virtual-star/lib/render/tokenizer.js
2025-04-21 21:46:30 -04:00

61 lines
1.8 KiB
JavaScript

import { Token, tokenTypes } from "./token.js";
const VARIABLE_TOKEN_DEF = {
start: '<*',
end: '*>',
};
const FRAGMENT_TOKEN_DEF = {
start: '<{',
end: '}>',
};
export class Tokenizer {
constructor() {
}
tokensByVariable(fragmentText) {
const forward_split_tokens = fragmentText.split(VARIABLE_TOKEN_DEF.start);
let tokens = [
new Token(tokenTypes.TEXT, forward_split_tokens[0])
];
for(let i = 1; i < forward_split_tokens.length; i++) {
const back_split = forward_split_tokens[i].split(VARIABLE_TOKEN_DEF.end);
if(back_split.length !== 2) {
console.error(`Difficulty parsing token: ${forward_split_tokens[i]}. Keeping as plain-text`);
tokens.push(new Token(tokenTypes.TEXT, forward_split_tokens[i]));
} else {
tokens.push(new Token(tokenTypes.VARIABLE, back_split[0]));
tokens.push(new Token(tokenTypes.TEXT, back_split[1]));
}
}
return tokens;
}
tokensByFragment(fragmentText) {
const forward_split_tokens = fragmentText.split(FRAGMENT_TOKEN_DEF.start);
let tokens = [
new Token(tokenTypes.TEXT, forward_split_tokens[0])
];
for(let i = 1; i < forward_split_tokens.length; i++) {
const back_split = forward_split_tokens[i].split(FRAGMENT_TOKEN_DEF.end);
if(back_split.length !== 2) {
console.error(`Difficulty parsing token: ${forward_split_tokens[i]}. Keeping as plain-text`);
tokens.push(new Token(tokenTypes.TEXT, forward_split_tokens[i]));
} else {
tokens.push(new Token(tokenTypes.FRAGMENT, back_split[0]));
tokens.push(new Token(tokenTypes.TEXT, back_split[1]));
}
}
return tokens;
}
}