Created
August 11, 2020 23:11
-
-
Save sancarn/f193d23eb56c2d2646c08d25d352bb6d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//Made by http://github.com/TarVK | |
/******************* | |
* All helper code * | |
*******************/ | |
/** | |
* Tokenizes the input text using the given rules | |
* @param {string} text The text to tokenize | |
* @param {{[key: string]: RegExp}} rules The rules | |
* @returns The tokens | |
*/ | |
function tokenize(text, rules) { | |
const ruleNames = Object.keys(rules); | |
let index = 0; | |
const tokens = []; | |
outer: while (text.length > 0) { | |
for (let i = 0; i < ruleNames.length; i++) { | |
const ruleName = ruleNames[i]; | |
const rule = rules[ruleName]; | |
const match = rule.exec(text); | |
if (match && match.index == 0) { | |
const { length } = match[0]; | |
tokens.push({ type: ruleName, value: match[0], index }); | |
text = text.substring(length); | |
index += length; | |
continue outer; | |
} | |
} | |
throw Error("Unexpected token " + text[0] * " at index " + index); | |
} | |
return tokens; | |
} | |
// The tokens that are analyzed | |
let tokens; | |
/** | |
* Consumes a token | |
* @param {string} token The token to consume | |
* @throws If the expected token wasn't found | |
* @returns The value of the token | |
*/ | |
function consume(token) { | |
const firstToken = tokens.shift(); // Get the first token | |
if (!firstToken || firstToken.type != token) | |
throw Error( | |
"Unexpected token, found: " + firstToken.type + " but expected: " + token | |
); | |
return firstToken.value; | |
} | |
/** | |
* Checks whether the first token is of the given type | |
* @param {string} token The token that is expected | |
* @returns Whether the expected token was found | |
*/ | |
function peek(token) { | |
return tokens[0] && tokens[0].type == token; | |
} | |
/** | |
* Combines peek and consume, consuming a token only if matched, without throwing an error if not | |
* @param {string} token The token that is expected | |
* @returns Whether the expected token was found | |
*/ | |
function optConsume(token) { | |
const matched = tokens[0] && tokens[0].type == token; | |
if (matched) { | |
consume(token); | |
return true; | |
} | |
return false; | |
} | |
/*********************************** | |
* All the lexer and grammar rules * | |
***********************************/ | |
const lexer = { | |
lBracket: /\(/, | |
rBracket: /\)/, | |
value: /\d*\.?\d+/, | |
add: /\+/, | |
sub: /\-/, | |
mul: /\*/, | |
div: /\//, | |
}; | |
function expression() { | |
let res = term(); | |
let loop = true; | |
do { | |
if (optConsume("add")) { | |
res += term(); | |
} else if (optConsume("sub")) { | |
res -= term(); | |
} else { | |
loop = false; | |
} | |
} while (loop); | |
return res; | |
} | |
function term() { | |
let res = factor(); | |
let loop = true; | |
do { | |
if (optConsume("mul")) { | |
res *= factor(); | |
} else if (optConsume("div")) { | |
res /= factor(); | |
} else { | |
loop = false; | |
} | |
} while (loop); | |
return res; | |
} | |
function factor() { | |
let res; | |
if (peek("value")) { | |
res = parseFloat(consume("value")); | |
} else { | |
consume("lBracket"); | |
res = expression(); | |
consume("rBracket"); | |
} | |
return res; | |
} | |
tokens = tokenize("3*8/2*(2+2+3)", lexer); | |
let result = expression(); | |
console.log(result); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment