Created
August 5, 2019 20:23
-
-
Save Tishka17/7ee8130683e670f0a3bb5d0043dcc6f3 to your computer and use it in GitHub Desktop.
formula calculation
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from collections import defaultdict | |
from enum import Enum | |
class TokenType(Enum): | |
LETTER = 0 | |
NUMBER = 1 | |
OPEN = 2 | |
CLOSE = 3 | |
def token_type(symbol: str): | |
if symbol.isalpha(): | |
return TokenType.LETTER | |
if symbol.isnumeric(): | |
return TokenType.NUMBER | |
if symbol in "<({[": | |
return TokenType.OPEN | |
return TokenType.CLOSE | |
def parse_tokens(s): | |
cur_str = "" | |
cur_type = None | |
for c in s: | |
if cur_type != token_type(c): | |
if cur_str: | |
yield (cur_str, cur_type) | |
cur_type = token_type(c) | |
cur_str = c | |
else: | |
cur_str += c | |
if cur_str: | |
yield (cur_str, cur_type) | |
def reduce_pairs(pairs): | |
data = defaultdict(int) | |
for k, v in pairs: | |
data[k] += v | |
return dict(data) | |
def calc(tokens): | |
stack = [] | |
frame = [] | |
current_token = None | |
current_token_type = None | |
for token, token_type in tokens: | |
if token_type is TokenType.OPEN: | |
stack.append(frame) | |
frame = [] | |
elif token_type is TokenType.NUMBER: | |
token = int(token) | |
if current_token_type is TokenType.LETTER: | |
frame.append((current_token, token)) | |
else: | |
frame = stack.pop() + [(k, v * token) for k, v in frame] | |
elif token_type is TokenType.LETTER: | |
pass | |
else: | |
pass | |
current_token = token | |
current_token_type = token_type | |
if stack: | |
frame += stack.pop() | |
return reduce_pairs(frame) | |
x = '(x3y2(f4v25)3)' | |
print(x) | |
print(calc(parse_tokens(x))) | |
x = '(x3y2{f4v25}3)2b1' | |
print(x) | |
print(calc(parse_tokens(x))) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment