Created
October 6, 2011 02:31
-
-
Save basicxman/1266352 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| require "test/unit" | |
| require "turn" | |
| class Token < Struct.new(:value, :prec, :type); end | |
| class Lexer | |
| def initialize(string) | |
| @string = string | |
| parse | |
| end | |
| def tokenize(c) | |
| return if c == " " | |
| data = \ | |
| case c | |
| when "(" then [4, :lp] | |
| when ")" then [4, :rp] | |
| when "^" then [3, :static] | |
| when "*" then [2, :static] | |
| when "/" then [2, :polish] | |
| when "+", "-" then [1, :static] | |
| else [0, :none] | |
| end | |
| Token.new(c, *data) | |
| end | |
| def parse | |
| tokens = @string.scan(/[a-z]+|[0-9]+|[\(\)+\-^\/*\s+]{1}/).map { |c| tokenize(c) }.compact | |
| expressions = Expressions.new(tokens) | |
| end | |
| end | |
| class Expressions | |
| def initialize(tokens) | |
| @groups = {} | |
| @current_group = 1 | |
| @groups["$0"] = parse(tokens) | |
| Parser.new(@groups) | |
| end | |
| def parse(tokens) | |
| group = [] | |
| group_start, group_end = nil | |
| cur_value = nil | |
| balance = 0 | |
| tokens.each_with_index do |t, index| | |
| next unless balance == 0 or (t.type == :rp or t.type == :lp) | |
| if t.type == :lp | |
| balance += 1 | |
| next unless balance == 1 | |
| cur_value = "$#{@current_group}" | |
| @current_group += 1 | |
| group << Token.new(cur_value, 0, :exp) | |
| group_start = index | |
| elsif t.type == :rp | |
| balance -= 1 | |
| next unless balance == 0 | |
| group_end = index | |
| @groups[cur_value] = parse(tokens[group_start + 1...group_end]) | |
| cur_value = nil | |
| else | |
| group << t if balance == 0 | |
| end | |
| end | |
| group | |
| end | |
| end | |
| module Operations | |
| def divide(lhs, rhs) | |
| '\dfrac{' + lhs + '}{' + rhs + '}' | |
| end | |
| end | |
| class Parser | |
| include Operations | |
| def initialize(token_groups) | |
| @groups = token_groups | |
| puts parse("$0").join("") | |
| end | |
| def parse(group_key) | |
| parse_expression(@groups[group_key]) | |
| end | |
| def parse_expression(tokens) | |
| output = [] | |
| skip_next = false | |
| tokens.each_with_index do |token, index| | |
| if skip_next | |
| skip_next = false | |
| next | |
| end | |
| if token.type == :polish | |
| output.slice! -1 | |
| output << polish(token, evaluate(tokens[index - 1]), evaluate(tokens[index + 1])) | |
| skip_next = true | |
| elsif token.type == :exp | |
| output << sub_group(token.value) | |
| else | |
| output << token.value | |
| end | |
| end | |
| output | |
| end | |
| def polish(token, lhs, rhs) | |
| if token.value == "/" | |
| divide(lhs, rhs) | |
| else | |
| "#{token.value} #{lhs} #{rhs}" | |
| end | |
| end | |
| def evaluate(token) | |
| if token.type == :exp | |
| sub_group(token.value) | |
| else | |
| token.value | |
| end | |
| end | |
| def sub_group(group_key) | |
| "(" + parse_expression(@groups[group_key]).join("") + ")" | |
| end | |
| end | |
| class ParsingTest < Test::Unit::TestCase | |
| def test | |
| result = Lexer.new('x + (y / (x + 4 * 3)) - 6') | |
| end | |
| def test_complex | |
| result = Lexer.new('(((test + 22)*(x ^ (y - 4))) / ((x - 3)*(x - 4))) / (x + 1)*(x + 2) / y - 3') | |
| end | |
| def test_mike | |
| result = Lexer.new('(x^4 + 3x^3 - 5x) / (3x^2)(3x)') | |
| end | |
| end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment