Last active
October 29, 2022 22:57
-
-
Save commander-trashdin/8fa45379b9b2048773380d99f519f1b1 to your computer and use it in GitHub Desktop.
My shitty forth like calc
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <iostream> | |
#include <string> | |
#include <variant> | |
#include <tuple> | |
#include <stdexcept> | |
#include <sstream> | |
#include <vector> | |
enum class Op { | |
Add, | |
Sub, | |
Mul, | |
Div | |
}; | |
typedef std::variant<Op, int> Token; | |
inline Token MakeLongToken(const std::string& symbols) { | |
if (isdigit(symbols.at(0)) || ((symbols.at(0) == '-' || symbols.at(0) == '+') && | |
symbols.length() > 1 && isdigit(symbols.at(1)))) { | |
return std::stoi(symbols); | |
} else { | |
switch (symbols.at(0)) { | |
case '+': | |
return Op::Add; | |
case '-': | |
return Op::Sub; | |
case '*': | |
return Op::Mul; | |
case '/': | |
return Op::Div; | |
default: | |
throw std::runtime_error("Illegal operation"); | |
} | |
} | |
} | |
class Tokenizer { | |
public: | |
Tokenizer(std::istream* in) | |
: this_token_(0), working_stream_(in), last_token_(false) { | |
Next(); | |
} | |
bool IsEnd() { | |
return last_token_; | |
} | |
void Next() { | |
this_token_ = 0; | |
char cur; | |
std::string accum_token; | |
if (working_stream_->peek() == EOF) { | |
if (!accum_token.empty()) { | |
this_token_ = MakeLongToken(accum_token); | |
accum_token.clear(); | |
} | |
last_token_ = true; | |
} | |
while (working_stream_->peek() != EOF) { | |
cur = working_stream_->peek(); | |
if (cur == ' ' || cur == EOF || cur == '\n') { | |
if (accum_token.empty()) { | |
working_stream_->get(); | |
} else { | |
this_token_ = MakeLongToken(accum_token); | |
accum_token.clear(); | |
working_stream_->get(); | |
break; | |
} | |
} else if (isdigit(cur)) { | |
accum_token += cur; | |
working_stream_->get(); | |
} else if (cur == '*' || cur == '/') { | |
if (accum_token.empty()) { | |
std::string s(1, cur); | |
this_token_ = MakeLongToken(s); | |
working_stream_->get(); | |
break; | |
} else { | |
this_token_ = MakeLongToken(accum_token); | |
accum_token.clear(); | |
break; | |
} | |
} else if (cur == '-' || cur == '+') { | |
if (!accum_token.empty()) { | |
this_token_ = MakeLongToken(accum_token); | |
accum_token.clear(); | |
break; | |
} else { | |
working_stream_->get(); | |
if (!isdigit(working_stream_->peek())) { | |
std::string s(1, cur); | |
this_token_ = MakeLongToken(s); | |
accum_token.clear(); | |
break; | |
} else { | |
accum_token += cur; | |
} | |
} | |
} | |
} | |
if (!accum_token.empty()) { | |
this_token_ = MakeLongToken(accum_token); | |
accum_token.clear(); | |
} | |
} | |
Token GetToken() { | |
return this_token_; | |
} | |
private: | |
Token this_token_; | |
std::istream* working_stream_; | |
bool last_token_; | |
}; | |
class Evaluator { | |
public: | |
Evaluator(Tokenizer* tok) : tok_(tok) { | |
} | |
int Eval() { | |
while (!tok_->IsEnd()) { | |
auto tok = tok_->GetToken(); | |
tok_->Next(); | |
if (std::holds_alternative<int>(tok)) { | |
stack_.push_back(std::get<int>(tok)); | |
} else { | |
if (stack_.size() < 2) { | |
throw std::runtime_error("Not enough operands!"); | |
} else { | |
int res; | |
switch (std::get<Op>(tok)) { | |
case Op::Add: | |
res = stack_.at(stack_.size() - 1) + stack_.at(stack_.size() - 2); | |
break; | |
case Op::Sub: | |
res = stack_.at(stack_.size() - 1) - stack_.at(stack_.size() - 2); | |
break; | |
case Op::Mul: | |
res = stack_.at(stack_.size() - 1) * stack_.at(stack_.size() - 2); | |
break; | |
case Op::Div: | |
if (stack_.at(stack_.size() - 2) == 0) { | |
throw std::runtime_error("Division by zero detected"); | |
} | |
res = stack_.at(stack_.size() - 1) / stack_.at(stack_.size() - 2); | |
break; | |
} | |
stack_.pop_back(); | |
stack_.pop_back(); | |
stack_.push_back(res); | |
} | |
} | |
} | |
if (stack_.size() == 1) { | |
return stack_.back(); | |
} else { | |
throw std::runtime_error("Not all operands are consumed!"); | |
} | |
} | |
private: | |
Tokenizer* tok_; | |
std::vector<int> stack_; | |
}; | |
int main () { | |
std::stringstream ss; | |
ss << "2 4 -10 + *"; | |
auto tokenizer = Tokenizer(&ss); | |
auto evaluator = Evaluator(&tokenizer); | |
std::cout << evaluator.Eval(); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment