Skip to content

Instantly share code, notes, and snippets.

@skeptomai
Last active April 16, 2020 15:38
Show Gist options
  • Save skeptomai/423a78e05a3696f6dc857494b70079f0 to your computer and use it in GitHub Desktop.
Save skeptomai/423a78e05a3696f6dc857494b70079f0 to your computer and use it in GitHub Desktop.
prefix expression interview question in rust
// I was working on this code for a while before I discovered the article below..
// https://medium.com/@wastedintel/reference-iterators-in-rust-5603a51b5192
use env_logger;
use log::info;
use std::fmt;
use std::io;
use std::io::prelude::*;
#[derive(Debug, PartialEq)]
enum TokenType {
//UNKNOWN = 0,
LPAREN = 1,
RPAREN = 2,
SYMBOL = 3,
OPER = 4,
}
enum EvalState {
NOSTATE = 0,
INEXPR = 1,
OUTEXPR = 2,
}
#[derive(Debug, PartialEq)]
struct Token<'a> {
tok_type: TokenType,
tok_value: &'a str,
}
struct TokenStream<'a> {
cur_pos: usize,
tok_string: &'a str,
}
impl<'a> fmt::Display for Token<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({:?}, {})", self.tok_type, self.tok_value)
}
}
impl<'a> TokenStream<'a> {
fn new(s: &'a str) -> TokenStream<'a> {
TokenStream {
cur_pos: 0,
tok_string: s,
}
}
}
impl<'a> Iterator for TokenStream<'a> {
type Item = std::result::Result<Token<'a>, &'static str>;
fn next(&mut self) -> Option<Self::Item> {
let mut ps = self.tok_string[self.cur_pos..self.tok_string.len()]
.chars()
.peekable();
loop {
let c = ps.peek()?;
match c {
'(' => {
self.cur_pos += 1;
return Some(Ok(Token {
tok_type: TokenType::LPAREN,
tok_value: &self.tok_string[self.cur_pos - 1..self.cur_pos],
}));
}
')' => {
self.cur_pos += 1;
return Some(Ok(Token {
tok_type: TokenType::RPAREN,
tok_value: &self.tok_string[self.cur_pos - 1..self.cur_pos],
}));
}
'+' | '-' | '/' | '*' => {
ps.next();
self.cur_pos += 1;
let cn = ps.peek().unwrap();
if cn.is_ascii_whitespace() {
return Some(Ok(Token {
tok_type: TokenType::OPER,
tok_value: &self.tok_string[self.cur_pos - 1..self.cur_pos],
}));
} else {
return Some(std::result::Result::Err("failed to lex oper"));
}
}
' ' => {
self.cur_pos += 1;
ps.next();
continue;
}
_ => {
let mut cur_pos = self.cur_pos;
loop {
let cn = ps.peek().unwrap();
if cn.is_ascii_alphanumeric() {
cur_pos += 1;
ps.next();
continue;
}
let res = Some(Ok(Token {
tok_type: TokenType::SYMBOL,
tok_value: &self.tok_string[self.cur_pos..cur_pos],
}));
self.cur_pos = cur_pos;
return res;
}
}
}
}
}
}
struct EvalContext<'a> {
cur_value: f64,
cur_state: EvalState,
tok_stream: TokenStream<'a>,
}
impl<'a> EvalContext<'a> {
fn new(ts: TokenStream<'a>) -> EvalContext {
EvalContext {
cur_value: 0.0,
cur_state: EvalState::NOSTATE,
tok_stream: ts,
}
}
fn eval(&mut self) -> std::result::Result<f64, &'static str> {
let t = self.tok_stream.next().unwrap().unwrap();
info!("in eval, t: {:?}", t);
match t.tok_type {
TokenType::LPAREN => {
info!("setting inexpr");
self.cur_state = EvalState::INEXPR;
return self.eval();
}
TokenType::OPER => match self.cur_state {
EvalState::INEXPR => {
info!("in inexpr, evaluating");
let op1 = self.eval().unwrap();
let op2 = self.eval().unwrap();
let i = self.apply_oper(t.tok_value, op1, op2).unwrap();
self.cur_state = EvalState::OUTEXPR;
self.cur_value = i;
return self.eval();
}
_ => return Err("Bad expr state in oper"),
},
TokenType::SYMBOL => {
info!("Parsing symbol: {:?}", t.tok_value);
let r: f64 = t.tok_value.parse().unwrap();
return Ok(r);
}
TokenType::RPAREN => match self.cur_state {
EvalState::OUTEXPR => {
return Ok(self.cur_value);
}
_ => return Err("Bad expr state in rparen"),
},
}
}
fn apply_oper(
&self,
operator: &str,
op1: f64,
op2: f64,
) -> std::result::Result<f64, &'static str> {
match operator {
"*" => return Ok(op1 * op2),
"+" => return Ok(op1 + op2),
"/" => return Ok(op1 / op2),
"-" => return Ok(op1 - op2),
_ => return Err("failed to apply oper"),
}
}
}
fn main() -> io::Result<()> {
env_logger::init();
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
info!("string to eval is {:?}", buffer);
let ts = TokenStream::new(&buffer);
let mut cxt = EvalContext::new(ts);
let result = cxt.eval();
println!("Result is: {:?}", result);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
static EXP_TOK_VALS: [Token<'static>; 5] = [
Token {
tok_type: TokenType::LPAREN,
tok_value: "(",
},
Token {
tok_type: TokenType::OPER,
tok_value: "+",
},
Token {
tok_type: TokenType::SYMBOL,
tok_value: "2",
},
Token {
tok_type: TokenType::SYMBOL,
tok_value: "3",
},
Token {
tok_type: TokenType::RPAREN,
tok_value: ")",
},
];
#[test]
fn test_with_spaces_tokenize() {
let ts = TokenStream::new("( + 2 3 )");
for (i, t) in ts.enumerate() {
assert_eq!(t.unwrap(), EXP_TOK_VALS[i]);
}
}
#[test]
fn test_without_spaces_tokenize() {
let ts = TokenStream::new("(+ 2 3)");
for (i, t) in ts.enumerate() {
assert_eq!(t.unwrap(), EXP_TOK_VALS[i]);
}
}
#[test]
fn test_with_incorrect_spacing_tokenize() {
let ts = TokenStream::new("(+2 3)");
let r: Result<Vec<_>, &'static str> = ts.collect();
assert!(r.is_err(), "failed to lex oper");
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment