Created
January 14, 2017 20:03
-
-
Save itarato/eafe67d827a9fa57515a09304c3539f3 to your computer and use it in GitHub Desktop.
Example of a concurrent (simplified) JSON parser
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std::collections::HashMap; | |
use std::thread::{self, sleep}; | |
use std::time; | |
use std::sync::{mpsc, Arc}; | |
macro_rules! char_token { | |
($c:expr, $t:ident) => ({ | |
let mut value = String::new(); | |
value.push($c); | |
Some(Token{ttype: TokenType::$t, value:value}) | |
}) | |
} | |
#[derive(Debug)] | |
enum TokenType { | |
BraceOpen, | |
BraceClose, | |
BracketOpen, | |
BracketClose, | |
Comma, | |
Colon, | |
StringToken, | |
} | |
#[derive(Debug)] | |
struct Token { | |
ttype: TokenType, | |
value: String, | |
} | |
#[derive(Debug)] | |
enum JsonValue { | |
StringValue(String), | |
Array(Vec<JsonValue>), | |
Object(HashMap<String, JsonValue>), | |
} | |
struct StreamIterator { | |
rx: mpsc::Receiver<Token>, | |
peeked: Arc<Token>, | |
} | |
impl StreamIterator { | |
fn new(rx: mpsc::Receiver<Token>) -> StreamIterator { | |
let peeked = rx.recv().unwrap(); | |
StreamIterator { | |
rx: rx, | |
peeked: Arc::new(peeked), | |
} | |
} | |
fn next(&mut self) -> Arc<Token> { | |
let out = self.peeked.clone(); | |
println!("Consume {:?}", &out); | |
match self.rx.recv() { | |
Ok(t) => self.peeked = Arc::new(t), | |
_ => (), | |
} | |
out.clone() | |
} | |
fn peek(&mut self) -> Arc<Token> { | |
self.peeked.clone() | |
} | |
} | |
struct Tokenizer { | |
tx: mpsc::Sender<Token>, | |
} | |
impl Tokenizer { | |
fn new(tx: mpsc::Sender<Token>) -> Tokenizer { | |
Tokenizer { tx: tx } | |
} | |
fn tokenize(&self, raw: &str) { | |
let mut chars = raw.chars(); | |
while let Some(c) = chars.next() { | |
let token: Option<Token> = match c { | |
'{' => char_token!('{', BraceOpen), | |
'}' => char_token!('}', BraceClose), | |
'[' => char_token!('[', BracketOpen), | |
']' => char_token!(']', BracketClose), | |
':' => char_token!(':', Colon), | |
',' => char_token!(',', Comma), | |
'"' => { | |
let mut string_val = String::new(); | |
while let Some(_c) = chars.next() { | |
match _c { | |
'"' => break, | |
next_char => string_val.push(next_char), | |
}; | |
} | |
Some(Token { | |
ttype: TokenType::StringToken, | |
value: string_val, | |
}) | |
} | |
_ => None, | |
}; | |
match token { | |
Some(t) => { | |
println!("Send token: {:?}", &t); | |
self.tx.send(t).unwrap(); | |
// sleep(time::Duration::from_millis(100)); | |
} | |
None => (), | |
}; | |
} | |
} | |
} | |
struct Evaluator { | |
tokens: StreamIterator, | |
} | |
impl Evaluator { | |
fn new(tokens: StreamIterator) -> Evaluator { | |
Evaluator { tokens: tokens } | |
} | |
fn eval_object(&mut self) -> JsonValue { | |
let mut obj: HashMap<String, JsonValue> = HashMap::new(); | |
self.tokens.next(); | |
loop { | |
let key = self.tokens.next(); | |
self.tokens.next(); | |
let value = self.eval(); | |
obj.insert(key.value.clone(), value); | |
match self.tokens.peek().ttype { | |
TokenType::BraceClose => break, | |
TokenType::Comma => self.tokens.next(), | |
_ => panic!("Unexpected token."), | |
}; | |
} | |
self.tokens.next(); | |
JsonValue::Object(obj) | |
} | |
fn eval_array(&mut self) -> JsonValue { | |
let mut list: Vec<JsonValue> = Vec::new(); | |
self.tokens.next(); | |
loop { | |
let value = self.eval(); | |
list.push(value); | |
match self.tokens.peek().ttype { | |
TokenType::BracketClose => break, | |
TokenType::Comma => self.tokens.next(), | |
_ => panic!("Unexpected token."), | |
}; | |
} | |
self.tokens.next(); | |
JsonValue::Array(list) | |
} | |
fn eval(&mut self) -> JsonValue { | |
match self.tokens.peek().ttype { | |
TokenType::StringToken => { | |
let val = JsonValue::StringValue(self.tokens.peek().value.clone()); | |
self.tokens.next(); | |
val | |
} | |
TokenType::BraceOpen => self.eval_object(), | |
TokenType::BracketOpen => self.eval_array(), | |
_ => panic!("Unexpected token."), | |
} | |
} | |
} | |
fn main() { | |
let (tx, rx) = mpsc::channel(); | |
let t = Tokenizer::new(tx); | |
thread::spawn(move || { | |
// t.tokenize("{\"foo\":[\"bar\"], \"baz\": {\"fur\": \"zip\"}}"); | |
t.tokenize("{\ | |
\"name\": \"John Doe\",\ | |
\"age\": \"72\",\ | |
\"hobby\": [\"cubes\", \"tea\", \"bike\"],\ | |
\"address\": {\ | |
\"city\": \"Kairo\",\ | |
\"zip\": [\"EST\", \"L10\"],\ | |
\"type\": {\ | |
\"main\": \"square\",\ | |
\"pos\": [\"-12.324\", \"+23.432\"]\ | |
}\ | |
}\ | |
}"); | |
}); | |
let si = StreamIterator::new(rx); | |
let mut e = Evaluator::new(si); | |
println!("{:#?}", e.eval()); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment