Created
October 23, 2020 14:57
-
-
Save myuon/6ffa91f192d6339204131017e910dd7c to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std::collections::HashMap; | |
fn parse_string(chars: &[char]) -> (String, &[char]) { | |
if chars[0] != '"' { | |
unreachable!() | |
} | |
let mut result = vec![]; | |
let mut index = 1; | |
while index < chars.len() { | |
let ch = chars[index]; | |
if ch == '"' { | |
index += 1; | |
break; | |
} | |
// エスケープされている時は1文字余分に読む | |
if ch == '\\' { | |
result.push(ch); | |
index += 1; | |
} | |
result.push(ch); | |
index += 1; | |
} | |
(result.into_iter().collect(), &chars[index..]) | |
} | |
#[test] | |
fn test_parse_string() { | |
assert_eq!( | |
parse_string(r#""aaaaa""#.chars().collect::<Vec<_>>().as_slice()), | |
( | |
"aaaaa".to_string(), | |
"".chars().collect::<Vec<_>>().as_slice() | |
) | |
); | |
assert_eq!( | |
parse_string(r#""aaaaa"bbcc"#.chars().collect::<Vec<_>>().as_slice()), | |
( | |
"aaaaa".to_string(), | |
"bbcc".chars().collect::<Vec<_>>().as_slice() | |
) | |
); | |
} | |
fn parse_number(chars: &[char]) -> (f64, &[char]) { | |
let mut result = vec![]; | |
let mut index = 0; | |
while index < chars.len() { | |
let ch = chars[index]; | |
if !ch.is_digit(10) { | |
break; | |
} | |
result.push(ch.to_digit(10).unwrap()); | |
index += 1; | |
} | |
let mut acc = 0.0; | |
for r in result { | |
acc = r as f64 + acc * 10.0; | |
} | |
(acc, &chars[index..]) | |
} | |
#[test] | |
fn test_parse_number() { | |
assert_eq!( | |
parse_number(r#"12345}"#.chars().collect::<Vec<_>>().as_slice()), | |
(12345.0, "}".chars().collect::<Vec<_>>().as_slice()) | |
); | |
} | |
#[derive(Clone, Debug, PartialEq)] | |
pub enum Lexeme { | |
LBrace, | |
RBrace, | |
Comma, | |
Colon, | |
Number(f64), | |
String(String), | |
} | |
fn lexer(str: &str) -> Vec<Lexeme> { | |
let chars = str.chars().collect::<Vec<_>>(); | |
let mut tokens = vec![]; | |
let mut remains = chars.as_slice(); | |
while !remains.is_empty() { | |
match remains[0] { | |
ch if ch.is_whitespace() => { | |
remains = &remains[1..]; | |
} | |
'{' => { | |
tokens.push(Lexeme::LBrace); | |
remains = &remains[1..]; | |
} | |
'}' => { | |
tokens.push(Lexeme::RBrace); | |
remains = &remains[1..]; | |
} | |
',' => { | |
tokens.push(Lexeme::Comma); | |
remains = &remains[1..]; | |
} | |
':' => { | |
tokens.push(Lexeme::Colon); | |
remains = &remains[1..]; | |
} | |
'"' => { | |
let (s, r) = parse_string(remains); | |
tokens.push(Lexeme::String(s)); | |
remains = r; | |
} | |
_ => { | |
let (n, r) = parse_number(remains); | |
tokens.push(Lexeme::Number(n)); | |
remains = r; | |
} | |
} | |
} | |
tokens | |
} | |
#[test] | |
fn test_lexer() { | |
use Lexeme::*; | |
assert_eq!(lexer(r#"{}"#), vec![LBrace, RBrace]); | |
assert_eq!( | |
lexer(r#"{"nyan": "nyun","hoge": 123}"#), | |
vec![ | |
LBrace, | |
String("nyan".to_string()), | |
Colon, | |
String("nyun".to_string()), | |
Comma, | |
String("hoge".to_string()), | |
Colon, | |
Number(123.0), | |
RBrace | |
] | |
); | |
assert_eq!( | |
lexer(r#"{"aaa": {"bbb": "ccc"},"ddd": 234}"#), | |
vec![ | |
LBrace, | |
String("aaa".to_string()), | |
Colon, | |
LBrace, | |
String("bbb".to_string()), | |
Colon, | |
String("ccc".to_string()), | |
RBrace, | |
Comma, | |
String("ddd".to_string()), | |
Colon, | |
Number(234.0), | |
RBrace | |
] | |
); | |
} | |
#[derive(Debug, PartialEq)] | |
pub enum Json { | |
JNull, | |
JBool(bool), | |
JNumber(f64), | |
JString(String), | |
JArray(Vec<Json>), | |
JObject(HashMap<String, Json>), | |
} | |
impl Json { | |
pub fn path(&self, p: &[String]) -> &Json { | |
if p.len() == 0 { | |
return self; | |
} | |
match self { | |
Json::JObject(o) => o[&p[0]].path(&p[1..]), | |
_ => unreachable!(), | |
} | |
} | |
} | |
#[derive(Debug, PartialEq)] | |
pub enum JsonFragment { | |
FJson(Json), | |
FToken(Lexeme), | |
} | |
fn pop_until<T: PartialEq>(mut v: Vec<T>, until: T) -> (Vec<T>, Vec<T>) { | |
let mut popped = vec![]; | |
while !v.is_empty() { | |
let p = v.pop().unwrap(); | |
if p == until { | |
break; | |
} | |
popped.push(p); | |
} | |
(v, popped) | |
} | |
#[test] | |
fn test_pop_until() { | |
assert_eq!( | |
pop_until(vec![1, 2, 3, 4, 5, 6], 4), | |
(vec![1, 2, 3], vec![6, 5]) | |
); | |
} | |
fn parse_json(tokens: Vec<Lexeme>) -> Json { | |
let mut stack = vec![]; | |
for t in tokens { | |
match t { | |
Lexeme::String(s) => { | |
stack.push(JsonFragment::FJson(Json::JString(s))); | |
} | |
Lexeme::Number(n) => { | |
stack.push(JsonFragment::FJson(Json::JNumber(n))); | |
} | |
Lexeme::RBrace => { | |
let (mut others, mut tokens) = | |
pop_until(stack, JsonFragment::FToken(Lexeme::LBrace)); | |
let mut m = HashMap::new(); | |
while !tokens.is_empty() { | |
let k = match tokens.pop().unwrap() { | |
JsonFragment::FJson(Json::JString(s)) => s, | |
t => panic!("{:?}", t), | |
}; | |
match tokens.pop().unwrap() { | |
JsonFragment::FToken(Lexeme::Colon) => (), | |
_ => unreachable!(), | |
} | |
let v = match tokens.pop().unwrap() { | |
JsonFragment::FJson(v) => v, | |
_ => unreachable!(), | |
}; | |
// オブジェクトの最後はtrailing commaを許容する | |
match tokens.pop() { | |
None => (), | |
Some(JsonFragment::FToken(Lexeme::Comma)) => (), | |
_ => unreachable!(), | |
}; | |
m.insert(k, v); | |
} | |
others.push(JsonFragment::FJson(Json::JObject(m))); | |
stack = others; | |
} | |
_ => { | |
stack.push(JsonFragment::FToken(t)); | |
} | |
} | |
} | |
if stack.len() != 1 { | |
unreachable!(); | |
} | |
match stack.pop().unwrap() { | |
JsonFragment::FJson(j) => j, | |
_ => unreachable!(), | |
} | |
} | |
fn parse(s: &str) -> Json { | |
parse_json(lexer(s)) | |
} | |
#[test] | |
fn test_parse() { | |
assert_eq!(parse(r#"{}"#), Json::JObject(HashMap::new())); | |
assert_eq!( | |
parse(r#"{"nyan": "nyun","hoge": 123}"#), | |
Json::JObject( | |
vec![ | |
("nyan".to_string(), Json::JString("nyun".to_string())), | |
("hoge".to_string(), Json::JNumber(123.0)) | |
] | |
.into_iter() | |
.collect() | |
) | |
); | |
assert_eq!( | |
parse(r#"{"aaa": {"bbb": "ccc"},"ddd": 234}"#), | |
Json::JObject( | |
vec![ | |
( | |
"aaa".to_string(), | |
Json::JObject( | |
vec![("bbb".to_string(), Json::JString("ccc".to_string()))] | |
.into_iter() | |
.collect() | |
) | |
), | |
("ddd".to_string(), Json::JNumber(234.0)) | |
] | |
.into_iter() | |
.collect() | |
) | |
); | |
} | |
#[test] | |
fn test_path() { | |
assert_eq!( | |
parse(r#"{"aaa": {"bbb": "ccc"},"ddd": 234}"#) | |
.path(&["aaa".to_string(), "bbb".to_string()]), | |
&Json::JString("ccc".to_string()) | |
); | |
} | |
fn main() { | |
println!("Hello, world!"); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment