Created
June 19, 2021 03:58
-
-
Save kasari/e7f1b612a7e3a6f3251d42eb4d2313ca to your computer and use it in GitHub Desktop.
Brainf*ck interpreter in Rust
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std::io::Read; | |
use std::num::Wrapping; | |
#[derive(Debug, Copy, Clone, PartialEq)] | |
pub enum Token { | |
Plus, | |
Minus, | |
Gt, | |
Lt, | |
LBracket, | |
RBracket, | |
Conma, | |
Dot, | |
Eof, | |
} | |
#[derive(Debug)] | |
pub enum Statement { | |
IncrementPointer, | |
DecrementPointer, | |
Increment, | |
Decrement, | |
Input, | |
Output, | |
While(Vec<Statement>), | |
} | |
pub fn lex(source: &str) -> Vec<Token> { | |
let mut tokens = Vec::new(); | |
for c in source.chars() { | |
match c { | |
'+' => tokens.push(Token::Plus), | |
'-' => tokens.push(Token::Minus), | |
'>' => tokens.push(Token::Gt), | |
'<' => tokens.push(Token::Lt), | |
'[' => tokens.push(Token::LBracket), | |
']' => tokens.push(Token::RBracket), | |
',' => tokens.push(Token::Conma), | |
'.' => tokens.push(Token::Dot), | |
_ => (), | |
} | |
} | |
tokens.push(Token::Eof); | |
tokens | |
} | |
pub struct Parser { | |
tokens: Vec<Token>, | |
cursor: usize, | |
} | |
impl Parser { | |
pub fn new() -> Self { | |
Self { | |
tokens: Default::default(), | |
cursor: 0, | |
} | |
} | |
pub fn parse(&mut self, tokens: Vec<Token>) -> Vec<Statement> { | |
self.tokens = tokens; | |
let mut statements = Vec::new(); | |
while !self.current_token_is(Token::Eof) { | |
let stmt = self.parse_statement(); | |
statements.push(stmt); | |
self.next_token(); | |
} | |
statements | |
} | |
fn parse_statement(&mut self) -> Statement { | |
match self.current_token() { | |
Token::Plus => return Statement::Increment, | |
Token::Minus => return Statement::Decrement, | |
Token::Gt => return Statement::IncrementPointer, | |
Token::Lt => return Statement::DecrementPointer, | |
Token::Conma => return Statement::Input, | |
Token::Dot => return Statement::Output, | |
Token::LBracket => return self.parse_while_statement(), | |
_ => panic!("compile error!"), | |
} | |
} | |
fn parse_while_statement(&mut self) -> Statement { | |
let mut statements = Vec::new(); | |
self.next_token(); | |
while !self.current_token_is(Token::RBracket) && !self.current_token_is(Token::Eof) { | |
let stmt = self.parse_statement(); | |
statements.push(stmt); | |
self.next_token(); | |
} | |
Statement::While(statements) | |
} | |
fn current_token(&self) -> Token { | |
self.tokens[self.cursor] | |
} | |
fn next_token(&mut self) { | |
self.cursor += 1; | |
} | |
fn current_token_is(&self, t: Token) -> bool { | |
self.current_token() == t | |
} | |
} | |
const MEMORY_SIZE: usize = 100; | |
pub struct Interpreter { | |
memory: [u8; MEMORY_SIZE], | |
ptr: usize, | |
} | |
impl Interpreter { | |
pub fn new() -> Self { | |
Self { | |
memory: [0; MEMORY_SIZE], | |
ptr: 0, | |
} | |
} | |
pub fn exec(&mut self, statements: &[Statement]) { | |
for stmt in statements { | |
match stmt { | |
Statement::IncrementPointer => self.ptr += 1, | |
Statement::DecrementPointer => self.ptr -= 1, | |
Statement::Increment => { | |
self.memory[self.ptr] = (Wrapping(self.memory[self.ptr]) + Wrapping(1)).0 | |
} | |
Statement::Decrement => { | |
self.memory[self.ptr] = (Wrapping(self.memory[self.ptr]) - Wrapping(1)).0 | |
} | |
Statement::Output => print!("{}", self.memory[self.ptr] as char), | |
Statement::Input => { | |
let mut input: [u8; 1] = [0; 1]; | |
std::io::stdin() | |
.read_exact(&mut input) | |
.expect("failed to input"); | |
self.memory[self.ptr] = input[0]; | |
} | |
Statement::While(statements) => { | |
while self.memory[self.ptr] != 0 { | |
self.exec(statements); | |
} | |
} | |
} | |
} | |
} | |
fn dump_memory(&self) { | |
let a = &(self.memory[0..25]); | |
for x in a { | |
print!(" {}", x); | |
} | |
println!() | |
} | |
} | |
fn main() { | |
let code = "+++++++[>++++++++++<-]> | |
++.---.+++++++..+++.++++++++. | |
--------.+++.------.--------."; | |
let tokens = lex(&code); | |
let mut parser = Parser::new(); | |
let statements = parser.parse(tokens); | |
let mut interpreter = Interpreter::new(); | |
interpreter.exec(&statements); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment