From 2293c442eb7897b6c7ba1676a852f14658149dbd Mon Sep 17 00:00:00 2001 From: ishanjain28 Date: Sun, 19 Jan 2020 19:57:53 +0530 Subject: [PATCH] Bug fixes and completed prefix and infix expression parser 1. Changed names of some variants of TokenType. 2. Implemented std::fmt::Display and std::string::ToString for some types 3. Added Infix and Prefix expressions parser 4. Added tests to ensure correctness of added parsers --- src/lexer/mod.rs | 52 ++++---- src/parser/ast/mod.rs | 201 ++++++++++++++++++++++------- src/parser/mod.rs | 290 ++++++++++++++++++++++++++++++++++++++---- src/repl.rs | 7 +- 4 files changed, 452 insertions(+), 98 deletions(-) diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index 9a0030e..3ac96f7 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -1,6 +1,5 @@ use std::{ collections::HashMap, - convert::Into, iter::Peekable, str::{self, Chars}, }; @@ -33,10 +32,10 @@ pub enum TokenType { // Operators Assign, Plus, - Multiply, - Divide, - Subtract, - ExclamationMark, + Asterisk, + Slash, + Minus, + Bang, LessThan, GreaterThan, Equals, @@ -60,17 +59,17 @@ pub enum TokenType { Return, } -impl Into<&'static str> for TokenType { - fn into(self) -> &'static str { +impl TokenType { + pub fn to_string(&self) -> &'static str { match self { TokenType::Assign => "=", TokenType::Plus => "+", - TokenType::Multiply => "*", - TokenType::Divide => "/", - TokenType::Subtract => "-", - TokenType::ExclamationMark => "!", - TokenType::LessThan => "<=", - TokenType::GreaterThan => ">=", + TokenType::Asterisk => "*", + TokenType::Slash => "/", + TokenType::Minus => "-", + TokenType::Bang => "!", + TokenType::LessThan => "<", + TokenType::GreaterThan => ">", TokenType::Equals => "==", TokenType::NotEquals => "!=", TokenType::Comma => ",", @@ -86,7 +85,10 @@ impl Into<&'static str> for TokenType { TokenType::True => "true", TokenType::False => "false", TokenType::Return => "return", - _ => unreachable!(), + _ => { + eprintln!("{:?}", self); + unreachable!() + } } } } @@ -113,9 +115,11 @@ impl Token { literal: Some(value.to_string()), } } +} - pub fn to_string(&self) -> &'static str { - self.name.into() +impl ToString for Token { + fn to_string(&self) -> String { + self.name.to_string().into() } } @@ -207,9 +211,9 @@ impl<'a> Iterator for Lexer<'a> { } } Some('+') => Some(Token::new(TokenType::Plus)), - Some('*') => Some(Token::new(TokenType::Multiply)), - Some('/') => Some(Token::new(TokenType::Divide)), - Some('-') => Some(Token::new(TokenType::Subtract)), + Some('*') => Some(Token::new(TokenType::Asterisk)), + Some('/') => Some(Token::new(TokenType::Slash)), + Some('-') => Some(Token::new(TokenType::Minus)), Some(',') => Some(Token::new(TokenType::Comma)), Some(';') => Some(Token::new(TokenType::Semicolon)), Some('(') => Some(Token::new(TokenType::LParen)), @@ -225,7 +229,7 @@ impl<'a> Iterator for Lexer<'a> { self.read_char(); Some(Token::new(TokenType::NotEquals)) } else { - Some(Token::new(TokenType::ExclamationMark)) + Some(Token::new(TokenType::Bang)) } } Some('>') => Some(Token::new(TokenType::GreaterThan)), @@ -363,10 +367,10 @@ mod tests { Token::with_value(TokenType::Ident, "ten"), Token::new(TokenType::RParen), Token::new(TokenType::Semicolon), - Token::new(TokenType::ExclamationMark), - Token::new(TokenType::Subtract), - Token::new(TokenType::Divide), - Token::new(TokenType::Multiply), + Token::new(TokenType::Bang), + Token::new(TokenType::Minus), + Token::new(TokenType::Slash), + Token::new(TokenType::Asterisk), Token::with_value(TokenType::Int, "5"), Token::new(TokenType::Semicolon), Token::with_value(TokenType::Int, "5"), diff --git a/src/parser/ast/mod.rs b/src/parser/ast/mod.rs index ba69042..6e63000 100644 --- a/src/parser/ast/mod.rs +++ b/src/parser/ast/mod.rs @@ -2,9 +2,13 @@ use { crate::{ lexer::{Token, TokenType}, - parser::{Parser, ParserError}, + parser::{Error as ParserError, Parser}, + }, + std::{ + cmp::PartialOrd, + convert::From, + fmt::{Display, Error as FmtError, Formatter}, }, - std::convert::From, }; #[derive(Debug, PartialEq)] @@ -12,23 +16,17 @@ pub struct Program { pub statements: Vec, } -impl ToString for Program { - fn to_string(&self) -> String { +impl Display for Program { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { let mut out = String::new(); for statement in &self.statements { out.push_str(&statement.to_string()); - out.push('\n'); } - out + write!(f, "{}", out) } } -pub enum Node { - Statement(Statement), - Expression(Expression), -} - #[derive(Debug, PartialEq)] pub enum Statement { Let(LetStatement), @@ -48,12 +46,12 @@ impl<'a> Statement { } } -impl ToString for Statement { - fn to_string(&self) -> String { +impl Display for Statement { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { match self { - Statement::Let(v) => v.to_string(), - Statement::Return(v) => v.to_string(), - Statement::ExpressionStatement(v) => v.to_string(), + Statement::Let(v) => write!(f, "{}", v.to_string()), + Statement::Return(v) => write!(f, "{}", v.to_string()), + Statement::ExpressionStatement(v) => write!(f, "{}", v.to_string()), } } } @@ -76,7 +74,6 @@ impl LetStatement { let ident = parser.expect_peek(TokenType::Ident)?; stmt.name.value = ident.literal?; - parser.expect_peek(TokenType::Assign)?; // TODO: Right now, We are just skipping over all the expressions @@ -92,8 +89,8 @@ impl LetStatement { } } -impl ToString for LetStatement { - fn to_string(&self) -> String { +impl Display for LetStatement { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { let mut out = format!("{} {} = ", Self::token_literal(), self.name.value); if let Some(v) = &self.value { @@ -101,7 +98,7 @@ impl ToString for LetStatement { out.push_str(&a); } out.push(';'); - out + write!(f, "{}", out) } } @@ -119,14 +116,13 @@ impl ReturnStatement { return Some(stmt); } - // TODO: REMOVE THIS! const fn token_literal() -> &'static str { "return" } } -impl ToString for ReturnStatement { - fn to_string(&self) -> String { +impl Display for ReturnStatement { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { let mut out = String::from(Self::token_literal()); if let Some(v) = &self.return_value { @@ -135,7 +131,7 @@ impl ToString for ReturnStatement { out.push_str(&a); } out.push(';'); - out + write!(f, "{}", out) } } @@ -147,7 +143,6 @@ pub struct ExpressionStatement { impl ExpressionStatement { fn parse(parser: &mut Parser, current_token: Token) -> Option { - // let expr = Expression::parse(parser, token.clone(), ExpressionPriority::Lowest)?; let stmt = ExpressionStatement { token: current_token.clone(), expression: Expression::parse(parser, current_token, ExpressionPriority::Lowest)?, @@ -159,14 +154,14 @@ impl ExpressionStatement { } } -impl ToString for ExpressionStatement { - fn to_string(&self) -> String { - self.expression.to_string() +impl Display for ExpressionStatement { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { + write!(f, "{}", self.expression.to_string()) } } -#[derive(Debug, PartialEq)] -enum ExpressionPriority { +#[derive(Debug, PartialEq, Copy, PartialOrd, Clone)] +pub enum ExpressionPriority { Lowest = 0, Equals = 1, LessGreater = 2, @@ -176,24 +171,46 @@ enum ExpressionPriority { Call = 6, } -// TODO: Expressions are not going to be a struct so using this here just as a placeholder - #[derive(Debug, PartialEq)] pub enum Expression { Identifier(Identifier), IntegerLiteral(IntegerLiteral), + PrefixExpression(PrefixExpression), + InfixExpression(InfixExpression), // TODO: Temporary placeholder value. Should be removed once this section is done None, } impl Expression { - fn parse(parser: &mut Parser, token: Token, precedence: ExpressionPriority) -> Option { - let prefix = parser.prefix_parse_fns.get(&token.name)?; - - prefix(parser, token) + fn parse(parser: &mut Parser, ctoken: Token, precedence: ExpressionPriority) -> Option { + match parser.prefix_parse_fns.get(&ctoken.name) { + Some(prefix) => { + let mut left_expr = prefix(parser, ctoken); + while !parser.peek_token_is(TokenType::Semicolon) + && precedence < parser.peek_precedence() + { + let peek_token = match parser.lexer.peek() { + Some(token) => token.clone(), + None => return left_expr, + }; + match parser.infix_parse_fns.get(&peek_token.name) { + Some(infix) => { + let next_token = parser.lexer.next()?; + left_expr = infix(parser, next_token, left_expr.unwrap()); + } + None => return left_expr, + }; + } + left_expr + } + None => { + parser.no_prefix_parse_fn_error(&ctoken.name); + None + } + } } - pub fn parse_identifier(parser: &mut Parser, token: Token) -> Option { + pub fn parse_identifier(_parser: &mut Parser, token: Token) -> Option { Some(Self::Identifier(Identifier::new( token.name, &token.literal?, @@ -214,12 +231,46 @@ impl Expression { Some(Self::IntegerLiteral(IntegerLiteral::new(TokenType::Int, n))) } - fn to_string(&self) -> String { - match self { - Expression::Identifier(v) => v.to_string(), - Expression::IntegerLiteral(v) => v.value.to_string(), - Expression::None => "None".into(), - } + pub fn parse_prefix_expression(parser: &mut Parser, ctoken: Token) -> Option { + let next_token = parser.lexer.next()?; + let right_expr = Expression::parse(parser, next_token.clone(), ExpressionPriority::Prefix)?; + Some(Expression::PrefixExpression(PrefixExpression { + token: ctoken.clone(), + operator: ctoken.to_string().into(), + right: Box::new(right_expr), + })) + } + + pub fn parse_infix_expression( + parser: &mut Parser, + token: Token, + left_expr: Self, + ) -> Option { + let cprecedence = parser.current_precedence(&token.name); + let next_token = parser.lexer.next()?; + let right_expr = Expression::parse(parser, next_token, cprecedence)?; + Some(Expression::InfixExpression(InfixExpression::new( + token.clone(), + left_expr, + &token.to_string(), + right_expr, + ))) + } +} + +impl Display for Expression { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { + write!( + f, + "{}", + match self { + Expression::Identifier(v) => v.to_string(), + Expression::IntegerLiteral(v) => v.value.to_string(), + Expression::PrefixExpression(v) => v.to_string(), + Expression::InfixExpression(v) => v.to_string(), + Expression::None => "None".into(), + } + ) } } @@ -245,9 +296,11 @@ impl Identifier { value: v.to_string(), } } +} - pub fn to_string(&self) -> String { - self.value.clone() +impl Display for Identifier { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { + write!(f, "{}", self.value.clone()) } } @@ -266,10 +319,64 @@ impl IntegerLiteral { } } +#[derive(Debug, PartialEq)] +pub struct PrefixExpression { + token: Token, + operator: String, + right: Box, +} + +impl PrefixExpression { + pub fn new(token: Token, operator: &str, right: Expression) -> Self { + Self { + token: token, + operator: operator.to_string(), + right: Box::new(right), + } + } +} + +impl Display for PrefixExpression { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { + write!(f, "({}{})", self.operator, self.right.to_string()) + } +} + +#[derive(Debug, PartialEq)] +pub struct InfixExpression { + token: Token, + left: Box, + operator: String, + right: Box, +} + +impl InfixExpression { + pub fn new(token: Token, left: Expression, operator: &str, right: Expression) -> Self { + Self { + token: token, + left: Box::new(left), + operator: operator.to_string(), + right: Box::new(right), + } + } +} + +impl Display for InfixExpression { + fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { + write!( + f, + "({} {} {})", + self.left.to_string(), + self.operator, + self.right.to_string() + ) + } +} + #[cfg(test)] mod tests { use crate::{ - lexer::{Token, TokenType}, + lexer::TokenType, parser::{ ast::{Expression, Identifier, LetStatement, ReturnStatement, Statement}, Program, @@ -298,7 +405,7 @@ mod tests { }; assert_eq!( program.to_string(), - "let myVar = anotherVar;\nreturn 5;\nreturn;\n" + "let myVar = anotherVar;return 5;return;" ); } } diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 7f9efab..7fdc601 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -2,7 +2,7 @@ pub mod ast; use { crate::{ lexer::{Lexer, Token, TokenType}, - parser::ast::{Expression, Program, Statement}, + parser::ast::{Expression, ExpressionPriority, Program, Statement}, }, std::{ collections::HashMap, @@ -12,11 +12,26 @@ use { }; type PrefixParseFn = fn(&mut Parser, token: Token) -> Option; -type InfixParseFn = fn(Expression) -> Option; +type InfixParseFn = fn(&mut Parser, Token, Expression) -> Option; + +lazy_static! { + static ref PRECEDENCE_MAP: HashMap = { + let mut m = HashMap::new(); + m.insert(TokenType::Equals, ExpressionPriority::Equals); + m.insert(TokenType::NotEquals, ExpressionPriority::Equals); + m.insert(TokenType::LessThan, ExpressionPriority::LessGreater); + m.insert(TokenType::GreaterThan, ExpressionPriority::LessGreater); + m.insert(TokenType::Plus, ExpressionPriority::Sum); + m.insert(TokenType::Minus, ExpressionPriority::Sum); + m.insert(TokenType::Slash, ExpressionPriority::Product); + m.insert(TokenType::Asterisk, ExpressionPriority::Product); + m + }; +} pub struct Parser<'a> { lexer: Peekable>, - errors: Vec, + errors: Vec, prefix_parse_fns: HashMap, infix_parse_fns: HashMap, } @@ -32,23 +47,35 @@ impl<'a> Parser<'a> { parser.register_prefix(TokenType::Ident, Expression::parse_identifier); parser.register_prefix(TokenType::Int, Expression::parse_integer_literal); + parser.register_prefix(TokenType::Bang, Expression::parse_prefix_expression); + parser.register_prefix(TokenType::Minus, Expression::parse_prefix_expression); + + parser.register_infix(TokenType::Plus, Expression::parse_infix_expression); + parser.register_infix(TokenType::Minus, Expression::parse_infix_expression); + parser.register_infix(TokenType::Slash, Expression::parse_infix_expression); + parser.register_infix(TokenType::Asterisk, Expression::parse_infix_expression); + parser.register_infix(TokenType::Equals, Expression::parse_infix_expression); + parser.register_infix(TokenType::NotEquals, Expression::parse_infix_expression); + parser.register_infix(TokenType::LessThan, Expression::parse_infix_expression); + parser.register_infix(TokenType::GreaterThan, Expression::parse_infix_expression); parser } - pub fn parse_program(&mut self) -> Program { + pub fn parse_program(&mut self) -> Option { let mut program = Program { statements: vec![] }; while let Some(token) = self.lexer.next() { if token.name == TokenType::EOF { break; } - match Statement::parse(self, token) { - Some(v) => program.statements.push(v), - None => {} // This will happen in case of a parsing error or something - } + + match Statement::parse(self, token.clone()) { + Some(x) => program.statements.push(x), + None => eprintln!("error in generating statement: {:?}", token), + }; } - program + Some(program) } fn peek_token_is(&mut self, token: TokenType) -> bool { @@ -58,11 +85,6 @@ impl<'a> Parser<'a> { } } - // TODO: Remove this. We most likely don't need it anywhere - // fn current_token_is(&self, token: TokenType) -> bool { - // false - // } - fn expect_peek(&mut self, token: TokenType) -> Option { if self.peek_token_is(token) { self.lexer.next() @@ -79,9 +101,12 @@ impl<'a> Parser<'a> { fn peek_error(&mut self, et: TokenType, gt: Option) { let msg = match gt { Some(v) => format!("expected next token to be {:?}, Got {:?} instead", et, v), - None => format!("expected next token to be {:?}, Got None instead", et), + None => format!( + "expected next token to be {}, Got None instead", + et.to_string() + ), }; - self.errors.push(ParserError { reason: msg }); + self.errors.push(Error { reason: msg }); } fn register_prefix(&mut self, token: TokenType, f: PrefixParseFn) { @@ -91,13 +116,37 @@ impl<'a> Parser<'a> { fn register_infix(&mut self, token: TokenType, f: InfixParseFn) { self.infix_parse_fns.insert(token, f); } + + fn no_prefix_parse_fn_error(&mut self, token: &TokenType) { + self.errors.push(Error { + reason: format!("no prefix parse function for {} found", token.to_string()), + }); + } + + fn peek_precedence(&mut self) -> ExpressionPriority { + match self.lexer.peek() { + Some(token) => match PRECEDENCE_MAP.get(&token.name) { + Some(p) => *p, + None => ExpressionPriority::Lowest, + }, + None => ExpressionPriority::Lowest, + } + } + + fn current_precedence(&mut self, token: &TokenType) -> ExpressionPriority { + match PRECEDENCE_MAP.get(&token) { + Some(p) => *p, + None => ExpressionPriority::Lowest, + } + } } -pub struct ParserError { +#[derive(PartialEq, Debug)] +pub struct Error { reason: String, } -impl Display for ParserError { +impl Display for Error { fn fmt(&self, fmt: &mut Formatter) -> Result<(), FmtError> { write!(fmt, "{}", self.reason) } @@ -107,13 +156,7 @@ impl Display for ParserError { mod tests { use crate::{ lexer::{Lexer, Token, TokenType}, - parser::{ - ast::{ - Expression, ExpressionStatement, Identifier, IntegerLiteral, LetStatement, Program, - Statement, - }, - Parser, - }, + parser::{ast::*, Parser}, }; fn check_parser_errors(p: &Parser) { @@ -135,6 +178,8 @@ mod tests { let mut parser = Parser::new(lexer); let program = parser.parse_program(); check_parser_errors(&parser); + assert!(program.is_some()); + let program = program.unwrap(); assert_eq!(program.statements.len(), 3); assert_eq!( program, @@ -170,9 +215,12 @@ mod tests { let program = parser.parse_program(); check_parser_errors(&parser); + assert!(program.is_some()); + let program = program.unwrap(); assert_eq!(program.statements.len(), 3); assert_eq!(parser.errors.len(), 0); } + #[test] fn identifier_expression() { let lexer = Lexer::new("foobar;"); @@ -180,6 +228,8 @@ mod tests { let program = parser.parse_program(); check_parser_errors(&parser); + assert!(program.is_some()); + let program = program.unwrap(); assert_eq!(program.statements.len(), 1); assert_eq!( program.statements, @@ -196,13 +246,201 @@ mod tests { let mut parser = Parser::new(lexer); let program = parser.parse_program(); check_parser_errors(&parser); + assert!(program.is_some()); assert_eq!( - program.statements, + program.unwrap().statements, vec![Statement::ExpressionStatement(ExpressionStatement { token: Token::with_value(TokenType::Int, "5"), expression: Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 5)) })] ); } + + #[test] + fn prefix_expressions() { + let prefix_tests = [ + ( + "!5", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::new(TokenType::Bang), + expression: Expression::PrefixExpression(PrefixExpression::new( + Token::new(TokenType::Bang), + "!", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 5)), + )), + })], + ), + ( + "-15;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::new(TokenType::Minus), + expression: Expression::PrefixExpression(PrefixExpression::new( + Token::new(TokenType::Minus), + "-", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 15)), + )), + })], + ), + ( + "!foobar;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::new(TokenType::Bang), + expression: Expression::PrefixExpression(PrefixExpression::new( + Token::new(TokenType::Bang), + "!", + Expression::Identifier(Identifier::new(TokenType::Ident, "foobar")), + )), + })], + ), + // TODO: Add this test when we add function call parser + // ( + // "!isGreaterThanZero( 2);", + // vec![Statement::ExpressionStatement(ExpressionStatement { + // token: Token::new(TokenType::Bang), + // expression: Expression::PrefixExpression(PrefixExpression::new( + // Token::new(TokenType::Bang), + // "!", + // Expression::Identifier(Identifier::new( + // TokenType::Function, + // "", + // )), + // )), + // }) + ]; + + for test in prefix_tests.iter() { + let lexer = Lexer::new(test.0); + let mut parser = Parser::new(lexer); + let program = parser.parse_program(); + check_parser_errors(&parser); + assert!(program.is_some()); + assert_eq!(program.unwrap().statements, test.1); + } + } + + #[test] + fn parsing_infix_expressions() { + let infix_tests = [ + ( + "5 + 10;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::with_value(TokenType::Int, "5"), + expression: Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::Plus), + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 5)), + "+", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 10)), + )), + })], + ), + ( + "5 - 10;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::with_value(TokenType::Int, "5"), + expression: Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::Minus), + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 5)), + "-", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 10)), + )), + })], + ), + ( + "5 * 15;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::with_value(TokenType::Int, "5"), + expression: Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::Asterisk), + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 5)), + "*", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 15)), + )), + })], + ), + ( + "15 / 3;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::with_value(TokenType::Int, "15"), + expression: Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::Slash), + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 15)), + "/", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 3)), + )), + })], + ), + ( + "5 > 15;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::with_value(TokenType::Int, "5"), + expression: Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::GreaterThan), + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 5)), + ">", + Expression::IntegerLiteral(IntegerLiteral::new(TokenType::Int, 15)), + )), + })], + ), + ( + "a + b + c;", + vec![Statement::ExpressionStatement(ExpressionStatement { + token: Token::with_value(TokenType::Ident, "a"), + expression: Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::Plus), + Expression::InfixExpression(InfixExpression::new( + Token::new(TokenType::Plus), + Expression::Identifier(Identifier::new(TokenType::Ident, "a")), + "+", + Expression::Identifier(Identifier::new(TokenType::Ident, "b")), + )), + "+", + Expression::Identifier(Identifier::new(TokenType::Ident, "c")), + )), + })], + ), + ]; + for test in infix_tests.iter() { + let lexer = Lexer::new(test.0); + let mut parser = Parser::new(lexer); + let program = parser.parse_program(); + check_parser_errors(&parser); + assert!(program.is_some()); + assert_eq!(program.unwrap().statements, test.1); + } + } + + #[test] + fn test_operator_precedence_parsing() { + let test_cases = [ + ("-a * b", "((-a) * b)"), + ("!-a", "(!(-a))"), + ("a + b + c", "((a + b) + c)"), + ("a + b - c", "((a + b) - c)"), + ("a * b * c", "((a * b) * c)"), + ("a * b / c", "((a * b) / c)"), + ("a + b / c", "(a + (b / c))"), + ("a + b * c + d / e - f", "(((a + (b * c)) + (d / e)) - f)"), + ("3 + 4; -5 * 5", "(3 + 4)((-5) * 5)"), + ("5 > 4 == 3 < 4", "((5 > 4) == (3 < 4))"), + ("5 < 4 != 3 > 4", "((5 < 4) != (3 > 4))"), + ( + "3 + 4 * 5 == 3 * 1 + 4 * 5", + "((3 + (4 * 5)) == ((3 * 1) + (4 * 5)))", + ), + ( + "3 + 4 * 5 == 3 * 1 + 4 * 5", + "((3 + (4 * 5)) == ((3 * 1) + (4 * 5)))", + ), + ]; + + for test in test_cases.iter() { + let lexer = Lexer::new(test.0); + let mut parser = Parser::new(lexer); + let program = parser.parse_program(); + check_parser_errors(&parser); + assert!(program.is_some()); + assert_eq!(program.unwrap().to_string(), test.1); + } + } } diff --git a/src/repl.rs b/src/repl.rs index fe523ce..8875260 100644 --- a/src/repl.rs +++ b/src/repl.rs @@ -26,6 +26,11 @@ fn start(mut ip: R, mut out: W) { let mut parser = Parser::new(tokens); - let stmts = parser.parse_program(); + let program = parser.parse_program(); + + match program { + Some(stmts) => println!("{}", stmts), + None => (), + } } }