add multiple error reporting
This commit is contained in:
parent
a67d4cb273
commit
1da18a14b7
|
@ -36,7 +36,7 @@ impl Debug for Program {
|
|||
pub struct LetStatement {
|
||||
pub token: Token,
|
||||
pub name: Identifier,
|
||||
pub value: Box<dyn Expression>,
|
||||
pub value: Rc<dyn Expression>,
|
||||
}
|
||||
|
||||
impl Node for LetStatement {}
|
||||
|
@ -65,5 +65,4 @@ impl Expression for DummyExpression {
|
|||
fn expression_node(&self) {
|
||||
panic!("this is dummy");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
mod ast;
|
||||
mod lexer;
|
||||
mod parser;
|
||||
mod repl;
|
||||
mod token;
|
||||
mod parser;
|
||||
|
||||
fn main() {
|
||||
let stdout = std::io::stdout();
|
||||
|
|
118
src/parser.rs
118
src/parser.rs
|
@ -1,28 +1,53 @@
|
|||
use std::rc::Rc;
|
||||
use std::{fmt, rc::Rc};
|
||||
|
||||
use crate::{lexer::Lexer, token::Token, ast::{Program, Statement, LetStatement, Identifier, DummyExpression}};
|
||||
use crate::{
|
||||
ast::{DummyExpression, Expression, Identifier, LetStatement, Program, Statement},
|
||||
lexer::Lexer,
|
||||
token::Token,
|
||||
};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Error {
|
||||
UnexpectedToken{
|
||||
pub enum Error {
|
||||
UnexpectedToken {
|
||||
expected: Token,
|
||||
actual: Option<Token>,
|
||||
},
|
||||
}
|
||||
|
||||
struct Parser {
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "parser error: ")?;
|
||||
use Error::*;
|
||||
match self {
|
||||
UnexpectedToken { expected, actual } => write!(
|
||||
f,
|
||||
"expected token `{:?}`, actual token: `{:?}`",
|
||||
expected, actual
|
||||
)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Parser {
|
||||
lexer: Lexer,
|
||||
cur_token: Option<Token>,
|
||||
peek_token: Option<Token>,
|
||||
errors: Vec<Error>,
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub fn new(mut lexer: Lexer) -> Self {
|
||||
let cur_token = lexer.next();
|
||||
let peek_token = lexer.next();
|
||||
Self { lexer, cur_token, peek_token }
|
||||
Self {
|
||||
lexer,
|
||||
cur_token,
|
||||
peek_token,
|
||||
errors: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(&mut self) -> Result<Program> {
|
||||
|
@ -30,12 +55,12 @@ impl Parser {
|
|||
statements: Vec::new(),
|
||||
};
|
||||
|
||||
|
||||
let mut done = Some(());
|
||||
|
||||
while done.is_some() {
|
||||
let stmt = self.parse_statement();
|
||||
program.statements.push(stmt?);
|
||||
if let Some(stmt) = self.parse_statement() {
|
||||
program.statements.push(stmt);
|
||||
}
|
||||
|
||||
done = self.next();
|
||||
}
|
||||
|
@ -43,53 +68,70 @@ impl Parser {
|
|||
Ok(program)
|
||||
}
|
||||
|
||||
fn parse_statement(&mut self) -> Result<Rc<dyn Statement>> {
|
||||
pub fn errors(&self) -> &Vec<Error> {
|
||||
&self.errors
|
||||
}
|
||||
|
||||
fn parse_statement(&mut self) -> Option<Rc<dyn Statement>> {
|
||||
match &self.cur_token {
|
||||
Some(token) => {
|
||||
use Token::*;
|
||||
match token {
|
||||
Let => self.parse_let_statement(),
|
||||
t => unimplemented!("{t:?} statement token not impl"),
|
||||
t => None,
|
||||
}
|
||||
}
|
||||
None => unreachable!()
|
||||
None => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_let_statement(&mut self) -> Result<Rc<dyn Statement>> {
|
||||
fn parse_let_statement(&mut self) -> Option<Rc<dyn Statement>> {
|
||||
let token = self.cur_token.clone().unwrap();
|
||||
|
||||
self.expect_peek(&Token::Ident("".to_string()))?;
|
||||
if !self.expect_peek(&Token::Ident("".to_string())) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = Identifier {
|
||||
token: self.cur_token.clone().unwrap(),
|
||||
};
|
||||
|
||||
self.expect_peek(&Token::Assign)?;
|
||||
if !self.expect_peek(&Token::Assign) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let value = self.parse_expression()?;
|
||||
|
||||
Some(Rc::new(LetStatement { token, name, value }))
|
||||
}
|
||||
|
||||
fn parse_expression(&mut self) -> Option<Rc<dyn Expression>> {
|
||||
while !self.cur_token_is(&Token::Semicolon) {
|
||||
self.next();
|
||||
}
|
||||
|
||||
Ok(Rc::new(LetStatement {
|
||||
token,
|
||||
name,
|
||||
value: Box::new(DummyExpression {}),
|
||||
}))
|
||||
Some(Rc::new(DummyExpression {}))
|
||||
}
|
||||
|
||||
fn expect_peek(&mut self, token: &Token) -> Result<()> {
|
||||
fn expect_peek(&mut self, token: &Token) -> bool {
|
||||
if self.peek_token.is_none() {
|
||||
return Err(Error::UnexpectedToken { expected: token.clone(), actual: None });
|
||||
self.errors.push(Error::UnexpectedToken {
|
||||
expected: token.clone(),
|
||||
actual: None,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
let peek_token = self.peek_token.clone().unwrap();
|
||||
|
||||
if token.is_same_type(&peek_token) {
|
||||
self.next();
|
||||
Ok(())
|
||||
return true;
|
||||
} else {
|
||||
Err(Error::UnexpectedToken { expected: token.clone(), actual: None })
|
||||
self.errors.push(Error::UnexpectedToken {
|
||||
expected: token.clone(),
|
||||
actual: None,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -116,7 +158,6 @@ impl Iterator for Parser {
|
|||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let peek_token = self.lexer.next();
|
||||
|
||||
|
||||
self.cur_token = self.peek_token.clone();
|
||||
self.peek_token = peek_token;
|
||||
|
||||
|
@ -131,7 +172,7 @@ impl Iterator for Parser {
|
|||
mod tests {
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::{lexer::Lexer, ast::Statement};
|
||||
use crate::{ast::Statement, lexer::Lexer};
|
||||
|
||||
use super::Parser;
|
||||
|
||||
|
@ -140,21 +181,19 @@ mod tests {
|
|||
let source = "let x = 5;\
|
||||
let y = 10;\
|
||||
let foobar = 838383;\
|
||||
".to_string();
|
||||
"
|
||||
.to_string();
|
||||
|
||||
let lexer = Lexer::new(source);
|
||||
|
||||
let mut parser = Parser::new(lexer);
|
||||
|
||||
let program = parser.parse().unwrap();
|
||||
check_parser_errors(parser);
|
||||
|
||||
assert_eq!(program.statements.len(), 3);
|
||||
|
||||
let expected_identifiers = vec![
|
||||
"x",
|
||||
"y",
|
||||
"foobar",
|
||||
];
|
||||
let expected_identifiers = vec!["x", "y", "foobar"];
|
||||
let mut statements_iter = program.statements.iter();
|
||||
for tt in expected_identifiers {
|
||||
let statement = statements_iter.next().unwrap();
|
||||
|
@ -169,4 +208,17 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
fn check_parser_errors(parser: Parser) {
|
||||
if parser.errors().len() == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut err = "parser errors:".to_owned();
|
||||
|
||||
for (i, error) in parser.errors().iter().enumerate() {
|
||||
err.push_str(&format!("\n\t{i}: {error}"))
|
||||
}
|
||||
|
||||
panic!("{err}");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::io::{BufRead, BufReader, Read, Write};
|
||||
|
||||
use crate::{lexer::Lexer, token::Token};
|
||||
use crate::{lexer::Lexer, parser::Parser, token::Token};
|
||||
|
||||
const PROMPT: &str = ">> ";
|
||||
|
||||
|
@ -18,7 +18,7 @@ pub fn start(mut w: impl Write, r: impl Read) {
|
|||
return;
|
||||
}
|
||||
|
||||
let lex = Lexer::new(line);
|
||||
let lex = Lexer::new(line.clone());
|
||||
|
||||
for token in lex {
|
||||
if token == Token::EOF {
|
||||
|
@ -26,5 +26,10 @@ pub fn start(mut w: impl Write, r: impl Read) {
|
|||
}
|
||||
writeln!(w, "{token:?}").unwrap();
|
||||
}
|
||||
|
||||
let mut parser = Parser::new(Lexer::new(line));
|
||||
let program = parser.parse().unwrap();
|
||||
|
||||
writeln!(w, "{program:?}").unwrap();
|
||||
}
|
||||
}
|
||||
|
|
16
src/token.rs
16
src/token.rs
|
@ -39,8 +39,20 @@ impl Token {
|
|||
pub fn is_same_type(&self, other: &Token) -> bool {
|
||||
use Token::*;
|
||||
match self {
|
||||
Ident(_) => if let Ident(_) = other { true } else { false },
|
||||
Int(_) => if let Int(_) = other { true } else { false },
|
||||
Ident(_) => {
|
||||
if let Ident(_) = other {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Int(_) => {
|
||||
if let Int(_) = other {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
tok => tok == other,
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue