diff --git a/src/ast.rs b/src/ast.rs index 2d921ef..8fe9215 100644 --- a/src/ast.rs +++ b/src/ast.rs @@ -4,20 +4,28 @@ pub struct Module { pub statements: Vec, } +#[derive(Debug)] pub enum Statement { FunctionDeclaration { name: Token, parameters: Vec, + statements: Vec, }, - Expression, + Expression(Expression), } +#[derive(Debug)] pub struct ParameterDeclaration { - name: Token, - typename: Token, + pub name: Token, + pub typename: Token, } +#[derive(Debug)] pub enum Expression { Identifier(Token), - FunctionCall {}, + FunctionCall { + function: Box, + arguments: Vec, + return_type: Option, + }, } diff --git a/src/main.rs b/src/main.rs index 0345762..77a7cfb 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,4 @@ -use token::Token; +use parse::Parser; mod ast; mod format; @@ -7,34 +7,21 @@ mod parse; mod token; fn main() { - println!("Hello, world!"); -} - -#[cfg(test)] -mod tests { - use crate::{ - format::Formatter, - lexer::{self, Lexer}, - }; - + use format::Formatter; + use lexer::Lexer; const BASIC: &str = r#" function hello(name: string){ - console.log("Hey, ", name); + println("Hey, ", name); } -console.log("Starting!"); +println("Starting!"); hello(); "#; - #[test] - fn lex() { - println!("Running lex"); - let lexer = Lexer::new(BASIC, Some("basic.file".to_string())); - let tokens = lexer.lex(); - println!( - "{}", - tokens.format(crate::format::FormatterOptions {}).unwrap() - ); - } + let lexer = Lexer::new(BASIC, Some("basic.file".to_string())); + let tokens = lexer.lex(); + let mut parser = Parser::new(tokens); + let statement = parser.statement(); + println!("{statement:?}"); } diff --git a/src/parse.rs b/src/parse.rs deleted file mode 100644 index 1970f90..0000000 --- a/src/parse.rs +++ /dev/null @@ -1,13 +0,0 @@ -use crate::token::Token; - -pub struct Parser { - tokens: Vec, -} - -impl Parser { - pub fn new(tokens: Vec) -> Parser { - Self { tokens } - } - - fn parse(&mut self) {} -} diff --git a/src/parse/macros.rs b/src/parse/macros.rs new file mode 100644 index 0000000..36c3e2a --- /dev/null +++ b/src/parse/macros.rs @@ -0,0 +1,55 @@ +#[macro_export] +macro_rules! expect_token { + ($self:ident, $expect:pat) => { + let t = $self.consume(); + if !matches!(t.kind, $expect) { + todo!("Expected token, found {:?}", t.kind) + } + }; +} + +#[macro_export] +macro_rules! expect_identifier { + ($self:ident) => {{ + let t = $self.consume(); + if !matches!(t.kind, TokenKind::Identifier(_)) { + todo!("Expected token, found {:?}", t.kind) + } + t + }}; +} + +#[macro_export] +macro_rules! expect_any_keyword { + ($self:ident) => {{ + let t = $self.consume(); + if !matches!(t.kind, TokenKind::Keyword(_)) { + todo!("Expected token, found {:?}", t.kind) + } + t + }}; +} + +#[macro_export] +macro_rules! expect_keyword { + ($self:ident, $keyword:pat) => { + let t = $self.consume(); + if !matches!(t.kind, TokenKind::Keyword($keyword)) { + todo!("Expected token, found {:?}", t.kind) + } + }; +} + +#[macro_export] +macro_rules! peek_keyword { + ($self:ident, $keyword:pat) => { + matches!($self.peek().kind, TokenKind::Keyword($keyword)) + }; +} + +#[macro_export] +macro_rules! peek_match { + ($self:ident, $p:pat) => { + matches!($self.peek().kind, $p) + }; +} diff --git a/src/parse/mod.rs b/src/parse/mod.rs new file mode 100644 index 0000000..8c70495 --- /dev/null +++ b/src/parse/mod.rs @@ -0,0 +1,77 @@ +mod macros; + +use crate::{ + ast, expect_any_keyword, expect_identifier, expect_keyword, expect_token, peek_keyword, + peek_match, + token::{KeywordKind, Token, TokenKind}, +}; + +pub struct Parser { + tokens: Vec, + current: usize, +} + +impl Parser { + pub fn new(tokens: Vec) -> Parser { + Self { tokens, current: 0 } + } + + pub fn statement(&mut self) -> ast::Statement { + if peek_keyword!(self, KeywordKind::function) { + return self.function_declaration(); + } + return self.expression_statement(); + todo!("No statement"); + } + + fn function_declaration(&mut self) -> ast::Statement { + expect_keyword!(self, KeywordKind::function); + let id = expect_identifier!(self); + expect_token!(self, TokenKind::LeftParen); + + let mut parameters = Vec::new(); + while peek_match!(self, TokenKind::Identifier(_)) { + let name = expect_identifier!(self); + expect_token!(self, TokenKind::Colon); + let typename = expect_any_keyword!(self); + let parameter = ast::ParameterDeclaration { + name: name.clone(), + typename: typename.clone(), + }; + parameters.push(parameter); + } + + expect_token!(self, TokenKind::RightParen); + + expect_token!(self, TokenKind::LeftCurly); + + let mut statements = Vec::new(); + while !peek_match!(self, TokenKind::RightCurly) { + let statement = self.statement(); + statements.push(statement); + } + + expect_token!(self, TokenKind::RightCurly); + + ast::Statement::FunctionDeclaration { + name: id.clone(), + parameters, + statements, + } + } + + fn expression_statement(&mut self) -> ast::Statement { + todo!() + } +} + +impl Parser { + fn peek(&self) -> &Token { + &self.tokens[self.current] + } + fn consume(&mut self) -> Token { + let token = &self.tokens[self.current]; + self.current += 1; + token.clone() + } +} diff --git a/src/token.rs b/src/token.rs index 51c7e07..415ddc1 100644 --- a/src/token.rs +++ b/src/token.rs @@ -1,12 +1,12 @@ use anyhow::anyhow; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Token { pub kind: TokenKind, pub location: TokenLocation, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum TokenKind { Identifier(String), Literal(LiteralKind), @@ -25,14 +25,14 @@ pub enum TokenKind { EndOfFile, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum CommentKind { Line, Block, } #[allow(non_camel_case_types)] -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum KeywordKind { function, string, @@ -51,19 +51,19 @@ impl TryFrom<&str> for KeywordKind { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum LiteralKind { String(String), Number(Number), } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum Number { Integer(usize), Float(f64), } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct TokenLocation { pub file: Option, pub line: usize,