Skip to content

Commit

Permalink
feat: parser unary .NOT. operator
Browse files Browse the repository at this point in the history
  • Loading branch information
fcoury committed Sep 17, 2024
1 parent a1fc781 commit a9a17d7
Show file tree
Hide file tree
Showing 3 changed files with 124 additions and 6 deletions.
1 change: 1 addition & 0 deletions core/src/ir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ pub fn emit_ir(

result
}
Exp::Unary(_, _) => todo!(),
};

Ok(val)
Expand Down
59 changes: 59 additions & 0 deletions core/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,8 @@ pub enum Token {
Or,
#[strum(props(regex = r"^\.AND\."))]
And,
#[strum(props(regex = r"^\.NOT\."))]
Not,
#[strum(props(regex = r"^\*\*"))]
StarStar,
#[strum(props(regex = r"^%"))]
Expand All @@ -137,6 +139,16 @@ pub enum Token {
Arrow,
#[strum(props(regex = r"^\$"))]
Dollar,
#[strum(props(regex = r"^&"))]
Ampersand,
#[strum(props(regex = r"^\+\+"))]
PlusPlus,
#[strum(props(regex = r"^--"))]
MinusMinus,
#[strum(props(regex = r"^@"))]
At,
#[strum(props(regex = r"^:"))]
Colon,
}

impl Token {
Expand Down Expand Up @@ -457,4 +469,51 @@ mod tests {
]
);
}

#[test]
fn test_colon() {
let program = r#"myBrowse:pageUp()"#;
let mut lexer = Lexer::new(program);
let tokens = lexer.tokenize().unwrap();
assert_eq!(
tokens,
vec![
Token::Identifier("myBrowse".to_string()),
Token::Colon,
Token::Identifier("pageUp".to_string()),
Token::OpenParens,
Token::CloseParens,
]
);
}

#[test]
fn test_minus_minus() {
let program = r#"--a"#;
let mut lexer = Lexer::new(program);
let tokens = lexer.tokenize().unwrap();
assert_eq!(
tokens,
vec![Token::MinusMinus, Token::Identifier("a".to_string()),]
);
}

#[test]
fn test_not() {
let program = r#".NOT. a"#;
let mut lexer = Lexer::new(program);
let tokens = lexer.tokenize().unwrap();
assert_eq!(
tokens,
vec![Token::Not, Token::Identifier("a".to_string()),]
);
}

#[test]
fn test_at() {
let program = r#"@a"#;
let mut lexer = Lexer::new(program);
let tokens = lexer.tokenize().unwrap();
assert_eq!(tokens, vec![Token::At, Token::Identifier("a".to_string()),]);
}
}
70 changes: 64 additions & 6 deletions core/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@ impl<'a> Parser<'a> {
}

fn parse_exp(&mut self) -> anyhow::Result<Exp> {
let exp = match self.next_token()? {
let exp = match self.peek_token() {
Some(Token::Identifier(name)) => {
self.take_token()?; // consumes identifier
if self.peek_token() == Some(Token::ColonEqual) {
self.parse_assignment(name)?
} else if self.peek_token() == Some(Token::OpenParens) {
Expand All @@ -34,9 +35,13 @@ impl<'a> Parser<'a> {
Exp::Var(name)
}
}
Some(Token::Int(n)) => Exp::Constant(n),
Some(token) => {
anyhow::bail!("Expected expression, found {token:?}");
Some(Token::Int(n)) => {
self.take_token()?; // consumes int
Exp::Constant(n)
}
Some(_) => {
self.parse_unary()?
// anyhow::bail!("Expected expression, found {token:?}");
}
None => {
anyhow::bail!("Expected expression, found end of file");
Expand Down Expand Up @@ -70,6 +75,26 @@ impl<'a> Parser<'a> {
Ok(Exp::FunCall(name, args))
}

fn parse_unary(&mut self) -> anyhow::Result<Exp> {
let operator = match self.next_token()? {
Some(Token::Not) => UnaryOperator::Not,
Some(token) => {
return Err(anyhow::anyhow!(
"Expected unary operator, found {:?}",
token
));
}
None => {
return Err(anyhow::anyhow!(
"Expected unary operator, found end of file"
));
}
};

println!("creating unary with {operator:?}");
Ok(Exp::Unary(operator, Box::new(self.parse_exp()?)))
}

fn expect(&mut self, expected: Token) -> anyhow::Result<()> {
let Some(actual) = self.next_token()? else {
anyhow::bail!("Expected {expected:?}, found end of file");
Expand Down Expand Up @@ -102,6 +127,7 @@ impl<'a> Parser<'a> {
}
}

#[derive(Debug, Clone, PartialEq)]
pub struct Program {
pub statements: Vec<Statement>,
}
Expand All @@ -112,15 +138,47 @@ impl Program {
}
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub enum Statement {
Expression(Exp),
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub enum Exp {
Var(String),
Constant(i32),
Assignment(Box<Exp>, Box<Exp>),
FunCall(String, Vec<Exp>),
Unary(UnaryOperator, Box<Exp>),
}

#[derive(Debug, Clone, PartialEq)]
pub enum UnaryOperator {
Not,
}

#[cfg(test)]
mod tests {
use crate::lexer::Lexer;

use super::*;

#[test]
fn unary_not() {
let program = r#".NOT. a"#;
let mut lexer = Lexer::new(program);
let tokens = lexer.tokenize().unwrap();
let mut parser = Parser::new(&tokens);
let program = parser.parse().unwrap();

assert_eq!(
program,
Program {
statements: vec![Statement::Expression(Exp::Unary(
UnaryOperator::Not,
Box::new(Exp::Var("a".into()))
))]
}
);
}
}

0 comments on commit a9a17d7

Please sign in to comment.