Skip to content

Commit

Permalink
structure: Restructure lexer & parser files
Browse files Browse the repository at this point in the history
  • Loading branch information
mrkajetanp committed Sep 15, 2024
1 parent 9e89fc4 commit 8304f03
Show file tree
Hide file tree
Showing 14 changed files with 265 additions and 266 deletions.
File renamed without changes.
3 changes: 0 additions & 3 deletions src/ast/mod.rs

This file was deleted.

2 changes: 1 addition & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::ast::ParserError;
use crate::codegen::CodegenError;
use crate::lexer::LexerError;
use crate::parser::ast::ParserError;
use crate::semantic::SemanticError;
use crate::typecheck::TypeCheckError;
use thiserror::Error;
Expand Down
22 changes: 11 additions & 11 deletions src/ir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::fmt;

use strum::EnumIs;

use crate::ast::{self, Statement};
use crate::parser::ast;

#[derive(Debug)]
#[allow(dead_code)]
Expand Down Expand Up @@ -162,13 +162,13 @@ impl Instruction {

pub fn generate_from_statement(statement: ast::Statement, ctx: &mut IrCtx) -> Vec<Self> {
match statement {
Statement::Return(expr) => {
ast::Statement::Return(expr) => {
let (mut instructions, val) = Self::generate_from_expr(expr, ctx);
instructions.push(Instruction::Return(val));
instructions
}
Statement::Exp(expr) => Self::generate_from_expr(expr, ctx).0,
Statement::If(cond, then_stmt, else_statement) => {
ast::Statement::Exp(expr) => Self::generate_from_expr(expr, ctx).0,
ast::Statement::If(cond, then_stmt, else_statement) => {
let end_label = ctx.label("jump_end");
let (mut instructions, cond_val) = Self::generate_from_expr(cond, ctx);

Expand All @@ -187,20 +187,20 @@ impl Instruction {
}
instructions
}
Statement::Compound(block) => Self::generate_from_block(block, ctx),
Statement::Break(label) => {
ast::Statement::Compound(block) => Self::generate_from_block(block, ctx),
ast::Statement::Break(label) => {
vec![Instruction::Jump(Identifier::new(&format!(
"break_{}",
label.unwrap()
)))]
}
Statement::Continue(label) => {
ast::Statement::Continue(label) => {
vec![Instruction::Jump(Identifier::new(&format!(
"continue_{}",
label.unwrap()
)))]
}
Statement::While(cond, body, label) => {
ast::Statement::While(cond, body, label) => {
let start_label = Identifier::new(&format!("continue_{}", label.as_ref().unwrap()));
let end_label = Identifier::new(&format!("break_{}", label.as_ref().unwrap()));

Expand All @@ -215,7 +215,7 @@ impl Instruction {
log::trace!("Emitting IR for while -> {:?}", instructions);
instructions
}
Statement::DoWhile(body, cond, label) => {
ast::Statement::DoWhile(body, cond, label) => {
let start_label = Identifier::new(&format!("start_{}", label.as_ref().unwrap()));
let break_label = Identifier::new(&format!("break_{}", label.as_ref().unwrap()));
let continue_label =
Expand All @@ -232,7 +232,7 @@ impl Instruction {
log::trace!("Emitting IR for do-while -> {:?}", instructions);
instructions
}
Statement::For(init, cond, post, body, label) => {
ast::Statement::For(init, cond, post, body, label) => {
let start_label = Identifier::new(&format!("start_{}", label.as_ref().unwrap()));
let continue_label =
Identifier::new(&format!("continue_{}", label.as_ref().unwrap()));
Expand All @@ -256,7 +256,7 @@ impl Instruction {
instructions.push(Instruction::Label(end_label));
instructions
}
Statement::Null => vec![],
ast::Statement::Null => vec![],
// _ => todo!(),
}
}
Expand Down
99 changes: 0 additions & 99 deletions src/lexer.rs → src/lexer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -306,102 +306,3 @@ pub fn run_lexer(source: String) -> LexerResult<Vec<TokenKind>> {
let mut lexer = Lexer::new(&source.trim());
lexer.tokenize()
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn tokenize_identifier() {
assert_eq!(
TokenKind::from_str("test"),
Some(TokenKind::Identifier("test".to_owned()))
);
assert_eq!(
TokenKind::from_str("main"),
Some(TokenKind::Identifier("main".to_owned()))
);
assert_eq!(
TokenKind::from_str("ma_n"),
Some(TokenKind::Identifier("ma_n".to_owned()))
);
assert_eq!(TokenKind::from_str("53main"), None);
assert_eq!(TokenKind::from_str("ma.in"), None);
}

#[test]
fn tokenize_constant() {
assert_eq!(TokenKind::from_str("66"), Some(TokenKind::Constant(66)));
assert_eq!(TokenKind::from_str("32"), Some(TokenKind::Constant(32)));
}

#[test]
fn tokenize_void() {
assert_eq!(TokenKind::from_str("void").unwrap(), TokenKind::Void);
}

#[test]
fn tokenize_return() {
assert_eq!(TokenKind::from_str("return").unwrap(), TokenKind::Return);
}

#[test]
fn tokenize_decrement() {
assert_eq!(TokenKind::from_str("--").unwrap(), TokenKind::Decrement);
}

#[test]
fn tokenize_complement() {
assert_eq!(TokenKind::from_str("~").unwrap(), TokenKind::Complement);
}

#[test]
fn tokenize_semicolon() {
assert_eq!(TokenKind::from_str(";").unwrap(), TokenKind::Semicolon);
}

#[test]
fn tokenize_brace_close() {
assert_eq!(TokenKind::from_str("}").unwrap(), TokenKind::BraceClose);
}

#[test]
fn tokenize_brace_open() {
assert_eq!(TokenKind::from_str("{").unwrap(), TokenKind::BraceOpen);
}

#[test]
fn tokenize_paren_close() {
assert_eq!(TokenKind::from_str(")").unwrap(), TokenKind::ParenClose);
}

#[test]
fn tokenize_paren_open() {
assert_eq!(TokenKind::from_str("(").unwrap(), TokenKind::ParenOpen);
}

#[test]
fn tokenize_minus() {
assert_eq!(TokenKind::from_str("-").unwrap(), TokenKind::Minus);
}

#[test]
fn tokenize_plus() {
assert_eq!(TokenKind::from_str("+").unwrap(), TokenKind::Plus);
}

#[test]
fn tokenize_asterisk() {
assert_eq!(TokenKind::from_str("*").unwrap(), TokenKind::Asterisk);
}

#[test]
fn tokenize_slash() {
assert_eq!(TokenKind::from_str("/").unwrap(), TokenKind::Slash);
}

#[test]
fn tokenize_percent() {
assert_eq!(TokenKind::from_str("%").unwrap(), TokenKind::Percent);
}
}
4 changes: 4 additions & 0 deletions src/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
pub mod lexer;
pub use lexer::*;
#[cfg(test)]
mod tests;
95 changes: 95 additions & 0 deletions src/lexer/tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
use super::*;

#[test]
fn tokenize_identifier() {
assert_eq!(
TokenKind::from_str("test"),
Some(TokenKind::Identifier("test".to_owned()))
);
assert_eq!(
TokenKind::from_str("main"),
Some(TokenKind::Identifier("main".to_owned()))
);
assert_eq!(
TokenKind::from_str("ma_n"),
Some(TokenKind::Identifier("ma_n".to_owned()))
);
assert_eq!(TokenKind::from_str("53main"), None);
assert_eq!(TokenKind::from_str("ma.in"), None);
}

#[test]
fn tokenize_constant() {
assert_eq!(TokenKind::from_str("66"), Some(TokenKind::Constant(66)));
assert_eq!(TokenKind::from_str("32"), Some(TokenKind::Constant(32)));
}

#[test]
fn tokenize_void() {
assert_eq!(TokenKind::from_str("void").unwrap(), TokenKind::Void);
}

#[test]
fn tokenize_return() {
assert_eq!(TokenKind::from_str("return").unwrap(), TokenKind::Return);
}

#[test]
fn tokenize_decrement() {
assert_eq!(TokenKind::from_str("--").unwrap(), TokenKind::Decrement);
}

#[test]
fn tokenize_complement() {
assert_eq!(TokenKind::from_str("~").unwrap(), TokenKind::Complement);
}

#[test]
fn tokenize_semicolon() {
assert_eq!(TokenKind::from_str(";").unwrap(), TokenKind::Semicolon);
}

#[test]
fn tokenize_brace_close() {
assert_eq!(TokenKind::from_str("}").unwrap(), TokenKind::BraceClose);
}

#[test]
fn tokenize_brace_open() {
assert_eq!(TokenKind::from_str("{").unwrap(), TokenKind::BraceOpen);
}

#[test]
fn tokenize_paren_close() {
assert_eq!(TokenKind::from_str(")").unwrap(), TokenKind::ParenClose);
}

#[test]
fn tokenize_paren_open() {
assert_eq!(TokenKind::from_str("(").unwrap(), TokenKind::ParenOpen);
}

#[test]
fn tokenize_minus() {
assert_eq!(TokenKind::from_str("-").unwrap(), TokenKind::Minus);
}

#[test]
fn tokenize_plus() {
assert_eq!(TokenKind::from_str("+").unwrap(), TokenKind::Plus);
}

#[test]
fn tokenize_asterisk() {
assert_eq!(TokenKind::from_str("*").unwrap(), TokenKind::Asterisk);
}

#[test]
fn tokenize_slash() {
assert_eq!(TokenKind::from_str("/").unwrap(), TokenKind::Slash);
}

#[test]
fn tokenize_percent() {
assert_eq!(TokenKind::from_str("%").unwrap(), TokenKind::Percent);
}
3 changes: 2 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,18 @@ use std::process::Command;
use strum::EnumIs;
use synoptic;

pub mod ast;
pub mod codegen;
pub mod error;
pub mod ir;
pub mod lexer;
#[cfg(feature = "llvm")]
pub mod llvm_ir;
pub mod parser;
pub mod semantic;
pub mod typecheck;

use cfg_if::cfg_if;
use parser::ast;

use lexer::TokenKind;

Expand Down
Loading

0 comments on commit 8304f03

Please sign in to comment.