Skip to content

Commit

Permalink
error: Cleanup error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
mrkajetanp committed Sep 13, 2024
1 parent d23a43a commit 9e89fc4
Show file tree
Hide file tree
Showing 6 changed files with 95 additions and 92 deletions.
46 changes: 11 additions & 35 deletions src/ast/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@ use thiserror::Error;

#[derive(Debug, Error)]
pub enum ParserError {
#[error("Unexpected token in the token stream")]
UnexpectedToken,
#[error("Syntax Error: Expected {0}, got {1}")]
FailedExpect(TokenKind, TokenKind),
#[error("Syntax Error: Unexpected token {0}")]
UnexpectedToken(TokenKind),
#[error("No tokens left in the token stream")]
NoTokens,
#[error("Malformed expression")]
Expand All @@ -34,13 +36,8 @@ impl Program {
let mut body = vec![];

while !tokens.is_empty() {
match FunctionDeclaration::parse(&mut tokens) {
Ok(func) => body.push(func),
Err(err) => {
log::error!("Parser error: {}", err);
return Err(err);
}
}
let func = FunctionDeclaration::parse(&mut tokens);
body.push(func?);
}

Ok(Program { body })
Expand Down Expand Up @@ -209,11 +206,8 @@ pub struct VariableDeclaration {
impl VariableDeclaration {
fn parse(tokens: &mut VecDeque<TokenKind>) -> ParserResult<Self> {
log_trace("Parsing declaration from", tokens);
// Silent expect here because we can use this failing to check
// whether we're parsing a declaration or something else
// so we don't want to log an error.
// TODO: actually use the type
let _ty = expect_token_silent(TokenKind::Int, tokens)?;
let _ty = expect_token(TokenKind::Int, tokens)?;
let ident = Identifier::parse(expect_token(TokenKind::Identifier("".to_owned()), tokens)?)?;

let init = if tokens.front().unwrap().to_owned().is_semicolon() {
Expand Down Expand Up @@ -396,7 +390,6 @@ impl Expression {
log_trace("Trying expr from", tokens);

if tokens.len() == 0 {
log::error!("No tokens passed to the Expression parser");
return Err(ParserError::NoTokens);
}
let mut left = Expression::parse_factor(tokens)?;
Expand Down Expand Up @@ -431,7 +424,6 @@ impl Expression {
log_trace("Trying factor from", tokens);

if tokens.len() == 0 {
log::error!("No tokens passed to the Expression parser");
return Err(ParserError::NoTokens);
}
let token = tokens.front().unwrap().to_owned();
Expand Down Expand Up @@ -536,7 +528,7 @@ impl BinaryOperator {
TokenKind::GreaterEqualThan => Ok(Self::GreaterEqualThan),
TokenKind::LessThan => Ok(Self::LessThan),
TokenKind::GreaterThan => Ok(Self::GreaterThan),
_ => Err(ParserError::UnexpectedToken),
_ => Err(ParserError::UnexpectedToken(token)),
}
}

Expand Down Expand Up @@ -564,7 +556,7 @@ impl UnaryOperator {
TokenKind::Complement => Ok(Self::Complement),
TokenKind::Minus => Ok(Self::Negation),
TokenKind::Not => Ok(Self::Not),
_ => Err(ParserError::UnexpectedToken),
_ => Err(ParserError::UnexpectedToken(token)),
}
}
}
Expand Down Expand Up @@ -607,33 +599,17 @@ fn log_trace(msg: &str, tokens: &mut VecDeque<TokenKind>) {
}

#[inline(always)]
fn expect_token_silent(
expected: TokenKind,
tokens: &mut VecDeque<TokenKind>,
) -> ParserResult<TokenKind> {
fn expect_token(expected: TokenKind, tokens: &mut VecDeque<TokenKind>) -> ParserResult<TokenKind> {
let exp = discriminant(&expected);
let actual = discriminant(&tokens[0]);

if actual != exp {
Err(ParserError::UnexpectedToken)
Err(ParserError::FailedExpect(expected, tokens[0].clone()))
} else {
Ok(tokens.pop_front().unwrap())
}
}

#[inline(always)]
fn expect_token(expected: TokenKind, tokens: &mut VecDeque<TokenKind>) -> ParserResult<TokenKind> {
let result = expect_token_silent(expected.clone(), tokens);
if let Err(_) = result {
log::error!(
"Syntax Error: Expected {:?}, got {:?}",
&expected,
&tokens[0]
);
}
result
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
58 changes: 58 additions & 0 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
use crate::ast::ParserError;
use crate::codegen::CodegenError;
use crate::lexer::LexerError;
use crate::semantic::SemanticError;
use crate::typecheck::TypeCheckError;
use thiserror::Error;

#[derive(Error, Debug)]
pub enum ErrorKind {
#[error("Lexer Failed\n{0}")]
LexerError(LexerError),
#[error("AST Parsing Failed\n{0}")]
ParserError(ParserError),
#[error("Semantic Analysis Failed\n{0}")]
SemanticError(SemanticError),
#[error("Type Checking Failed\n{0}")]
TypeCheckError(TypeCheckError),
#[error("Codegen Failed\n{0}")]
CodegenError(CodegenError),
#[error("IO Error\n{0}")]
IOError(std::io::Error),
}

impl From<LexerError> for ErrorKind {
fn from(error: LexerError) -> Self {
Self::LexerError(error)
}
}

impl From<ParserError> for ErrorKind {
fn from(error: ParserError) -> Self {
Self::ParserError(error)
}
}

impl From<SemanticError> for ErrorKind {
fn from(error: SemanticError) -> Self {
Self::SemanticError(error)
}
}

impl From<TypeCheckError> for ErrorKind {
fn from(error: TypeCheckError) -> Self {
Self::TypeCheckError(error)
}
}

impl From<CodegenError> for ErrorKind {
fn from(error: CodegenError) -> Self {
Self::CodegenError(error)
}
}

impl From<std::io::Error> for ErrorKind {
fn from(error: std::io::Error) -> Self {
Self::IOError(error)
}
}
20 changes: 14 additions & 6 deletions src/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use core::panic;
use std::fmt;

use regex::Regex;
use strum::IntoEnumIterator;
Expand Down Expand Up @@ -45,15 +46,15 @@ static COMMA_RE: &str = r"\,";

#[derive(Error, Debug)]
pub enum LexerError {
#[error("Syntax error")]
SyntaxError,
#[error("Syntax error: {0}\nAt source:\n{1}")]
SyntaxError(String, String),
}

pub type LexerResult<T> = Result<T, LexerError>;

// NOTE: The tokenizer will try tokens in-order based on this list
// It *must* be ordered longest-match first
#[derive(EnumIter, EnumIs, Debug, strum_macros::Display, PartialEq, Clone)]
#[derive(EnumIter, EnumIs, Debug, PartialEq, Clone)]
pub enum TokenKind {
Return,
Int,
Expand Down Expand Up @@ -247,6 +248,12 @@ impl TokenKind {
}
}

impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}

struct Lexer<'a> {
source: &'a str,
}
Expand Down Expand Up @@ -278,16 +285,17 @@ impl<'a> Lexer<'a> {

if !token_found {
log::debug!("Parsed tokens: {:?}", tokens);
log::error!("Could not parse token");
let error_source: String = self
.source
.to_owned()
.lines()
.take(2)
.collect::<Vec<&str>>()
.join("\n");
log::error!("At source: \n{}", error_source);
return Err(LexerError::SyntaxError);
return Err(LexerError::SyntaxError(
"Could not parse token".to_owned(),
error_source,
));
}
}
Ok(tokens)
Expand Down
38 changes: 10 additions & 28 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
#![feature(let_chains)]

use error::ErrorKind;
use lliw::Fg;
use std::fs;
use std::io::{Error, Write};
use std::process::Command;
use strum::EnumIs;
use synoptic;
use thiserror::Error;

pub mod ast;
pub mod codegen;
pub mod error;
pub mod ir;
pub mod lexer;
#[cfg(feature = "llvm")]
Expand All @@ -21,24 +22,6 @@ use cfg_if::cfg_if;

use lexer::TokenKind;

#[derive(Error, Debug)]
pub enum ErrorKind {
#[error("Lexer Failed")]
LexerError,
#[error("AST Parsing Failed")]
ParserError,
#[error("Semantic Analysis Failed")]
SemanticError,
#[error("Type Checking Failed")]
TypeCheckError,
#[error("Codegen Failed")]
CodegenError,
#[error("Asm Emission Failed")]
AsmEmitError,
#[error("IO Error")]
IOError,
}

type CompileResult<T> = Result<T, ErrorKind>;

#[derive(PartialEq, EnumIs, Clone, Copy)]
Expand Down Expand Up @@ -72,23 +55,23 @@ impl Driver {
log::debug!("Preprocessed source:");
log::debug!("\n{}", source);

let tokens = self.lex(source).map_err(|_| ErrorKind::LexerError)?;
let tokens = self.lex(source)?;
log::debug!("Tokens:\n{:?}\n", &tokens);

if stage.is_lex() {
return Ok(());
}

let ast = self.parse(tokens).map_err(|_| ErrorKind::ParserError)?;
let ast = self.parse(tokens)?;
log::debug!("Parsed AST:\n{}", ast);

if stage.is_parse() {
return Ok(());
}

let ast = ast.validate().map_err(|_| ErrorKind::SemanticError)?;
let ast = ast.validate()?;
log::trace!("Resolved and labelled AST:\n{}", ast);
let ast = ast.typecheck().map_err(|_| ErrorKind::TypeCheckError)?;
let ast = ast.typecheck()?;
log::debug!("Validated AST:\n{}", ast);

if stage.is_validate() {
Expand Down Expand Up @@ -163,21 +146,20 @@ impl Driver {
}

fn codegen(&self, ir: ir::Program) -> CompileResult<codegen::Program> {
codegen::Program::codegen(ir).map_err(|_| ErrorKind::CodegenError)
codegen::Program::codegen(ir).map_err(|err| ErrorKind::CodegenError(err))
}

fn emit(&self, code: codegen::Program) -> Result<String, ErrorKind> {
let output_path = format!("{}.s", self.name);
let asm = code.emit().map_err(|_| ErrorKind::AsmEmitError)?;
let asm = code.emit()?;

if log::log_enabled!(log::Level::Debug) {
log::debug!("Emitted asm:");
Driver::print_asm_with_highlight(&asm);
}

let mut file = fs::File::create(&output_path).map_err(|_| ErrorKind::IOError)?;
file.write_all(asm.as_bytes())
.map_err(|_| ErrorKind::IOError)?;
let mut file = fs::File::create(&output_path)?;
file.write_all(asm.as_bytes())?;

Ok(output_path)
}
Expand Down
13 changes: 2 additions & 11 deletions src/semantic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,14 +108,7 @@ fn clone_identifier_map(map: &IdentifierMap) -> IdentifierMap {
impl ast::Program {
pub fn validate(self) -> SemanticResult<Self> {
let mut ctx = SemanticCtx::new();
self.resolve(&mut ctx)
.inspect_err(|err| {
log::error!("Variable resolution error: {}", err);
})?
.label(&mut ctx)
.inspect_err(|err| {
log::error!("Loop labelling error: {}", err);
})
self.resolve(&mut ctx)?.label(&mut ctx)
}

pub fn resolve(self, ctx: &mut SemanticCtx) -> SemanticResult<Self> {
Expand Down Expand Up @@ -479,9 +472,7 @@ impl ast::Expression {
Box::new(right.resolve(ctx, identifiers)?),
)
} else {
let lvalue = format!("{:?}", left);
log::error!("Invalid assignment lvalue {}", lvalue);
return Err(SemanticError::InvalidLvalue(lvalue));
return Err(SemanticError::InvalidLvalue(format!("{:?}", left)));
}
}
Self::Conditional(cond, then_exp, else_exp) => Self::Conditional(
Expand Down
12 changes: 0 additions & 12 deletions src/typecheck.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,18 +57,6 @@ impl TypeCheckCtx {

impl Program {
pub fn typecheck(self) -> TypeCheckResult<Self> {
let ast = match self._typecheck() {
Ok(ast) => Ok(ast),
Err(err) => {
log::error!("Type Checking Error: {}", err);
Err(err)
}
};

ast
}

fn _typecheck(self) -> TypeCheckResult<Self> {
log::trace!("*** Running the type checker ***");
let mut ctx = TypeCheckCtx::new();

Expand Down

0 comments on commit 9e89fc4

Please sign in to comment.