From 553d6e6a497f3eabe76cd75a4626a5b241a87751 Mon Sep 17 00:00:00 2001 From: keplerHaloxx Date: Sun, 18 Aug 2024 22:44:18 +1000 Subject: [PATCH] fix oopsie --- .gitignore | 3 ++ Cargo.toml | 9 ++++ src/errors.rs | 32 ++++++++++++++ src/lexer.rs | 88 +++++++++++++++++++++++++++++++++++++ src/main.rs | 102 +++++++++++++++++++++++++++++++++++++++++++ src/parser.rs | 117 ++++++++++++++++++++++++++++++++++++++++++++++++++ src/token.rs | 55 ++++++++++++++++++++++++ src/traits.rs | 16 +++++++ 8 files changed, 422 insertions(+) create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 src/errors.rs create mode 100644 src/lexer.rs create mode 100644 src/main.rs create mode 100644 src/parser.rs create mode 100644 src/token.rs create mode 100644 src/traits.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f54bb5e --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +/target +/.vscode +Cargo.lock \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..cfe90a0 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "math_interpreter" +version = "0.1.0" +edition = "2021" + +[dependencies] +colored = "2.1.0" +strum = "0.26" +strum_macros = "0.26" diff --git a/src/errors.rs b/src/errors.rs new file mode 100644 index 0000000..a0edc5a --- /dev/null +++ b/src/errors.rs @@ -0,0 +1,32 @@ +use std::fmt::Display; + +use colored::Colorize; + +#[derive(Debug, Clone)] +pub enum ErrorReason { + Error(String), +} + +impl Display for ErrorReason { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ErrorReason::Error(reason) => write!(f, "{}", reason.red()), + } + } +} + +#[derive(Debug, Clone)] +pub struct ParserError { + #[allow(dead_code)] + pub error: ErrorReason, + pub description: String, +} + +impl ParserError { + pub fn new(error: ErrorReason) -> Self { + Self { + description: error.to_string(), + error, + } + } +} diff --git a/src/lexer.rs b/src/lexer.rs new file mode 100644 index 0000000..bc315ed --- /dev/null +++ b/src/lexer.rs @@ -0,0 +1,88 @@ +use std::iter::Peekable; + +use crate::{ + errors::ErrorReason, + token::{Token, TokenKind, TokenValue}, +}; + +const DIGITS: &str = ".0123456789"; + +pub struct Lexer { + source: String, +} + +impl Lexer { + pub fn new(source: String) -> Self { + Self { source } + } + + pub fn tokenize(&self) -> Result, ErrorReason> { + let mut tokens: Vec = Vec::new(); + let mut chars = self + .source + .chars() + .filter(|c| !c.is_whitespace()) + .peekable(); + + while let Some(¤t) = chars.peek() { + // Best solution i could find to making sure it doesn't skip + if DIGITS.contains(current) { + tokens.push(self.generate_number(&mut chars)?); + } else { + let token = match current { + '+' => Token::new(TokenKind::Plus, None), + '-' => Token::new(TokenKind::Minus, None), + '*' => Token::new(TokenKind::Multiply, None), + '/' => Token::new(TokenKind::Divide, None), + '^' => Token::new(TokenKind::Power, None), + '(' => Token::new(TokenKind::LParen, None), + ')' => Token::new(TokenKind::RParen, None), + _ => Token::new( + TokenKind::Unknown, + Some(TokenValue::StrValue(current.to_string())), + ), + }; + tokens.push(token); + chars.next(); + } + } + + Ok(tokens) + } + + fn generate_number(&self, chars: &mut Peekable) -> Result + where + I: Iterator, + { + let mut decimal_point_counter = 0; + let mut number = String::new(); + + while let Some(¤t) = chars.peek() { + if current == '.' { + decimal_point_counter += 1; + if decimal_point_counter > 1 { + return Err(ErrorReason::Error("Too many decimal points".into())); + } + } + number.push(current); + chars.next(); + + // Peek the next character and check if it's valid for a number + if let Some(&next_char) = chars.peek() { + if !DIGITS.contains(next_char) { + if number.trim() == "." { + return Err(ErrorReason::Error("Random decimal place found ".into())); + } + break; + } + } + } + + Ok(Token::new( + TokenKind::Number, + Some(TokenValue::NumValue(number.parse::().unwrap_or_else( + |_| panic!("Error parsing number '{number}'"), + ))), + )) + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..ee13886 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,102 @@ +mod errors; +mod lexer; +mod parser; +mod token; +mod traits; + +use std::io::{stdin, stdout, Write}; + +use colored::Colorize; +use errors::{ErrorReason, ParserError}; +use lexer::Lexer; +use parser::Parser; +use token::{Token, TokenKind, TokenValue}; + +/// Prompts user for input and returns trimmed result +fn prompt_input(prompt: &str) -> String { + let mut input = String::new(); + + print!("{}", prompt); + stdout().flush().unwrap(); + stdin().read_line(&mut input).unwrap(); + + input.trim().to_owned() +} + +fn main() { + println!("MATH INTERPRETER"); + println!("----------------"); + + loop { + let input = prompt_input("> "); + + let lexer = Lexer::new(input); + let tokens_result = lexer.tokenize(); + if let Err(err) = tokens_result { + println!("{}", err); + continue; + } + let tokens = tokens_result.unwrap(); + + let token_errors = get_tokens_errors(tokens.clone()); + if !token_errors.is_empty() { + for err in token_errors { + println!( + "{}", + ParserError::new(ErrorReason::Error(format!("Invalid sequence: '{}'", err))) + .description + ); + } + println!(); + } else { + pretty_print_tokens(tokens.clone()); + let mut parser = Parser::new(tokens.into_iter()); + let result = parser.parse_expr(); + println!("{:?}", result); + println!( + "{} {}", + "RESULT:".bright_green(), + result.unwrap().evaluate().to_string().bright_green() + ); + } + } +} + +fn pretty_print_tokens(tokens: Vec) { + let token_len = tokens.len(); + tokens.iter().enumerate().for_each(|(i, token)| { + if i == token_len - 1 { + println!("{token}"); + } else { + print!("{}, ", token); + } + }) +} + +fn get_tokens_errors(tokens: Vec) -> Vec { + let mut error_str: Vec = vec![]; + let mut current_sequence: String = String::new(); + let mut tokens_iter = tokens.iter().peekable(); + while let Some(token) = tokens_iter.next() { + // If there's multiple correct tokens just skip that + if token.kind != TokenKind::Unknown { + continue; + } + if let TokenValue::StrValue(value) = &token.value { + // Push illegal char into current sequence + current_sequence.push(value.chars().next().unwrap()); + } + + // Check if next value is empty or not unknown + // If true then push current sequence to end string + let peek = tokens_iter.peek(); + if !current_sequence.is_empty() + && (peek.is_none() || peek.unwrap().kind != TokenKind::Unknown) + { + // Append the current sequence + error_str.push(current_sequence.clone()); + current_sequence.clear(); + } + } + error_str +} diff --git a/src/parser.rs b/src/parser.rs new file mode 100644 index 0000000..0befa95 --- /dev/null +++ b/src/parser.rs @@ -0,0 +1,117 @@ +// - i needed some help making the AST. i dont think i could've made ts by myself 😭 + +use std::iter::Peekable; + +use crate::{ + errors::{ErrorReason, ParserError}, + token::{Token, TokenKind, TokenValue}, + traits::Round, +}; + +#[allow(dead_code)] +#[derive(Debug)] +pub enum ASTNode { + Number(f64), + BinaryOp(Box, TokenKind, Box), +} + +pub struct Parser> { + tokens: Peekable, +} + +impl ASTNode { + pub fn evaluate(&self) -> f64 { + match self { + ASTNode::Number(val) => *val, + ASTNode::BinaryOp(left_node, op, right_node) => { + let left = left_node.evaluate(); + let right = right_node.evaluate(); + match op { + TokenKind::Plus => left + right, + TokenKind::Minus => left - right, + TokenKind::Multiply => left * right, + TokenKind::Divide => left / right, + TokenKind::Power => left.powf(right), + _ => panic!("wrong operation i cbf making proper errors for this"), + } + .round_to(5) + } + } + } +} + +impl> Parser { + pub fn new(tokens: I) -> Self { + Self { + tokens: tokens.peekable(), + } + } + + fn advance(&mut self) -> Option { + self.tokens.next() + } + + fn peek(&mut self) -> Option<&Token> { + self.tokens.peek() + } + + pub fn parse_expr(&mut self) -> Result { + let mut node = self.parse_term()?; + + while matches!( + self.peek().map(|t| t.kind), + Some(TokenKind::Plus) | Some(TokenKind::Minus) | Some(TokenKind::Power) + ) { + let operator = self.advance().unwrap().kind; + let right = self.parse_term()?; + node = ASTNode::BinaryOp(Box::new(node), operator, Box::new(right)); + } + + Ok(node) + } + + fn parse_term(&mut self) -> Result { + let mut node = self.parse_factor()?; + + while matches!( + self.peek().map(|t| t.kind), + Some(TokenKind::Multiply) | Some(TokenKind::Divide) + ) { + let operator = self.advance().unwrap().kind; + let right = self.parse_factor()?; + node = ASTNode::BinaryOp(Box::new(node), operator, Box::new(right)); + } + + Ok(node) + } + + fn parse_factor(&mut self) -> Result { + match self.peek().map(|t| t.kind) { + Some(TokenKind::Number) => { + let value = if let TokenValue::NumValue(val) = self.advance().unwrap().value { + val + } else { + return Err(ParserError::new(ErrorReason::Error( + "Expected a number".to_string(), + ))); + }; + Ok(ASTNode::Number(value)) + } + Some(TokenKind::LParen) => { + self.advance(); // Consume '(' + let node = self.parse_expr()?; + if self.peek().map(|t| t.kind) != Some(TokenKind::RParen) { + return Err(ParserError::new(ErrorReason::Error( + "Expected ')'".into(), + ))); + } + self.advance(); // Consume ')' + Ok(node) + } + _ => Err(ParserError::new(ErrorReason::Error(format!( + "Unexpected token: {:?}", + self.peek() + )))), + } + } +} diff --git a/src/token.rs b/src/token.rs new file mode 100644 index 0000000..e5c894f --- /dev/null +++ b/src/token.rs @@ -0,0 +1,55 @@ +use std::fmt::Display; + +use strum_macros::{Display, EnumString}; + +#[derive(Debug, PartialEq, EnumString, Display, Clone, Copy)] +pub enum TokenKind { + Number, + Plus, + Minus, + Multiply, + Divide, + Power, + LParen, + RParen, + Function, + Unknown, +} + +#[derive(Debug, PartialEq, Clone)] +pub enum TokenValue { + StrValue(String), + NumValue(f64), + None, +} + +#[derive(Debug, PartialEq, Clone)] +pub struct Token { + pub kind: TokenKind, + pub value: TokenValue, +} + +impl Token { + pub fn new(kind: TokenKind, value: Option) -> Token { + if value.is_none() { + return Self { + kind, + value: TokenValue::None, + }; + } + Self { + kind, + value: value.unwrap(), + } + } +} + +impl Display for Token { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.value == TokenValue::None { + write!(f, "{}", self.kind) + } else { + write!(f, "{}:{:?}", self.kind, self.value) + } + } +} diff --git a/src/traits.rs b/src/traits.rs new file mode 100644 index 0000000..6134d89 --- /dev/null +++ b/src/traits.rs @@ -0,0 +1,16 @@ +pub trait Round { + fn round_to(self, percision: u32) -> f64; +} + +impl Round for f64 { + fn round_to(self, percision: u32) -> f64 { + if self == 0. || percision == 0 { + 0. + } else { + let shift = percision as i32 - self.abs().log10().ceil() as i32; + let shift_factor = 10_f64.powi(shift); + + (self * shift_factor).round() / shift_factor + } + } +}