Skip to content

Commit

Permalink
ran clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
sn99 committed Dec 20, 2019
1 parent 74372da commit 808e473
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 54 deletions.
1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,3 @@ libc = "0.2.65"
[build-dependencies]
cc = "*"
bindgen = "0.52.0"

2 changes: 1 addition & 1 deletion src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub enum Token {
impl Token {
pub fn get_string(self) -> String {
match self {
Token::Ident(name) => return name,
Token::Ident(name) => name,
_ => unimplemented!(),
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ mod parser;
use std::env;
use std::fs;

const USAGE: &'static str = "
const USAGE: &str = "
Usage: cargo run SOURCE_FILE OUTPUT_FILE
Options:
Expand Down
72 changes: 21 additions & 51 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,25 +21,15 @@ impl Program {

fn get_tok_precedence(&mut self) -> isize {
match self.cur_token {
Token::Plus => {
return 20;
}
Token::Hyphen => {
return 20;
}
Token::Asterisk => {
return 40;
}
Token::LessThan => {
return 10;
}
_ => {
return -1;
}
Token::Plus => 20,
Token::Hyphen => 20,
Token::Asterisk => 40,
Token::LessThan => 10,
_ => -1,
}
}

pub fn new(file_name: &String, input: &str) -> Self {
pub fn new(file_name: &str, input: &str) -> Self {
let tokens = lexer::tokenize(input);

Self {
Expand Down Expand Up @@ -75,9 +65,7 @@ impl Program {
self.get_next_token();
let v = self.parse_expression();

if v.is_none() {
return None;
}
v.as_ref()?;

if self.cur_token != Token::CloseBracket {
return log_error("expected ')'".to_owned());
Expand Down Expand Up @@ -135,18 +123,10 @@ impl Program {
pub fn parse_primary(&mut self) -> Option<Box<ExprAST>> {
let k = self.cur_token.clone();
match k {
Token::Ident(e) => {
return self.parse_identifier_expr(e.clone());
}
Token::IntNumber(num) => {
return self.parse_number_expr(num as f64);
}
Token::FloatNumber(num) => {
return self.parse_number_expr(num);
}
Token::OpenCurly => {
return self.parse_paren_expr();
}
Token::Ident(e) => self.parse_identifier_expr(e),
Token::IntNumber(num) => self.parse_number_expr(num as f64),
Token::FloatNumber(num) => self.parse_number_expr(num),
Token::OpenCurly => self.parse_paren_expr(),
_ => {
log_error("unknown token when expecting an expression".to_owned());
None
Expand All @@ -157,11 +137,9 @@ impl Program {
pub fn parse_expression(&mut self) -> Option<Box<ExprAST>> {
let lhs = self.parse_primary();

if lhs.is_none() {
return None;
}
lhs.as_ref()?;

return self.parse_bin_op_rhs(0, lhs);
self.parse_bin_op_rhs(0, lhs)
}

pub fn parse_bin_op_rhs(
Expand All @@ -179,18 +157,14 @@ impl Program {
self.get_next_token();

let mut rhs = self.parse_primary();
if rhs.is_none() {
return None;
}
rhs.as_ref()?;

let next_prec = self.get_tok_precedence();

if tok_prec < next_prec {
rhs = self.parse_bin_op_rhs(tok_prec + 1, rhs);

if rhs.is_none() {
return None;
}
rhs.as_ref()?;
}

lhs = Some(Box::new(BinaryExpr(BinaryExprAST {
Expand Down Expand Up @@ -230,25 +204,21 @@ impl Program {
self.get_next_token();

Some(Box::new(PrototypeAST {
name: fn_name.to_owned(),
name: fn_name,
args: arg_names,
}))
}

pub fn parse_definition(&mut self) -> Option<Box<FunctionAST>> {
self.get_next_token();
let proto = self.parse_prototype();
if proto.is_none() {
return None;
}
proto.as_ref()?;

match self.parse_expression() {
Some(e) => {
return Some(Box::new(FunctionAST {
prototype: proto.unwrap(),
body: e,
}));
}
Some(e) => Some(Box::new(FunctionAST {
prototype: proto.unwrap(),
body: e,
})),
_ => None,
}
}
Expand Down

0 comments on commit 808e473

Please sign in to comment.