diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..1e6979e --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,130 @@ +use crate::units::Unit; +pub mod units; +use std::time::{Instant}; +use decimal::d128; + +#[derive(Clone, Debug)] +pub enum Operator { + Plus, + Minus, + Multiply, + Divide, + Modulo, + Caret, + LeftParen, // lexer only + RightParen, // lexer only +} + +#[derive(Clone, Debug)] +pub enum UnaryOperator { + Percent, + Factorial, +} + +#[derive(Clone, Debug)] +pub enum TextOperator { + To, + Of, +} + +#[derive(Clone, Debug)] +pub enum Constant { + Pi, + E, +} + +#[derive(Clone, Debug)] +pub enum FunctionIdentifier { + Sqrt, + Cbrt, + + Log, + Ln, + Exp, + + Round, + Ceil, + Floor, + Abs, + + Sin, + Cos, + Tan, +} + +#[derive(Clone, Debug)] +pub enum LexerKeyword { + Per, + PercentChar, + In, + DoubleQuotes, + Mercury, + Hg, + PoundForce, + PoundWord, + Force, +} + +#[derive(Clone, Debug)] +pub enum Token { + Operator(Operator), + UnaryOperator(UnaryOperator), + Number(d128), + FunctionIdentifier(FunctionIdentifier), + Constant(Constant), + Paren, // parser only + Per, // lexer only + LexerKeyword(LexerKeyword), + TextOperator(TextOperator), + Negative, // parser only + Unit(units::Unit), +} + +pub type TokenVector = Vec; + +mod lexer; +mod parser; +mod evaluator; +mod lookup; + +pub fn eval(input: &str, allow_trailing_operators: bool, default_degree: Unit, debug: bool) -> Result { + + let lex_start = Instant::now(); + + match lexer::lex(input, allow_trailing_operators, default_degree) { + Ok(tokens) => { + let lex_time = Instant::now().duration_since(lex_start).as_nanos() as f32; + + let parse_start = Instant::now(); + match parser::parse(&tokens) { + Ok(ast) => { + let parse_time = Instant::now().duration_since(parse_start).as_nanos() as f32; + + let eval_start = Instant::now(); + match evaluator::evaluate(&ast) { + Ok(answer) => { + let eval_time = Instant::now().duration_since(eval_start).as_nanos() as f32; + + if debug == true { + println!("Lexed TokenVector: {:?}", tokens); + println!("Parsed AstNode: {:#?}", ast); + println!("Evaluated value: {} {:?}", answer.value, answer.unit); + println!("\u{23f1} {:.3}ms lexing", lex_time/1000.0/1000.0); + println!("\u{23f1} {:.3}ms parsing", parse_time/1000.0/1000.0); + println!("\u{23f1} {:.3}ms evaluation", eval_time/1000.0/1000.0); + } + + return Ok(answer) + }, + Err(e) => Err(format!("Eval error: {}", e)), + } + + }, + Err(e) => Err(format!("Parsing error: {}", e)), + } + + }, + Err(e) => Err(format!("Lexing error: {}", e)), + } + +} diff --git a/src/main.rs b/src/main.rs index 2c44768..0bf39cf 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,92 +1,5 @@ -use crate::units::Unit; -use std::time::{Instant}; -use decimal::d128; - -#[derive(Clone, Debug)] -pub enum Operator { - Plus, - Minus, - Multiply, - Divide, - Modulo, - Caret, - LeftParen, // lexer only - RightParen, // lexer only -} - -#[derive(Clone, Debug)] -pub enum UnaryOperator { - Percent, - Factorial, -} - -#[derive(Clone, Debug)] -pub enum TextOperator { - To, - Of, -} - -#[derive(Clone, Debug)] -pub enum Constant { - Pi, - E, -} - -#[derive(Clone, Debug)] -pub enum FunctionIdentifier { - Sqrt, - Cbrt, - - Log, - Ln, - Exp, - - Round, - Ceil, - Floor, - Abs, - - Sin, - Cos, - Tan, -} - -#[derive(Clone, Debug)] -pub enum LexerKeyword { - Per, - PercentChar, - In, - DoubleQuotes, - Mercury, - Hg, - PoundForce, - PoundWord, - Force, -} - -mod units; - -#[derive(Clone, Debug)] -pub enum Token { - Operator(Operator), - UnaryOperator(UnaryOperator), - Number(d128), - FunctionIdentifier(FunctionIdentifier), - Constant(Constant), - Paren, // parser only - Per, // lexer only - LexerKeyword(LexerKeyword), - TextOperator(TextOperator), - Negative, // parser only - Unit(units::Unit), -} - -pub type TokenVector = Vec; - -mod lexer; -mod parser; -mod evaluator; -mod lookup; +use cpc::eval; +use cpc::units::Unit; fn main() { use std::env; @@ -110,45 +23,3 @@ fn main() { println!("No argument supplied"); } } - -pub fn eval(input: &str, allow_trailing_operators: bool, default_degree: Unit, debug: bool) -> Result { - - let lex_start = Instant::now(); - - match lexer::lex(input, allow_trailing_operators, default_degree) { - Ok(tokens) => { - let lex_time = Instant::now().duration_since(lex_start).as_nanos() as f32; - - let parse_start = Instant::now(); - match parser::parse(&tokens) { - Ok(ast) => { - let parse_time = Instant::now().duration_since(parse_start).as_nanos() as f32; - - let eval_start = Instant::now(); - match evaluator::evaluate(&ast) { - Ok(answer) => { - let eval_time = Instant::now().duration_since(eval_start).as_nanos() as f32; - - if debug == true { - println!("Lexed TokenVector: {:?}", tokens); - println!("Parsed AstNode: {:#?}", ast); - println!("Evaluated value: {} {:?}", answer.value, answer.unit); - println!("\u{23f1} {:.3}ms lexing", lex_time/1000.0/1000.0); - println!("\u{23f1} {:.3}ms parsing", parse_time/1000.0/1000.0); - println!("\u{23f1} {:.3}ms evaluation", eval_time/1000.0/1000.0); - } - - return Ok(answer) - }, - Err(e) => Err(format!("Eval error: {}", e)), - } - - }, - Err(e) => Err(format!("Parsing error: {}", e)), - } - - }, - Err(e) => Err(format!("Lexing error: {}", e)), - } - -}