Re-add PoundForce and NewtonMeter parsing

This commit is contained in:
Kasper 2021-07-05 17:49:08 +02:00
parent eaaee87b2c
commit c34a21afe1

View File

@ -64,7 +64,7 @@ pub fn read_word(first_c: &str, lexer: &mut Lexer) -> String {
break; break;
} }
} }
let mut word = first_c.to_owned(); let mut word = first_c.trim().to_owned();
while let Some(next_char) = chars.peek() { while let Some(next_char) = chars.peek() {
if is_alphabetic_extended_str(&next_char) { if is_alphabetic_extended_str(&next_char) {
word += chars.next().unwrap(); word += chars.next().unwrap();
@ -335,27 +335,25 @@ pub fn parse_word(word: &str, lexer: &mut Lexer) -> Result<(), String> {
"oz" | "ounces" => Token::Unit(Ounce), "oz" | "ounces" => Token::Unit(Ounce),
"lb" | "lbs" => Token::Unit(Pound), "lb" | "lbs" => Token::Unit(Pound),
"pound" | "pounds" => { "pound" | "pounds" => {
todo!(); match lexer.chars.next() {
// if chars.peek() == Some(&"-") { Some("-") => {
// let dash_chars_iter = chars.clone(); match read_word_plain(&mut lexer.chars).as_str() {
// dash_chars_iter.next(); "force" => Token::LexerKeyword(PoundForce),
// match read_word_plain(dash_chars_iter).as_str() { other => {
// "force" => { lexer.tokens.push(Token::Unit(Pound));
lexer.tokens.push(Token::Operator(Minus));
// } parse_token(&other, lexer)?;
// } return Ok(());
// chars.next(); }
// match read_word_plain(chars).as_str() { }
// "force" => Token::LexerKeyword(PoundForce), },
// string => return Err(format!("Invalid string: {}", string)), Some(c) => {
// } lexer.tokens.push(Token::Unit(Pound));
// match read_word(chars).as_str() { parse_token(c, lexer)?;
// "force" => Token::LexerKeyword(PoundForce), return Ok(());
// string => return Err(format!("Invalid string: {}", string)), },
// } None => return Ok(()),
// } else { }
// Token::Unit(Pound)
// }
}, },
"stone" | "stones" => Token::Unit(Stone), "stone" | "stones" => Token::Unit(Stone),
"st" | "ton" | "tons" => Token::Unit(ShortTon), "st" | "ton" | "tons" => Token::Unit(ShortTon),
@ -412,9 +410,21 @@ pub fn parse_word(word: &str, lexer: &mut Lexer) -> Result<(), String> {
"j"| "joule" | "joules" => Token::Unit(Joule), "j"| "joule" | "joules" => Token::Unit(Joule),
"nm" => Token::Unit(NewtonMeter), "nm" => Token::Unit(NewtonMeter),
"newton" => { "newton" => {
todo!(); match lexer.chars.next() {
// "-meter" | "-meters" | "metre" | "metres" => Token::Unit(NewtonMeter), Some("-") => {
// "meter" | "meters" | "metre" | "metres" => Token::Unit(NewtonMeter), match read_word_plain(&mut lexer.chars).as_str() {
"meter" | "meters" | "metre" | "metres" => Token::Unit(NewtonMeter),
string => return Err(format!("Invalid string: {}", string)),
}
},
Some(c) => {
match read_word(c, lexer).as_str() {
"meter" | "meters" | "metre" | "metres" => Token::Unit(NewtonMeter),
string => return Err(format!("Invalid string: {}", string)),
}
},
None => return Err(format!("Invalid string: {}", word)),
}
}, },
"kj" | "kilojoule" | "kilojoules" => Token::Unit(Kilojoule), "kj" | "kilojoule" | "kilojoules" => Token::Unit(Kilojoule),
"mj" | "megajoule" | "megajoules" => Token::Unit(Megajoule), "mj" | "megajoule" | "megajoules" => Token::Unit(Megajoule),