Fixed lexing of pound

This commit is contained in:
Kasper 2020-11-26 18:22:17 +01:00
parent 47094869dc
commit 7826834447
2 changed files with 17 additions and 6 deletions

View File

@ -13,7 +13,7 @@
- Added support for `Energy / Time` - Added support for `Energy / Time`
- Fixed dividing a unit by `NoUnit` resulting in `NoUnit` - Fixed dividing a unit by `NoUnit` resulting in `NoUnit`
- Fixed interpreting of `µs` - Fixed interpreting of `µs`
- Fixed panics caused in Rust `1.48.0` by switching `decimal` dependency to `decimal_fixes_mirror`. - Fixed panics caused in Rust `1.48.0` by switching `decimal` dependency to `decimal_fixes_mirror`
## 1.1.0 - 2020 Nov 14 ## 1.1.0 - 2020 Nov 14
- Added units of frequency - Added units of frequency

View File

@ -66,6 +66,7 @@ pub fn lex(input: &str, allow_trailing_operators: bool, default_degree: Unit) ->
let start_index = byte_index; let start_index = byte_index;
// account for chars longer than one byte // account for chars longer than one byte
let mut end_index = byte_index + current_char.len_utf8() - 1; let mut end_index = byte_index + current_char.len_utf8() - 1;
while let Some(current_char) = chars.peek() { while let Some(current_char) = chars.peek() {
// don't loop more than max_word_length: // don't loop more than max_word_length:
if end_index >= start_index + max_word_length - 1 { if end_index >= start_index + max_word_length - 1 {
@ -261,7 +262,21 @@ pub fn lex(input: &str, allow_trailing_operators: bool, default_degree: Unit) ->
"t" | "tonne" | "tonnes" | "metric ton" | "metric tons" | "metric tonne" | "metric tonnes" => tokens.push(Token::Unit(MetricTon)), "t" | "tonne" | "tonnes" | "metric ton" | "metric tons" | "metric tonne" | "metric tonnes" => tokens.push(Token::Unit(MetricTon)),
"oz" | "ounces" => tokens.push(Token::Unit(Ounce)), "oz" | "ounces" => tokens.push(Token::Unit(Ounce)),
"lb" | "lbs" | "pounds" => tokens.push(Token::Unit(Pound)), "lb" | "lbs" | "pounds" => tokens.push(Token::Unit(Pound)),
"pound" => tokens.push(Token::LexerKeyword(PoundWord)), "pound" => {
let str_len = "-force".len();
match input.get(end_index+1..=end_index+str_len) {
Some("-force") => {
tokens.push(Token::LexerKeyword(PoundForce));
for _i in 0..str_len {
chars.next();
}
byte_index += str_len;
},
_ => {
tokens.push(Token::Unit(Pound));
}
}
},
"st" | "ton" | "tons" | "short ton" | "short tons" | "short tonne" | "short tonnes" => tokens.push(Token::Unit(ShortTon)), "st" | "ton" | "tons" | "short ton" | "short tons" | "short tonne" | "short tonnes" => tokens.push(Token::Unit(ShortTon)),
"lt" | "long ton" | "long tons" | "long tonne" | "long tonnes" => tokens.push(Token::Unit(LongTon)), "lt" | "long ton" | "long tons" | "long tonne" | "long tonnes" => tokens.push(Token::Unit(LongTon)),
@ -542,10 +557,6 @@ pub fn lex(input: &str, allow_trailing_operators: bool, default_degree: Unit) ->
(Token::Unit(BritishThermalUnit), Token::LexerKeyword(Per), Token::Unit(Hour)) => { (Token::Unit(BritishThermalUnit), Token::LexerKeyword(Per), Token::Unit(Hour)) => {
tokens[token_index-2] = Token::Unit(BritishThermalUnitsPerHour); tokens[token_index-2] = Token::Unit(BritishThermalUnitsPerHour);
}, },
// pound-force
(Token::LexerKeyword(PoundWord), Token::Operator(Minus), Token::LexerKeyword(Force)) => {
tokens[token_index-2] = Token::LexerKeyword(PoundForce);
},
// lbs/sqin // lbs/sqin
(Token::LexerKeyword(PoundForce), Token::LexerKeyword(Per), Token::Unit(SquareInch)) => { (Token::LexerKeyword(PoundForce), Token::LexerKeyword(Per), Token::Unit(SquareInch)) => {
tokens[token_index-2] = Token::Unit(PoundsPerSquareInch); tokens[token_index-2] = Token::Unit(PoundsPerSquareInch);