Browse Source

fix addition/subtraction with no space (#235)

pull/245/head
Alexandre GOMES 4 years ago committed by GitHub
parent
commit
448835295a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 86
      src/lib/syntax/lexer.rs

86
src/lib/syntax/lexer.rs

@ -171,6 +171,20 @@ impl<'a> Lexer<'a> {
fn preview_next(&mut self) -> Option<char> {
self.buffer.peek().copied()
}
/// Preview a char x indexes further in buf, without incrementing
fn preview_multiple_next(&mut self, nb_next: usize) -> Option<char> {
let mut next_peek = None;
for (i, x) in self.buffer.clone().enumerate() {
if i >= nb_next {
break;
}
next_peek = Some(x);
}
next_peek
}
/// Utility Function, while ``f(char)`` is true, read chars and move curser.
/// All chars are returned as a string
@ -416,8 +430,19 @@ impl<'a> Lexer<'a> {
break 'digitloop;
}
},
'e' | '+' | '-' => {
buf.push(self.next());
'e' => {
match self.preview_multiple_next(2).unwrap_or_default().to_digit(10) {
Some(0..=9) | None => {
buf.push(self.next());
}
_ => {
break;
}
}
buf.push(self.next());
}
'+' | '-' => {
break;
}
_ if ch.is_digit(10) => {
buf.push(self.next());
@ -1026,4 +1051,61 @@ mod tests {
TokenData::RegularExpressionLiteral("\\/[^\\/]*\\/*".to_string(), "gmi".to_string())
);
}
#[test]
fn test_addition_no_spaces() {
let mut lexer = Lexer::new("1+1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_left_side() {
let mut lexer = Lexer::new("1+ 1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_right_side() {
let mut lexer = Lexer::new("1 +1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_e_number_left_side() {
let mut lexer = Lexer::new("1e2+ 1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(100.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_e_number_right_side() {
let mut lexer = Lexer::new("1 +1e3");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1000.0));
}
#[test]
fn test_addition_no_spaces_e_number() {
let mut lexer = Lexer::new("1e3+1e11");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1000.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(
lexer.tokens[2].data,
TokenData::NumericLiteral(100000000000.0)
);
}
}

Loading…
Cancel
Save