diff --git a/boa/src/syntax/lexer/comment.rs b/boa/src/syntax/lexer/comment.rs index 9f9c482d6a..88e5238839 100644 --- a/boa/src/syntax/lexer/comment.rs +++ b/boa/src/syntax/lexer/comment.rs @@ -31,11 +31,11 @@ impl Tokenizer for SingleLineComment { // Skip either to the end of the line or to the end of the input while let Some(ch) = cursor.peek()? { - if ch == b'\n' { + if ch == b'\n' || ch == b'\r' { break; } else { // Consume char. - cursor.next_byte()?.expect("Comment character vansihed"); + cursor.next_byte()?.expect("Comment character vanished"); } } Ok(Token::new( diff --git a/boa/src/syntax/lexer/tests.rs b/boa/src/syntax/lexer/tests.rs index accc078c9d..ba7f3202c4 100644 --- a/boa/src/syntax/lexer/tests.rs +++ b/boa/src/syntax/lexer/tests.rs @@ -41,6 +41,21 @@ fn check_single_line_comment() { expect_tokens(&mut lexer, &expected); } +#[test] +fn check_single_line_comment_with_crlf_ending() { + let s1 = "var \r\n//This is a comment\r\ntrue"; + let mut lexer = Lexer::new(s1.as_bytes()); + + let expected = [ + TokenKind::Keyword(Keyword::Var), + TokenKind::LineTerminator, + TokenKind::LineTerminator, + TokenKind::BooleanLiteral(true), + ]; + + expect_tokens(&mut lexer, &expected); +} + #[test] fn check_multi_line_comment() { let s = "var /* await \n break \n*/ x";