From 4a11ca1773837a2a6f04bf41f4238f1753dabc26 Mon Sep 17 00:00:00 2001 From: Iban Eguia Date: Mon, 7 Feb 2022 21:15:47 +0000 Subject: [PATCH] Fixed #1768 (#1820) This Pull Request fixes/closes #1768. It adds one extra peeked token in the buffered lexer, since it didn't take into account that the stream might end just after the last peeked token. The panic was only happening in debug mode, but still, this was wrong. --- boa/src/syntax/parser/cursor/buffered_lexer/mod.rs | 9 +++++---- boa/src/syntax/parser/cursor/buffered_lexer/tests.rs | 8 ++++++++ boa/src/syntax/parser/expression/assignment/mod.rs | 5 +++-- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/boa/src/syntax/parser/cursor/buffered_lexer/mod.rs b/boa/src/syntax/parser/cursor/buffered_lexer/mod.rs index 573891e16c..66a5b51307 100644 --- a/boa/src/syntax/parser/cursor/buffered_lexer/mod.rs +++ b/boa/src/syntax/parser/cursor/buffered_lexer/mod.rs @@ -17,12 +17,12 @@ const MAX_PEEK_SKIP: usize = 3; /// The fixed size of the buffer used for storing values that are peeked ahead. /// /// The size is calculated for a worst case scenario, where we want to peek `MAX_PEEK_SKIP` tokens -/// skipping line terminators: +/// skipping line terminators, and the stream ends just after: /// ```text -/// [\n, B, \n, C, \n, D, \n, E, \n, F] -/// 0 0 1 1 2 2 3 3 4 4 +/// [\n, B, \n, C, \n, D, \n, E, \n, F, None] +/// 0 0 1 1 2 2 3 3 4 4 5 /// ``` -const PEEK_BUF_SIZE: usize = (MAX_PEEK_SKIP + 1) * 2; +const PEEK_BUF_SIZE: usize = (MAX_PEEK_SKIP + 1) * 2 + 1; #[derive(Debug)] pub(super) struct BufferedLexer { @@ -49,6 +49,7 @@ where None::, None::, None::, + None::, ], read_index: 0, write_index: 0, diff --git a/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs b/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs index 57e0fd53dc..b2a301736d 100644 --- a/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs +++ b/boa/src/syntax/parser/cursor/buffered_lexer/tests.rs @@ -278,3 +278,11 @@ fn skip_peeked_terminators() { // End of stream assert!(cur.peek(2, true, &mut interner).unwrap().is_none()); } + +#[test] +fn issue_1768() { + let mut cur = BufferedLexer::from(&b"\n(\nx\n)\n"[..]); + let mut interner = Interner::default(); + + assert!(cur.peek(3, true, &mut interner).unwrap().is_none()); +} diff --git a/boa/src/syntax/parser/expression/assignment/mod.rs b/boa/src/syntax/parser/expression/assignment/mod.rs index 81b67ffd9d..af4bb2eaca 100644 --- a/boa/src/syntax/parser/expression/assignment/mod.rs +++ b/boa/src/syntax/parser/expression/assignment/mod.rs @@ -153,8 +153,9 @@ where .map(Node::ArrowFunctionDecl); } TokenKind::Punctuator(Punctuator::CloseParen) => { - // Need to check if the token after the close paren is an arrow, if so then this is an ArrowFunction - // otherwise it is an expression of the form (b). + // Need to check if the token after the close paren is an + // arrow, if so then this is an ArrowFunction otherwise it + // is an expression of the form (b). if let Some(t) = cursor.peek(3, interner)? { if t.kind() == &TokenKind::Punctuator(Punctuator::Arrow) {