From 06b1c3394dce14ea0a763ded415de3210560bd1b Mon Sep 17 00:00:00 2001 From: raskad <32105367+raskad@users.noreply.github.com> Date: Mon, 18 Mar 2024 06:36:59 +0100 Subject: [PATCH] Fix invalid syntex errors for allower `let` as variable names (#3743) --- .../parser/statement/declaration/lexical.rs | 17 +++++++- .../src/parser/statement/declaration/mod.rs | 2 +- .../statement/iteration/for_statement.rs | 42 +++++++++++-------- core/parser/src/parser/statement/mod.rs | 21 ++++++---- 4 files changed, 56 insertions(+), 26 deletions(-) diff --git a/core/parser/src/parser/statement/declaration/lexical.rs b/core/parser/src/parser/statement/declaration/lexical.rs index a0e27aa6ba..279c5e2738 100644 --- a/core/parser/src/parser/statement/declaration/lexical.rs +++ b/core/parser/src/parser/statement/declaration/lexical.rs @@ -8,7 +8,7 @@ //! [spec]: https://tc39.es/ecma262/#sec-let-and-const-declarations use crate::{ - lexer::{Error as LexError, TokenKind}, + lexer::{Error as LexError, Token, TokenKind}, parser::{ cursor::{Cursor, SemicolonResult}, expression::Initializer, @@ -123,6 +123,21 @@ where } } +/// Check if the given token is valid after the `let` keyword of a lexical declaration. +pub(crate) fn allowed_token_after_let(token: Option<&Token>) -> bool { + matches!( + token.map(Token::kind), + Some( + TokenKind::IdentifierName(_) + | TokenKind::Keyword(( + Keyword::Await | Keyword::Yield | Keyword::Let | Keyword::Async, + _ + )) + | TokenKind::Punctuator(Punctuator::OpenBlock | Punctuator::OpenBracket), + ) + ) +} + /// Parses a binding list. /// /// It will return an error if a `const` declaration is being parsed and there is no diff --git a/core/parser/src/parser/statement/declaration/mod.rs b/core/parser/src/parser/statement/declaration/mod.rs index 51b91228f3..26ac00de33 100644 --- a/core/parser/src/parser/statement/declaration/mod.rs +++ b/core/parser/src/parser/statement/declaration/mod.rs @@ -20,7 +20,7 @@ pub(in crate::parser) use self::{ class_decl::ClassTail, ClassDeclaration, FunctionDeclaration, HoistableDeclaration, }, import::ImportDeclaration, - lexical::LexicalDeclaration, + lexical::{allowed_token_after_let, LexicalDeclaration}, }; use crate::{ lexer::TokenKind, diff --git a/core/parser/src/parser/statement/iteration/for_statement.rs b/core/parser/src/parser/statement/iteration/for_statement.rs index 84488436b8..c505048d00 100644 --- a/core/parser/src/parser/statement/iteration/for_statement.rs +++ b/core/parser/src/parser/statement/iteration/for_statement.rs @@ -11,8 +11,11 @@ use crate::{ lexer::{Error as LexError, TokenKind}, parser::{ expression::{AssignmentExpression, Expression}, - statement::declaration::LexicalDeclaration, - statement::{variable::VariableDeclarationList, Statement}, + statement::{ + declaration::{allowed_token_after_let, LexicalDeclaration}, + variable::VariableDeclarationList, + Statement, + }, AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser, }, source::ReadChar, @@ -20,6 +23,7 @@ use crate::{ }; use ast::{ declaration::Binding, + expression::Identifier, operations::{bound_names, var_declared_names}, }; use boa_ast::{ @@ -107,7 +111,7 @@ where } }; - let init = match cursor.peek(0, interner).or_abrupt()?.kind() { + let init = match cursor.peek(0, interner).or_abrupt()?.kind().clone() { TokenKind::Keyword((Keyword::Var, _)) => { cursor.advance(interner); Some( @@ -116,20 +120,15 @@ where .into(), ) } - TokenKind::Keyword((Keyword::Let, _)) => Some('exit: { - if !cursor.strict() { - if let Some(token) = cursor.peek(1, interner)? { - if token.kind() == &TokenKind::Keyword((Keyword::In, false)) { - cursor.advance(interner); - break 'exit boa_ast::Expression::Identifier(Sym::LET.into()).into(); - } - } - } - - LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) - .parse(cursor, interner)? - .into() - }), + TokenKind::Keyword((Keyword::Let, false)) + if allowed_token_after_let(cursor.peek(1, interner)?) => + { + Some( + LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) + .parse(cursor, interner)? + .into(), + ) + } TokenKind::Keyword((Keyword::Const, _)) => Some( LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) .parse(cursor, interner)? @@ -174,6 +173,15 @@ where )); } (Some(init), TokenKind::Keyword((kw @ (Keyword::In | Keyword::Of), false))) => { + if kw == &Keyword::Of + && init + == ForLoopInitializer::Expression(ast::Expression::Identifier( + Identifier::new(Sym::LET), + )) + { + return Err(Error::general("unexpected token", position)); + } + let in_loop = kw == &Keyword::In; let init = initializer_to_iterable_loop_initializer( init, diff --git a/core/parser/src/parser/statement/mod.rs b/core/parser/src/parser/statement/mod.rs index ac4352ea57..cd4f3e4ca0 100644 --- a/core/parser/src/parser/statement/mod.rs +++ b/core/parser/src/parser/statement/mod.rs @@ -26,7 +26,7 @@ use self::{ block::BlockStatement, break_stm::BreakStatement, continue_stm::ContinueStatement, - declaration::{Declaration, ExportDeclaration, ImportDeclaration}, + declaration::{allowed_token_after_let, Declaration, ExportDeclaration, ImportDeclaration}, expression::ExpressionStatement, if_stm::IfStatement, iteration::{DoWhileStatement, ForStatement, WhileStatement}, @@ -412,12 +412,19 @@ where let _timer = Profiler::global().start_event("StatementListItem", "Parsing"); let tok = cursor.peek(0, interner).or_abrupt()?; - match *tok.kind() { - TokenKind::Keyword( - (Keyword::Function | Keyword::Class | Keyword::Const, _) | (Keyword::Let, false), - ) => Declaration::new(self.allow_yield, self.allow_await) - .parse(cursor, interner) - .map(ast::StatementListItem::from), + match tok.kind().clone() { + TokenKind::Keyword((Keyword::Function | Keyword::Class | Keyword::Const, _)) => { + Declaration::new(self.allow_yield, self.allow_await) + .parse(cursor, interner) + .map(ast::StatementListItem::from) + } + TokenKind::Keyword((Keyword::Let, false)) + if allowed_token_after_let(cursor.peek(1, interner)?) => + { + Declaration::new(self.allow_yield, self.allow_await) + .parse(cursor, interner) + .map(ast::StatementListItem::from) + } TokenKind::Keyword((Keyword::Async, false)) => { let skip_n = if cursor.peek_is_line_terminator(0, interner).or_abrupt()? { 2