|
|
@ -11,8 +11,11 @@ use crate::{ |
|
|
|
lexer::{Error as LexError, TokenKind}, |
|
|
|
lexer::{Error as LexError, TokenKind}, |
|
|
|
parser::{ |
|
|
|
parser::{ |
|
|
|
expression::{AssignmentExpression, Expression}, |
|
|
|
expression::{AssignmentExpression, Expression}, |
|
|
|
statement::declaration::LexicalDeclaration, |
|
|
|
statement::{ |
|
|
|
statement::{variable::VariableDeclarationList, Statement}, |
|
|
|
declaration::{allowed_token_after_let, LexicalDeclaration}, |
|
|
|
|
|
|
|
variable::VariableDeclarationList, |
|
|
|
|
|
|
|
Statement, |
|
|
|
|
|
|
|
}, |
|
|
|
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser, |
|
|
|
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser, |
|
|
|
}, |
|
|
|
}, |
|
|
|
source::ReadChar, |
|
|
|
source::ReadChar, |
|
|
@ -20,6 +23,7 @@ use crate::{ |
|
|
|
}; |
|
|
|
}; |
|
|
|
use ast::{ |
|
|
|
use ast::{ |
|
|
|
declaration::Binding, |
|
|
|
declaration::Binding, |
|
|
|
|
|
|
|
expression::Identifier, |
|
|
|
operations::{bound_names, var_declared_names}, |
|
|
|
operations::{bound_names, var_declared_names}, |
|
|
|
}; |
|
|
|
}; |
|
|
|
use boa_ast::{ |
|
|
|
use boa_ast::{ |
|
|
@ -107,7 +111,7 @@ where |
|
|
|
} |
|
|
|
} |
|
|
|
}; |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
let init = match cursor.peek(0, interner).or_abrupt()?.kind() { |
|
|
|
let init = match cursor.peek(0, interner).or_abrupt()?.kind().clone() { |
|
|
|
TokenKind::Keyword((Keyword::Var, _)) => { |
|
|
|
TokenKind::Keyword((Keyword::Var, _)) => { |
|
|
|
cursor.advance(interner); |
|
|
|
cursor.advance(interner); |
|
|
|
Some( |
|
|
|
Some( |
|
|
@ -116,20 +120,15 @@ where |
|
|
|
.into(), |
|
|
|
.into(), |
|
|
|
) |
|
|
|
) |
|
|
|
} |
|
|
|
} |
|
|
|
TokenKind::Keyword((Keyword::Let, _)) => Some('exit: { |
|
|
|
TokenKind::Keyword((Keyword::Let, false)) |
|
|
|
if !cursor.strict() { |
|
|
|
if allowed_token_after_let(cursor.peek(1, interner)?) => |
|
|
|
if let Some(token) = cursor.peek(1, interner)? { |
|
|
|
{ |
|
|
|
if token.kind() == &TokenKind::Keyword((Keyword::In, false)) { |
|
|
|
Some( |
|
|
|
cursor.advance(interner); |
|
|
|
|
|
|
|
break 'exit boa_ast::Expression::Identifier(Sym::LET.into()).into(); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) |
|
|
|
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) |
|
|
|
.parse(cursor, interner)? |
|
|
|
.parse(cursor, interner)? |
|
|
|
.into() |
|
|
|
.into(), |
|
|
|
}), |
|
|
|
) |
|
|
|
|
|
|
|
} |
|
|
|
TokenKind::Keyword((Keyword::Const, _)) => Some( |
|
|
|
TokenKind::Keyword((Keyword::Const, _)) => Some( |
|
|
|
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) |
|
|
|
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true) |
|
|
|
.parse(cursor, interner)? |
|
|
|
.parse(cursor, interner)? |
|
|
@ -174,6 +173,15 @@ where |
|
|
|
)); |
|
|
|
)); |
|
|
|
} |
|
|
|
} |
|
|
|
(Some(init), TokenKind::Keyword((kw @ (Keyword::In | Keyword::Of), false))) => { |
|
|
|
(Some(init), TokenKind::Keyword((kw @ (Keyword::In | Keyword::Of), false))) => { |
|
|
|
|
|
|
|
if kw == &Keyword::Of |
|
|
|
|
|
|
|
&& init |
|
|
|
|
|
|
|
== ForLoopInitializer::Expression(ast::Expression::Identifier( |
|
|
|
|
|
|
|
Identifier::new(Sym::LET), |
|
|
|
|
|
|
|
)) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return Err(Error::general("unexpected token", position)); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
let in_loop = kw == &Keyword::In; |
|
|
|
let in_loop = kw == &Keyword::In; |
|
|
|
let init = initializer_to_iterable_loop_initializer( |
|
|
|
let init = initializer_to_iterable_loop_initializer( |
|
|
|
init, |
|
|
|
init, |
|
|
|