Browse Source

AST nodes now have spans.

feature/node_span
Iban Eguia (Razican) 3 years ago
parent
commit
dddc15ff5c
No known key found for this signature in database
GPG Key ID: 4437BF72D677D89C
  1. 44
      Cargo.lock
  2. 39
      boa/src/context.rs
  3. 2
      boa/src/syntax/ast/mod.rs
  4. 8
      boa/src/syntax/ast/node/array/mod.rs
  5. 4
      boa/src/syntax/ast/node/await_expr/mod.rs
  6. 4
      boa/src/syntax/ast/node/block/mod.rs
  7. 6
      boa/src/syntax/ast/node/break_node/mod.rs
  8. 14
      boa/src/syntax/ast/node/call/mod.rs
  9. 6
      boa/src/syntax/ast/node/conditional/conditional_op/mod.rs
  10. 6
      boa/src/syntax/ast/node/conditional/if_node/mod.rs
  11. 8
      boa/src/syntax/ast/node/declaration/arrow_function_decl/mod.rs
  12. 4
      boa/src/syntax/ast/node/declaration/async_function_decl/mod.rs
  13. 4
      boa/src/syntax/ast/node/declaration/async_function_expr/mod.rs
  14. 8
      boa/src/syntax/ast/node/declaration/function_decl/mod.rs
  15. 8
      boa/src/syntax/ast/node/declaration/function_expr/mod.rs
  16. 10
      boa/src/syntax/ast/node/declaration/mod.rs
  17. 4
      boa/src/syntax/ast/node/field/get_const_field/mod.rs
  18. 4
      boa/src/syntax/ast/node/field/get_field/mod.rs
  19. 4
      boa/src/syntax/ast/node/identifier/mod.rs
  20. 6
      boa/src/syntax/ast/node/iteration/continue_node/mod.rs
  21. 4
      boa/src/syntax/ast/node/iteration/do_while_loop/mod.rs
  22. 18
      boa/src/syntax/ast/node/iteration/for_in_loop/mod.rs
  23. 4
      boa/src/syntax/ast/node/iteration/for_loop/mod.rs
  24. 18
      boa/src/syntax/ast/node/iteration/for_of_loop/mod.rs
  25. 4
      boa/src/syntax/ast/node/iteration/while_loop/mod.rs
  26. 193
      boa/src/syntax/ast/node/mod.rs
  27. 6
      boa/src/syntax/ast/node/new/mod.rs
  28. 6
      boa/src/syntax/ast/node/object/mod.rs
  29. 12
      boa/src/syntax/ast/node/operator/assign/mod.rs
  30. 10
      boa/src/syntax/ast/node/operator/bin_op/mod.rs
  31. 31
      boa/src/syntax/ast/node/operator/unary_op/mod.rs
  32. 8
      boa/src/syntax/ast/node/return_smt/mod.rs
  33. 6
      boa/src/syntax/ast/node/spread/mod.rs
  34. 62
      boa/src/syntax/ast/node/statement_list/mod.rs
  35. 4
      boa/src/syntax/ast/node/switch/mod.rs
  36. 29
      boa/src/syntax/ast/node/template/mod.rs
  37. 6
      boa/src/syntax/ast/node/throw/mod.rs
  38. 4
      boa/src/syntax/ast/node/try_node/mod.rs
  39. 35
      boa/src/syntax/ast/position.rs
  40. 13
      boa/src/syntax/parser/cursor/mod.rs
  41. 28
      boa/src/syntax/parser/expression/assignment/arrow_function.rs
  42. 9
      boa/src/syntax/parser/expression/assignment/conditional.rs
  43. 9
      boa/src/syntax/parser/expression/assignment/exponentiation.rs
  44. 79
      boa/src/syntax/parser/expression/assignment/mod.rs
  45. 37
      boa/src/syntax/parser/expression/left_hand_side/arguments.rs
  46. 35
      boa/src/syntax/parser/expression/left_hand_side/call.rs
  47. 43
      boa/src/syntax/parser/expression/left_hand_side/member.rs
  48. 13
      boa/src/syntax/parser/expression/left_hand_side/template.rs
  49. 34
      boa/src/syntax/parser/expression/primary/array_initializer/mod.rs
  50. 19
      boa/src/syntax/parser/expression/primary/async_function_expression/mod.rs
  51. 19
      boa/src/syntax/parser/expression/primary/function_expression/mod.rs
  52. 114
      boa/src/syntax/parser/expression/primary/mod.rs
  53. 37
      boa/src/syntax/parser/expression/primary/object_initializer/mod.rs
  54. 24
      boa/src/syntax/parser/expression/primary/template/mod.rs
  55. 10
      boa/src/syntax/parser/mod.rs
  56. 12
      boa/src/syntax/parser/statement/block/mod.rs
  57. 12
      boa/src/syntax/parser/statement/declaration/hoistable/async_function_decl/mod.rs
  58. 12
      boa/src/syntax/parser/statement/declaration/hoistable/function_decl/mod.rs
  59. 6
      boa/src/syntax/parser/statement/declaration/hoistable/mod.rs
  60. 2
      boa/src/syntax/parser/statement/iteration/for_statement.rs
  61. 19
      boa/src/syntax/parser/statement/labelled_stm/mod.rs
  62. 59
      boa/src/syntax/parser/statement/mod.rs
  63. 56
      boa/src/syntax/parser/statement/variable/mod.rs
  64. 2
      test262
  65. 110
      yarn.lock

44
Cargo.lock generated

@ -1,5 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "Boa"
version = "0.11.0"
@ -237,7 +239,7 @@ dependencies = [
"clap",
"criterion-plot",
"csv",
"itertools 0.10.0",
"itertools 0.10.1",
"lazy_static",
"num-traits",
"oorandom",
@ -499,9 +501,9 @@ dependencies = [
[[package]]
name = "hermit-abi"
version = "0.1.18"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
@ -553,9 +555,9 @@ dependencies = [
[[package]]
name = "itertools"
version = "0.10.0"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d572918e350e82412fe766d24b15e6682fb2ed2bbe018280caa810397cb319"
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
dependencies = [
"either",
]
@ -613,9 +615,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.95"
version = "0.2.97"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "789da6d93f1b866ffe175afc5322a4d76c038605a1c3319bb57b06967ca98a36"
checksum = "12b8adadd720df158f4d70dfe7ccc6adb0472d7c55ca83445f6a5ab3e36f8fb6"
[[package]]
name = "libgit2-sys"
@ -823,9 +825,9 @@ checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a"
[[package]]
name = "openssl-sys"
version = "0.9.63"
version = "0.9.65"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6b0d6fb7d80f877617dfcb014e605e2b5ab2fb0afdf27935219bb6bd984cb98"
checksum = "7a7907e3bfa08bb85105209cdfcb6c63d109f8f6c1ed6ca318fff5c1853fbc1d"
dependencies = [
"autocfg",
"cc",
@ -989,9 +991,9 @@ dependencies = [
[[package]]
name = "rand_chacha"
version = "0.3.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
@ -999,18 +1001,18 @@ dependencies = [
[[package]]
name = "rand_core"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_hc"
version = "0.3.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73"
checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
dependencies = [
"rand_core",
]
@ -1042,9 +1044,9 @@ dependencies = [
[[package]]
name = "redox_syscall"
version = "0.2.8"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "742739e41cd49414de871ea5e549afb7e2a3ac77b589bcbebe8c82fab37147fc"
checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee"
dependencies = [
"bitflags",
]
@ -1282,9 +1284,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.72"
version = "1.0.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e8cdbefb79a9a5a65e0db8b47b723ee907b7c7f8496c76a1770b5c310bab82"
checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7"
dependencies = [
"proc-macro2",
"quote",
@ -1416,9 +1418,9 @@ checksum = "936e4b492acfd135421d8dca4b1aa80a7bfc26e702ef3af710e0752684df5372"
[[package]]
name = "vcpkg"
version = "0.2.13"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "025ce40a007e1907e58d5bc1a594def78e5573bb0b1160bc389634e8f12e4faa"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "vec_map"

39
boa/src/context.rs

@ -18,7 +18,7 @@ use crate::{
statement_list::RcStatementList, Call, FormalParameter, Identifier, New,
StatementList,
},
Const, Node,
Const, Node, NodeKind, Span,
},
Parser,
},
@ -336,8 +336,8 @@ impl Context {
{
// Runs a `new RangeError(message)`.
New::from(Call::new(
Identifier::from("RangeError"),
vec![Const::from(message.into()).into()],
Node::new(Identifier::from("RangeError"), Span::default()),
vec![Node::new(Const::from(message.into()), Span::default())],
))
.run(self)
.expect("Into<String> used as message")
@ -360,8 +360,8 @@ impl Context {
{
// Runs a `new TypeError(message)`.
New::from(Call::new(
Identifier::from("TypeError"),
vec![Const::from(message.into()).into()],
Node::new(Identifier::from("TypeError"), Span::default()),
vec![Node::new(Const::from(message.into()), Span::default())],
))
.run(self)
.expect("Into<String> used as message")
@ -383,8 +383,8 @@ impl Context {
M: Into<Box<str>>,
{
New::from(Call::new(
Identifier::from("ReferenceError"),
vec![Const::from(message.into()).into()],
Node::new(Identifier::from("ReferenceError"), Span::default()),
vec![Node::new(Const::from(message.into()), Span::default())],
))
.run(self)
.expect("Into<String> used as message")
@ -406,8 +406,8 @@ impl Context {
M: Into<Box<str>>,
{
New::from(Call::new(
Identifier::from("SyntaxError"),
vec![Const::from(message.into()).into()],
Node::new(Identifier::from("SyntaxError"), Span::default()),
vec![Node::new(Const::from(message.into()), Span::default())],
))
.run(self)
.expect("Into<String> used as message")
@ -428,8 +428,8 @@ impl Context {
M: Into<Box<str>>,
{
New::from(Call::new(
Identifier::from("EvalError"),
vec![Const::from(message.into()).into()],
Node::new(Identifier::from("EvalError"), Span::default()),
vec![Node::new(Const::from(message.into()), Span::default())],
))
.run(self)
.expect("Into<String> used as message")
@ -441,8 +441,8 @@ impl Context {
M: Into<Box<str>>,
{
New::from(Call::new(
Identifier::from("URIError"),
vec![Const::from(message.into()).into()],
Node::new(Identifier::from("URIError"), Span::default()),
vec![Node::new(Const::from(message.into()), Span::default())],
))
.run(self)
.expect("Into<String> used as message")
@ -465,15 +465,14 @@ impl Context {
}
/// Utility to create a function Value for Function Declarations, Arrow Functions or Function Expressions
pub(crate) fn create_function<P, B>(
pub(crate) fn create_function<P>(
&mut self,
params: P,
body: B,
body: StatementList,
flags: FunctionFlags,
) -> Result<Value>
where
P: Into<Box<[FormalParameter]>>,
B: Into<StatementList>,
{
let function_prototype: Value =
self.standard_objects().function_object().prototype().into();
@ -485,7 +484,7 @@ impl Context {
let params_len = params.len();
let func = Function::Ordinary {
flags,
body: RcStatementList::from(body.into()),
body: RcStatementList::from(body),
params,
environment: self.get_current_environment().clone(),
};
@ -549,12 +548,12 @@ impl Context {
#[inline]
pub(crate) fn set_value(&mut self, node: &Node, value: Value) -> Result<Value> {
match node {
Node::Identifier(ref name) => {
match node.kind() {
NodeKind::Identifier(ref name) => {
self.set_mutable_binding(name.as_ref(), value.clone(), true)?;
Ok(value)
}
Node::GetConstField(ref get_const_field_node) => Ok(get_const_field_node
NodeKind::GetConstField(ref get_const_field_node) => Ok(get_const_field_node
.obj()
.run(self)?
.set_field(get_const_field_node.field(), value, false, self)?),

2
boa/src/syntax/ast/mod.rs

@ -10,7 +10,7 @@ pub mod punctuator;
pub use self::{
constant::Const,
keyword::Keyword,
node::Node,
node::{Node, NodeKind},
position::{Position, Span},
punctuator::Punctuator,
};

8
boa/src/syntax/ast/node/array/mod.rs

@ -1,6 +1,6 @@
//! Array declaration node.
use super::{join_nodes, Node};
use super::{join_nodes, Node, NodeKind};
use crate::{
builtins::{iterable, Array},
exec::Executable,
@ -43,8 +43,8 @@ impl Executable for ArrayDecl {
let _timer = BoaProfiler::global().start_event("ArrayDecl", "exec");
let array = Array::new_array(context);
let mut elements = Vec::new();
for elem in self.as_ref() {
if let Node::Spread(ref x) = elem {
for elem in self.arr.iter().map(Node::kind) {
if let NodeKind::Spread(ref x) = elem {
let val = x.run(context)?;
let iterator_record = iterable::get_iterator(context, val)?;
// TODO after proper internal Array representation as per https://github.com/boa-dev/boa/pull/811#discussion_r502460858
@ -92,7 +92,7 @@ impl fmt::Display for ArrayDecl {
}
}
impl From<ArrayDecl> for Node {
impl From<ArrayDecl> for NodeKind {
fn from(arr: ArrayDecl) -> Self {
Self::ArrayDecl(arr)
}

4
boa/src/syntax/ast/node/await_expr/mod.rs

@ -1,6 +1,6 @@
//! Await expression node.
use super::Node;
use super::{Node, NodeKind};
use crate::{exec::Executable, BoaProfiler, Context, Result, Value};
use gc::{Finalize, Trace};
use std::fmt;
@ -50,7 +50,7 @@ impl fmt::Display for AwaitExpr {
}
}
impl From<AwaitExpr> for Node {
impl From<AwaitExpr> for NodeKind {
fn from(awaitexpr: AwaitExpr) -> Self {
Self::AwaitExpr(awaitexpr)
}

4
boa/src/syntax/ast/node/block/mod.rs

@ -1,6 +1,6 @@
//! Block AST node.
use super::{Node, StatementList};
use super::{Node, NodeKind, StatementList};
use crate::{
environment::declarative_environment_record::DeclarativeEnvironmentRecord,
exec::Executable,
@ -119,7 +119,7 @@ impl fmt::Display for Block {
}
}
impl From<Block> for Node {
impl From<Block> for NodeKind {
fn from(block: Block) -> Self {
Self::Block(block)
}

6
boa/src/syntax/ast/node/break_node/mod.rs

@ -1,4 +1,4 @@
use super::Node;
use super::NodeKind;
use crate::{
exec::Executable,
exec::InterpreterState,
@ -75,8 +75,8 @@ impl fmt::Display for Break {
}
}
impl From<Break> for Node {
fn from(break_smt: Break) -> Node {
impl From<Break> for NodeKind {
fn from(break_smt: Break) -> Self {
Self::Break(break_smt)
}
}

14
boa/src/syntax/ast/node/call/mod.rs

@ -3,7 +3,7 @@ use crate::{
exec::Executable,
exec::InterpreterState,
gc::{Finalize, Trace},
syntax::ast::node::{join_nodes, Node},
syntax::ast::node::{join_nodes, Node, NodeKind},
value::{Type, Value},
BoaProfiler, Context, Result,
};
@ -63,8 +63,8 @@ impl Call {
impl Executable for Call {
fn run(&self, context: &mut Context) -> Result<Value> {
let _timer = BoaProfiler::global().start_event("Call", "exec");
let (this, func) = match self.expr() {
Node::GetConstField(ref get_const_field) => {
let (this, func) = match self.expr().kind() {
NodeKind::GetConstField(ref get_const_field) => {
let mut obj = get_const_field.obj().run(context)?;
if obj.get_type() != Type::Object {
obj = Value::Object(obj.to_object(context)?);
@ -74,7 +74,7 @@ impl Executable for Call {
obj.get_field(get_const_field.field(), context)?,
)
}
Node::GetField(ref get_field) => {
NodeKind::GetField(ref get_field) => {
let mut obj = get_field.obj().run(context)?;
if obj.get_type() != Type::Object {
obj = Value::Object(obj.to_object(context)?);
@ -92,8 +92,8 @@ impl Executable for Call {
),
};
let mut v_args = Vec::with_capacity(self.args().len());
for arg in self.args() {
if let Node::Spread(ref x) = arg {
for arg in self.args().iter().map(Node::kind) {
if let NodeKind::Spread(ref x) = arg {
let val = x.run(context)?;
let iterator_record = iterable::get_iterator(context, val)?;
loop {
@ -130,7 +130,7 @@ impl fmt::Display for Call {
}
}
impl From<Call> for Node {
impl From<Call> for NodeKind {
fn from(call: Call) -> Self {
Self::Call(call)
}

6
boa/src/syntax/ast/node/conditional/conditional_op/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -81,8 +81,8 @@ impl fmt::Display for ConditionalOp {
}
}
impl From<ConditionalOp> for Node {
fn from(cond_op: ConditionalOp) -> Node {
impl From<ConditionalOp> for NodeKind {
fn from(cond_op: ConditionalOp) -> Self {
Self::ConditionalOp(cond_op)
}
}

6
boa/src/syntax/ast/node/conditional/if_node/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -96,8 +96,8 @@ impl fmt::Display for If {
}
}
impl From<If> for Node {
fn from(if_stm: If) -> Node {
impl From<If> for NodeKind {
fn from(if_stm: If) -> Self {
Self::If(if_stm)
}
}

8
boa/src/syntax/ast/node/declaration/arrow_function_decl/mod.rs

@ -2,7 +2,7 @@ use crate::{
builtins::function::FunctionFlags,
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::{join_nodes, FormalParameter, Node, StatementList},
syntax::ast::node::{join_nodes, FormalParameter, Node, NodeKind, StatementList},
Context, Result, Value,
};
use std::fmt;
@ -74,8 +74,8 @@ impl ArrowFunctionDecl {
impl Executable for ArrowFunctionDecl {
fn run(&self, context: &mut Context) -> Result<Value> {
context.create_function(
self.params().to_vec(),
self.body().to_vec(),
self.params.clone(),
self.body.clone(),
FunctionFlags::CALLABLE
| FunctionFlags::CONSTRUCTABLE
| FunctionFlags::LEXICAL_THIS_MODE,
@ -89,7 +89,7 @@ impl fmt::Display for ArrowFunctionDecl {
}
}
impl From<ArrowFunctionDecl> for Node {
impl From<ArrowFunctionDecl> for NodeKind {
fn from(decl: ArrowFunctionDecl) -> Self {
Self::ArrowFunctionDecl(decl)
}

4
boa/src/syntax/ast/node/declaration/async_function_decl/mod.rs

@ -2,7 +2,7 @@
use crate::{
exec::Executable,
syntax::ast::node::{join_nodes, FormalParameter, Node, StatementList},
syntax::ast::node::{join_nodes, FormalParameter, Node, NodeKind, StatementList},
BoaProfiler, Context, Result, Value,
};
use gc::{Finalize, Trace};
@ -86,7 +86,7 @@ impl Executable for AsyncFunctionDecl {
}
}
impl From<AsyncFunctionDecl> for Node {
impl From<AsyncFunctionDecl> for NodeKind {
fn from(decl: AsyncFunctionDecl) -> Self {
Self::AsyncFunctionDecl(decl)
}

4
boa/src/syntax/ast/node/declaration/async_function_expr/mod.rs

@ -2,7 +2,7 @@
use crate::{
exec::Executable,
syntax::ast::node::{join_nodes, FormalParameter, Node, StatementList},
syntax::ast::node::{join_nodes, FormalParameter, Node, NodeKind, StatementList},
Context, Result, Value,
};
use gc::{Finalize, Trace};
@ -93,7 +93,7 @@ impl fmt::Display for AsyncFunctionExpr {
}
}
impl From<AsyncFunctionExpr> for Node {
impl From<AsyncFunctionExpr> for NodeKind {
fn from(expr: AsyncFunctionExpr) -> Self {
Self::AsyncFunctionExpr(expr)
}

8
boa/src/syntax/ast/node/declaration/function_decl/mod.rs

@ -3,7 +3,7 @@ use crate::{
environment::lexical_environment::VariableScope,
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::{join_nodes, FormalParameter, Node, StatementList},
syntax::ast::node::{join_nodes, FormalParameter, Node, NodeKind, StatementList},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -89,8 +89,8 @@ impl Executable for FunctionDecl {
fn run(&self, context: &mut Context) -> Result<Value> {
let _timer = BoaProfiler::global().start_event("FunctionDecl", "exec");
let val = context.create_function(
self.parameters().to_vec(),
self.body().to_vec(),
self.parameters.clone(),
self.body.clone(),
FunctionFlags::CALLABLE | FunctionFlags::CONSTRUCTABLE,
)?;
@ -112,7 +112,7 @@ impl Executable for FunctionDecl {
}
}
impl From<FunctionDecl> for Node {
impl From<FunctionDecl> for NodeKind {
fn from(decl: FunctionDecl) -> Self {
Self::FunctionDecl(decl)
}

8
boa/src/syntax/ast/node/declaration/function_expr/mod.rs

@ -2,7 +2,7 @@ use crate::{
builtins::function::FunctionFlags,
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::{join_nodes, FormalParameter, Node, StatementList},
syntax::ast::node::{join_nodes, FormalParameter, Node, NodeKind, StatementList},
Context, Result, Value,
};
use std::fmt;
@ -100,8 +100,8 @@ impl FunctionExpr {
impl Executable for FunctionExpr {
fn run(&self, context: &mut Context) -> Result<Value> {
let val = context.create_function(
self.parameters().to_vec(),
self.body().to_vec(),
self.parameters.clone(),
self.body.clone(),
FunctionFlags::CALLABLE | FunctionFlags::CONSTRUCTABLE,
)?;
@ -119,7 +119,7 @@ impl fmt::Display for FunctionExpr {
}
}
impl From<FunctionExpr> for Node {
impl From<FunctionExpr> for NodeKind {
fn from(expr: FunctionExpr) -> Self {
Self::FunctionExpr(expr)
}

10
boa/src/syntax/ast/node/declaration/mod.rs

@ -3,7 +3,7 @@ use crate::{
environment::lexical_environment::VariableScope,
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::{join_nodes, Identifier, Node},
syntax::ast::node::{join_nodes, Identifier, Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -171,13 +171,13 @@ impl fmt::Display for DeclarationList {
}
}
impl From<DeclarationList> for Node {
impl From<DeclarationList> for NodeKind {
fn from(list: DeclarationList) -> Self {
use DeclarationList::*;
match &list {
Let(_) => Node::LetDeclList(list),
Const(_) => Node::ConstDeclList(list),
Var(_) => Node::VarDeclList(list),
Let(_) => Self::LetDeclList(list),
Const(_) => Self::ConstDeclList(list),
Var(_) => Self::VarDeclList(list),
}
}
}

4
boa/src/syntax/ast/node/field/get_const_field/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
value::{Type, Value},
Context, Result,
};
@ -79,7 +79,7 @@ impl fmt::Display for GetConstField {
}
}
impl From<GetConstField> for Node {
impl From<GetConstField> for NodeKind {
fn from(get_const_field: GetConstField) -> Self {
Self::GetConstField(get_const_field)
}

4
boa/src/syntax/ast/node/field/get_field/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
value::{Type, Value},
Context, Result,
};
@ -79,7 +79,7 @@ impl fmt::Display for GetField {
}
}
impl From<GetField> for Node {
impl From<GetField> for NodeKind {
fn from(get_field: GetField) -> Self {
Self::GetField(get_field)
}

4
boa/src/syntax/ast/node/identifier/mod.rs

@ -3,7 +3,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::NodeKind,
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -73,7 +73,7 @@ where
}
}
impl From<Identifier> for Node {
impl From<Identifier> for NodeKind {
fn from(local: Identifier) -> Self {
Self::Identifier(local)
}

6
boa/src/syntax/ast/node/iteration/continue_node/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::NodeKind,
Context, Result, Value,
};
use std::fmt;
@ -65,8 +65,8 @@ impl fmt::Display for Continue {
}
}
impl From<Continue> for Node {
fn from(cont: Continue) -> Node {
impl From<Continue> for NodeKind {
fn from(cont: Continue) -> Self {
Self::Continue(cont)
}
}

4
boa/src/syntax/ast/node/iteration/do_while_loop/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -109,7 +109,7 @@ impl fmt::Display for DoWhileLoop {
}
}
impl From<DoWhileLoop> for Node {
impl From<DoWhileLoop> for NodeKind {
fn from(do_while: DoWhileLoop) -> Self {
Self::DoWhileLoop(do_while)
}

18
boa/src/syntax/ast/node/iteration/for_in_loop/mod.rs

@ -6,7 +6,7 @@ use crate::{
},
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -73,8 +73,8 @@ impl fmt::Display for ForInLoop {
}
}
impl From<ForInLoop> for Node {
fn from(for_in: ForInLoop) -> Node {
impl From<ForInLoop> for NodeKind {
fn from(for_in: ForInLoop) -> Self {
Self::ForInLoop(for_in)
}
}
@ -108,8 +108,8 @@ impl Executable for ForInLoop {
}
let next_result = iterator_result.value();
match self.variable() {
Node::Identifier(ref name) => {
match self.variable().kind() {
NodeKind::Identifier(ref name) => {
if context.has_binding(name.as_ref()) {
// Binding already exists
context.set_mutable_binding(name.as_ref(), next_result.clone(), true)?;
@ -122,7 +122,7 @@ impl Executable for ForInLoop {
context.initialize_binding(name.as_ref(), next_result.clone())?;
}
}
Node::VarDeclList(ref list) => match list.as_ref() {
NodeKind::VarDeclList(ref list) => match list.as_ref() {
[var] => {
if var.init().is_some() {
return context.throw_syntax_error("a declaration in the head of a for-in loop can't have an initializer");
@ -145,7 +145,7 @@ impl Executable for ForInLoop {
)
}
},
Node::LetDeclList(ref list) => match list.as_ref() {
NodeKind::LetDeclList(ref list) => match list.as_ref() {
[var] => {
if var.init().is_some() {
return context.throw_syntax_error("a declaration in the head of a for-in loop can't have an initializer");
@ -164,7 +164,7 @@ impl Executable for ForInLoop {
)
}
},
Node::ConstDeclList(ref list) => match list.as_ref() {
NodeKind::ConstDeclList(ref list) => match list.as_ref() {
[var] => {
if var.init().is_some() {
return context.throw_syntax_error("a declaration in the head of a for-in loop can't have an initializer");
@ -183,7 +183,7 @@ impl Executable for ForInLoop {
)
}
},
Node::Assign(_) => {
NodeKind::Assign(_) => {
return context.throw_syntax_error(
"a declaration in the head of a for-in loop can't have an initializer",
);

4
boa/src/syntax/ast/node/iteration/for_loop/mod.rs

@ -2,7 +2,7 @@ use crate::{
environment::declarative_environment_record::DeclarativeEnvironmentRecord,
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -155,7 +155,7 @@ impl fmt::Display for ForLoop {
}
}
impl From<ForLoop> for Node {
impl From<ForLoop> for NodeKind {
fn from(for_loop: ForLoop) -> Self {
Self::ForLoop(for_loop)
}

18
boa/src/syntax/ast/node/iteration/for_of_loop/mod.rs

@ -6,7 +6,7 @@ use crate::{
},
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -73,8 +73,8 @@ impl fmt::Display for ForOfLoop {
}
}
impl From<ForOfLoop> for Node {
fn from(for_of: ForOfLoop) -> Node {
impl From<ForOfLoop> for NodeKind {
fn from(for_of: ForOfLoop) -> Self {
Self::ForOfLoop(for_of)
}
}
@ -98,8 +98,8 @@ impl Executable for ForOfLoop {
}
let next_result = iterator_result.value();
match self.variable() {
Node::Identifier(ref name) => {
match self.variable().kind() {
NodeKind::Identifier(ref name) => {
if context.has_binding(name.as_ref()) {
// Binding already exists
context.set_mutable_binding(name.as_ref(), next_result.clone(), true)?;
@ -112,7 +112,7 @@ impl Executable for ForOfLoop {
context.initialize_binding(name.as_ref(), next_result.clone())?;
}
}
Node::VarDeclList(ref list) => match list.as_ref() {
NodeKind::VarDeclList(ref list) => match list.as_ref() {
[var] => {
if var.init().is_some() {
return context.throw_syntax_error("a declaration in the head of a for-of loop can't have an initializer");
@ -135,7 +135,7 @@ impl Executable for ForOfLoop {
)
}
},
Node::LetDeclList(ref list) => match list.as_ref() {
NodeKind::LetDeclList(ref list) => match list.as_ref() {
[var] => {
if var.init().is_some() {
return context.throw_syntax_error("a declaration in the head of a for-of loop can't have an initializer");
@ -155,7 +155,7 @@ impl Executable for ForOfLoop {
)
}
},
Node::ConstDeclList(ref list) => match list.as_ref() {
NodeKind::ConstDeclList(ref list) => match list.as_ref() {
[var] => {
if var.init().is_some() {
return context.throw_syntax_error("a declaration in the head of a for-of loop can't have an initializer");
@ -174,7 +174,7 @@ impl Executable for ForOfLoop {
)
}
},
Node::Assign(_) => {
NodeKind::Assign(_) => {
return context.throw_syntax_error(
"a declaration in the head of a for-of loop can't have an initializer",
);

4
boa/src/syntax/ast/node/iteration/while_loop/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -104,7 +104,7 @@ impl fmt::Display for WhileLoop {
}
}
impl From<WhileLoop> for Node {
impl From<WhileLoop> for NodeKind {
fn from(while_loop: WhileLoop) -> Self {
Self::WhileLoop(while_loop)
}

193
boa/src/syntax/ast/node/mod.rs

@ -46,7 +46,7 @@ pub use self::{
throw::Throw,
try_node::{Catch, Finally, Try},
};
use super::Const;
use super::{Const, Span};
use crate::{
exec::Executable,
gc::{empty_trace, Finalize, Trace},
@ -60,9 +60,76 @@ use std::{
#[cfg(feature = "deser")]
use serde::{Deserialize, Serialize};
/// An AST node.
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub enum Node {
pub struct Node {
kind: NodeKind,
span: Span,
}
impl Node {
/// Creates a new node.
#[inline]
pub fn new<K>(kind: K, span: Span) -> Self
where
K: Into<NodeKind>,
{
Self {
kind: kind.into(),
span,
}
}
/// Retrieves the kind of node.
#[inline]
pub fn kind(&self) -> &NodeKind {
&self.kind
}
/// Discards the span information.
#[inline]
pub(crate) fn into_kind(self) -> NodeKind {
self.kind
}
/// Retrieves the span of the node.
#[inline]
pub fn span(&self) -> Span {
self.span
}
/// Implements the display formatting with indentation.
#[inline]
fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result {
self.kind.display(f, indentation)
}
/// Returns a node ordering based on the hoistability of each node.
#[inline]
pub(crate) fn hoistable_order(a: &Node, b: &Node) -> Ordering {
NodeKind::hoistable_order(a.kind(), b.kind())
}
}
impl Executable for Node {
#[inline]
fn run(&self, context: &mut Context) -> Result<Value> {
self.kind.run(context)
}
}
impl Display for Node {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.kind.display(f, 0)
}
}
/// The kind of node, with all the relevant information.
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub enum NodeKind {
/// Array declaration node. [More information](./array/struct.ArrayDecl.html).
ArrayDecl(ArrayDecl),
@ -210,30 +277,44 @@ pub enum Node {
Empty,
}
impl Display for Node {
impl Display for NodeKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.display(f, 0)
}
}
impl From<Const> for Node {
impl From<Const> for NodeKind {
fn from(c: Const) -> Self {
Self::Const(c)
}
}
impl Node {
impl NodeKind {
/// Returns a node ordering based on the hoistability of each node.
pub(crate) fn hoistable_order(a: &Node, b: &Node) -> Ordering {
pub(crate) fn hoistable_order(a: &NodeKind, b: &NodeKind) -> Ordering {
match (a, b) {
(Node::FunctionDecl(_), Node::FunctionDecl(_)) => Ordering::Equal,
(_, Node::FunctionDecl(_)) => Ordering::Greater,
(Node::FunctionDecl(_), _) => Ordering::Less,
(NodeKind::FunctionDecl(_), NodeKind::FunctionDecl(_)) => Ordering::Equal,
(_, NodeKind::FunctionDecl(_)) => Ordering::Greater,
(NodeKind::FunctionDecl(_), _) => Ordering::Less,
(_, _) => Ordering::Equal,
}
}
/// Returns true if as per the [spec][spec] the node can be assigned a value.
///
/// [spec]: https://tc39.es/ecma262/#sec-assignment-operators-static-semantics-early-errors
pub(crate) fn is_assignable(&self) -> bool {
matches!(
self,
Self::GetConstField(_)
| Self::GetField(_)
| Self::Assign(_)
| Self::Call(_)
| Self::Identifier(_)
| Self::Object(_)
)
}
/// Creates a `This` AST node.
pub fn this() -> Self {
Self::This
@ -305,63 +386,63 @@ impl Node {
}
}
impl Executable for Node {
impl Executable for NodeKind {
fn run(&self, context: &mut Context) -> Result<Value> {
let _timer = BoaProfiler::global().start_event("Executable", "exec");
match *self {
Node::AsyncFunctionDecl(ref decl) => decl.run(context),
Node::AsyncFunctionExpr(ref function_expr) => function_expr.run(context),
Node::AwaitExpr(ref expr) => expr.run(context),
Node::Call(ref call) => call.run(context),
Node::Const(Const::Null) => Ok(Value::null()),
Node::Const(Const::Num(num)) => Ok(Value::rational(num)),
Node::Const(Const::Int(num)) => Ok(Value::integer(num)),
Node::Const(Const::BigInt(ref num)) => Ok(Value::from(num.clone())),
Node::Const(Const::Undefined) => Ok(Value::Undefined),
Self::AsyncFunctionDecl(ref decl) => decl.run(context),
Self::AsyncFunctionExpr(ref function_expr) => function_expr.run(context),
Self::AwaitExpr(ref expr) => expr.run(context),
Self::Call(ref call) => call.run(context),
Self::Const(Const::Null) => Ok(Value::null()),
Self::Const(Const::Num(num)) => Ok(Value::rational(num)),
Self::Const(Const::Int(num)) => Ok(Value::integer(num)),
Self::Const(Const::BigInt(ref num)) => Ok(Value::from(num.clone())),
Self::Const(Const::Undefined) => Ok(Value::Undefined),
// we can't move String from Const into value, because const is a garbage collected value
// Which means Drop() get's called on Const, but str will be gone at that point.
// Do Const values need to be garbage collected? We no longer need them once we've generated Values
Node::Const(Const::String(ref value)) => Ok(Value::string(value.to_string())),
Node::Const(Const::Bool(value)) => Ok(Value::boolean(value)),
Node::Block(ref block) => block.run(context),
Node::Identifier(ref identifier) => identifier.run(context),
Node::GetConstField(ref get_const_field_node) => get_const_field_node.run(context),
Node::GetField(ref get_field) => get_field.run(context),
Node::WhileLoop(ref while_loop) => while_loop.run(context),
Node::DoWhileLoop(ref do_while) => do_while.run(context),
Node::ForLoop(ref for_loop) => for_loop.run(context),
Node::ForOfLoop(ref for_of_loop) => for_of_loop.run(context),
Node::ForInLoop(ref for_in_loop) => for_in_loop.run(context),
Node::If(ref if_smt) => if_smt.run(context),
Node::ConditionalOp(ref op) => op.run(context),
Node::Switch(ref switch) => switch.run(context),
Node::Object(ref obj) => obj.run(context),
Node::ArrayDecl(ref arr) => arr.run(context),
Self::Const(Const::String(ref value)) => Ok(Value::string(value.to_string())),
Self::Const(Const::Bool(value)) => Ok(Value::boolean(value)),
Self::Block(ref block) => block.run(context),
Self::Identifier(ref identifier) => identifier.run(context),
Self::GetConstField(ref get_const_field_node) => get_const_field_node.run(context),
Self::GetField(ref get_field) => get_field.run(context),
Self::WhileLoop(ref while_loop) => while_loop.run(context),
Self::DoWhileLoop(ref do_while) => do_while.run(context),
Self::ForLoop(ref for_loop) => for_loop.run(context),
Self::ForOfLoop(ref for_of_loop) => for_of_loop.run(context),
Self::ForInLoop(ref for_in_loop) => for_in_loop.run(context),
Self::If(ref if_smt) => if_smt.run(context),
Self::ConditionalOp(ref op) => op.run(context),
Self::Switch(ref switch) => switch.run(context),
Self::Object(ref obj) => obj.run(context),
Self::ArrayDecl(ref arr) => arr.run(context),
// <https://tc39.es/ecma262/#sec-createdynamicfunction>
Node::FunctionDecl(ref decl) => decl.run(context),
Self::FunctionDecl(ref decl) => decl.run(context),
// <https://tc39.es/ecma262/#sec-createdynamicfunction>
Node::FunctionExpr(ref function_expr) => function_expr.run(context),
Node::ArrowFunctionDecl(ref decl) => decl.run(context),
Node::BinOp(ref op) => op.run(context),
Node::UnaryOp(ref op) => op.run(context),
Node::New(ref call) => call.run(context),
Node::Return(ref ret) => ret.run(context),
Node::TaggedTemplate(ref template) => template.run(context),
Node::TemplateLit(ref template) => template.run(context),
Node::Throw(ref throw) => throw.run(context),
Node::Assign(ref op) => op.run(context),
Node::VarDeclList(ref decl) => decl.run(context),
Node::LetDeclList(ref decl) => decl.run(context),
Node::ConstDeclList(ref decl) => decl.run(context),
Node::Spread(ref spread) => spread.run(context),
Node::This => {
Self::FunctionExpr(ref function_expr) => function_expr.run(context),
Self::ArrowFunctionDecl(ref decl) => decl.run(context),
Self::BinOp(ref op) => op.run(context),
Self::UnaryOp(ref op) => op.run(context),
Self::New(ref call) => call.run(context),
Self::Return(ref ret) => ret.run(context),
Self::TaggedTemplate(ref template) => template.run(context),
Self::TemplateLit(ref template) => template.run(context),
Self::Throw(ref throw) => throw.run(context),
Self::Assign(ref op) => op.run(context),
Self::VarDeclList(ref decl) => decl.run(context),
Self::LetDeclList(ref decl) => decl.run(context),
Self::ConstDeclList(ref decl) => decl.run(context),
Self::Spread(ref spread) => spread.run(context),
Self::This => {
// Will either return `this` binding or undefined
context.get_this_binding()
}
Node::Try(ref try_node) => try_node.run(context),
Node::Break(ref break_node) => break_node.run(context),
Node::Continue(ref continue_node) => continue_node.run(context),
Node::Empty => Ok(Value::Undefined),
Self::Try(ref try_node) => try_node.run(context),
Self::Break(ref break_node) => break_node.run(context),
Self::Continue(ref continue_node) => continue_node.run(context),
Self::Empty => Ok(Value::Undefined),
}
}
}

6
boa/src/syntax/ast/node/new/mod.rs

@ -2,7 +2,7 @@ use crate::{
builtins::iterable,
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::{Call, Node},
syntax::ast::node::{Call, Node, NodeKind},
value::Value,
BoaProfiler, Context, Result,
};
@ -54,7 +54,7 @@ impl Executable for New {
let func_object = self.expr().run(context)?;
let mut v_args = Vec::with_capacity(self.args().len());
for arg in self.args() {
if let Node::Spread(ref x) = arg {
if let NodeKind::Spread(ref x) = arg.kind() {
let val = x.run(context)?;
let iterator_record = iterable::get_iterator(context, val)?;
loop {
@ -91,7 +91,7 @@ impl fmt::Display for New {
}
}
impl From<New> for Node {
impl From<New> for NodeKind {
fn from(new: New) -> Self {
Self::New(new)
}

6
boa/src/syntax/ast/node/object/mod.rs

@ -4,7 +4,7 @@ use crate::{
exec::Executable,
gc::{Finalize, Trace},
property::{AccessorDescriptor, Attribute, DataDescriptor, PropertyDescriptor},
syntax::ast::node::{join_nodes, MethodDefinitionKind, Node, PropertyDefinition},
syntax::ast::node::{join_nodes, MethodDefinitionKind, NodeKind, PropertyDefinition},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -64,7 +64,7 @@ impl Object {
}
PropertyDefinition::Property(key, value) => {
write!(f, "{}{}: ", indentation, key,)?;
value.display_no_indent(f, indent + 1)?;
value.kind().display_no_indent(f, indent + 1)?;
writeln!(f, ",")?;
}
PropertyDefinition::SpreadObject(key) => {
@ -190,7 +190,7 @@ where
}
}
impl From<Object> for Node {
impl From<Object> for NodeKind {
fn from(obj: Object) -> Self {
Self::Object(obj)
}

12
boa/src/syntax/ast/node/operator/assign/mod.rs

@ -2,7 +2,7 @@ use crate::{
environment::lexical_environment::VariableScope,
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -56,8 +56,8 @@ impl Executable for Assign {
fn run(&self, context: &mut Context) -> Result<Value> {
let _timer = BoaProfiler::global().start_event("Assign", "exec");
let val = self.rhs().run(context)?;
match self.lhs() {
Node::Identifier(ref name) => {
match self.lhs().kind() {
NodeKind::Identifier(ref name) => {
if context.has_binding(name.as_ref()) {
// Binding already exists
context.set_mutable_binding(name.as_ref(), val.clone(), true)?;
@ -70,11 +70,11 @@ impl Executable for Assign {
context.initialize_binding(name.as_ref(), val.clone())?;
}
}
Node::GetConstField(ref get_const_field) => {
NodeKind::GetConstField(ref get_const_field) => {
let val_obj = get_const_field.obj().run(context)?;
val_obj.set_field(get_const_field.field(), val.clone(), false, context)?;
}
Node::GetField(ref get_field) => {
NodeKind::GetField(ref get_field) => {
let object = get_field.obj().run(context)?;
let field = get_field.field().run(context)?;
let key = field.to_property_key(context)?;
@ -92,7 +92,7 @@ impl fmt::Display for Assign {
}
}
impl From<Assign> for Node {
impl From<Assign> for NodeKind {
fn from(op: Assign) -> Self {
Self::Assign(op)
}

10
boa/src/syntax/ast/node/operator/bin_op/mod.rs

@ -3,7 +3,7 @@ use crate::{
gc::{Finalize, Trace},
symbol::WellKnownSymbols,
syntax::ast::{
node::Node,
node::{Node, NodeKind},
op::{self, AssignOp, BitOp, CompOp, LogOp, NumOp},
},
Context, Result, Value,
@ -204,15 +204,15 @@ impl Executable for BinOp {
}
}
}),
op::BinOp::Assign(op) => match self.lhs() {
Node::Identifier(ref name) => {
op::BinOp::Assign(op) => match self.lhs().kind() {
NodeKind::Identifier(ref name) => {
let v_a = context.get_binding_value(name.as_ref())?;
let value = Self::run_assign(op, v_a, self.rhs(), context)?;
context.set_mutable_binding(name.as_ref(), value.clone(), true)?;
Ok(value)
}
Node::GetConstField(ref get_const_field) => {
NodeKind::GetConstField(ref get_const_field) => {
let v_r_a = get_const_field.obj().run(context)?;
let v_a = v_r_a.get_field(get_const_field.field(), context)?;
let value = Self::run_assign(op, v_a, self.rhs(), context)?;
@ -285,7 +285,7 @@ impl fmt::Display for BinOp {
}
}
impl From<BinOp> for Node {
impl From<BinOp> for NodeKind {
fn from(op: BinOp) -> Self {
Self::BinOp(op)
}

31
boa/src/syntax/ast/node/operator/unary_op/mod.rs

@ -1,7 +1,10 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::{node::Node, op},
syntax::ast::{
node::{Node, NodeKind},
op,
},
Context, Result, Value,
};
use std::fmt;
@ -94,15 +97,15 @@ impl Executable for UnaryOp {
self.target().run(context)?;
Value::undefined()
}
op::UnaryOp::Delete => match *self.target() {
Node::GetConstField(ref get_const_field) => Value::boolean(
op::UnaryOp::Delete => match self.target().kind() {
NodeKind::GetConstField(ref get_const_field) => Value::boolean(
get_const_field
.obj()
.run(context)?
.to_object(context)?
.delete(&get_const_field.field().into()),
),
Node::GetField(ref get_field) => {
NodeKind::GetField(ref get_field) => {
let obj = get_field.obj().run(context)?;
let field = &get_field.field().run(context)?;
let res = obj
@ -110,15 +113,15 @@ impl Executable for UnaryOp {
.delete(&field.to_property_key(context)?);
return Ok(Value::boolean(res));
}
Node::Identifier(_) => Value::boolean(false),
Node::ArrayDecl(_)
| Node::Block(_)
| Node::Const(_)
| Node::FunctionDecl(_)
| Node::FunctionExpr(_)
| Node::New(_)
| Node::Object(_)
| Node::UnaryOp(_) => Value::boolean(true),
NodeKind::Identifier(_) => Value::boolean(false),
NodeKind::ArrayDecl(_)
| NodeKind::Block(_)
| NodeKind::Const(_)
| NodeKind::FunctionDecl(_)
| NodeKind::FunctionExpr(_)
| NodeKind::New(_)
| NodeKind::Object(_)
| NodeKind::UnaryOp(_) => Value::boolean(true),
_ => return context.throw_syntax_error(format!("wrong delete argument {}", self)),
},
op::UnaryOp::TypeOf => Value::from(self.target().run(context)?.get_type().as_str()),
@ -132,7 +135,7 @@ impl fmt::Display for UnaryOp {
}
}
impl From<UnaryOp> for Node {
impl From<UnaryOp> for NodeKind {
fn from(op: UnaryOp) -> Self {
Self::UnaryOp(op)
}

8
boa/src/syntax/ast/node/return_smt/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -74,9 +74,9 @@ impl Executable for Return {
}
}
impl From<Return> for Node {
fn from(return_smt: Return) -> Node {
Node::Return(return_smt)
impl From<Return> for NodeKind {
fn from(return_smt: Return) -> Self {
Self::Return(return_smt)
}
}

6
boa/src/syntax/ast/node/spread/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -64,8 +64,8 @@ impl fmt::Display for Spread {
}
}
impl From<Spread> for Node {
fn from(spread: Spread) -> Node {
impl From<Spread> for NodeKind {
fn from(spread: Spread) -> Self {
Self::Spread(spread)
}
}

62
boa/src/syntax/ast/node/statement_list/mod.rs

@ -3,7 +3,7 @@
use crate::{
exec::{Executable, InterpreterState},
gc::{empty_trace, Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::{Node, NodeKind, Span},
BoaProfiler, Context, Result, Value,
};
use std::{collections::HashSet, fmt, ops::Deref, rc::Rc};
@ -25,16 +25,34 @@ use crate::vm::{compilation::CodeGen, Compiler};
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub struct StatementList {
#[cfg_attr(feature = "deser", serde(flatten))]
items: Box<[Node]>,
span: Span,
}
impl StatementList {
/// Creates a new statement list
pub(crate) fn new<N>(items: N, span: Span) -> Self
where
N: Into<Box<[Node]>>,
{
let items = items.into();
debug_assert_ne!(items.len(), 0, "empty statement list created");
Self { items, span }
}
/// Gets the list of items.
#[inline]
pub fn items(&self) -> &[Node] {
&self.items
}
/// Gets the span of the statement list.
#[inline]
pub fn span(&self) -> Span {
self.span
}
/// Implements the display formatting with indentation.
pub(in crate::syntax::ast::node) fn display(
&self,
@ -42,13 +60,15 @@ impl StatementList {
indentation: usize,
) -> fmt::Result {
// Print statements
for node in self.items.iter() {
for node in self.items.iter().map(Node::kind) {
// We rely on the node to add the correct indent.
node.display(f, indentation)?;
match node {
Node::Block(_) | Node::If(_) | Node::Switch(_) | Node::WhileLoop(_) => {}
_ => write!(f, ";")?,
if !matches!(
node,
NodeKind::Block(_) | NodeKind::If(_) | NodeKind::Switch(_) | NodeKind::WhileLoop(_)
) {
write!(f, ";")?
}
writeln!(f)?;
}
@ -58,7 +78,9 @@ impl StatementList {
pub fn lexically_declared_names(&self) -> HashSet<&str> {
let mut set = HashSet::new();
for stmt in self.items() {
if let Node::LetDeclList(decl_list) | Node::ConstDeclList(decl_list) = stmt {
if let NodeKind::LetDeclList(decl_list) | NodeKind::ConstDeclList(decl_list) =
stmt.kind()
{
for decl in decl_list.as_ref() {
if !set.insert(decl.name()) {
// It is a Syntax Error if the LexicallyDeclaredNames of StatementList contains any duplicate entries.
@ -72,19 +94,22 @@ impl StatementList {
}
pub fn function_declared_names(&self) -> HashSet<&str> {
let mut set = HashSet::new();
for stmt in self.items() {
if let Node::FunctionDecl(decl) = stmt {
set.insert(decl.name());
self.items
.iter()
.filter_map(|node| {
if let NodeKind::FunctionDecl(decl) = node.kind() {
Some(decl.name())
} else {
None
}
}
set
})
.collect::<HashSet<_>>()
}
pub fn var_declared_names(&self) -> HashSet<&str> {
let mut set = HashSet::new();
for stmt in self.items() {
if let Node::VarDeclList(decl_list) = stmt {
if let NodeKind::VarDeclList(decl_list) = stmt.kind() {
for decl in decl_list.as_ref() {
set.insert(decl.name());
}
@ -145,15 +170,6 @@ impl CodeGen for StatementList {
}
}
impl<T> From<T> for StatementList
where
T: Into<Box<[Node]>>,
{
fn from(stm: T) -> Self {
Self { items: stm.into() }
}
}
impl fmt::Display for StatementList {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.display(f, 0)

4
boa/src/syntax/ast/node/switch/mod.rs

@ -3,7 +3,7 @@
use crate::{
exec::{Executable, InterpreterState},
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -208,7 +208,7 @@ impl fmt::Display for Switch {
}
}
impl From<Switch> for Node {
impl From<Switch> for NodeKind {
fn from(switch: Switch) -> Self {
Self::Switch(switch)
}

29
boa/src/syntax/ast/node/template/mod.rs

@ -1,8 +1,13 @@
//! Template literal node.
use super::Node;
use crate::{builtins::Array, exec::Executable, value::Type, BoaProfiler, Context, Result, Value};
use gc::{Finalize, Trace};
use super::{Node, NodeKind};
use crate::{
builtins::Array,
exec::Executable,
gc::{Finalize, Trace},
value::Type,
BoaProfiler, Context, Result, Value,
};
#[cfg(feature = "deser")]
use serde::{Deserialize, Serialize};
@ -64,6 +69,14 @@ impl fmt::Display for TemplateLit {
write!(f, "`")
}
}
impl From<TemplateLit> for NodeKind {
#[inline]
fn from(templ: TemplateLit) -> Self {
Self::TemplateLit(templ)
}
}
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub struct TaggedTemplate {
@ -109,8 +122,8 @@ impl Executable for TaggedTemplate {
}
template_object.set_field("raw", raw_array, false, context)?;
let (this, func) = match *self.tag {
Node::GetConstField(ref get_const_field) => {
let (this, func) = match self.tag.kind() {
NodeKind::GetConstField(ref get_const_field) => {
let mut obj = get_const_field.obj().run(context)?;
if obj.get_type() != Type::Object {
obj = Value::Object(obj.to_object(context)?);
@ -120,7 +133,7 @@ impl Executable for TaggedTemplate {
obj.get_field(get_const_field.field(), context)?,
)
}
Node::GetField(ref get_field) => {
NodeKind::GetField(ref get_field) => {
let obj = get_field.obj().run(context)?;
let field = get_field.field().run(context)?;
(
@ -150,9 +163,9 @@ impl fmt::Display for TaggedTemplate {
}
}
impl From<TaggedTemplate> for Node {
impl From<TaggedTemplate> for NodeKind {
fn from(template: TaggedTemplate) -> Self {
Node::TaggedTemplate(template)
Self::TaggedTemplate(template)
}
}

6
boa/src/syntax/ast/node/throw/mod.rs

@ -1,7 +1,7 @@
use crate::{
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::Node,
syntax::ast::node::{Node, NodeKind},
Context, Result, Value,
};
use std::fmt;
@ -61,8 +61,8 @@ impl fmt::Display for Throw {
}
}
impl From<Throw> for Node {
fn from(trw: Throw) -> Node {
impl From<Throw> for NodeKind {
fn from(trw: Throw) -> Self {
Self::Throw(trw)
}
}

4
boa/src/syntax/ast/node/try_node/mod.rs

@ -5,7 +5,7 @@ use crate::{
},
exec::Executable,
gc::{Finalize, Trace},
syntax::ast::node::{Block, Identifier, Node},
syntax::ast::node::{Block, Identifier, NodeKind},
BoaProfiler, Context, Result, Value,
};
use std::fmt;
@ -141,7 +141,7 @@ impl fmt::Display for Try {
}
}
impl From<Try> for Node {
impl From<Try> for NodeKind {
fn from(try_catch: Try) -> Self {
Self::Try(try_catch)
}

35
boa/src/syntax/ast/position.rs

@ -5,6 +5,8 @@ use std::{cmp::Ordering, fmt, num::NonZeroU32};
#[cfg(feature = "deser")]
use serde::{Deserialize, Serialize};
use crate::gc::{empty_trace, Finalize, Trace};
/// A position in the JavaScript source code.
///
/// Stores both the column number and the line number.
@ -15,7 +17,7 @@ use serde::{Deserialize, Serialize};
/// ## Similar Implementations
/// [V8: Location](https://cs.chromium.org/chromium/src/v8/src/parsing/scanner.h?type=cs&q=isValid+Location&g=0&l=216)
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Finalize)]
pub struct Position {
/// Line number.
line_number: NonZeroU32,
@ -23,6 +25,10 @@ pub struct Position {
column_number: NonZeroU32,
}
unsafe impl Trace for Position {
empty_trace!();
}
impl Position {
/// Creates a new `Position`.
#[inline]
@ -47,6 +53,16 @@ impl Position {
}
}
impl Default for Position {
#[inline]
fn default() -> Self {
Self {
line_number: NonZeroU32::new(1).unwrap(),
column_number: NonZeroU32::new(1).unwrap(),
}
}
}
impl fmt::Display for Position {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}:{}", self.line_number, self.column_number)
@ -57,12 +73,16 @@ impl fmt::Display for Position {
///
/// Stores a start position and an end position.
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Finalize)]
pub struct Span {
start: Position,
end: Position,
}
unsafe impl Trace for Span {
empty_trace!();
}
impl Span {
/// Creates a new `Span`.
#[inline]
@ -97,6 +117,7 @@ impl Span {
}
impl From<Position> for Span {
#[inline]
fn from(pos: Position) -> Self {
Self {
start: pos,
@ -105,6 +126,16 @@ impl From<Position> for Span {
}
}
impl Default for Span {
#[inline]
fn default() -> Self {
Self {
start: Position::default(),
end: Position::default(),
}
}
}
impl PartialOrd for Span {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
if self == other {

13
boa/src/syntax/parser/cursor/mod.rs

@ -113,16 +113,19 @@ where
///
/// [spec]: https://tc39.es/ecma262/#sec-automatic-semicolon-insertion
#[inline]
pub(super) fn expect_semicolon(&mut self, context: &'static str) -> Result<(), ParseError> {
pub(super) fn expect_semicolon(
&mut self,
context: &'static str,
) -> Result<Option<Token>, ParseError> {
match self.peek_semicolon()? {
SemicolonResult::Found(Some(tk)) => match *tk.kind() {
TokenKind::Punctuator(Punctuator::Semicolon) | TokenKind::LineTerminator => {
let _ = self.buffered_lexer.next(false)?;
Ok(())
let tk = self.buffered_lexer.next(false)?;
Ok(tk)
}
_ => Ok(()),
_ => Ok(None),
},
SemicolonResult::Found(None) => Ok(()),
SemicolonResult::Found(None) => Ok(None),
SemicolonResult::NotFound(tk) => Err(ParseError::expected(
vec![TokenKind::Punctuator(Punctuator::Semicolon)],
tk.clone(),

28
boa/src/syntax/parser/expression/assignment/arrow_function.rs

@ -12,7 +12,7 @@ use crate::{
syntax::{
ast::{
node::{ArrowFunctionDecl, FormalParameter, Node, Return, StatementList},
Punctuator,
Punctuator, Span,
},
lexer::{Error as LexError, Position, TokenKind},
parser::{
@ -66,11 +66,13 @@ impl<R> TokenParser<R> for ArrowFunction
where
R: Read,
{
type Output = ArrowFunctionDecl;
type Output = (ArrowFunctionDecl, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("ArrowFunction", "Parsing");
let next_token = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
let span_start = next_token.span().start();
let params = if let TokenKind::Punctuator(Punctuator::OpenParen) = &next_token.kind() {
// CoverParenthesizedExpressionAndArrowParameterList
@ -80,7 +82,7 @@ where
cursor.expect(Punctuator::CloseParen, "arrow function")?;
params
} else {
let param = BindingIdentifier::new(self.allow_yield, self.allow_await)
let (param, _span) = BindingIdentifier::new(self.allow_yield, self.allow_await)
.parse(cursor)
.context("arrow function")?;
Box::new([FormalParameter::new(param, None, false)])
@ -109,7 +111,9 @@ where
}
}
Ok(ArrowFunctionDecl::new(params, body))
let span = Span::new(span_start, body.span().end());
Ok((ArrowFunctionDecl::new(params, body), span))
}
}
@ -138,18 +142,22 @@ where
type Output = StatementList;
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
match cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?.kind() {
let next = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
match next.kind() {
TokenKind::Punctuator(Punctuator::OpenBlock) => {
let _ = cursor.next();
let body = FunctionBody::new(false, false).parse(cursor)?;
cursor.expect(Punctuator::CloseBlock, "arrow function")?;
Ok(body)
}
_ => Ok(StatementList::from(vec![Return::new(
ExpressionBody::new(self.allow_in, false).parse(cursor)?,
None,
)
.into()])),
_ => {
let expr = ExpressionBody::new(self.allow_in, false).parse(cursor)?;
Ok(StatementList::new(
vec![Node::new(Return::new(expr, None), expr.span())],
expr.span(),
))
}
}
}
}

9
boa/src/syntax/parser/expression/assignment/conditional.rs

@ -10,7 +10,7 @@
use crate::syntax::lexer::TokenKind;
use crate::{
syntax::{
ast::{node::ConditionalOp, Node, Punctuator},
ast::{node::ConditionalOp, Node, Punctuator, Span},
parser::{
expression::{AssignmentExpression, ShortCircuitExpression},
AllowAwait, AllowIn, AllowYield, Cursor, ParseResult, TokenParser,
@ -79,7 +79,12 @@ where
let else_clause =
AssignmentExpression::new(self.allow_in, self.allow_yield, self.allow_await)
.parse(cursor)?;
return Ok(ConditionalOp::new(lhs, then_clause, else_clause).into());
let span = Span::new(lhs.span().start(), else_clause.span().end());
return Ok(Node::new(
ConditionalOp::new(lhs, then_clause, else_clause),
span,
));
}
}

9
boa/src/syntax/parser/expression/assignment/exponentiation.rs

@ -14,7 +14,7 @@ use crate::{
ast::{
node::{BinOp, Node},
op::NumOp,
Keyword, Punctuator,
Keyword, Punctuator, Span,
},
parser::{
expression::{unary::UnaryExpression, update::UpdateExpression},
@ -92,7 +92,12 @@ where
if let Some(tok) = cursor.peek(0)? {
if let TokenKind::Punctuator(Punctuator::Exp) = tok.kind() {
cursor.next()?.expect("** token vanished"); // Consume the token.
return Ok(BinOp::new(NumOp::Exp, lhs, self.parse(cursor)?).into());
let span = Span::new(lhs.span().start(), tok.span().end());
return Ok(Node::new(
BinOp::new(NumOp::Exp, lhs, self.parse(cursor)?),
span,
));
}
}
Ok(lhs)

79
boa/src/syntax/parser/expression/assignment/mod.rs

@ -16,8 +16,8 @@ use crate::syntax::lexer::{Error as LexError, InputElement, TokenKind};
use crate::{
syntax::{
ast::{
node::{Assign, BinOp, Node},
Keyword, Punctuator,
node::{Assign, BinOp, Node, NodeKind},
Keyword, Punctuator, Span,
},
parser::{AllowAwait, AllowIn, AllowYield, Cursor, ParseError, ParseResult, TokenParser},
},
@ -90,13 +90,11 @@ where
| TokenKind::Keyword(Keyword::Await) => {
if let Ok(tok) = cursor.peek_expect_no_lineterminator(1, "assignment expression") {
if tok.kind() == &TokenKind::Punctuator(Punctuator::Arrow) {
return ArrowFunction::new(
self.allow_in,
self.allow_yield,
self.allow_await,
)
.parse(cursor)
.map(Node::ArrowFunctionDecl);
let (decl, span) =
ArrowFunction::new(self.allow_in, self.allow_yield, self.allow_await)
.parse(cursor)?;
return Ok(Node::new(NodeKind::ArrowFunctionDecl(decl), span));
}
}
}
@ -110,37 +108,43 @@ where
// otherwise it is an expression of the form (b).
if let Some(t) = cursor.peek(2)? {
if t.kind() == &TokenKind::Punctuator(Punctuator::Arrow) {
return ArrowFunction::new(
let (decl, span) = ArrowFunction::new(
self.allow_in,
self.allow_yield,
self.allow_await,
)
.parse(cursor)
.map(Node::ArrowFunctionDecl);
.parse(cursor)?;
return Ok(Node::new(NodeKind::ArrowFunctionDecl(decl), span));
}
}
}
TokenKind::Punctuator(Punctuator::Spread) => {
return ArrowFunction::new(
let (decl, span) = ArrowFunction::new(
self.allow_in,
self.allow_yield,
self.allow_await,
)
.parse(cursor)
.map(Node::ArrowFunctionDecl);
.parse(cursor)?;
return Ok(Node::new(NodeKind::ArrowFunctionDecl(decl), span));
}
TokenKind::Identifier(_) => {
if let Some(t) = cursor.peek(2)? {
match *t.kind() {
TokenKind::Punctuator(Punctuator::Comma) => {
// This must be an argument list and therefore (a, b) => {}
return ArrowFunction::new(
let (decl, span) = ArrowFunction::new(
self.allow_in,
self.allow_yield,
self.allow_await,
)
.parse(cursor)
.map(Node::ArrowFunctionDecl);
.parse(cursor)?;
return Ok(Node::new(
NodeKind::ArrowFunctionDecl(decl),
span,
));
}
TokenKind::Punctuator(Punctuator::CloseParen) => {
// Need to check if the token after the close paren is an arrow, if so then this is an ArrowFunction
@ -148,13 +152,17 @@ where
if let Some(t) = cursor.peek(3)? {
if t.kind() == &TokenKind::Punctuator(Punctuator::Arrow)
{
return ArrowFunction::new(
let (decl, span) = ArrowFunction::new(
self.allow_in,
self.allow_yield,
self.allow_await,
)
.parse(cursor)
.map(Node::ArrowFunctionDecl);
.parse(cursor)?;
return Ok(Node::new(
NodeKind::ArrowFunctionDecl(decl),
span,
));
}
}
}
@ -181,8 +189,11 @@ where
match tok.kind() {
TokenKind::Punctuator(Punctuator::Assign) => {
cursor.next()?.expect("= token vanished"); // Consume the token.
if is_assignable(&lhs) {
lhs = Assign::new(lhs, self.parse(cursor)?).into();
if lhs.kind().is_assignable() {
let expr = self.parse(cursor)?;
let span = Span::new(lhs.span().start(), expr.span().end());
lhs = Node::new(Assign::new(lhs, expr), span);
} else {
return Err(ParseError::lex(LexError::Syntax(
"Invalid left-hand side in assignment".into(),
@ -192,11 +203,13 @@ where
}
TokenKind::Punctuator(p) if p.as_binop().is_some() && p != &Punctuator::Comma => {
cursor.next()?.expect("token vanished"); // Consume the token.
if is_assignable(&lhs) {
if lhs.kind().is_assignable() {
let binop = p.as_binop().expect("binop disappeared");
let expr = self.parse(cursor)?;
lhs = BinOp::new(binop, lhs, expr).into();
let span = Span::new(lhs.span().start(), expr.span().end());
lhs = Node::new(BinOp::new(binop, lhs, expr), span);
} else {
return Err(ParseError::lex(LexError::Syntax(
"Invalid left-hand side in assignment".into(),
@ -211,19 +224,3 @@ where
Ok(lhs)
}
}
/// Returns true if as per spec[spec] the node can be assigned a value.
///
/// [spec]: https://tc39.es/ecma262/#sec-assignment-operators-static-semantics-early-errors
#[inline]
pub(crate) fn is_assignable(node: &Node) -> bool {
matches!(
node,
Node::GetConstField(_)
| Node::GetField(_)
| Node::Assign(_)
| Node::Call(_)
| Node::Identifier(_)
| Node::Object(_)
)
}

37
boa/src/syntax/parser/expression/left_hand_side/arguments.rs

@ -10,7 +10,7 @@
use crate::syntax::lexer::TokenKind;
use crate::{
syntax::{
ast::{node::Spread, Node, Punctuator},
ast::{node::Spread, Node, Punctuator, Span},
lexer::InputElement,
parser::{
expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, ParseError,
@ -54,20 +54,23 @@ impl<R> TokenParser<R> for Arguments
where
R: Read,
{
type Output = Box<[Node]>;
type Output = (Box<[Node]>, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("Arguments", "Parsing");
cursor.expect(Punctuator::OpenParen, "arguments")?;
let start_span = cursor
.expect(Punctuator::OpenParen, "arguments")?
.span()
.start();
let mut args = Vec::new();
loop {
let end_span = loop {
let next_token = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
match next_token.kind() {
TokenKind::Punctuator(Punctuator::CloseParen) => {
cursor.next()?.expect(") token vanished"); // Consume the token.
break;
break cursor.next()?.expect(") token vanished").span().end();
// Consume the token.
}
TokenKind::Punctuator(Punctuator::Comma) => {
let next_token = cursor.next()?.expect(", token vanished"); // Consume the token.
@ -76,8 +79,8 @@ where
return Err(ParseError::unexpected(next_token, None));
}
if cursor.next_if(Punctuator::CloseParen)?.is_some() {
break;
if let Some(tok) = cursor.next_if(Punctuator::CloseParen)? {
break tok.span().end();
}
}
_ => {
@ -95,13 +98,13 @@ where
}
if cursor.next_if(Punctuator::Spread)?.is_some() {
args.push(
Spread::new(
let assignment_expr =
AssignmentExpression::new(true, self.allow_yield, self.allow_await)
.parse(cursor)?,
)
.into(),
);
.parse(cursor)?;
args.push(Node::new(
Spread::new(assignment_expr),
assignment_expr.span(),
));
} else {
cursor.set_goal(InputElement::RegExp);
args.push(
@ -109,7 +112,9 @@ where
.parse(cursor)?,
);
}
}
Ok(args.into_boxed_slice())
};
let span = Span::new(start_span, end_span);
Ok((args.into_boxed_slice(), span))
}
}

35
boa/src/syntax/parser/expression/left_hand_side/call.rs

@ -13,9 +13,9 @@ use crate::{
ast::{
node::{
field::{GetConstField, GetField},
Call, Node,
Call,
},
Punctuator,
Node, Punctuator, Span,
},
lexer::TokenKind,
parser::{
@ -68,8 +68,8 @@ where
let token = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
let mut lhs = if token.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) {
let args = Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?;
Node::from(Call::new(self.first_member_expr, args))
let (args, span) = Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?;
Node::new(Call::new(self.first_member_expr, args), span)
} else {
let next_token = cursor.next()?.expect("token vanished");
return Err(ParseError::expected(
@ -83,18 +83,22 @@ where
let token = tok.clone();
match token.kind() {
TokenKind::Punctuator(Punctuator::OpenParen) => {
let args = Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?;
lhs = Node::from(Call::new(lhs, args));
let (args, span) =
Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?;
lhs = Node::new(Call::new(lhs, args), span);
}
TokenKind::Punctuator(Punctuator::Dot) => {
cursor.next()?.ok_or(ParseError::AbruptEnd)?; // We move the parser forward.
let span_start = cursor.next()?.ok_or(ParseError::AbruptEnd)?.span().start(); // We move the parser forward.
let next_token = cursor.next()?.ok_or(ParseError::AbruptEnd)?;
match &cursor.next()?.ok_or(ParseError::AbruptEnd)?.kind() {
let span = Span::new(span_start, next_token.span().end());
match next_token.kind() {
TokenKind::Identifier(name) => {
lhs = GetConstField::new(lhs, name.clone()).into();
lhs = Node::new(GetConstField::new(lhs, name.clone()), span);
}
TokenKind::Keyword(kw) => {
lhs = GetConstField::new(lhs, kw.to_string()).into();
lhs = Node::new(GetConstField::new(lhs, kw.to_string()), span);
}
_ => {
return Err(ParseError::expected(
@ -106,11 +110,18 @@ where
}
}
TokenKind::Punctuator(Punctuator::OpenBracket) => {
let span_start = token.span().start();
let _ = cursor.next()?.ok_or(ParseError::AbruptEnd)?; // We move the parser.
let idx =
Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?;
cursor.expect(Punctuator::CloseBracket, "call expression")?;
lhs = GetField::new(lhs, idx).into();
let span_end = cursor
.expect(Punctuator::CloseBracket, "call expression")?
.span()
.end();
let span = Span::new(span_start, span_end);
lhs = Node::new(GetField::new(lhs, idx), span);
}
TokenKind::TemplateNoSubstitution { .. } | TokenKind::TemplateMiddle { .. } => {
lhs = TaggedTemplateLiteral::new(

43
boa/src/syntax/parser/expression/left_hand_side/member.rs

@ -11,9 +11,9 @@ use crate::{
ast::{
node::{
field::{GetConstField, GetField},
Call, New, Node,
Call, New,
},
Keyword, Punctuator,
Keyword, Node, Punctuator, Span,
},
lexer::TokenKind,
parser::{
@ -67,28 +67,40 @@ where
let mut lhs = if cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?.kind()
== &TokenKind::Keyword(Keyword::New)
{
let _ = cursor.next().expect("new keyword disappeared");
let start_span = cursor
.next()?
.expect("new keyword disappeared")
.span()
.start();
let lhs = self.parse(cursor)?;
let args = Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?;
let (args, args_span) =
Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?;
let call_node = Call::new(lhs, args);
Node::from(New::from(call_node))
let span = Span::new(start_span, args_span.end());
Node::new(New::from(call_node), span)
} else {
PrimaryExpression::new(self.allow_yield, self.allow_await).parse(cursor)?
};
while let Some(tok) = cursor.peek(0)? {
match tok.kind() {
TokenKind::Punctuator(Punctuator::Dot) => {
cursor.next()?.expect("dot punctuator token disappeared"); // We move the parser forward.
let span_start = cursor
.next()?
.expect("dot punctuator token disappeared")
.span()
.start(); // We move the parser forward.
let token = cursor.next()?.ok_or(ParseError::AbruptEnd)?;
let span = Span::new(span_start, token.span().end());
match token.kind() {
TokenKind::Identifier(name) => {
lhs = GetConstField::new(lhs, name.clone()).into()
lhs = Node::new(GetConstField::new(lhs, name.clone()), span);
}
TokenKind::Keyword(kw) => {
lhs = GetConstField::new(lhs, kw.to_string()).into()
lhs = Node::new(GetConstField::new(lhs, kw.to_string()), span);
}
_ => {
return Err(ParseError::expected(
@ -100,13 +112,20 @@ where
}
}
TokenKind::Punctuator(Punctuator::OpenBracket) => {
cursor
let span_start = cursor
.next()?
.expect("open bracket punctuator token disappeared"); // We move the parser forward.
.expect("open bracket punctuator token disappeared")
.span()
.start(); // We move the parser forward.
let idx =
Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?;
cursor.expect(Punctuator::CloseBracket, "member expression")?;
lhs = GetField::new(lhs, idx).into();
let span_end = cursor
.expect(Punctuator::CloseBracket, "member expression")?
.span()
.end();
lhs = Node::new(GetField::new(lhs, idx), Span::new(span_start, span_end));
}
TokenKind::TemplateNoSubstitution { .. } | TokenKind::TemplateMiddle { .. } => {
lhs = TaggedTemplateLiteral::new(

13
boa/src/syntax/parser/expression/left_hand_side/template.rs

@ -1,8 +1,7 @@
use crate::{
profiler::BoaProfiler,
syntax::{
ast::node::TaggedTemplate,
ast::{Node, Position, Punctuator},
ast::{node::TaggedTemplate, Node, Position, Punctuator, Span},
lexer::TokenKind,
parser::{
cursor::Cursor, expression::Expression, AllowAwait, AllowYield, ParseError,
@ -56,6 +55,7 @@ where
let mut exprs = Vec::new();
let mut token = cursor.next()?.ok_or(ParseError::AbruptEnd)?;
let span_start = token.span().start();
loop {
match token.kind() {
@ -73,9 +73,12 @@ where
TokenKind::TemplateNoSubstitution(template_string) => {
raws.push(template_string.as_raw().to_owned().into_boxed_str());
cookeds.push(template_string.to_owned_cooked().ok());
return Ok(Node::from(TaggedTemplate::new(
self.tag, raws, cookeds, exprs,
)));
let span = Span::new(span_start, token.span().end());
return Ok(Node::new(
TaggedTemplate::new(self.tag, raws, cookeds, exprs),
span,
));
}
_ => {
return Err(ParseError::general(

34
boa/src/syntax/parser/expression/primary/array_initializer/mod.rs

@ -13,8 +13,8 @@ mod tests;
use crate::{
syntax::{
ast::{
node::{ArrayDecl, Node, Spread},
Const, Punctuator,
node::{ArrayDecl, Spread},
Const, Node, Punctuator, Span,
},
parser::{
expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, ParseError,
@ -58,28 +58,38 @@ impl<R> TokenParser<R> for ArrayLiteral
where
R: Read,
{
type Output = ArrayDecl;
type Output = (ArrayDecl, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("ArrayLiteral", "Parsing");
let span_start = cursor
.expect(Punctuator::OpenBracket, "array literal")?
.span()
.start();
let mut elements = Vec::new();
loop {
let span_end = loop {
// TODO: Support all features.
while cursor.next_if(Punctuator::Comma)?.is_some() {
elements.push(Node::Const(Const::Undefined));
while let Some(tok) = cursor.next_if(Punctuator::Comma)? {
elements.push(Node::new(Const::Undefined, tok.span()));
}
if cursor.next_if(Punctuator::CloseBracket)?.is_some() {
break;
if let Some(tok) = cursor.next_if(Punctuator::CloseBracket)? {
break tok.span().end();
}
let _ = cursor.peek(0)?.ok_or(ParseError::AbruptEnd); // Check that there are more tokens to read.
if cursor.next_if(Punctuator::Spread)?.is_some() {
if let Some(tok) = cursor.next_if(Punctuator::Spread)? {
let span_start = tok.span().start();
let node = AssignmentExpression::new(true, self.allow_yield, self.allow_await)
.parse(cursor)?;
elements.push(Spread::new(node).into());
elements.push(Node::new(
Spread::new(node),
Span::new(span_start, node.span().end()),
));
} else {
elements.push(
AssignmentExpression::new(true, self.allow_yield, self.allow_await)
@ -87,8 +97,8 @@ where
);
}
cursor.next_if(Punctuator::Comma)?;
}
};
Ok(elements.into())
Ok((elements.into(), Span::new(span_start, span_end)))
}
}

19
boa/src/syntax/parser/expression/primary/async_function_expression/mod.rs

@ -3,7 +3,7 @@ mod tests;
use crate::{
syntax::{
ast::{node::AsyncFunctionExpr, Keyword, Punctuator},
ast::{node::AsyncFunctionExpr, Keyword, Punctuator, Span},
lexer::{Error as LexError, Position, TokenKind},
parser::{
function::{FormalParameters, FunctionBody},
@ -45,10 +45,15 @@ impl<R> TokenParser<R> for AsyncFunctionExpression
where
R: Read,
{
type Output = AsyncFunctionExpr;
type Output = (AsyncFunctionExpr, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("AsyncFunctionExpression", "Parsing");
let span_start = cursor
.expect(Keyword::Async, "async function expression")?
.span()
.end();
cursor.peek_expect_no_lineterminator(0, "async function expression")?;
cursor.expect(Keyword::Function, "async function expression")?;
@ -72,7 +77,10 @@ where
let body = FunctionBody::new(false, true).parse(cursor)?;
cursor.expect(Punctuator::CloseBlock, "async function expression")?;
let span_end = cursor
.expect(Punctuator::CloseBlock, "async function expression")?
.span()
.end();
// It is a Syntax Error if any element of the BoundNames of FormalParameters
// also occurs in the LexicallyDeclaredNames of FunctionBody.
@ -92,6 +100,9 @@ where
}
}
Ok(AsyncFunctionExpr::new(name, params, body))
Ok((
AsyncFunctionExpr::new(name.map(|name| name.0), params, body),
Span::new(span_start, span_end),
))
}
}

19
boa/src/syntax/parser/expression/primary/function_expression/mod.rs

@ -12,7 +12,7 @@ mod tests;
use crate::{
syntax::{
ast::{node::FunctionExpr, Keyword, Punctuator},
ast::{node::FunctionExpr, Keyword, Punctuator, Span},
lexer::{Error as LexError, Position, TokenKind},
parser::{
function::{FormalParameters, FunctionBody},
@ -40,11 +40,16 @@ impl<R> TokenParser<R> for FunctionExpression
where
R: Read,
{
type Output = FunctionExpr;
type Output = (FunctionExpr, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("FunctionExpression", "Parsing");
let span_start = cursor
.expect(Keyword::Function, "function expression")?
.span()
.start();
let name = if let Some(token) = cursor.peek(0)? {
match token.kind() {
TokenKind::Identifier(_)
@ -67,7 +72,10 @@ where
let body = FunctionBody::new(false, false).parse(cursor)?;
cursor.expect(Punctuator::CloseBlock, "function expression")?;
let span_end = cursor
.expect(Punctuator::CloseBlock, "function expression")?
.span()
.end();
// It is a Syntax Error if any element of the BoundNames of FormalParameters
// also occurs in the LexicallyDeclaredNames of FunctionBody.
@ -87,6 +95,9 @@ where
}
}
Ok(FunctionExpr::new(name, params, body))
Ok((
FunctionExpr::new(name.map(|name| name.0), params, body),
Span::new(span_start, span_end),
))
}
}

114
boa/src/syntax/parser/expression/primary/mod.rs

@ -24,8 +24,8 @@ use crate::{
profiler::BoaProfiler,
syntax::{
ast::{
node::{Call, Identifier, New, Node},
Const, Keyword, Punctuator,
node::{Call, Identifier, New},
Const, Keyword, Node, NodeKind, Punctuator, Span,
},
lexer::{token::Numeric, InputElement, TokenKind},
parser::{
@ -75,71 +75,99 @@ where
fn parse(self, cursor: &mut Cursor<R>) -> ParseResult {
let _timer = BoaProfiler::global().start_event("PrimaryExpression", "Parsing");
let tok = cursor.next()?.ok_or(ParseError::AbruptEnd)?;
let tok = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
match tok.kind() {
TokenKind::Keyword(Keyword::This) => Ok(Node::This),
TokenKind::Keyword(Keyword::This) => Ok(Node::new(
NodeKind::This,
cursor.next()?.expect("token disappeared").span(),
)),
TokenKind::Keyword(Keyword::Function) => {
FunctionExpression.parse(cursor).map(Node::from)
let (expr, span) = FunctionExpression.parse(cursor)?;
Ok(Node::new(NodeKind::from(expr), span))
}
TokenKind::Keyword(Keyword::Async) => {
let (expr, span) = AsyncFunctionExpression::new(self.allow_yield).parse(cursor)?;
Ok(Node::new(NodeKind::from(expr), span))
}
TokenKind::Keyword(Keyword::Async) => AsyncFunctionExpression::new(self.allow_yield)
.parse(cursor)
.map(Node::from),
TokenKind::Punctuator(Punctuator::OpenParen) => {
let span_start = cursor.next()?.expect("token disappeared").span().start();
cursor.set_goal(InputElement::RegExp);
let expr =
Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?;
cursor.expect(Punctuator::CloseParen, "primary expression")?;
Ok(expr)
let span_end = cursor
.expect(Punctuator::CloseParen, "primary expression")?
.span()
.end();
Ok(Node::new(expr.into_kind(), Span::new(span_start, span_end)))
}
TokenKind::Punctuator(Punctuator::OpenBracket) => {
cursor.set_goal(InputElement::RegExp);
ArrayLiteral::new(self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::ArrayDecl)
let (expr, span) =
ArrayLiteral::new(self.allow_yield, self.allow_await).parse(cursor)?;
Ok(Node::new(expr, span))
}
TokenKind::Punctuator(Punctuator::OpenBlock) => {
cursor.set_goal(InputElement::RegExp);
Ok(ObjectLiteral::new(self.allow_yield, self.allow_await)
.parse(cursor)?
.into())
}
TokenKind::BooleanLiteral(boolean) => Ok(Const::from(*boolean).into()),
TokenKind::NullLiteral => Ok(Const::Null.into()),
TokenKind::Identifier(ident) => Ok(Identifier::from(ident.as_ref()).into()), // TODO: IdentifierReference
TokenKind::StringLiteral(s) => Ok(Const::from(s.as_ref()).into()),
TokenKind::TemplateNoSubstitution(template_string) => {
Ok(Const::from(template_string.to_owned_cooked().map_err(ParseError::lex)?).into())
}
TokenKind::NumericLiteral(Numeric::Integer(num)) => Ok(Const::from(*num).into()),
TokenKind::NumericLiteral(Numeric::Rational(num)) => Ok(Const::from(*num).into()),
TokenKind::NumericLiteral(Numeric::BigInt(num)) => Ok(Const::from(num.clone()).into()),
TokenKind::RegularExpressionLiteral(body, flags) => {
Ok(Node::from(New::from(Call::new(
Identifier::from("RegExp"),
let (expr, span) =
ObjectLiteral::new(self.allow_yield, self.allow_await).parse(cursor)?;
Ok(Node::new(expr, span))
}
TokenKind::BooleanLiteral(boolean) => Ok(Node::new(Const::from(*boolean), tok.span())),
TokenKind::NullLiteral => Ok(Node::new(Const::Null, tok.span())),
TokenKind::Identifier(ident) => {
Ok(Node::new(Identifier::from(ident.as_ref()), tok.span()))
} // TODO: IdentifierReference
TokenKind::StringLiteral(s) => Ok(Node::new(Const::from(s.as_ref()), tok.span())),
TokenKind::TemplateNoSubstitution(template_string) => Ok(Node::new(
Const::from(template_string.to_owned_cooked().map_err(ParseError::lex)?),
tok.span(),
)),
TokenKind::NumericLiteral(Numeric::Integer(num)) => {
Ok(Node::new(Const::from(*num), tok.span()))
}
TokenKind::NumericLiteral(Numeric::Rational(num)) => {
Ok(Node::new(Const::from(*num), tok.span()))
}
TokenKind::NumericLiteral(Numeric::BigInt(num)) => {
Ok(Node::new(Const::from(num.clone()), tok.span()))
}
TokenKind::RegularExpressionLiteral(body, flags) => Ok(Node::new(
// FIXME: properly use flags and body spans, maybe a new AST node.
New::from(Call::new(
Node::new(Identifier::from("RegExp"), tok.span()),
vec![
Const::from(body.as_ref()).into(),
Const::from(flags.to_string()).into(),
Node::new(Const::from(body.as_ref()), tok.span()),
Node::new(Const::from(flags.to_string()), tok.span()),
],
))))
}
)),
tok.span(),
)),
TokenKind::Punctuator(Punctuator::Div) => {
let tok = cursor.lex_regex(tok.span().start())?;
if let TokenKind::RegularExpressionLiteral(body, flags) = tok.kind() {
Ok(Node::from(New::from(Call::new(
Identifier::from("RegExp"),
// FIXME: properly use flags and body spans, maybe a new AST node.
Ok(Node::new(
New::from(Call::new(
Node::new(Identifier::from("RegExp"), tok.span()),
vec![
Const::from(body.as_ref()).into(),
Const::from(flags.to_string()).into(),
Node::new(Const::from(body.as_ref()), tok.span()),
Node::new(Const::from(flags.to_string()), tok.span()),
],
))))
)),
tok.span(),
))
} else {
// A regex was expected and nothing else.
Err(ParseError::unexpected(tok, "regular expression literal"))
}
}
TokenKind::TemplateMiddle(template_string) => TemplateLiteral::new(
TokenKind::TemplateMiddle(template_string) => {
let (expr, span) = TemplateLiteral::new(
self.allow_yield,
self.allow_await,
tok.span().start(),
@ -148,8 +176,10 @@ where
.map_err(ParseError::lex)?
.as_ref(),
)
.parse(cursor)
.map(Node::TemplateLit),
.parse(cursor)?;
Ok(Node::new(expr, span))
}
_ => Err(ParseError::unexpected(tok.clone(), "primary expression")),
}
}

37
boa/src/syntax/parser/expression/primary/object_initializer/mod.rs

@ -13,8 +13,8 @@ use crate::syntax::lexer::TokenKind;
use crate::{
syntax::{
ast::{
node::{self, FunctionExpr, MethodDefinitionKind, Node, Object},
Punctuator,
node::{self, FunctionExpr, MethodDefinitionKind, Object},
Node, Punctuator, Span,
},
parser::{
expression::AssignmentExpression,
@ -58,22 +58,28 @@ impl<R> TokenParser<R> for ObjectLiteral
where
R: Read,
{
type Output = Object;
type Output = (Object, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("ObjectLiteral", "Parsing");
let span_start = cursor
.expect(Punctuator::OpenBlock, "object literal")?
.span()
.start();
let mut elements = Vec::new();
loop {
if cursor.next_if(Punctuator::CloseBlock)?.is_some() {
break;
let span_end = loop {
if let Some(tok) = cursor.next_if(Punctuator::CloseBlock)? {
break tok.span().end();
}
elements
.push(PropertyDefinition::new(self.allow_yield, self.allow_await).parse(cursor)?);
if cursor.next_if(Punctuator::CloseBlock)?.is_some() {
break;
if let Some(tok) = cursor.next_if(Punctuator::CloseBlock)? {
break tok.span().end();
}
if cursor.next_if(Punctuator::Comma)?.is_none() {
@ -87,9 +93,9 @@ where
"object literal",
));
}
}
};
Ok(Object::from(elements))
Ok((Object::from(elements), Span::new(span_start, span_end)))
}
}
@ -312,7 +318,14 @@ where
fn parse(self, cursor: &mut Cursor<R>) -> ParseResult {
let _timer = BoaProfiler::global().start_event("Initializer", "Parsing");
cursor.expect(Punctuator::Assign, "initializer")?;
AssignmentExpression::new(self.allow_in, self.allow_yield, self.allow_await).parse(cursor)
let span_start = cursor
.expect(Punctuator::Assign, "initializer")?
.span()
.start();
let node = AssignmentExpression::new(self.allow_in, self.allow_yield, self.allow_await)
.parse(cursor)?;
let span_end = node.span().end();
Ok(Node::new(node.into_kind(), Span::new(span_start, span_end)))
}
}

24
boa/src/syntax/parser/expression/primary/template/mod.rs

@ -10,13 +10,12 @@
use crate::{
profiler::BoaProfiler,
syntax::{
ast::node::template::{TemplateElement, TemplateLit},
ast::Position,
ast::Punctuator,
ast::{
node::template::{TemplateElement, TemplateLit},
Position, Punctuator, Span,
},
lexer::TokenKind,
parser::cursor::Cursor,
parser::expression::Expression,
parser::{AllowAwait, AllowYield, ParseError, TokenParser},
parser::{expression::Expression, AllowAwait, AllowYield, Cursor, ParseError, TokenParser},
},
};
use std::io::Read;
@ -57,7 +56,7 @@ impl<R> TokenParser<R> for TemplateLiteral
where
R: Read,
{
type Output = TemplateLit;
type Output = (TemplateLit, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("TemplateLiteral", "Parsing");
@ -73,8 +72,9 @@ where
"template literal",
)?;
loop {
match cursor.lex_template(self.start)?.kind() {
let span_end = loop {
let tok = cursor.lex_template(self.start)?;
match tok.kind() {
TokenKind::TemplateMiddle(template_string) => {
let cooked = template_string.to_owned_cooked().map_err(ParseError::lex)?;
@ -91,7 +91,7 @@ where
let cooked = template_string.to_owned_cooked().map_err(ParseError::lex)?;
elements.push(TemplateElement::String(cooked));
return Ok(TemplateLit::new(elements));
break tok.span().end();
}
_ => {
return Err(ParseError::general(
@ -100,6 +100,8 @@ where
))
}
}
}
};
Ok((TemplateLit::new(elements), Span::new(self.start, span_end)))
}
}

10
boa/src/syntax/parser/mod.rs

@ -8,10 +8,12 @@ mod statement;
#[cfg(test)]
mod tests;
use self::cursor::Cursor;
pub use self::error::{ParseError, ParseResult};
use crate::syntax::{ast::node::StatementList, lexer::TokenKind};
use cursor::Cursor;
use crate::syntax::{
ast::{node::StatementList, Position},
lexer::TokenKind,
};
use std::io::Read;
@ -132,7 +134,7 @@ where
}
ScriptBody.parse(cursor)
}
None => Ok(StatementList::from(Vec::new())),
None => Ok(StatementList::new(Vec::new(), Position::new(0, 0).into())),
}
}
}

12
boa/src/syntax/parser/statement/block/mod.rs

@ -16,7 +16,7 @@ use crate::syntax::lexer::TokenKind;
use crate::{
profiler::BoaProfiler,
syntax::{
ast::{node, Punctuator},
ast::{node, Punctuator, Span},
parser::{AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, TokenParser},
},
};
@ -73,11 +73,17 @@ where
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("Block", "Parsing");
cursor.expect(Punctuator::OpenBlock, "block")?;
let start_token = cursor.expect(Punctuator::OpenBlock, "block")?;
if let Some(tk) = cursor.peek(0)? {
if tk.kind() == &TokenKind::Punctuator(Punctuator::CloseBlock) {
cursor.next()?.expect("} token vanished");
return Ok(node::Block::from(vec![]));
let span = Span::new(start_token.span().start(), tk.span().end());
return Ok(node::Block::from(node::StatementList::new(
Vec::new(),
span,
)));
}
}

12
boa/src/syntax/parser/statement/declaration/hoistable/async_function_decl/mod.rs

@ -2,7 +2,7 @@
mod tests;
use crate::syntax::{
ast::{node::AsyncFunctionDecl, Keyword, Punctuator},
ast::{node::AsyncFunctionDecl, Keyword, Punctuator, Span},
lexer::TokenKind,
parser::{
function::FormalParameters,
@ -48,10 +48,10 @@ impl<R> TokenParser<R> for AsyncFunctionDeclaration
where
R: Read,
{
type Output = AsyncFunctionDecl;
type Output = (AsyncFunctionDecl, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
cursor.expect(Keyword::Async, "async function declaration")?;
let start_token = cursor.expect(Keyword::Async, "async function declaration")?;
cursor.peek_expect_no_lineterminator(0, "async function declaration")?;
cursor.expect(Keyword::Function, "async function declaration")?;
let tok = cursor.peek(0)?;
@ -84,7 +84,7 @@ where
let body = FunctionBody::new(false, true).parse(cursor)?;
cursor.expect(Punctuator::CloseBlock, "async function declaration")?;
let end_token = cursor.expect(Punctuator::CloseBlock, "async function declaration")?;
// It is a Syntax Error if any element of the BoundNames of FormalParameters
// also occurs in the LexicallyDeclaredNames of FunctionBody.
@ -104,6 +104,8 @@ where
}
}
Ok(AsyncFunctionDecl::new(name, params, body))
let span = Span::new(start_token.span().start(), end_token.span().end());
Ok((AsyncFunctionDecl::new(name, params, body), span))
}
}

12
boa/src/syntax/parser/statement/declaration/hoistable/function_decl/mod.rs

@ -2,7 +2,7 @@
mod tests;
use crate::syntax::{
ast::{node::FunctionDecl, Keyword, Punctuator},
ast::{node::FunctionDecl, Keyword, Punctuator, Span},
parser::{
function::FormalParameters,
function::FunctionBody,
@ -48,10 +48,10 @@ impl<R> TokenParser<R> for FunctionDeclaration
where
R: Read,
{
type Output = FunctionDecl;
type Output = (FunctionDecl, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
cursor.expect(Keyword::Function, "function declaration")?;
let start_token = cursor.expect(Keyword::Function, "function declaration")?;
// TODO: If self.is_default, then this can be empty.
let name = BindingIdentifier::new(self.allow_yield, self.allow_await).parse(cursor)?;
@ -65,7 +65,7 @@ where
let body = FunctionBody::new(self.allow_yield, self.allow_await).parse(cursor)?;
cursor.expect(Punctuator::CloseBlock, "function declaration")?;
let end_token = cursor.expect(Punctuator::CloseBlock, "function declaration")?;
// It is a Syntax Error if any element of the BoundNames of FormalParameters
// also occurs in the LexicallyDeclaredNames of FunctionBody.
@ -85,6 +85,8 @@ where
}
}
Ok(FunctionDecl::new(name, params, body))
let span = Span::new(start_token.span().start(), end_token.span().end());
Ok((FunctionDecl::new(name, params, body), span))
}
}

6
boa/src/syntax/parser/statement/declaration/hoistable/mod.rs

@ -65,16 +65,18 @@ where
let _timer = BoaProfiler::global().start_event("HoistableDeclaration", "Parsing");
let tok = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
let start = tok.span().start();
match tok.kind() {
TokenKind::Keyword(Keyword::Function) => {
FunctionDeclaration::new(self.allow_yield, self.allow_await, self.is_default)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Async) => {
AsyncFunctionDeclaration::new(self.allow_yield, self.allow_await, false)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
_ => unreachable!("unknown token found: {:?}", tok),
}

2
boa/src/syntax/parser/statement/iteration/for_statement.rs

@ -78,7 +78,7 @@ where
Some(
VariableDeclarationList::new(false, self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::from)?,
.map(|(kind, span)| Node::new(kind, span))?,
)
}
TokenKind::Keyword(Keyword::Let) | TokenKind::Keyword(Keyword::Const) => {

19
boa/src/syntax/parser/statement/labelled_stm/mod.rs

@ -2,9 +2,8 @@ use std::io::Read;
use super::{LabelIdentifier, Statement};
use crate::{
syntax::ast::Node,
syntax::{
ast::Punctuator,
ast::{Node, NodeKind, Punctuator},
parser::{
cursor::Cursor, error::ParseError, AllowAwait, AllowReturn, AllowYield, TokenParser,
},
@ -49,7 +48,9 @@ where
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("Label", "Parsing");
let name = LabelIdentifier::new(self.allow_yield, self.allow_await).parse(cursor)?;
let (name, name_span) =
LabelIdentifier::new(self.allow_yield, self.allow_await).parse(cursor)?;
cursor.expect(Punctuator::Colon, "Labelled Statement")?;
let mut stmt =
Statement::new(self.allow_yield, self.allow_await, self.allow_return).parse(cursor)?;
@ -60,12 +61,12 @@ where
}
fn set_label_for_node(stmt: &mut Node, name: Box<str>) {
match stmt {
Node::ForLoop(ref mut for_loop) => for_loop.set_label(name),
Node::ForOfLoop(ref mut for_of_loop) => for_of_loop.set_label(name),
Node::ForInLoop(ref mut for_in_loop) => for_in_loop.set_label(name),
Node::DoWhileLoop(ref mut do_while_loop) => do_while_loop.set_label(name),
Node::WhileLoop(ref mut while_loop) => while_loop.set_label(name),
match stmt.kind() {
NodeKind::ForLoop(ref mut for_loop) => for_loop.set_label(name),
NodeKind::ForOfLoop(ref mut for_of_loop) => for_of_loop.set_label(name),
NodeKind::ForInLoop(ref mut for_in_loop) => for_in_loop.set_label(name),
NodeKind::DoWhileLoop(ref mut do_while_loop) => do_while_loop.set_label(name),
NodeKind::WhileLoop(ref mut while_loop) => while_loop.set_label(name),
_ => (),
}
}

59
boa/src/syntax/parser/statement/mod.rs

@ -40,7 +40,7 @@ use super::{AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, TokenParser
use crate::{
syntax::{
ast::{node, Keyword, Node, Punctuator},
ast::{node, Keyword, Node, NodeKind, Punctuator, Span},
lexer::{Error as LexError, InputElement, Position, TokenKind},
parser::expression::await_expr::AwaitExpression,
},
@ -106,7 +106,7 @@ where
{
type Output = Node;
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
fn parse(self, cursor: &mut Cursor<R>) -> ParseResult {
let _timer = BoaProfiler::global().start_event("Statement", "Parsing");
// TODO: add BreakableStatement and divide Whiles, fors and so on to another place.
let tok = cursor.peek(0)?.ok_or(ParseError::AbruptEnd)?;
@ -114,37 +114,37 @@ where
match tok.kind() {
TokenKind::Keyword(Keyword::Await) => AwaitExpression::new(self.allow_yield)
.parse(cursor)
.map(Node::from),
.map(|(kind, span)| Node::new(kind, span)),
TokenKind::Keyword(Keyword::If) => {
IfStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Var) => {
VariableStatement::new(self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::While) => {
WhileStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Do) => {
DoWhileStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::For) => {
ForStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Return) => {
if self.allow_return.0 {
ReturnStatement::new(self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
} else {
Err(ParseError::unexpected(tok.clone(), "statement"))
}
@ -152,37 +152,37 @@ where
TokenKind::Keyword(Keyword::Break) => {
BreakStatement::new(self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Continue) => {
ContinueStatement::new(self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Try) => {
TryStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Throw) => {
ThrowStatement::new(self.allow_yield, self.allow_await)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Keyword(Keyword::Switch) => {
SwitchStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Punctuator(Punctuator::OpenBlock) => {
BlockStatement::new(self.allow_yield, self.allow_await, self.allow_return)
.parse(cursor)
.map(Node::from)
.map(|(kind, span)| Node::new(kind, span))
}
TokenKind::Punctuator(Punctuator::Semicolon) => {
// parse the EmptyStatement
cursor.next().expect("semicolon disappeared");
Ok(Node::Empty)
Ok(Node::new(NodeKind::Empty, tok.span()))
}
TokenKind::Identifier(_) => {
// Labelled Statement check
@ -200,7 +200,7 @@ where
self.allow_return,
)
.parse(cursor)
.map(Node::from);
.map(|(kind, span)| Node::new(kind, span));
}
ExpressionStatement::new(self.allow_yield, self.allow_await).parse(cursor)
@ -254,7 +254,7 @@ impl<R> TokenParser<R> for StatementList
where
R: Read,
{
type Output = node::StatementList;
type Output = Option<node::StatementList>;
/// The function parses a node::StatementList using the StatementList's
/// break_nodes to know when to terminate.
@ -297,9 +297,9 @@ where
let mut var_declared_names: HashSet<&str> = HashSet::new();
// TODO: Use more helpful positions in errors when spans are added to Nodes
for item in &items {
for item in items.iter().map(Node::kind) {
match item {
Node::LetDeclList(decl_list) | Node::ConstDeclList(decl_list) => {
NodeKind::LetDeclList(decl_list) | NodeKind::ConstDeclList(decl_list) => {
for decl in decl_list.as_ref() {
// if name in VarDeclaredNames or can't be added to
// LexicallyDeclaredNames, raise an error
@ -316,7 +316,7 @@ where
}
}
}
Node::VarDeclList(decl_list) => {
NodeKind::VarDeclList(decl_list) => {
for decl in decl_list.as_ref() {
// if name in LexicallyDeclaredNames, raise an error
if lexically_declared_names.contains(decl.name()) {
@ -339,7 +339,14 @@ where
items.sort_by(Node::hoistable_order);
Ok(items.into())
if items.is_empty() {
Ok(None)
} else {
let start = items.first().expect("item disappeared").span().start();
let end = items.last().expect("item disappeared").span().end();
Ok(Some(node::StatementList::new(items, Span::new(start, end))))
}
}
}
@ -449,7 +456,7 @@ impl<R> TokenParser<R> for BindingIdentifier
where
R: Read,
{
type Output = Box<str>;
type Output = (Box<str>, Span);
/// Strict mode parsing as per <https://tc39.es/ecma262/#sec-identifiers-static-semantics-early-errors>.
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
@ -457,7 +464,7 @@ where
let next_token = cursor.next()?.ok_or(ParseError::AbruptEnd)?;
match next_token.kind() {
let ident = match next_token.kind() {
TokenKind::Identifier(ref s) => Ok(s.clone()),
TokenKind::Keyword(k @ Keyword::Yield) if !self.allow_yield.0 => {
if cursor.strict_mode() {
@ -484,6 +491,8 @@ where
next_token,
"binding identifier",
)),
}
}?;
Ok((ident, next_token.span()))
}
}

56
boa/src/syntax/parser/statement/variable/mod.rs

@ -4,7 +4,7 @@ use crate::{
syntax::{
ast::{
node::{Declaration, DeclarationList},
Keyword, Punctuator,
Keyword, Punctuator, Span,
},
lexer::TokenKind,
parser::{
@ -52,18 +52,25 @@ impl<R> TokenParser<R> for VariableStatement
where
R: Read,
{
type Output = DeclarationList;
type Output = (DeclarationList, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("VariableStatement", "Parsing");
cursor.expect(Keyword::Var, "variable statement")?;
let decl_list =
let start_token = cursor.expect(Keyword::Var, "variable statement")?;
let (decl_list, list_span) =
VariableDeclarationList::new(true, self.allow_yield, self.allow_await).parse(cursor)?;
cursor.expect_semicolon("variable statement")?;
let end_token = cursor.expect_semicolon("variable statement")?;
let end_pos = if let Some(tk) = end_token {
tk.span().end()
} else {
list_span.end()
};
let span = Span::new(start_token.span().start(), end_pos);
Ok(decl_list)
Ok((decl_list, span))
}
}
@ -106,16 +113,21 @@ impl<R> TokenParser<R> for VariableDeclarationList
where
R: Read,
{
type Output = DeclarationList;
type Output = (DeclarationList, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let mut list = Vec::new();
let mut start_pos = None;
let mut end_pos = None;
loop {
list.push(
let (decl, span) =
VariableDeclaration::new(self.allow_in, self.allow_yield, self.allow_await)
.parse(cursor)?,
);
.parse(cursor)?;
list.push(decl);
start_pos.get_or_insert(span.start());
end_pos.replace(span.end());
match cursor.peek_semicolon()? {
SemicolonResult::NotFound(tk)
@ -127,7 +139,12 @@ where
}
}
Ok(DeclarationList::Var(list.into()))
let span = Span::new(
start_pos.expect("no starting position"),
end_pos.expect("no ending position"),
);
Ok((DeclarationList::Var(list.into()), span))
}
}
@ -164,16 +181,22 @@ impl<R> TokenParser<R> for VariableDeclaration
where
R: Read,
{
type Output = Declaration;
type Output = (Declaration, Span);
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
// TODO: BindingPattern
let name = BindingIdentifier::new(self.allow_yield, self.allow_await).parse(cursor)?;
let (name, span) =
BindingIdentifier::new(self.allow_yield, self.allow_await).parse(cursor)?;
let span_start = span.start();
let mut span_end = span.end();
let init = if let Some(t) = cursor.peek(0)? {
if *t.kind() == TokenKind::Punctuator(Punctuator::Assign) {
Some(Initializer::new(true, self.allow_yield, self.allow_await).parse(cursor)?)
let init =
Initializer::new(true, self.allow_yield, self.allow_await).parse(cursor)?;
span_end = init.span().end();
Some(init)
} else {
None
}
@ -181,6 +204,9 @@ where
None
};
Ok(Declaration::new(name, init))
Ok((
Declaration::new(name, init),
Span::new(span_start, span_end),
))
}
}

2
test262

@ -1 +1 @@
Subproject commit e793512b55c199de6abc392d1be4de7325dae544
Subproject commit ebb6c34fa5dd76a6bea01c54ed7b182596492176

110
yarn.lock

@ -73,9 +73,9 @@
integrity sha512-1z8k4wzFnNjVK/tlxvrWuK5WMt6mydWWP7+zvH5eFep4oj+UkrfiJTRtjCeBXNpwaA/FYqqtb4/QS4ianFpIRA==
"@types/node@*":
version "15.12.1"
resolved "https://registry.yarnpkg.com/@types/node/-/node-15.12.1.tgz#9b60797dee1895383a725f828a869c86c6caa5c2"
integrity sha512-zyxJM8I1c9q5sRMtVF+zdd13Jt6RU4r4qfhTd7lQubyThvLfx6yYekWSQjGCGV2Tkecgxnlpl/DNlb6Hg+dmEw==
version "15.12.5"
resolved "https://registry.yarnpkg.com/@types/node/-/node-15.12.5.tgz#9a78318a45d75c9523d2396131bd3cca54b2d185"
integrity sha512-se3yX7UHv5Bscf8f1ERKvQOD6sTyycH3hdaoozvaLxgUiY5lIGEeH37AD0G0Qi9kPqihPn0HOfd2yaIEN9VwEg==
"@types/source-list-map@*":
version "0.1.2"
@ -282,9 +282,9 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7:
negotiator "0.6.2"
acorn@^8.2.1:
version "8.3.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.3.0.tgz#1193f9b96c4e8232f00b11a9edff81b2c8b98b88"
integrity sha512-tqPKHZ5CaBJw0Xmy0ZZvLs1qTV+BNFSyvn77ASXkpBNfIRk8ev26fKrD9iLGwGA9zedPao52GSHzq8lyZG0NUw==
version "8.4.1"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.4.1.tgz#56c36251fc7cabc7096adc18f05afe814321a28c"
integrity sha512-asabaBSkEKosYKMITunzX177CXxQ4Q8BSSzMTKD+FefUhipQC70gfW5SiUDhYQ3vk8G+81HqQk7Fv9OXwwn9KA==
ajv-errors@^1.0.0:
version "1.0.1"
@ -341,7 +341,7 @@ anymatch@^2.0.0:
micromatch "^3.1.4"
normalize-path "^2.1.1"
anymatch@^3.0.0, anymatch@~3.1.1:
anymatch@^3.0.0, anymatch@~3.1.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716"
integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==
@ -605,9 +605,9 @@ camelcase@^5.0.0:
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
caniuse-lite@^1.0.30001219:
version "1.0.30001235"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001235.tgz#ad5ca75bc5a1f7b12df79ad806d715a43a5ac4ed"
integrity sha512-zWEwIVqnzPkSAXOUlQnPW2oKoYb2aLQ4Q5ejdjBcnH63rfypaW34CxaeBn1VMya2XaEU3P/R2qHpWyj+l0BT1A==
version "1.0.30001240"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001240.tgz#ec15d125b590602c8731545c5351ff054ad2d52f"
integrity sha512-nb8mDzfMdxBDN7ZKx8chWafAdBp5DAAlpWvNyUGe5tcDWd838zpzDN3Rah9cjCqhfOKkrvx40G2SDtP0qiWX/w==
chalk@^2.4.1:
version "2.4.2"
@ -638,19 +638,19 @@ chokidar@^2.1.8:
fsevents "^1.2.7"
chokidar@^3.4.1:
version "3.5.1"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a"
integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==
version "3.5.2"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.2.tgz#dba3976fcadb016f66fd365021d91600d01c1e75"
integrity sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==
dependencies:
anymatch "~3.1.1"
anymatch "~3.1.2"
braces "~3.0.2"
glob-parent "~5.1.0"
glob-parent "~5.1.2"
is-binary-path "~2.1.0"
is-glob "~4.0.1"
normalize-path "~3.0.0"
readdirp "~3.5.0"
readdirp "~3.6.0"
optionalDependencies:
fsevents "~2.3.1"
fsevents "~2.3.2"
chrome-trace-event@^1.0.2:
version "1.0.3"
@ -1068,9 +1068,9 @@ ee-first@1.1.1:
integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
electron-to-chromium@^1.3.723:
version "1.3.749"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.749.tgz#0ecebc529ceb49dd2a7c838ae425236644c3439a"
integrity sha512-F+v2zxZgw/fMwPz/VUGIggG4ZndDsYy0vlpthi3tjmDZlcfbhN5mYW0evXUsBr2sUtuDANFtle410A9u/sd/4A==
version "1.3.759"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.759.tgz#b0d652d376831470a4c230ba721da2427bfb996a"
integrity sha512-nM76xH0t2FBH5iMEZDVc3S/qbdKjGH7TThezxC8k1Q7w7WHvIAyJh8lAe2UamGfdRqBTjHfPDn82LJ0ksCiB9g==
emoji-regex@^7.0.1:
version "7.0.3"
@ -1298,16 +1298,15 @@ fast-deep-equal@^3.1.1:
integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
fast-glob@^3.1.1, fast-glob@^3.2.5:
version "3.2.5"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661"
integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg==
version "3.2.6"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.6.tgz#434dd9529845176ea049acc9343e8282765c6e1a"
integrity sha512-GnLuqj/pvQ7pX8/L4J84nijv6sAnlwvSDpMkJi9i7nPmPxGtRPkBSStfvDW5l6nMdX9VWe+pkKWFTgD+vF2QSQ==
dependencies:
"@nodelib/fs.stat" "^2.0.2"
"@nodelib/fs.walk" "^1.2.3"
glob-parent "^5.1.0"
glob-parent "^5.1.2"
merge2 "^1.3.0"
micromatch "^4.0.2"
picomatch "^2.2.1"
micromatch "^4.0.4"
fast-json-stable-stringify@^2.0.0:
version "2.1.0"
@ -1431,7 +1430,7 @@ fsevents@^1.2.7:
bindings "^1.5.0"
nan "^2.12.1"
fsevents@~2.3.1:
fsevents@~2.3.2:
version "2.3.2"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
@ -1480,7 +1479,7 @@ glob-parent@^3.1.0:
is-glob "^3.1.0"
path-dirname "^1.0.0"
glob-parent@^5.1.0, glob-parent@~5.1.0:
glob-parent@^5.1.2, glob-parent@~5.1.2:
version "5.1.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
@ -1512,9 +1511,9 @@ glob@^7.0.3, glob@^7.1.3:
path-is-absolute "^1.0.0"
globby@^11.0.3:
version "11.0.3"
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb"
integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg==
version "11.0.4"
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5"
integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg==
dependencies:
array-union "^2.1.0"
dir-glob "^3.0.1"
@ -2186,7 +2185,7 @@ micromatch@^3.1.10, micromatch@^3.1.4:
snapdragon "^0.8.1"
to-regex "^3.0.2"
micromatch@^4.0.2:
micromatch@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9"
integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==
@ -2347,9 +2346,9 @@ node-forge@^0.10.0:
integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==
node-releases@^1.1.71:
version "1.1.72"
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe"
integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw==
version "1.1.73"
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.73.tgz#dd4e81ddd5277ff846b80b52bb40c49edf7a7b20"
integrity sha512-uW7fodD6pyW2FZNZnp/Z3hvWKeEW1Y8R1+1CnErE8cXFXzl5blBOoVB41CvMer6P6Q0S5FXDwcHgFd1Wj0U9zg==
normalize-path@^2.1.1:
version "2.1.1"
@ -2693,9 +2692,9 @@ postcss-value-parser@^4.1.0:
integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==
postcss@^8.2.15:
version "8.3.0"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.3.0.tgz#b1a713f6172ca427e3f05ef1303de8b65683325f"
integrity sha512-+ogXpdAjWGa+fdYY5BQ96V/6tAo+TdSSIMP5huJBIygdWwKtVoB5JWZ7yUd4xZ8r+8Kvvx4nyg/PQ071H4UtcQ==
version "8.3.5"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.3.5.tgz#982216b113412bc20a86289e91eb994952a5b709"
integrity sha512-NxTuJocUhYGsMiMFHDUkmjSKT3EdH4/WbGF6GCi1NDGk+vbcUTun4fpbOqaPtD8IIsztA2ilZm2DhYCuyN58gA==
dependencies:
colorette "^1.2.2"
nanoid "^3.1.23"
@ -2823,10 +2822,10 @@ readdirp@^2.2.1:
micromatch "^3.1.10"
readable-stream "^2.0.2"
readdirp@~3.5.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e"
integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==
readdirp@~3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
dependencies:
picomatch "^2.2.1"
@ -3053,13 +3052,6 @@ send@0.17.1:
range-parser "~1.2.1"
statuses "~1.5.0"
serialize-javascript@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-5.0.1.tgz#7886ec848049a462467a97d3d918ebb2aaf934f4"
integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==
dependencies:
randombytes "^2.1.0"
serialize-javascript@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8"
@ -3378,14 +3370,14 @@ tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0:
integrity sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw==
terser-webpack-plugin@^5.1.3:
version "5.1.3"
resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.1.3.tgz#30033e955ca28b55664f1e4b30a1347e61aa23af"
integrity sha512-cxGbMqr6+A2hrIB5ehFIF+F/iST5ZOxvOmy9zih9ySbP1C2oEWQSOUS+2SNBTjzx5xLKO4xnod9eywdfq1Nb9A==
version "5.1.4"
resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.1.4.tgz#c369cf8a47aa9922bd0d8a94fe3d3da11a7678a1"
integrity sha512-C2WkFwstHDhVEmsmlCxrXUtVklS+Ir1A7twrYzrDrQQOIMOaVAYykaoo/Aq1K0QRkMoY2hhvDQY1cm4jnIMFwA==
dependencies:
jest-worker "^27.0.2"
p-limit "^3.1.0"
schema-utils "^3.0.0"
serialize-javascript "^5.0.1"
serialize-javascript "^6.0.0"
source-map "^0.6.1"
terser "^5.7.0"
@ -3399,9 +3391,9 @@ terser@^4.6.3:
source-map-support "~0.5.12"
terser@^5.7.0:
version "5.7.0"
resolved "https://registry.yarnpkg.com/terser/-/terser-5.7.0.tgz#a761eeec206bc87b605ab13029876ead938ae693"
integrity sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g==
version "5.7.1"
resolved "https://registry.yarnpkg.com/terser/-/terser-5.7.1.tgz#2dc7a61009b66bb638305cb2a824763b116bf784"
integrity sha512-b3e+d5JbHAe/JSjwsC3Zn55wsBIM7AsHLjKxT31kGCldgbpFePaFo+PiddtO6uwRZWRw7sPXmAN8dTW61xmnSg==
dependencies:
commander "^2.20.0"
source-map "~0.7.2"
@ -3450,9 +3442,9 @@ toidentifier@1.0.0:
integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
tslib@^2.0.3:
version "2.2.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c"
integrity sha512-gS9GVHRU+RGn5KQM2rllAlR3dU6m7AcpJKdtH8gFvQiC4Otgk98XnmMU+nZenHt/+VhnBPWwgrJsyrdcw6i23w==
version "2.3.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e"
integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==
type-is@~1.6.17, type-is@~1.6.18:
version "1.6.18"

Loading…
Cancel
Save