Browse Source

Restructure lints in multiple crates (#2447)

This Pull Request restructures the lint deny/warn/allow lists in almost all crates. `boa_engine` will be done in a follow up PR as the changes there are pretty extensive.
pull/2450/head
raskad 2 years ago
parent
commit
a5e8111fa2
  1. 3
      Cargo.lock
  2. 1
      boa_cli/Cargo.toml
  3. 136
      boa_cli/src/main.rs
  4. 3
      boa_gc/Cargo.toml
  5. 41
      boa_gc/src/cell.rs
  6. 29
      boa_gc/src/internals/ephemeron_box.rs
  7. 10
      boa_gc/src/internals/gc_box.rs
  8. 5
      boa_gc/src/internals/mod.rs
  9. 105
      boa_gc/src/lib.rs
  10. 3
      boa_gc/src/pointers/ephemeron.rs
  11. 28
      boa_gc/src/pointers/gc.rs
  12. 3
      boa_gc/src/test/cell.rs
  13. 3
      boa_gc/src/test/weak.rs
  14. 28
      boa_gc/src/trace.rs
  15. 2
      boa_interner/src/interned_str.rs
  16. 120
      boa_interner/src/lib.rs
  17. 6
      boa_interner/src/raw.rs
  18. 1
      boa_interner/src/sym.rs
  19. 68
      boa_macros/src/lib.rs
  20. 67
      boa_parser/src/error.rs
  21. 3
      boa_parser/src/lexer/comment.rs
  22. 24
      boa_parser/src/lexer/cursor.rs
  23. 1
      boa_parser/src/lexer/error.rs
  24. 35
      boa_parser/src/lexer/identifier.rs
  25. 13
      boa_parser/src/lexer/mod.rs
  26. 10
      boa_parser/src/lexer/number.rs
  27. 12
      boa_parser/src/lexer/operator.rs
  28. 5
      boa_parser/src/lexer/private_identifier.rs
  29. 18
      boa_parser/src/lexer/regex.rs
  30. 7
      boa_parser/src/lexer/spread.rs
  31. 5
      boa_parser/src/lexer/string.rs
  32. 7
      boa_parser/src/lexer/template.rs
  33. 16
      boa_parser/src/lexer/tests.rs
  34. 30
      boa_parser/src/lexer/token.rs
  35. 81
      boa_parser/src/lib.rs
  36. 2
      boa_parser/src/parser/cursor/buffered_lexer/mod.rs
  37. 49
      boa_parser/src/parser/cursor/mod.rs
  38. 3
      boa_parser/src/parser/expression/assignment/mod.rs
  39. 14
      boa_parser/src/parser/expression/left_hand_side/optional/mod.rs
  40. 2
      boa_parser/src/parser/expression/mod.rs
  41. 6
      boa_parser/src/parser/expression/primary/array_initializer/mod.rs
  42. 7
      boa_parser/src/parser/expression/primary/async_function_expression/mod.rs
  43. 7
      boa_parser/src/parser/expression/primary/async_generator_expression/mod.rs
  44. 7
      boa_parser/src/parser/expression/primary/function_expression/mod.rs
  45. 7
      boa_parser/src/parser/expression/primary/generator_expression/mod.rs
  46. 4
      boa_parser/src/parser/expression/primary/mod.rs
  47. 7
      boa_parser/src/parser/expression/primary/object_initializer/mod.rs
  48. 8
      boa_parser/src/parser/expression/update.rs
  49. 4
      boa_parser/src/parser/function/mod.rs
  50. 9
      boa_parser/src/parser/mod.rs
  51. 6
      boa_parser/src/parser/statement/block/mod.rs
  52. 9
      boa_parser/src/parser/statement/break_stm/mod.rs
  53. 6
      boa_parser/src/parser/statement/declaration/hoistable/class_decl/mod.rs
  54. 7
      boa_parser/src/parser/statement/declaration/hoistable/mod.rs
  55. 7
      boa_parser/src/parser/statement/if_stm/mod.rs
  56. 10
      boa_parser/src/parser/statement/iteration/do_while_statement.rs
  57. 4
      boa_parser/src/parser/statement/labelled_stm/mod.rs
  58. 9
      boa_parser/src/parser/statement/return_stm/mod.rs
  59. 81
      boa_profiler/src/lib.rs
  60. 1
      boa_tester/Cargo.toml
  61. 11
      boa_tester/src/exec/js262.rs
  62. 6
      boa_tester/src/exec/mod.rs
  63. 121
      boa_tester/src/main.rs
  64. 12
      boa_tester/src/read.rs
  65. 98
      boa_unicode/src/lib.rs
  66. 101
      boa_wasm/src/lib.rs

3
Cargo.lock generated

@ -107,7 +107,6 @@ version = "0.16.0"
dependencies = [
"boa_ast",
"boa_engine",
"boa_interner",
"boa_parser",
"clap 4.0.26",
"colored",
@ -179,7 +178,6 @@ version = "0.16.0"
dependencies = [
"boa_macros",
"boa_profiler",
"measureme",
]
[[package]]
@ -237,7 +235,6 @@ dependencies = [
"bitflags",
"boa_engine",
"boa_gc",
"boa_interner",
"boa_parser",
"clap 4.0.26",
"color-eyre",

1
boa_cli/Cargo.toml

@ -14,7 +14,6 @@ rust-version.workspace = true
[dependencies]
boa_engine = { workspace = true, features = ["deser", "console"] }
boa_ast = { workspace = true, features = ["serde"]}
boa_interner.workspace = true
boa_parser.workspace = true
rustyline = "10.0.0"
rustyline-derive = "0.7.0"

136
boa_cli/src/main.rs

@ -1,63 +1,64 @@
//! A ECMAScript REPL implementation based on boa_engine.
#![doc(
html_logo_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
)]
#![cfg_attr(not(test), deny(clippy::unwrap_used))]
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::use_self,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
// rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::module_name_repetitions,
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_precision_loss,
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::missing_panics_doc,
clippy::too_many_lines,
clippy::unreadable_literal,
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions,
clippy::let_unit_value,
rustdoc::missing_doc_code_examples
)]
#![allow(clippy::option_if_let_else, clippy::redundant_pub_crate)]
mod helper;
use boa_ast::StatementList;
use boa_engine::Context;
@ -65,7 +66,6 @@ use clap::{Parser, ValueEnum, ValueHint};
use colored::{Color, Colorize};
use rustyline::{config::Config, error::ReadlineError, EditMode, Editor};
use std::{fs::read, fs::OpenOptions, io, path::PathBuf};
mod helper;
#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))]
#[cfg_attr(
@ -82,7 +82,6 @@ const READLINE_COLOR: Color = Color::Cyan;
// Added #[allow(clippy::option_option)] because to StructOpt an Option<Option<T>>
// is an optional argument that optionally takes a value ([--opt=[val]]).
// https://docs.rs/structopt/0.3.11/structopt/#type-magic
#[allow(clippy::option_option)]
#[derive(Debug, Parser)]
#[command(author, version, about, name = "boa")]
struct Opt {
@ -98,6 +97,7 @@ struct Opt {
ignore_case = true,
value_enum
)]
#[allow(clippy::option_option)]
dump_ast: Option<Option<DumpFormat>>,
/// Dump the AST to stdout with the given format.
@ -111,7 +111,7 @@ struct Opt {
impl Opt {
/// Returns whether a dump flag has been used.
fn has_dump_flag(&self) -> bool {
const fn has_dump_flag(&self) -> bool {
self.dump_ast.is_some()
}
}
@ -162,29 +162,23 @@ where
let ast = parse_tokens(src, context)?;
match arg {
Some(format) => match format {
DumpFormat::Debug => println!("{ast:#?}"),
DumpFormat::Json => println!(
"{}",
serde_json::to_string(&ast).expect("could not convert AST to a JSON string")
),
DumpFormat::JsonPretty => {
println!(
"{}",
serde_json::to_string_pretty(&ast)
.expect("could not convert AST to a pretty JSON string")
);
}
},
// Default ast dumping format.
None => println!("{ast:#?}"),
Some(DumpFormat::Json) => println!(
"{}",
serde_json::to_string(&ast).expect("could not convert AST to a JSON string")
),
Some(DumpFormat::JsonPretty) => println!(
"{}",
serde_json::to_string_pretty(&ast)
.expect("could not convert AST to a pretty JSON string")
),
Some(DumpFormat::Debug) | None => println!("{ast:#?}"),
}
}
Ok(())
}
pub fn main() -> Result<(), io::Error> {
fn main() -> Result<(), io::Error> {
let args = Opt::parse();
let mut context = Context::default();

3
boa_gc/Cargo.toml

@ -13,6 +13,3 @@ rust-version.workspace = true
[dependencies]
boa_profiler.workspace = true
boa_macros.workspace = true
# Optional Dependencies
measureme = { version = "10.1.0", optional = true }

41
boa_gc/src/cell.rs

@ -1,11 +1,13 @@
//! A garbage collected cell implementation
use std::cell::{Cell, UnsafeCell};
use std::cmp::Ordering;
use std::fmt::{self, Debug, Display};
use std::hash::Hash;
use std::ops::{Deref, DerefMut};
use crate::trace::{Finalize, Trace};
use std::{
cell::{Cell, UnsafeCell},
cmp::Ordering,
fmt::{self, Debug, Display},
hash::Hash,
ops::{Deref, DerefMut},
};
/// `BorrowFlag` represent the internal state of a `GcCell` and
/// keeps track of the amount of current borrows.
@ -34,7 +36,7 @@ pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(ROOT);
impl BorrowFlag {
/// Check the current `BorrowState` of `BorrowFlag`.
#[inline]
pub(crate) fn borrowed(self) -> BorrowState {
pub(crate) const fn borrowed(self) -> BorrowState {
match self.0 & !ROOT {
UNUSED => BorrowState::Unused,
WRITING => BorrowState::Writing,
@ -44,20 +46,20 @@ impl BorrowFlag {
/// Check whether the borrow bit is flagged.
#[inline]
pub(crate) fn rooted(self) -> bool {
pub(crate) const fn rooted(self) -> bool {
self.0 & ROOT > 0
}
/// Set the `BorrowFlag`'s state to writing.
#[inline]
pub(crate) fn set_writing(self) -> Self {
pub(crate) const fn set_writing(self) -> Self {
// Set every bit other than the root bit, which is preserved
Self(self.0 | WRITING)
}
/// Remove the root flag on `BorrowFlag`
#[inline]
pub(crate) fn set_unused(self) -> Self {
pub(crate) const fn set_unused(self) -> Self {
// Clear every bit other than the root bit, which is preserved
Self(self.0 & ROOT)
}
@ -130,7 +132,7 @@ pub struct GcCell<T: ?Sized + 'static> {
impl<T: Trace> GcCell<T> {
/// Creates a new `GcCell` containing `value`.
#[inline]
pub fn new(value: T) -> Self {
pub const fn new(value: T) -> Self {
Self {
flags: Cell::new(BORROWFLAG_INIT),
cell: UnsafeCell::new(value),
@ -402,7 +404,7 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> {
}
}
impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> {
impl<T: ?Sized> Deref for GcCellRef<'_, T> {
type Target = T;
#[inline]
@ -411,20 +413,20 @@ impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> {
}
}
impl<'a, T: ?Sized> Drop for GcCellRef<'a, T> {
impl<T: ?Sized> Drop for GcCellRef<'_, T> {
fn drop(&mut self) {
debug_assert!(self.flags.get().borrowed() == BorrowState::Reading);
self.flags.set(self.flags.get().sub_reading());
}
}
impl<'a, T: ?Sized + Debug> Debug for GcCellRef<'a, T> {
impl<T: ?Sized + Debug> Debug for GcCellRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> {
impl<T: ?Sized + Display> Display for GcCellRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
@ -452,6 +454,7 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> {
F: FnOnce(&mut U) -> &mut V,
{
// SAFETY: This is safe as `GcCellRefMut` is already borrowed, so the value is rooted.
#[allow(trivial_casts)]
let value = unsafe { &mut *(orig.value as *mut U) };
let ret = GcCellRefMut {
@ -467,7 +470,7 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> {
}
}
impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> {
impl<T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'_, T, U> {
type Target = U;
#[inline]
@ -476,14 +479,14 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> {
}
}
impl<'a, T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'a, T, U> {
impl<T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'_, T, U> {
#[inline]
fn deref_mut(&mut self) -> &mut U {
self.value
}
}
impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> {
impl<T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'_, T, U> {
#[inline]
fn drop(&mut self) {
debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing);
@ -502,13 +505,13 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> {
}
}
impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> {
impl<T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'_, T, U> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, U> {
impl<T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'_, T, U> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}

29
boa_gc/src/internals/ephemeron_box.rs

@ -1,8 +1,5 @@
use crate::trace::Trace;
use crate::{finalizer_safe, GcBox};
use crate::{Finalize, Gc};
use std::cell::Cell;
use std::ptr::NonNull;
use crate::{finalizer_safe, trace::Trace, Finalize, Gc, GcBox};
use std::{cell::Cell, ptr::NonNull};
/// The inner allocation of an [`Ephemeron`][crate::Ephemeron] pointer.
pub(crate) struct EphemeronBox<K: Trace + ?Sized + 'static, V: Trace + ?Sized + 'static> {
@ -23,11 +20,7 @@ impl<K: Trace + ?Sized, V: Trace + ?Sized> EphemeronBox<K, V> {
/// Checks if the key pointer is marked by Trace
#[inline]
pub(crate) fn is_marked(&self) -> bool {
if let Some(key) = self.inner_key() {
key.is_marked()
} else {
false
}
self.inner_key().map_or(false, GcBox::is_marked)
}
/// Returns some pointer to the `key`'s `GcBox` or None
@ -46,28 +39,18 @@ impl<K: Trace + ?Sized, V: Trace + ?Sized> EphemeronBox<K, V> {
// fetch either a live `GcBox` or None. The value of `key` is set
// to None in the case where `EphemeronBox` and `key`'s `GcBox`
// entered into `Collector::sweep()` as unmarked.
unsafe {
if let Some(inner_key) = self.inner_key_ptr() {
Some(&*inner_key)
} else {
None
}
}
unsafe { self.inner_key_ptr().map(|inner_key| &*inner_key) }
}
/// Returns a reference to the value of `key`'s `GcBox`
#[inline]
pub(crate) fn key(&self) -> Option<&K> {
if let Some(key_box) = self.inner_key() {
Some(key_box.value())
} else {
None
}
self.inner_key().map(GcBox::value)
}
/// Returns a reference to `value`
#[inline]
pub(crate) fn value(&self) -> &V {
pub(crate) const fn value(&self) -> &V {
&self.value
}

10
boa_gc/src/internals/gc_box.rs

@ -1,7 +1,9 @@
use crate::Trace;
use std::cell::Cell;
use std::fmt;
use std::ptr::{self, NonNull};
use std::{
cell::Cell,
fmt,
ptr::{self, NonNull},
};
// Age and Weak Flags
const MARK_MASK: usize = 1 << (usize::BITS - 2);
@ -183,7 +185,7 @@ impl<T: Trace + ?Sized> GcBox<T> {
/// Returns a reference to the `GcBox`'s value.
#[inline]
pub(crate) fn value(&self) -> &T {
pub(crate) const fn value(&self) -> &T {
&self.value
}

5
boa_gc/src/internals/mod.rs

@ -1,5 +1,4 @@
mod ephemeron_box;
pub(crate) use ephemeron_box::EphemeronBox;
mod gc_box;
pub(crate) use gc_box::GcBox;
pub(crate) use self::{ephemeron_box::EphemeronBox, gc_box::GcBox};

105
boa_gc/src/lib.rs

@ -1,69 +1,84 @@
//! Garbage collector for the Boa JavaScript engine.
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::use_self,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
clippy::undocumented_unsafe_blocks,
clippy::missing_safety_doc,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_qualifications,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
future_incompatible,
nonstandard_style,
missing_docs
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::module_name_repetitions,
clippy::redundant_pub_crate,
clippy::let_unit_value
)]
#![allow(clippy::let_unit_value, clippy::module_name_repetitions)]
extern crate self as boa_gc;
use boa_profiler::Profiler;
use std::cell::{Cell, RefCell};
use std::mem;
use std::ptr::NonNull;
mod cell;
mod pointers;
mod trace;
pub(crate) mod internals;
mod cell;
mod pointers;
use boa_profiler::Profiler;
use internals::GcBox;
use std::{
cell::{Cell, RefCell},
mem,
ptr::NonNull,
};
pub use crate::trace::{Finalize, Trace};
pub use boa_macros::{Finalize, Trace};
pub use cell::{GcCell, GcCellRef, GcCellRefMut};
pub use pointers::{Ephemeron, Gc, WeakGc};
use internals::GcBox;
type GcPointer = NonNull<GcBox<dyn Trace>>;
thread_local!(static EPHEMERON_QUEUE: Cell<Option<Vec<GcPointer>>> = Cell::new(None));

3
boa_gc/src/pointers/ephemeron.rs

@ -4,8 +4,7 @@ use crate::{
trace::{Finalize, Trace},
Allocator, Gc, GcBox, EPHEMERON_QUEUE,
};
use std::cell::Cell;
use std::ptr::NonNull;
use std::{cell::Cell, ptr::NonNull};
#[derive(Debug)]
/// A key-value pair where the value becomes unaccesible when the key is garbage collected.

28
boa_gc/src/pointers/gc.rs

@ -1,15 +1,19 @@
use std::cell::Cell;
use std::cmp::Ordering;
use std::fmt::{self, Debug, Display};
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::ops::Deref;
use std::ptr::{self, addr_of_mut, NonNull};
use std::rc::Rc;
use crate::internals::GcBox;
use crate::trace::{Finalize, Trace};
use crate::{finalizer_safe, Allocator};
use crate::{
finalizer_safe,
internals::GcBox,
trace::{Finalize, Trace},
Allocator,
};
use std::{
cell::Cell,
cmp::Ordering,
fmt::{self, Debug, Display},
hash::{Hash, Hasher},
marker::PhantomData,
ops::Deref,
ptr::{self, addr_of_mut, NonNull},
rc::Rc,
};
// Technically, this function is safe, since we're just modifying the address of a pointer without
// dereferencing it.

3
boa_gc/src/test/cell.rs

@ -1,6 +1,5 @@
use boa_gc::{Gc, GcCell};
use super::run_test;
use crate::{Gc, GcCell};
#[test]
fn boa_borrow_mut_test() {

3
boa_gc/src/test/weak.rs

@ -1,6 +1,5 @@
use boa_gc::{force_collect, Ephemeron, Gc, WeakGc};
use super::run_test;
use crate::{force_collect, Ephemeron, Gc, WeakGc};
#[test]
fn eph_weak_gc_test() {

28
boa_gc/src/trace.rs

@ -1,16 +1,18 @@
use std::borrow::{Cow, ToOwned};
use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
use std::hash::{BuildHasher, Hash};
use std::marker::PhantomData;
use std::num::{
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::atomic::{
AtomicBool, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicU16, AtomicU32,
AtomicU64, AtomicU8, AtomicUsize,
use std::{
borrow::{Cow, ToOwned},
collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque},
hash::{BuildHasher, Hash},
marker::PhantomData,
num::{
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
},
path::{Path, PathBuf},
rc::Rc,
sync::atomic::{
AtomicBool, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicU16, AtomicU32,
AtomicU64, AtomicU8, AtomicUsize,
},
};
/// Substitute for the [`Drop`] trait for garbage collected types.

2
boa_interner/src/interned_str.rs

@ -27,7 +27,7 @@ impl<Char> InternedStr<Char> {
/// Not maintaining the invariants specified on the struct definition
/// could cause Undefined Behaviour.
#[inline]
pub(super) unsafe fn new(ptr: NonNull<[Char]>) -> Self {
pub(super) const unsafe fn new(ptr: NonNull<[Char]>) -> Self {
Self { ptr }
}

120
boa_interner/src/lib.rs

@ -13,62 +13,60 @@
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
)]
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::use_self,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
// rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
unsafe_op_in_unsafe_fn
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::module_name_repetitions,
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_precision_loss,
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::missing_panics_doc,
clippy::too_many_lines,
clippy::unreadable_literal,
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions,
clippy::let_unit_value,
clippy::redundant_pub_crate,
// TODO deny once false positive is fixed (https://github.com/rust-lang/rust-clippy/issues/9626).
clippy::trait_duplication_in_bounds,
clippy::trait_duplication_in_bounds
)]
extern crate static_assertions as sa;
@ -77,12 +75,13 @@ mod fixed_string;
mod interned_str;
mod raw;
mod sym;
#[cfg(test)]
mod tests;
use raw::RawInterner;
use std::borrow::Cow;
use raw::RawInterner;
pub use sym::*;
/// An enumeration of all slice types [`Interner`] can internally store.
@ -91,7 +90,10 @@ pub use sym::*;
/// encodings [`Interner`] can store.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum JStrRef<'a> {
/// A `UTF-8` string reference.
Utf8(&'a str),
/// A `UTF-16` string reference.
Utf16(&'a [u16]),
}
@ -128,12 +130,16 @@ pub struct JSInternedStrRef<'a, 'b> {
impl<'a, 'b> JSInternedStrRef<'a, 'b> {
/// Returns the inner reference to the interned string in `UTF-8` encoding.
/// if the string is not representable in `UTF-8`, returns [`None`]
pub fn utf8(&self) -> Option<&'a str> {
#[inline]
#[must_use]
pub const fn utf8(&self) -> Option<&'a str> {
self.utf8
}
/// Returns the inner reference to the interned string in `UTF-16` encoding.
pub fn utf16(&self) -> &'b [u16] {
#[inline]
#[must_use]
pub const fn utf16(&self) -> &'b [u16] {
self.utf16
}
@ -186,7 +192,7 @@ impl<'a, 'b> JSInternedStrRef<'a, 'b> {
}
}
impl<'a, 'b> std::fmt::Display for JSInternedStrRef<'a, 'b> {
impl std::fmt::Display for JSInternedStrRef<'_, '_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.join_with_context(
std::fmt::Display::fmt,
@ -215,12 +221,14 @@ pub struct Interner {
impl Interner {
/// Creates a new [`Interner`].
#[inline]
#[must_use]
pub fn new() -> Self {
Self::default()
}
/// Creates a new [`Interner`] with the specified capacity.
#[inline]
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
Self {
utf8_interner: RawInterner::with_capacity(capacity),
@ -230,6 +238,7 @@ impl Interner {
/// Returns the number of strings interned by the interner.
#[inline]
#[must_use]
pub fn len(&self) -> usize {
// `utf16_interner.len()` == `utf8_interner.len()`,
// so we can use any of them.
@ -238,6 +247,7 @@ impl Interner {
/// Returns `true` if the [`Interner`] contains no interned strings.
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
COMMON_STRINGS_UTF8.is_empty() && self.utf16_interner.is_empty()
}
@ -342,6 +352,7 @@ impl Interner {
/// Returns the string for the given symbol if any.
#[inline]
#[must_use]
pub fn resolve(&self, symbol: Sym) -> Option<JSInternedStrRef<'_, '_>> {
let index = symbol.get() - 1;
@ -385,6 +396,7 @@ impl Interner {
///
/// If the interner cannot resolve the given symbol.
#[inline]
#[must_use]
pub fn resolve_expect(&self, symbol: Sym) -> JSInternedStrRef<'_, '_> {
self.resolve(symbol).expect("string disappeared")
}

6
boa_interner/src/raw.rs

@ -1,8 +1,6 @@
use std::hash::Hash;
use rustc_hash::FxHashMap;
use crate::{fixed_string::FixedString, interned_str::InternedStr};
use rustc_hash::FxHashMap;
use std::hash::Hash;
/// Raw string interner, generic by a char type.
#[derive(Debug)]

1
boa_interner/src/sym.rs

@ -131,6 +131,7 @@ impl Sym {
/// Returns the internal value of the [`Sym`]
#[inline]
#[must_use]
pub const fn get(self) -> usize {
self.value.get()
}

68
boa_macros/src/lib.rs

@ -1,3 +1,58 @@
//! Macros for the Boa JavaScript engine.
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, LitStr};
@ -15,7 +70,11 @@ pub fn utf16(input: TokenStream) -> TokenStream {
.into()
}
decl_derive!([Trace, attributes(unsafe_ignore_trace)] => derive_trace);
decl_derive! {
[Trace, attributes(unsafe_ignore_trace)] =>
/// Derive the Trace trait.
derive_trace
}
fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
s.filter(|bi| {
@ -109,8 +168,13 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream {
}
}
decl_derive!([Finalize] => derive_finalize);
decl_derive! {
[Finalize] =>
/// Derive the Finalize trait.
derive_finalize
}
#[allow(clippy::needless_pass_by_value)]
fn derive_finalize(s: Structure<'_>) -> proc_macro2::TokenStream {
s.unbound_impl(quote!(::boa_gc::Finalize), quote!())
}

67
boa_parser/src/error.rs

@ -1,7 +1,6 @@
//! Error and result implementation for the parser.
use crate::lexer::Error as LexError;
use boa_ast::{Position, Span};
use std::fmt;
@ -26,34 +25,51 @@ impl From<LexError> for Error {
}
}
/// An enum which represents errors encounted during parsing an expression
/// An enum which represents errors encountered during parsing an expression
#[derive(Debug)]
pub enum Error {
/// When it expected a certain kind of token, but got another as part of something
Expected {
/// The token(s) that were expected.
expected: Box<[String]>,
/// The token that was not expected.
found: Box<str>,
span: Span,
/// The parsing context in which the error occurred.
context: &'static str,
/// Position of the source code where the error occurred.
span: Span,
},
/// When a token is unexpected
Unexpected {
/// The error message.
message: Option<&'static str>,
/// The token that was not expected.
found: Box<str>,
/// Position of the source code where the error occurred.
span: Span,
message: Option<&'static str>,
},
/// When there is an abrupt end to the parsing
AbruptEnd,
/// A lexing error.
Lex { err: LexError },
Lex {
/// The error that occurred during lexing.
err: LexError,
},
/// Catch all General Error
General {
/// The error message.
message: &'static str,
position: Position,
},
/// Unimplemented syntax error
Unimplemented {
message: &'static str,
/// Position of the source code where the error occurred.
position: Position,
},
}
@ -73,6 +89,7 @@ impl Error {
}
/// Creates an `Expected` parsing error.
#[inline]
pub(crate) fn expected<E, F>(expected: E, found: F, span: Span, context: &'static str) -> Self
where
E: Into<Box<[String]>>,
@ -87,6 +104,7 @@ impl Error {
}
/// Creates an `Expected` parsing error.
#[inline]
pub(crate) fn unexpected<F, C>(found: F, span: Span, message: C) -> Self
where
F: Into<Box<str>>,
@ -100,12 +118,14 @@ impl Error {
}
/// Creates a "general" parsing error.
pub(crate) fn general(message: &'static str, position: Position) -> Self {
#[inline]
pub(crate) const fn general(message: &'static str, position: Position) -> Self {
Self::General { message, position }
}
/// Creates a "general" parsing error with the specific error message for a wrong function declaration in non-strict mode.
pub(crate) fn wrong_function_declaration_non_strict(position: Position) -> Self {
#[inline]
pub(crate) const fn wrong_function_declaration_non_strict(position: Position) -> Self {
Self::General {
message: "In non-strict mode code, functions can only be declared at top level, inside a block, or as the body of an if statement.",
position
@ -114,7 +134,7 @@ impl Error {
/// Creates a "general" parsing error with the specific error message for a wrong function declaration with label.
#[inline]
pub(crate) fn wrong_labelled_function_declaration(position: Position) -> Self {
pub(crate) const fn wrong_labelled_function_declaration(position: Position) -> Self {
Self::General {
message: "Labelled functions can only be declared at top level or inside a block",
position,
@ -122,15 +142,10 @@ impl Error {
}
/// Creates a parsing error from a lexing error.
pub(crate) fn lex(e: LexError) -> Self {
#[inline]
pub(crate) const fn lex(e: LexError) -> Self {
Self::Lex { err: e }
}
/// Creates a new `Unimplemented` parsing error.
#[allow(dead_code)]
pub(crate) fn unimplemented(message: &'static str, position: Position) -> Self {
Self::Unimplemented { message, position }
}
}
impl fmt::Display for Error {
@ -180,11 +195,7 @@ impl fmt::Display for Error {
} => write!(
f,
"unexpected token '{found}'{} at line {}, col {}",
if let Some(m) = message {
format!(", {m}")
} else {
String::new()
},
message.map_or_else(String::new, |m| format!(", {m}")),
span.start().line_number(),
span.start().column_number()
),
@ -196,12 +207,6 @@ impl fmt::Display for Error {
position.column_number()
),
Self::Lex { err } => fmt::Display::fmt(err, f),
Self::Unimplemented { message, position } => write!(
f,
"{message} not yet implemented at line {}, col {}",
position.line_number(),
position.column_number()
),
}
}
}

3
boa_parser/src/lexer/comment.rs

@ -1,7 +1,6 @@
//! This module implements lexing for comments used in the JavaScript programing language.
use super::{Cursor, Error, Tokenizer};
use crate::lexer::{Token, TokenKind};
use crate::lexer::{Cursor, Error, Token, TokenKind, Tokenizer};
use boa_ast::{Position, Span};
use boa_interner::Interner;
use boa_profiler::Profiler;

24
boa_parser/src/lexer/cursor.rs

@ -14,9 +14,10 @@ pub(super) struct Cursor<R> {
impl<R> Cursor<R> {
/// Gets the current position of the cursor in the source code.
#[inline]
pub(super) fn pos(&self) -> Position {
pub(super) const fn pos(&self) -> Position {
self.pos
}
/// Advances the position to the next column.
#[inline]
pub(super) fn next_column(&mut self) {
@ -34,7 +35,7 @@ impl<R> Cursor<R> {
#[inline]
/// Returns if strict mode is currently active.
pub(super) fn strict_mode(&self) -> bool {
pub(super) const fn strict_mode(&self) -> bool {
self.strict_mode
}
@ -120,11 +121,8 @@ where
let _timer = Profiler::global().start_event("cursor::next_is_ascii_pred()", "Lexing");
Ok(match self.peek()? {
Some(byte) => match byte {
0..=0x7F => pred(char::from(byte)),
_ => false,
},
None => false,
Some(byte) if (0..=0x7F).contains(&byte) => pred(char::from(byte)),
Some(_) | None => false,
})
}
@ -141,11 +139,7 @@ where
{
let _timer = Profiler::global().start_event("cursor::next_is_char_pred()", "Lexing");
Ok(if let Some(peek) = self.peek_char()? {
pred(peek)
} else {
false
})
Ok(self.peek_char()?.map_or(false, pred))
}
/// Fills the buffer with all bytes until the stop byte is found.
@ -303,7 +297,7 @@ struct InnerIter<R> {
impl<R> InnerIter<R> {
/// Creates a new inner iterator.
#[inline]
fn new(iter: Bytes<R>) -> Self {
const fn new(iter: Bytes<R>) -> Self {
Self {
iter,
num_peeked_bytes: 0,
@ -503,7 +497,7 @@ fn utf8_acc_cont_byte(ch: u32, byte: u8) -> u32 {
/// Checks whether the byte is a UTF-8 first byte (i.e., ascii byte or starts with the
/// bits `11`).
#[inline]
fn utf8_is_first_byte(byte: u8) -> bool {
const fn utf8_is_first_byte(byte: u8) -> bool {
byte <= 0x7F || (byte >> 6) == 0x11
}
@ -513,7 +507,7 @@ fn unwrap_or_0(opt: Option<u8>) -> u8 {
}
#[inline]
fn utf8_len(ch: u32) -> u32 {
const fn utf8_len(ch: u32) -> u32 {
if ch <= 0x7F {
1
} else if ch <= 0x7FF {

1
boa_parser/src/lexer/error.rs

@ -8,6 +8,7 @@
use boa_ast::Position;
use std::{error::Error as StdError, fmt, io};
/// An error that occurred during the lexing.
#[derive(Debug)]
pub enum Error {
/// An IO error is raised to indicate an issue when the lexer is reading data that isn't

35
boa_parser/src/lexer/identifier.rs

@ -1,7 +1,6 @@
//! This module implements lexing for identifiers (foo, myvar, etc.) used in the JavaScript programing language.
use super::{Cursor, Error, Tokenizer};
use crate::lexer::{StringLiteral, Token, TokenKind};
use crate::lexer::{Cursor, Error, StringLiteral, Token, TokenKind, Tokenizer};
use boa_ast::{Keyword, Position, Span};
use boa_interner::Interner;
use boa_profiler::Profiler;
@ -23,7 +22,8 @@ pub(super) struct Identifier {
impl Identifier {
/// Creates a new identifier/keyword lexer.
pub(super) fn new(init: char) -> Self {
#[inline]
pub(super) const fn new(init: char) -> Self {
Self { init }
}
@ -33,13 +33,10 @@ impl Identifier {
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#sec-names-and-keywords
#[inline]
pub(super) fn is_identifier_start(ch: u32) -> bool {
matches!(ch, 0x0024 /* $ */ | 0x005F /* _ */)
|| if let Ok(ch) = char::try_from(ch) {
ch.is_id_start()
} else {
false
}
|| char::try_from(ch).map_or(false, char::is_id_start)
}
/// Checks if a character is `IdentifierPart` as per ECMAScript standards.
@ -48,15 +45,12 @@ impl Identifier {
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#sec-names-and-keywords
#[inline]
fn is_identifier_part(ch: u32) -> bool {
matches!(
ch,
0x0024 /* $ */ | 0x005F /* _ */ | 0x200C /* <ZWNJ> */ | 0x200D /* <ZWJ> */
) || if let Ok(ch) = char::try_from(ch) {
ch.is_id_continue()
} else {
false
}
) || char::try_from(ch).map_or(false, char::is_id_continue)
}
}
@ -75,15 +69,12 @@ impl<R> Tokenizer<R> for Identifier {
let (identifier_name, contains_escaped_chars) =
Self::take_identifier_name(cursor, start_pos, self.init)?;
let token_kind = if let Ok(keyword) = identifier_name.parse() {
match keyword {
Keyword::True => TokenKind::BooleanLiteral(true),
Keyword::False => TokenKind::BooleanLiteral(false),
Keyword::Null => TokenKind::NullLiteral,
_ => TokenKind::Keyword((keyword, contains_escaped_chars)),
}
} else {
TokenKind::identifier(interner.get_or_intern(identifier_name.as_str()))
let token_kind = match identifier_name.parse() {
Ok(Keyword::True) => TokenKind::BooleanLiteral(true),
Ok(Keyword::False) => TokenKind::BooleanLiteral(false),
Ok(Keyword::Null) => TokenKind::NullLiteral,
Ok(keyword) => TokenKind::Keyword((keyword, contains_escaped_chars)),
_ => TokenKind::identifier(interner.get_or_intern(identifier_name.as_str())),
};
Ok(Token::new(token_kind, Span::new(start_pos, cursor.pos())))

13
boa_parser/src/lexer/mod.rs

@ -14,18 +14,19 @@
//!
//! [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-lexical-grammar
pub mod error;
pub mod regex;
pub mod token;
mod comment;
mod cursor;
pub mod error;
mod identifier;
mod number;
mod operator;
mod private_identifier;
pub mod regex;
mod spread;
mod string;
mod template;
pub mod token;
#[cfg(test)]
mod tests;
@ -80,7 +81,7 @@ impl<R> Lexer<R> {
/// * ECMAScript standard uses `\{Space_Separator}` + `\u{0009}`, `\u{000B}`, `\u{000C}`, `\u{FEFF}`
///
/// [More information](https://tc39.es/ecma262/#table-32)
fn is_whitespace(ch: u32) -> bool {
const fn is_whitespace(ch: u32) -> bool {
matches!(
ch,
0x0020 | 0x0009 | 0x000B | 0x000C | 0x00A0 | 0xFEFF |
@ -97,13 +98,13 @@ impl<R> Lexer<R> {
/// Gets the goal symbol the lexer is currently using.
#[inline]
pub(crate) fn get_goal(&self) -> InputElement {
pub(crate) const fn get_goal(&self) -> InputElement {
self.goal_symbol
}
#[inline]
/// Returns if strict mode is currently active.
pub(super) fn strict_mode(&self) -> bool {
pub(super) const fn strict_mode(&self) -> bool {
self.cursor.strict_mode()
}

10
boa_parser/src/lexer/number.rs

@ -25,7 +25,7 @@ pub(super) struct NumberLiteral {
impl NumberLiteral {
/// Creates a new string literal lexer.
pub(super) fn new(init: u8) -> Self {
pub(super) const fn new(init: u8) -> Self {
Self { init }
}
}
@ -42,7 +42,7 @@ enum NumericKind {
impl NumericKind {
/// Get the base of the number kind.
fn base(self) -> u32 {
const fn base(self) -> u32 {
match self {
Self::Rational => 10,
Self::Integer(base) | Self::BigInt(base) => base,
@ -402,12 +402,10 @@ impl<R> Tokenizer<R> for NumberLiteral {
}
},
NumericKind::Integer(base) => {
if let Ok(num) = i32::from_str_radix(num_str, base) {
Numeric::Integer(num)
} else {
i32::from_str_radix(num_str, base).map_or_else(|_| {
let num = BigInt::parse_bytes(num_str.as_bytes(), base).expect("Failed to parse integer after checks");
Numeric::Rational(num.to_f64().unwrap_or(f64::INFINITY))
}
}, Numeric::Integer)
}
};

12
boa_parser/src/lexer/operator.rs

@ -37,16 +37,6 @@ macro_rules! vop {
_ => $op,
}
});
($cursor:ident, $op:expr, {$($case:pat => $block:expr),+}) => {
match $cursor.peek().ok_or_else(|| Error::syntax("could not preview next value", $cursor.pos()))? {
$($case => {
$cursor.next_byte()?;
$cursor.next_column();
$block
})+,
_ => $op
}
}
}
/// The `op` macro handles binary operations or assignment operations and converts them into tokens.
@ -83,7 +73,7 @@ pub(super) struct Operator {
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators
impl Operator {
/// Creates a new operator lexer.
pub(super) fn new(init: u8) -> Self {
pub(super) const fn new(init: u8) -> Self {
Self { init }
}
}

5
boa_parser/src/lexer/private_identifier.rs

@ -1,7 +1,6 @@
//! This module implements lexing for private identifiers (#foo, #myvar, etc.) used in the JavaScript programing language.
use super::{identifier::Identifier, Cursor, Error, Tokenizer};
use crate::lexer::{Token, TokenKind};
use crate::lexer::{identifier::Identifier, Cursor, Error, Token, TokenKind, Tokenizer};
use boa_ast::{Position, Span};
use boa_interner::Interner;
use boa_profiler::Profiler;
@ -18,7 +17,7 @@ pub(super) struct PrivateIdentifier;
impl PrivateIdentifier {
/// Creates a new private identifier lexer.
pub(super) fn new() -> Self {
pub(super) const fn new() -> Self {
Self
}
}

18
boa_parser/src/lexer/regex.rs

@ -1,7 +1,6 @@
//! This module implements lexing for regex literals used in the JavaScript programing language.
use super::{Cursor, Error, Span, Tokenizer};
use crate::lexer::{Token, TokenKind};
use crate::lexer::{Cursor, Error, Span, Token, TokenKind, Tokenizer};
use bitflags::bitflags;
use boa_ast::Position;
use boa_interner::{Interner, Sym};
@ -141,12 +140,27 @@ bitflags! {
/// Flags of a regular expression.
#[derive(Default)]
pub struct RegExpFlags: u8 {
/// Whether to test the regular expression against all possible matches in a string,
/// or only against the first.
const GLOBAL = 0b0000_0001;
/// Whether to ignore case while attempting a match in a string.
const IGNORE_CASE = 0b0000_0010;
/// Whether or not to search in strings across multiple lines.
const MULTILINE = 0b0000_0100;
/// Whether `.` matches newlines or not.
const DOT_ALL = 0b0000_1000;
/// Whether or not Unicode features are enabled.
const UNICODE = 0b0001_0000;
/// Whether or not the search is sticky.
const STICKY = 0b0010_0000;
/// Whether the regular expression result exposes the start and end indices of
/// captured substrings.
const HAS_INDICES = 0b0100_0000;
}
}

7
boa_parser/src/lexer/spread.rs

@ -1,7 +1,6 @@
//! This module implements lexing for spread (...) literals used in the JavaScript programing language.
use super::{Cursor, Error, Tokenizer};
use crate::lexer::Token;
use crate::lexer::{Cursor, Error, Token, Tokenizer};
use boa_ast::{Position, Punctuator, Span};
use boa_interner::Interner;
use boa_profiler::Profiler;
@ -22,8 +21,8 @@ pub(super) struct SpreadLiteral;
impl SpreadLiteral {
/// Creates a new string literal lexer.
pub(super) fn new() -> Self {
Self {}
pub(super) const fn new() -> Self {
Self
}
}

5
boa_parser/src/lexer/string.rs

@ -1,7 +1,6 @@
//! This module implements lexing for string literals used in the JavaScript programing language.
use super::{Cursor, Error, Tokenizer};
use crate::lexer::{Token, TokenKind};
use crate::lexer::{Cursor, Error, Token, TokenKind, Tokenizer};
use boa_ast::{Position, Span};
use boa_interner::Interner;
use boa_profiler::Profiler;
@ -109,7 +108,7 @@ impl StringLiteral {
///
/// [spec]: https://tc39.es/ecma262/#prod-LineTerminator
#[inline]
pub(super) fn is_line_terminator(ch: u32) -> bool {
pub(super) const fn is_line_terminator(ch: u32) -> bool {
matches!(
ch,
0x000A /* <LF> */ | 0x000D /* <CR> */ | 0x2028 /* <LS> */ | 0x2029 /* <PS> */

7
boa_parser/src/lexer/template.rs

@ -1,9 +1,8 @@
//! This module implements lexing for template literals used in the JavaScript programing language.
use super::{Cursor, Error, Tokenizer};
use crate::lexer::{
string::{StringLiteral, UTF16CodeUnitsBuffer},
Token, TokenKind,
Cursor, Error, Token, TokenKind, Tokenizer,
};
use boa_ast::{Position, Span};
use boa_interner::{Interner, Sym};
@ -22,7 +21,7 @@ pub struct TemplateString {
impl TemplateString {
/// Creates a new `TemplateString` with the given raw template ans start position.
pub fn new(raw: Sym, start_pos: Position) -> Self {
pub const fn new(raw: Sym, start_pos: Position) -> Self {
Self { raw, start_pos }
}
@ -32,7 +31,7 @@ impl TemplateString {
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#sec-static-semantics-templatestrings
pub fn as_raw(self) -> Sym {
pub const fn as_raw(self) -> Sym {
self.raw
}

16
boa_parser/src/lexer/tests.rs

@ -5,11 +5,9 @@ use crate::lexer::{
template::TemplateString, token::Numeric, Cursor, Error, Interner, Lexer, Position, Punctuator,
Read, Span, TokenKind,
};
use boa_ast::Keyword;
use boa_interner::Sym;
use boa_macros::utf16;
use std::str;
fn span(start: (u32, u32), end: (u32, u32)) -> Span {
@ -610,11 +608,9 @@ fn single_number_without_semicolon() {
let mut lexer = Lexer::new(&b"1"[..]);
let interner = &mut Interner::default();
if let Some(x) = lexer.next(interner).unwrap() {
assert_eq!(x.kind(), &TokenKind::numeric_literal(Numeric::Integer(1)));
} else {
panic!("Failed to lex 1 without semicolon");
}
let expected = [TokenKind::numeric_literal(Numeric::Integer(1))];
expect_tokens(&mut lexer, &expected, interner);
}
#[test]
@ -864,11 +860,7 @@ fn take_while_char_pred_utf8_char() {
let mut buf: Vec<u8> = Vec::new();
cur.take_while_char_pred(&mut buf, &|c| {
if let Ok(c) = char::try_from(c) {
c == 'a' || c == 'b' || c == 'c' || c == '😀'
} else {
false
}
char::try_from(c).map_or(false, |c| c == 'a' || c == 'b' || c == 'c' || c == '😀')
})
.unwrap();

30
boa_parser/src/lexer/token.rs

@ -29,21 +29,21 @@ impl Token {
/// Create a new detailed token from the token data, line number and column number
#[inline]
#[must_use]
pub fn new(kind: TokenKind, span: Span) -> Self {
pub const fn new(kind: TokenKind, span: Span) -> Self {
Self { kind, span }
}
/// Gets the kind of the token.
#[inline]
#[must_use]
pub fn kind(&self) -> &TokenKind {
pub const fn kind(&self) -> &TokenKind {
&self.kind
}
/// Gets the token span in the original source code.
#[inline]
#[must_use]
pub fn span(&self) -> Span {
pub const fn span(&self) -> Span {
self.span
}
@ -63,7 +63,7 @@ pub enum Numeric {
/// An integer
Integer(i32),
// A BigInt
/// A BigInt
BigInt(Box<BigInt>),
}
@ -162,19 +162,19 @@ impl From<Numeric> for TokenKind {
impl TokenKind {
/// Creates a `BooleanLiteral` token kind.
#[must_use]
pub fn boolean_literal(lit: bool) -> Self {
pub const fn boolean_literal(lit: bool) -> Self {
Self::BooleanLiteral(lit)
}
/// Creates an `EOF` token kind.
#[must_use]
pub fn eof() -> Self {
pub const fn eof() -> Self {
Self::EOF
}
/// Creates an `Identifier` token type.
#[must_use]
pub fn identifier(ident: Sym) -> Self {
pub const fn identifier(ident: Sym) -> Self {
Self::Identifier(ident)
}
@ -188,41 +188,43 @@ impl TokenKind {
/// Creates a `Punctuator` token type.
#[must_use]
pub fn punctuator(punc: Punctuator) -> Self {
pub const fn punctuator(punc: Punctuator) -> Self {
Self::Punctuator(punc)
}
/// Creates a `StringLiteral` token type.
#[must_use]
pub fn string_literal(lit: Sym) -> Self {
pub const fn string_literal(lit: Sym) -> Self {
Self::StringLiteral(lit)
}
/// Creates a `TemplateMiddle` token type.
#[must_use]
pub fn template_middle(template_string: TemplateString) -> Self {
pub const fn template_middle(template_string: TemplateString) -> Self {
Self::TemplateMiddle(template_string)
}
/// Creates a `TemplateNoSubstitution` token type.
#[must_use]
pub fn template_no_substitution(template_string: TemplateString) -> Self {
pub const fn template_no_substitution(template_string: TemplateString) -> Self {
Self::TemplateNoSubstitution(template_string)
}
/// Creates a `RegularExpressionLiteral` token kind.
#[must_use]
pub fn regular_expression_literal(body: Sym, flags: Sym) -> Self {
pub const fn regular_expression_literal(body: Sym, flags: Sym) -> Self {
Self::RegularExpressionLiteral(body, flags)
}
/// Creates a `LineTerminator` token kind.
#[must_use]
pub fn line_terminator() -> Self {
pub const fn line_terminator() -> Self {
Self::LineTerminator
}
/// Creates a 'Comment' token kind.
#[must_use]
pub fn comment() -> Self {
pub const fn comment() -> Self {
Self::Comment
}

81
boa_parser/src/lib.rs

@ -9,46 +9,63 @@
//! [grammar]: https://tc39.es/ecma262/#sec-ecmascript-language-expressions
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
unused_qualifications,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
future_incompatible,
nonstandard_style
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::module_name_repetitions,
clippy::too_many_lines,
clippy::let_unit_value
clippy::cognitive_complexity,
clippy::let_unit_value,
clippy::redundant_pub_crate
)]
pub mod error;

2
boa_parser/src/parser/cursor/buffered_lexer/mod.rs

@ -105,7 +105,7 @@ where
}
#[inline]
pub(super) fn strict_mode(&self) -> bool {
pub(super) const fn strict_mode(&self) -> bool {
self.lexer.strict_mode()
}

49
boa_parser/src/parser/cursor/mod.rs

@ -87,10 +87,8 @@ where
/// This function will panic if there is no further token in the cursor.
#[inline]
#[track_caller]
#[allow(clippy::let_underscore_drop)]
pub(super) fn advance(&mut self, interner: &mut Interner) {
let _ = self
.next(interner)
self.next(interner)
.expect("tried to advance cursor, but the buffer was empty");
}
@ -108,7 +106,7 @@ where
/// Gets the current strict mode for the cursor.
#[inline]
pub(super) fn strict_mode(&self) -> bool {
pub(super) const fn strict_mode(&self) -> bool {
self.buffered_lexer.strict_mode()
}
@ -120,7 +118,7 @@ where
/// Returns if the cursor is currently in an arrow function declaration.
#[inline]
pub(super) fn arrow(&self) -> bool {
pub(super) const fn arrow(&self) -> bool {
self.arrow
}
@ -132,7 +130,7 @@ where
/// Returns if the cursor is currently used in `JSON.parse`.
#[inline]
pub(super) fn json_parse(&self) -> bool {
pub(super) const fn json_parse(&self) -> bool {
self.json_parse
}
@ -156,15 +154,18 @@ where
identifier: Sym,
position: Position,
) -> ParseResult<()> {
if let Some(env) = self.private_environments_stack.last_mut() {
env.entry(identifier).or_insert(position);
Ok(())
} else {
Err(Error::general(
"private identifier declared outside of class",
position,
))
}
self.private_environments_stack.last_mut().map_or_else(
|| {
Err(Error::general(
"private identifier declared outside of class",
position,
))
},
|env| {
env.entry(identifier).or_insert(position);
Ok(())
},
)
}
/// Pop the last private environment.
@ -231,14 +232,14 @@ where
&mut self,
interner: &mut Interner,
) -> ParseResult<SemicolonResult<'_>> {
match self.buffered_lexer.peek(0, false, interner)? {
Some(tk) => match tk.kind() {
self.buffered_lexer.peek(0, false, interner)?.map_or(
Ok(SemicolonResult::Found(None)),
|tk| match tk.kind() {
TokenKind::Punctuator(Punctuator::Semicolon | Punctuator::CloseBlock)
| TokenKind::LineTerminator => Ok(SemicolonResult::Found(Some(tk))),
_ => Ok(SemicolonResult::NotFound(tk)),
},
None => Ok(SemicolonResult::Found(None)),
}
)
}
/// Consumes the next token if it is a semicolon, or returns a `Errpr` if it's not.
@ -306,11 +307,11 @@ where
skip_n: usize,
interner: &mut Interner,
) -> ParseResult<Option<bool>> {
if let Some(t) = self.buffered_lexer.peek(skip_n, false, interner)? {
Ok(Some(t.kind() == &TokenKind::LineTerminator))
} else {
Ok(None)
}
self.buffered_lexer
.peek(skip_n, false, interner)?
.map_or(Ok(None), |t| {
Ok(Some(t.kind() == &TokenKind::LineTerminator))
})
}
/// Advance the cursor to the next token and retrieve it, only if it's of `kind` type.

3
boa_parser/src/parser/expression/assignment/mod.rs

@ -13,6 +13,7 @@ mod conditional;
mod exponentiation;
mod r#yield;
use super::check_strict_arguments_or_eval;
use crate::{
lexer::{Error as LexError, InputElement, TokenKind},
parser::{
@ -41,8 +42,6 @@ use std::io::Read;
pub(super) use exponentiation::ExponentiationExpression;
use super::check_strict_arguments_or_eval;
/// Assignment expression parsing.
///
/// This can be one of the following:

14
boa_parser/src/parser/expression/left_hand_side/optional/mod.rs

@ -1,26 +1,22 @@
#[cfg(test)]
mod tests;
use std::io::Read;
use boa_interner::{Interner, Sym};
use boa_profiler::Profiler;
use crate::{
lexer::{Token, TokenKind},
parser::{
cursor::Cursor, expression::Expression, AllowAwait, AllowYield, OrAbrupt, ParseResult,
TokenParser,
cursor::Cursor, expression::left_hand_side::arguments::Arguments, expression::Expression,
AllowAwait, AllowYield, OrAbrupt, ParseResult, TokenParser,
},
Error,
};
use super::arguments::Arguments;
use boa_ast::{
self as ast,
expression::{access::PropertyAccessField, Optional, OptionalOperation, OptionalOperationKind},
Punctuator,
};
use boa_interner::{Interner, Sym};
use boa_profiler::Profiler;
use std::io::Read;
/// Parses an optional expression.
///

2
boa_parser/src/parser/expression/mod.rs

@ -736,7 +736,7 @@ expression!(
);
/// Returns an error if `arguments` or `eval` are used as identifier in strict mode.
fn check_strict_arguments_or_eval(ident: Identifier, position: Position) -> ParseResult<()> {
const fn check_strict_arguments_or_eval(ident: Identifier, position: Position) -> ParseResult<()> {
match ident.sym() {
Sym::ARGUMENTS => Err(Error::general(
"unexpected identifier 'arguments' in strict mode",

6
boa_parser/src/parser/expression/primary/array_initializer/mod.rs

@ -124,10 +124,8 @@ where
}
}
if last_spread {
if let Some(None) = elements.last() {
has_trailing_comma_spread = true;
}
if last_spread && elements.last() == Some(&None) {
has_trailing_comma_spread = true;
}
Ok(literal::ArrayLiteral::new(

7
boa_parser/src/parser/expression/primary/async_function_expression/mod.rs

@ -77,10 +77,9 @@ where
if cursor.strict_mode() && [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym()) {
return Err(Error::lex(LexError::Syntax(
"Unexpected eval or arguments in strict mode".into(),
match cursor.peek(0, interner)? {
Some(token) => token.span().end(),
None => Position::new(1, 1),
},
cursor
.peek(0, interner)?
.map_or_else(|| Position::new(1, 1), |token| token.span().end()),
)));
}
}

7
boa_parser/src/parser/expression/primary/async_generator_expression/mod.rs

@ -86,10 +86,9 @@ where
if cursor.strict_mode() && [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym()) {
return Err(Error::lex(LexError::Syntax(
"Unexpected eval or arguments in strict mode".into(),
match cursor.peek(0, interner)? {
Some(token) => token.span().end(),
None => Position::new(1, 1),
},
cursor
.peek(0, interner)?
.map_or_else(|| Position::new(1, 1), |token| token.span().end()),
)));
}
}

7
boa_parser/src/parser/expression/primary/function_expression/mod.rs

@ -79,10 +79,9 @@ where
if cursor.strict_mode() && [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym()) {
return Err(Error::lex(LexError::Syntax(
"Unexpected eval or arguments in strict mode".into(),
match cursor.peek(0, interner)? {
Some(token) => token.span().end(),
None => Position::new(1, 1),
},
cursor
.peek(0, interner)?
.map_or_else(|| Position::new(1, 1), |token| token.span().end()),
)));
}
}

7
boa_parser/src/parser/expression/primary/generator_expression/mod.rs

@ -82,10 +82,9 @@ where
if cursor.strict_mode() && [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym()) {
return Err(Error::lex(LexError::Syntax(
"Unexpected eval or arguments in strict mode".into(),
match cursor.peek(0, interner)? {
Some(token) => token.span().end(),
None => Position::new(1, 1),
},
cursor
.peek(0, interner)?
.map_or_else(|| Position::new(1, 1), |token| token.span().end()),
)));
}
}

4
boa_parser/src/parser/expression/primary/mod.rs

@ -450,8 +450,8 @@ where
}
};
let is_arrow = if let Some(TokenKind::Punctuator(Punctuator::Arrow)) =
cursor.peek(0, interner)?.map(Token::kind)
let is_arrow = if cursor.peek(0, interner)?.map(Token::kind)
== Some(&TokenKind::Punctuator(Punctuator::Arrow))
{
!cursor.peek_is_line_terminator(0, interner).or_abrupt()?
} else {

7
boa_parser/src/parser/expression/primary/object_initializer/mod.rs

@ -121,10 +121,9 @@ where
if let Some(position) = duplicate_proto_position {
if !cursor.json_parse()
&& match cursor.peek(0, interner)? {
Some(token) => token.kind() != &TokenKind::Punctuator(Punctuator::Assign),
None => true,
}
&& cursor.peek(0, interner)?.map_or(true, |token| {
token.kind() != &TokenKind::Punctuator(Punctuator::Assign)
})
{
return Err(Error::general(
"Duplicate __proto__ fields are not allowed in object literals.",

8
boa_parser/src/parser/expression/update.rs

@ -5,12 +5,14 @@
//!
//! [spec]: https://tc39.es/ecma262/#sec-update-expressions
use super::{check_strict_arguments_or_eval, left_hand_side::LeftHandSideExpression};
use crate::{
lexer::{Error as LexError, TokenKind},
parser::{
expression::unary::UnaryExpression, AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult,
TokenParser,
expression::{
check_strict_arguments_or_eval, left_hand_side::LeftHandSideExpression,
unary::UnaryExpression,
},
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
},
Error,
};

4
boa_parser/src/parser/function/mod.rs

@ -15,7 +15,7 @@ use crate::{
parser::{
expression::{BindingIdentifier, Initializer},
statement::{ArrayBindingPattern, ObjectBindingPattern, StatementList},
AllowAwait, AllowYield, Cursor, TokenParser,
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
},
Error,
};
@ -30,8 +30,6 @@ use boa_macros::utf16;
use boa_profiler::Profiler;
use std::io::Read;
use super::{OrAbrupt, ParseResult};
/// Formal parameters parsing.
///
/// More information:

9
boa_parser/src/parser/mod.rs

@ -9,9 +9,6 @@ pub(crate) mod function;
#[cfg(test)]
mod tests;
use rustc_hash::FxHashSet;
use std::io::Read;
use crate::{
error::ParseResult,
lexer::TokenKind,
@ -31,6 +28,8 @@ use boa_ast::{
};
use boa_interner::Interner;
use boa_macros::utf16;
use rustc_hash::FxHashSet;
use std::io::Read;
/// Trait implemented by parsers.
///
@ -240,7 +239,7 @@ pub struct Script {
impl Script {
/// Create a new `Script` parser.
#[inline]
fn new(direct_eval: bool) -> Self {
const fn new(direct_eval: bool) -> Self {
Self { direct_eval }
}
}
@ -315,7 +314,7 @@ pub struct ScriptBody {
impl ScriptBody {
/// Create a new `ScriptBody` parser.
#[inline]
fn new(direct_eval: bool) -> Self {
const fn new(direct_eval: bool) -> Self {
Self { direct_eval }
}
}

6
boa_parser/src/parser/statement/block/mod.rs

@ -10,10 +10,12 @@
#[cfg(test)]
mod tests;
use super::StatementList;
use crate::{
lexer::TokenKind,
parser::{AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser},
parser::{
statement::StatementList, AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt,
ParseResult, TokenParser,
},
Error,
};
use boa_ast::{

9
boa_parser/src/parser/statement/break_stm/mod.rs

@ -11,7 +11,7 @@
mod tests;
use crate::{
lexer::TokenKind,
lexer::{Token, TokenKind},
parser::{
cursor::{Cursor, SemicolonResult},
expression::LabelIdentifier,
@ -62,11 +62,8 @@ where
cursor.expect((Keyword::Break, false), "break statement", interner)?;
let label = if let SemicolonResult::Found(tok) = cursor.peek_semicolon(interner)? {
match tok {
Some(tok) if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) => {
cursor.advance(interner);
}
_ => {}
if tok.map(Token::kind) == Some(&TokenKind::Punctuator(Punctuator::Semicolon)) {
cursor.advance(interner);
}
None

6
boa_parser/src/parser/statement/declaration/hoistable/class_decl/mod.rs

@ -660,7 +660,7 @@ where
match class_element_name {
ClassElementName::PropertyName(property_name) if r#static => {
if let Some(Sym::PROTOTYPE) = property_name.prop_name() {
if property_name.prop_name() == Some(Sym::PROTOTYPE) {
return Err(Error::general(
"class may not have static method definitions named 'prototype'",
name_position,
@ -717,7 +717,7 @@ where
cursor.set_strict_mode(strict);
match class_element_name {
ClassElementName::PropertyName(property_name) if r#static => {
if let Some(Sym::PROTOTYPE) = property_name.prop_name() {
if property_name.prop_name() == Some(Sym::PROTOTYPE) {
return Err(Error::general(
"class may not have static method definitions named 'prototype'",
name_position,
@ -762,7 +762,7 @@ where
match class_element_name {
ClassElementName::PropertyName(property_name) if r#static => {
if let Some(Sym::PROTOTYPE) = property_name.prop_name() {
if property_name.prop_name() == Some(Sym::PROTOTYPE) {
return Err(Error::general(
"class may not have static method definitions named 'prototype'",
name_position,

7
boa_parser/src/parser/statement/declaration/hoistable/mod.rs

@ -165,10 +165,9 @@ fn parse_callable_declaration<R: Read, C: CallableDeclaration>(
if cursor.strict_mode() && [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym()) {
return Err(Error::lex(LexError::Syntax(
"Unexpected eval or arguments in strict mode".into(),
match cursor.peek(0, interner)? {
Some(token) => token.span().end(),
None => Position::new(1, 1),
},
cursor
.peek(0, interner)?
.map_or_else(|| Position::new(1, 1), |token| token.span().end()),
)));
}

7
boa_parser/src/parser/statement/if_stm/mod.rs

@ -4,8 +4,9 @@ mod tests;
use crate::{
lexer::TokenKind,
parser::{
expression::Expression, statement::declaration::FunctionDeclaration, AllowAwait,
AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
expression::Expression,
statement::{declaration::FunctionDeclaration, Statement},
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
},
Error,
};
@ -17,8 +18,6 @@ use boa_interner::Interner;
use boa_profiler::Profiler;
use std::io::Read;
use super::Statement;
/// If statement parsing.
///
/// An `if` statement will have a condition, a block statement, and an optional `else` statement.

10
boa_parser/src/parser/statement/iteration/do_while_statement.rs

@ -8,7 +8,7 @@
//! [spec]: https://tc39.es/ecma262/#sec-do-while-statement
use crate::{
lexer::TokenKind,
lexer::{Token, TokenKind},
parser::{
expression::Expression, statement::Statement, AllowAwait, AllowReturn, AllowYield, Cursor,
OrAbrupt, ParseResult, TokenParser,
@ -107,10 +107,10 @@ where
// Here, we only care to read the next token if it's a semicolon. If it's not, we
// automatically "enter" or assume a semicolon, since we have just read the `)` token:
// https://tc39.es/ecma262/#sec-automatic-semicolon-insertion
if let Some(tok) = cursor.peek(0, interner)? {
if let TokenKind::Punctuator(Punctuator::Semicolon) = *tok.kind() {
cursor.advance(interner);
}
if cursor.peek(0, interner)?.map(Token::kind)
== Some(&TokenKind::Punctuator(Punctuator::Semicolon))
{
cursor.advance(interner);
}
Ok(DoWhileLoop::new(body, cond))

4
boa_parser/src/parser/statement/labelled_stm/mod.rs

@ -3,7 +3,7 @@ use crate::{
parser::{
cursor::Cursor,
expression::LabelIdentifier,
statement::{AllowAwait, AllowReturn, Statement},
statement::{declaration::FunctionDeclaration, AllowAwait, AllowReturn, Statement},
AllowYield, OrAbrupt, ParseResult, TokenParser,
},
Error,
@ -13,8 +13,6 @@ use boa_interner::Interner;
use boa_profiler::Profiler;
use std::io::Read;
use super::declaration::FunctionDeclaration;
/// Labelled Statement Parsing
///
/// More information

9
boa_parser/src/parser/statement/return_stm/mod.rs

@ -1,5 +1,5 @@
use crate::{
lexer::TokenKind,
lexer::{Token, TokenKind},
parser::{
cursor::{Cursor, SemicolonResult},
expression::Expression,
@ -50,11 +50,8 @@ where
cursor.expect((Keyword::Return, false), "return statement", interner)?;
if let SemicolonResult::Found(tok) = cursor.peek_semicolon(interner)? {
match tok {
Some(tok) if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) => {
cursor.advance(interner);
}
_ => {}
if tok.map(Token::kind) == Some(&TokenKind::Punctuator(Punctuator::Semicolon)) {
cursor.advance(interner);
}
return Ok(Return::new(None));

81
boa_profiler/src/lib.rs

@ -1,16 +1,71 @@
#![allow(missing_copy_implementations, missing_debug_implementations)]
//! Profiler for the Boa JavaScript engine.
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
use std::fmt::{self, Debug};
#[cfg(feature = "profiler")]
use measureme::{EventId, Profiler as MeasuremeProfiler, TimingGuard};
#[cfg(feature = "profiler")]
use once_cell::sync::OnceCell;
use std::fmt::{self, Debug};
#[cfg(feature = "profiler")]
use std::{
path::Path,
thread::{current, ThreadId},
};
/// Profiler for the Boa JavaScript engine.
#[cfg(feature = "profiler")]
pub struct Profiler {
profiler: MeasuremeProfiler,
@ -24,6 +79,7 @@ static mut INSTANCE: OnceCell<Profiler> = OnceCell::new();
#[cfg(feature = "profiler")]
impl Profiler {
/// Start a new profiled event.
pub fn start_event(&self, label: &str, category: &str) -> TimingGuard<'_> {
let kind = self.profiler.alloc_string(category);
let id = EventId::from_label(self.profiler.alloc_string(label));
@ -32,16 +88,19 @@ impl Profiler {
.start_recording_interval_event(kind, id, thread_id)
}
pub fn default() -> Self {
fn default() -> Self {
let profiler =
MeasuremeProfiler::new(Path::new("./my_trace")).expect("must be able to create file");
Self { profiler }
}
/// Return the global instance of the profiler.
#[must_use]
pub fn global() -> &'static Self {
unsafe { INSTANCE.get_or_init(Self::default) }
}
/// Drop the global instance of the profiler.
pub fn drop(&self) {
// In order to drop the INSTANCE we need to get ownership of it, which isn't possible on a static unless you make it a mutable static
// mutating statics is unsafe, so we need to wrap it as so.
@ -58,6 +117,7 @@ impl Profiler {
// Once `as_64()` is in stable we can do this:
// https://github.com/rust-lang/rust/pull/68531/commits/ea42b1c5b85f649728e3a3b334489bac6dce890a
// Until then our options are: use rust-nightly or use unsafe {}
#[allow(clippy::cast_possible_truncation)]
fn thread_id_to_u32(tid: ThreadId) -> u32 {
unsafe { std::mem::transmute::<ThreadId, u64>(tid) as u32 }
}
@ -69,17 +129,24 @@ impl Debug for Profiler {
}
}
/// An empty profiler that does nothing.
#[cfg(not(feature = "profiler"))]
#[derive(Copy, Clone)]
pub struct Profiler;
#[allow(clippy::unused_unit, clippy::unused_self)]
//#[allow(clippy::unused_unit, clippy::unused_self)]
#[cfg(not(feature = "profiler"))]
impl Profiler {
pub fn start_event(&self, _label: &str, _category: &str) -> () {}
/// Does nothing.
#[allow(clippy::unused_unit)]
pub const fn start_event(&self, _label: &str, _category: &str) -> () {}
pub fn drop(&self) {}
/// Does nothing.
pub const fn drop(&self) {}
pub fn global() -> Self {
/// Does nothing.
#[must_use]
pub const fn global() -> Self {
Self
}
}

1
boa_tester/Cargo.toml

@ -13,7 +13,6 @@ rust-version.workspace = true
[dependencies]
boa_engine = { workspace = true, features = ["intl"] }
boa_interner.workspace = true
boa_gc.workspace = true
boa_parser.workspace = true
clap = { version = "4.0.26", features = ["derive"] }

11
boa_tester/src/exec/js262.rs

@ -78,8 +78,9 @@ fn detach_array_buffer(_this: &JsValue, args: &[JsValue], _: &mut Context) -> Js
///
/// Accepts a string value as its first argument and executes it as an ECMAScript script.
fn eval_script(_this: &JsValue, args: &[JsValue], context: &mut Context) -> JsResult<JsValue> {
if let Some(source_text) = args.get(0).and_then(JsValue::as_string) {
match context.parse(source_text.to_std_string_escaped()) {
args.get(0).and_then(JsValue::as_string).map_or_else(
|| Ok(JsValue::undefined()),
|source_text| match context.parse(source_text.to_std_string_escaped()) {
// TODO: check strict
Err(e) => Err(JsNativeError::typ()
.with_message(format!("Uncaught Syntax Error: {e}"))
@ -87,10 +88,8 @@ fn eval_script(_this: &JsValue, args: &[JsValue], context: &mut Context) -> JsRe
// Calling eval here parses the code a second time.
// TODO: We can fix this after we have have defined the public api for the vm executer.
Ok(_) => context.eval(source_text.to_std_string_escaped()),
}
} else {
Ok(JsValue::undefined())
}
},
)
}
/// The `$262.gc()` function.

6
boa_tester/src/exec/mod.rs

@ -2,13 +2,10 @@
mod js262;
use std::borrow::Cow;
use crate::read::ErrorType;
use super::{
Harness, Outcome, Phase, SuiteResult, Test, TestFlags, TestOutcomeResult, TestResult, TestSuite,
};
use crate::read::ErrorType;
use boa_engine::{
builtins::JsArgs, object::FunctionBuilder, property::Attribute, Context, JsNativeErrorKind,
JsResult, JsValue,
@ -17,6 +14,7 @@ use boa_gc::{Finalize, Gc, GcCell, Trace};
use boa_parser::Parser;
use colored::Colorize;
use rayon::prelude::*;
use std::borrow::Cow;
impl TestSuite {
/// Runs the test suite.

121
boa_tester/src/main.rs

@ -2,64 +2,69 @@
//!
//! This crate will run the full ECMAScript test suite (Test262) and report compliance of the
//! `boa` wrap_err.
#![doc(
html_logo_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
)]
#![cfg_attr(not(test), deny(clippy::unwrap_used))]
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
// rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::use_self, // TODO: deny once false positives are fixed
clippy::module_name_repetitions,
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_precision_loss,
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::missing_panics_doc,
clippy::use_self,
clippy::too_many_lines,
clippy::unreadable_literal,
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions,
clippy::let_unit_value,
clippy::redundant_pub_crate,
clippy::cast_precision_loss,
clippy::cast_possible_wrap
)]
mod exec;
@ -123,7 +128,7 @@ impl Ignored {
.unwrap_or_default()
}
pub(crate) fn contains_any_flag(&self, flags: TestFlags) -> bool {
pub(crate) const fn contains_any_flag(&self, flags: TestFlags) -> bool {
flags.intersects(self.flags)
}
}
@ -427,15 +432,15 @@ impl From<Option<Negative>> for Outcome {
bitflags! {
struct TestFlags: u16 {
const STRICT = 0b000000001;
const NO_STRICT = 0b000000010;
const MODULE = 0b000000100;
const RAW = 0b000001000;
const ASYNC = 0b000010000;
const GENERATED = 0b000100000;
const CAN_BLOCK_IS_FALSE = 0b001000000;
const CAN_BLOCK_IS_TRUE = 0b010000000;
const NON_DETERMINISTIC = 0b100000000;
const STRICT = 0b0_0000_0001;
const NO_STRICT = 0b0_0000_0010;
const MODULE = 0b0_0000_0100;
const RAW = 0b0_0000_1000;
const ASYNC = 0b0_0001_0000;
const GENERATED = 0b0_0010_0000;
const CAN_BLOCK_IS_FALSE = 0b0_0100_0000;
const CAN_BLOCK_IS_TRUE = 0b0_1000_0000;
const NON_DETERMINISTIC = 0b1_0000_0000;
}
}
@ -512,7 +517,7 @@ impl<'de> Deserialize<'de> for TestFlags {
struct RawFlagsVisitor;
impl<'de> Visitor<'de> for RawFlagsVisitor {
impl Visitor<'_> for RawFlagsVisitor {
type Value = TestFlags;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {

12
boa_tester/src/read.rs

@ -53,13 +53,13 @@ pub(super) enum ErrorType {
}
impl ErrorType {
pub(super) fn as_str(self) -> &'static str {
pub(super) const fn as_str(self) -> &'static str {
match self {
ErrorType::Test262Error => "Test262Error",
ErrorType::SyntaxError => "SyntaxError",
ErrorType::ReferenceError => "ReferenceError",
ErrorType::RangeError => "RangeError",
ErrorType::TypeError => "TypeError",
Self::Test262Error => "Test262Error",
Self::SyntaxError => "SyntaxError",
Self::ReferenceError => "ReferenceError",
Self::RangeError => "RangeError",
Self::TypeError => "TypeError",
}
}
}

98
boa_unicode/src/lib.rs

@ -11,60 +11,58 @@
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
)]
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::use_self,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
// rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
#![allow(
clippy::module_name_repetitions,
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_precision_loss,
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::missing_panics_doc,
clippy::too_many_lines,
clippy::unreadable_literal,
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions,
clippy::let_unit_value
)]
#![allow(clippy::redundant_pub_crate)]
mod tables;
#[cfg(test)]

101
boa_wasm/src/lib.rs

@ -1,66 +1,67 @@
//! A ECMAScript WASM implementation based on boa_engine.
#![doc(
html_logo_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
)]
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
#![warn(
clippy::perf,
clippy::single_match_else,
clippy::dbg_macro,
clippy::doc_markdown,
clippy::wildcard_imports,
clippy::struct_excessive_bools,
clippy::doc_markdown,
clippy::semicolon_if_nothing_returned,
clippy::pedantic
)]
#![warn(missing_docs, clippy::dbg_macro)]
#![deny(
clippy::all,
clippy::cast_lossless,
clippy::redundant_closure_for_method_calls,
clippy::use_self,
clippy::unnested_or_patterns,
clippy::trivially_copy_pass_by_ref,
clippy::needless_pass_by_value,
clippy::match_wildcard_for_single_variants,
clippy::map_unwrap_or,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
// rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
warnings,
future_incompatible,
let_underscore,
nonstandard_style,
)]
#![allow(
clippy::module_name_repetitions,
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::cast_precision_loss,
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::missing_panics_doc,
clippy::too_many_lines,
clippy::unreadable_literal,
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions,
clippy::let_unit_value
rust_2018_compatibility,
rust_2018_idioms,
rust_2021_compatibility,
unused,
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
macro_use_extern_crate,
meta_variable_misuse,
missing_abi,
missing_copy_implementations,
missing_debug_implementations,
non_ascii_idents,
noop_method_call,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unsafe_op_in_unsafe_fn,
unused_crate_dependencies,
unused_import_braces,
unused_lifetimes,
unused_qualifications,
unused_tuple_struct_fields,
variant_size_differences,
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
rustdoc::broken_intra_doc_links,
rustdoc::private_intra_doc_links,
rustdoc::missing_crate_level_docs,
rustdoc::private_doc_tests,
rustdoc::invalid_codeblock_attributes,
rustdoc::invalid_rust_codeblocks,
rustdoc::bare_urls,
// clippy categories https://doc.rust-lang.org/clippy/
clippy::all,
clippy::correctness,
clippy::suspicious,
clippy::style,
clippy::complexity,
clippy::perf,
clippy::pedantic,
clippy::nursery,
)]
use boa_engine::Context;
use getrandom as _;
use wasm_bindgen::prelude::*;
/// Evaluate the given ECMAScript code.
#[wasm_bindgen]
pub fn evaluate(src: &str) -> Result<String, JsValue> {
// Setup executor

Loading…
Cancel
Save