Browse Source

Parser fixes #225 #240 #273 (#281)

New parser!
Plus loads of tidy up in various places.

Co-authored-by: Jason Williams <jwilliams720@bloomberg.net>
Co-authored-by: HalidOdat <halidodat@gmail.com>
Co-authored-by: Iban Eguia <iban.eguia@cern.ch>
Co-authored-by: Iban Eguia <razican@protonmail.ch>
pull/295/head
Jason Williams 5 years ago committed by GitHub
parent
commit
48c6e886d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 30
      .vscode/tasks.json
  2. 275
      Cargo.lock
  3. 19
      Dockerfile
  4. 8
      boa/Cargo.toml
  5. 2
      boa/benches/parser.rs
  6. 2
      boa/benches/string.rs
  7. 36
      boa/src/builtins/array/tests.rs
  8. 9
      boa/src/builtins/boolean/mod.rs
  9. 6
      boa/src/builtins/boolean/tests.rs
  10. 15
      boa/src/builtins/console.rs
  11. 25
      boa/src/builtins/function/mod.rs
  12. 2
      boa/src/builtins/function/tests.rs
  13. 4
      boa/src/builtins/json.rs
  14. 54
      boa/src/builtins/math/tests.rs
  15. 3
      boa/src/builtins/number/mod.rs
  16. 14
      boa/src/builtins/number/tests.rs
  17. 4
      boa/src/builtins/property.rs
  18. 16
      boa/src/builtins/regexp/mod.rs
  19. 8
      boa/src/builtins/regexp/tests.rs
  20. 20
      boa/src/builtins/string/mod.rs
  21. 20
      boa/src/builtins/string/tests.rs
  22. 4
      boa/src/builtins/symbol/tests.rs
  23. 35
      boa/src/builtins/value/mod.rs
  24. 18
      boa/src/environment/declarative_environment_record.rs
  25. 6
      boa/src/environment/environment_record_trait.rs
  26. 13
      boa/src/environment/function_environment_record.rs
  27. 2
      boa/src/environment/global_environment_record.rs
  28. 6
      boa/src/environment/lexical_environment.rs
  29. 2
      boa/src/environment/object_environment_record.rs
  30. 181
      boa/src/exec/mod.rs
  31. 37
      boa/src/lib.rs
  32. 5
      boa/src/syntax/ast/constant.rs
  33. 246
      boa/src/syntax/ast/expr.rs
  34. 9
      boa/src/syntax/ast/keyword.rs
  35. 2
      boa/src/syntax/ast/mod.rs
  36. 372
      boa/src/syntax/ast/node.rs
  37. 81
      boa/src/syntax/ast/op.rs
  38. 11
      boa/src/syntax/ast/pos.rs
  39. 42
      boa/src/syntax/ast/punc.rs
  40. 47
      boa/src/syntax/ast/token.rs
  41. 64
      boa/src/syntax/lexer/mod.rs
  42. 423
      boa/src/syntax/lexer/tests.rs
  43. 102
      boa/src/syntax/parser/cursor.rs
  44. 2189
      boa/src/syntax/parser/mod.rs
  45. 540
      boa/src/syntax/parser/tests.rs
  46. 10
      boa/src/wasm.rs
  47. 2
      boa_cli/Cargo.toml
  48. 6
      boa_cli/src/main.rs

30
.vscode/tasks.json vendored

@ -7,7 +7,35 @@
"type": "process",
"label": "Cargo Run",
"command": "cargo",
"args": ["run"],
"args": ["run", "./tests/js/test.js"],
"problemMatcher": ["$rustc"],
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"clear": true
}
},
{
"type": "process",
"label": "Get Tokens",
"command": "cargo",
"args": ["run", "--", "-t=Debug", "./tests/js/test.js"],
"problemMatcher": ["$rustc"],
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"clear": true
}
},
{
"type": "process",
"label": "Get AST",
"command": "cargo",
"args": ["run", "--", "-a=Debug", "./tests/js/test.js"],
"problemMatcher": ["$rustc"],
"group": {
"kind": "build",

275
Cargo.lock generated

@ -16,9 +16,9 @@ dependencies = [
[[package]]
name = "aho-corasick"
version = "0.7.8"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "743ad5a418686aad3b87fd14c43badd828cf26e214a00f92a384291cf22e1811"
checksum = "8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada"
dependencies = [
"memchr",
]
@ -32,12 +32,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "anyhow"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7825f6833612eb2414095684fcf6c635becf3ce97fe48cf6421321e93bfbd53c"
[[package]]
name = "atty"
version = "0.2.14"
@ -49,12 +43,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "autocfg"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
[[package]]
name = "autocfg"
version = "1.0.0"
@ -77,9 +65,9 @@ dependencies = [
[[package]]
name = "bstr"
version = "0.2.11"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "502ae1441a0a5adb8fbd38a5955a6416b9493e92b465de5e4a9bde6a539c2c48"
checksum = "2889e6d50f394968c8bf4240dc3f2a7eb4680844d27308f798229ac9d4725f41"
dependencies = [
"lazy_static",
"memchr",
@ -89,24 +77,15 @@ dependencies = [
[[package]]
name = "bumpalo"
version = "3.1.2"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fb8038c1ddc0a5f73787b130f4cc75151e96ed33e417fde765eb5a81e3532f4"
checksum = "12ae9db68ad7fac5fe51304d20f016c911539251075a214f8e663babefa35187"
[[package]]
name = "byteorder"
version = "1.3.2"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
[[package]]
name = "c2-chacha"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
dependencies = [
"ppv-lite86",
]
checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
[[package]]
name = "cast"
@ -175,24 +154,26 @@ dependencies = [
[[package]]
name = "crossbeam-deque"
version = "0.7.2"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca"
checksum = "9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285"
dependencies = [
"crossbeam-epoch",
"crossbeam-utils",
"maybe-uninit",
]
[[package]]
name = "crossbeam-epoch"
version = "0.8.0"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac"
checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
dependencies = [
"autocfg 0.1.7",
"autocfg",
"cfg-if",
"crossbeam-utils",
"lazy_static",
"maybe-uninit",
"memoffset",
"scopeguard",
]
@ -209,11 +190,11 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.7.0"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4"
checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
dependencies = [
"autocfg 0.1.7",
"autocfg",
"cfg-if",
"lazy_static",
]
@ -233,9 +214,9 @@ dependencies = [
[[package]]
name = "csv-core"
version = "0.1.6"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c"
checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
dependencies = [
"memchr",
]
@ -285,9 +266,9 @@ dependencies = [
[[package]]
name = "hermit-abi"
version = "0.1.6"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772"
checksum = "1010591b26bbfe835e9faeabeb11866061cc7dcebffd56ad7d0942d0e61aefd8"
dependencies = [
"libc",
]
@ -309,9 +290,9 @@ checksum = "b8b7a7c0c47db5545ed3fef7468ee7bb5b74691498139e4b3f6a20685dc6dd8e"
[[package]]
name = "js-sys"
version = "0.3.35"
version = "0.3.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7889c7c36282151f6bf465be4700359318aef36baa951462382eae49e9577cf9"
checksum = "6a27d435371a2fa5b6d2b028a74bbdb1234f308da363226a2854ca3ff8ba7055"
dependencies = [
"wasm-bindgen",
]
@ -324,9 +305,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.66"
version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
checksum = "dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0"
[[package]]
name = "log"
@ -338,31 +319,24 @@ dependencies = [
]
[[package]]
name = "memchr"
version = "2.3.0"
name = "maybe-uninit"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3197e20c7edb283f87c071ddfc7a2cca8f8e0b888c242959846a6fce03c72223"
dependencies = [
"libc",
]
checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
[[package]]
name = "memoffset"
version = "0.5.3"
name = "memchr"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9"
dependencies = [
"rustc_version",
]
checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
[[package]]
name = "nom"
version = "4.2.3"
name = "memoffset"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
checksum = "b4fc2c02a7e374099d4ee95a193111f72d2110197fe200272371758f6c3643d8"
dependencies = [
"memchr",
"version_check",
"autocfg",
]
[[package]]
@ -371,7 +345,7 @@ version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096"
dependencies = [
"autocfg 1.0.0",
"autocfg",
]
[[package]]
@ -410,35 +384,35 @@ checksum = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
[[package]]
name = "proc-macro-error"
version = "0.4.8"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "875077759af22fa20b610ad4471d8155b321c89c3f2785526c9839b099be4e0a"
checksum = "18f33027081eba0a6d8aba6d1b1c3a3be58cbb12106341c2d5759fcd9b5277e7"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote 1.0.2",
"rustversion",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "0.4.8"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5717d9fa2664351a01ed73ba5ef6df09c01a521cb42cb65a061432a826f3c7a"
checksum = "8a5b4b77fdb63c1eca72173d68d24501c54ab1269409f6b672c85deb18af69de"
dependencies = [
"proc-macro2",
"quote 1.0.2",
"rustversion",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
"syn-mid",
"version_check",
]
[[package]]
name = "proc-macro2"
version = "1.0.8"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acb317c6ff86a4e579dfa00fc5e6cca91ecbb4e7eb2df0468805b674eb88548"
checksum = "6c09721c6781493a2a492a96b5a5bf19b65917fe6728884e7c44dd0c60ca3435"
dependencies = [
"unicode-xid 0.2.0",
]
@ -451,9 +425,9 @@ checksum = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
[[package]]
name = "quote"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
checksum = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f"
dependencies = [
"proc-macro2",
]
@ -473,11 +447,11 @@ dependencies = [
[[package]]
name = "rand_chacha"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"c2-chacha",
"ppv-lite86",
"rand_core",
]
@ -525,9 +499,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.3.4"
version = "1.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "322cf97724bea3ee221b78fe25ac9c46114ebb51747ad5babd51a2fc6a8235a8"
checksum = "7f6946991529684867e47d86474e3a6d0c0ab9b82d5821e314b1ede31fa3a4b3"
dependencies = [
"aho-corasick",
"memchr",
@ -537,18 +511,18 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.1.8"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9"
checksum = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4"
dependencies = [
"byteorder",
]
[[package]]
name = "regex-syntax"
version = "0.6.14"
version = "0.6.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b28dfe3fe9badec5dbf0a79a9cccad2cfc2ab5484bdb3e44cbd1ae8b3ba2be06"
checksum = "7fe5bd57d1d7414c6b5ed48563a2c855d995ff777729dcd91c369ec7fea395ae"
[[package]]
name = "rustc_version"
@ -559,22 +533,11 @@ dependencies = [
"semver",
]
[[package]]
name = "rustversion"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3bba175698996010c4f6dce5e7f173b6eb781fce25d2cfc45e27091ce0b79f6"
dependencies = [
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
]
[[package]]
name = "ryu"
version = "1.0.2"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8"
checksum = "535622e6be132bccd223f4bb2b8ac8d53cda3c7a6394944d3b2b33fb974f9d76"
[[package]]
name = "same-file"
@ -587,9 +550,9 @@ dependencies = [
[[package]]
name = "scopeguard"
version = "1.0.0"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "semver"
@ -608,41 +571,35 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]]
name = "serde"
version = "1.0.104"
version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449"
checksum = "e707fbbf255b8fc8c3b99abb91e7257a622caeb20a9818cbadbeeede4e0932ff"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.104"
version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64"
checksum = "ac5d00fc561ba2724df6758a17de23df5914f20e41cb00f94d5b7ae42fffaff8"
dependencies = [
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
]
[[package]]
name = "serde_json"
version = "1.0.46"
version = "1.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21b01d7f0288608a01dca632cf1df859df6fd6ffa885300fc275ce2ba6221953"
checksum = "9371ade75d4c2d6cb154141b9752cf3781ec9c05e0e5cf35060e1e70ee7b9c25"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "sourcefile"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3"
[[package]]
name = "strsim"
version = "0.8.0"
@ -651,9 +608,9 @@ checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "structopt"
version = "0.3.9"
version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1bcbed7d48956fcbb5d80c6b95aedb553513de0a1b451ea92679d999c010e98"
checksum = "c8faa2719539bbe9d77869bfb15d4ee769f99525e707931452c97b693b3f159d"
dependencies = [
"clap",
"lazy_static",
@ -662,15 +619,15 @@ dependencies = [
[[package]]
name = "structopt-derive"
version = "0.4.2"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "095064aa1f5b94d14e635d0a5684cf140c43ae40a0fd990708d38f5d669e5f64"
checksum = "3f88b8e18c69496aad6f9ddf4630dd7d585bcaf765786cb415b9aec2fe5a0430"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
]
[[package]]
@ -686,12 +643,12 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.14"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af6f3550d8dff9ef7dc34d384ac6f107e5d31c8f57d9f28e0081503f547ac8f5"
checksum = "0df0eb663f387145cab623dea85b09c2c5b4b0aef44e945d928e682fce71bb03"
dependencies = [
"proc-macro2",
"quote 1.0.2",
"quote 1.0.3",
"unicode-xid 0.2.0",
]
@ -702,8 +659,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7be3539f6c128a931cf19dcee741c1af532c7fd387baa739c03dd2e96479338a"
dependencies = [
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
]
[[package]]
@ -785,9 +742,9 @@ checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
[[package]]
name = "version_check"
version = "0.1.5"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce"
[[package]]
name = "walkdir"
@ -808,9 +765,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasm-bindgen"
version = "0.2.58"
version = "0.2.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5205e9afdf42282b192e2310a5b463a6d1c1d774e30dc3c791ac37ab42d2616c"
checksum = "2cc57ce05287f8376e998cbddfb4c8cb43b84a7ec55cf4551d7c00eef317a47f"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@ -818,84 +775,56 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.58"
version = "0.2.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11cdb95816290b525b32587d76419facd99662a07e59d3cdb560488a819d9a45"
checksum = "d967d37bf6c16cca2973ca3af071d0a2523392e4a594548155d89a678f4237cd"
dependencies = [
"bumpalo",
"lazy_static",
"log",
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.58"
version = "0.2.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "574094772ce6921576fb6f2e3f7497b8a76273b6db092be18fc48a082de09dc3"
checksum = "8bd151b63e1ea881bb742cd20e1d6127cef28399558f3b5d415289bc41eee3a4"
dependencies = [
"quote 1.0.2",
"quote 1.0.3",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.58"
version = "0.2.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e85031354f25eaebe78bb7db1c3d86140312a911a106b2e29f9cc440ce3e7668"
checksum = "d68a5b36eef1be7868f668632863292e37739656a80fc4b9acec7b0bd35a4931"
dependencies = [
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
"quote 1.0.3",
"syn 1.0.17",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.58"
version = "0.2.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5e7e61fc929f4c0dddb748b102ebf9f632e2b8d739f2016542b4de2965a9601"
[[package]]
name = "wasm-bindgen-webidl"
version = "0.2.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef012a0d93fc0432df126a8eaf547b2dce25a8ce9212e1d3cbeef5c11157975d"
dependencies = [
"anyhow",
"heck",
"log",
"proc-macro2",
"quote 1.0.2",
"syn 1.0.14",
"wasm-bindgen-backend",
"weedle",
]
checksum = "daf76fe7d25ac79748a37538b7daeed1c7a6867c92d3245c12c6222e4a20d639"
[[package]]
name = "web-sys"
version = "0.3.35"
version = "0.3.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aaf97caf6aa8c2b1dac90faf0db529d9d63c93846cca4911856f78a83cebf53b"
checksum = "2d6f51648d8c56c366144378a33290049eafdd784071077f6fe37dae64c1c4cb"
dependencies = [
"anyhow",
"js-sys",
"sourcefile",
"wasm-bindgen",
"wasm-bindgen-webidl",
]
[[package]]
name = "weedle"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164"
dependencies = [
"nom",
]
[[package]]

19
Dockerfile

@ -1,24 +1,7 @@
FROM rust:latest
WORKDIR /usr/src/myapp
COPY . .
# LLDB Server
EXPOSE 9228
RUN apt-get -y update && \
apt-get -y upgrade && \
apt-get install -y sudo software-properties-common libpython2.7
# codelldb depends on libpython2.7
# https://stackoverflow.com/questions/20842732/libpython2-7-so-1-0-cannot-open-shared-object-file-no-such-file-or-directory
# https://askubuntu.com/questions/787383/how-to-install-llvm-3-9
# http://apt.llvm.org/
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
RUN apt-add-repository "deb http://apt.llvm.org/stretch/ llvm-toolchain-stretch-6.0 main"
RUN apt-get -y update
RUN sudo apt-get install -y lldb
RUN apt-get -y update && apt-get -y upgrade
CMD ["/bin/bash"]

8
boa/Cargo.toml

@ -17,13 +17,13 @@ default = ["wasm-bindgen"]
[dependencies]
gc = "0.3.3"
gc_derive = "0.3.2"
serde_json = "1.0.46"
serde_json = "1.0.48"
rand = "0.7.3"
regex = "1.3.4"
regex = "1.3.6"
# Optional Dependencies
wasm-bindgen = { version = "0.2.58", optional = true }
serde = { version = "1.0", features = ["derive"], optional = true }
wasm-bindgen = { version = "0.2.59", optional = true }
serde = { version = "1.0.105", features = ["derive"], optional = true }
[dev-dependencies]
criterion = "0.3.1"

2
boa/benches/parser.rs

@ -19,7 +19,7 @@ fn expression_parser(c: &mut Criterion) {
"Expression (Parser)",
move |b, tok| {
b.iter(|| {
Parser::new(black_box(tok.to_vec())).parse_all().unwrap();
Parser::new(&black_box(tok.to_vec())).parse_all().unwrap();
})
},
vec![tokens],

2
boa/benches/string.rs

@ -29,7 +29,7 @@ fn hello_world_parser(c: &mut Criterion) {
"Hello World (Parser)",
move |b, tok| {
b.iter(|| {
Parser::new(black_box(tok.to_vec())).parse_all().unwrap();
Parser::new(&black_box(tok.to_vec())).parse_all().unwrap();
})
},
vec![tokens],

36
boa/src/builtins/array/tests.rs

@ -11,7 +11,7 @@ fn is_array() {
var new_arr = new Array();
var many = ["a", "b", "c"];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "Array.isArray(empty)"), "true");
assert_eq!(forward(&mut engine, "Array.isArray(new_arr)"), "true");
assert_eq!(forward(&mut engine, "Array.isArray(many)"), "true");
@ -51,7 +51,7 @@ fn concat() {
// var empty = new Array();
// var one = new Array(1);
// "#;
// forward(&mut engine, init);
// eprintln!("{}", forward(&mut engine, init));
// // Empty ++ Empty
// let ee = forward(&mut engine, "empty.concat(empty)");
// assert_eq!(ee, String::from("[]"));
@ -75,7 +75,7 @@ fn join() {
var one = ["a"];
var many = ["a", "b", "c"];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// Empty
let empty = forward(&mut engine, "empty.join('.')");
assert_eq!(empty, String::from(""));
@ -96,7 +96,7 @@ fn to_string() {
var one = ["a"];
var many = ["a", "b", "c"];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// Empty
let empty = forward(&mut engine, "empty.toString()");
assert_eq!(empty, String::from(""));
@ -136,7 +136,7 @@ fn every() {
return elem < 3;
}
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let result = forward(&mut engine, "array.every(callback);");
assert_eq!(result, "true");
@ -163,7 +163,7 @@ fn find() {
}
var many = ["a", "b", "c"];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let found = forward(&mut engine, "many.find(comp)");
assert_eq!(found, String::from("a"));
}
@ -201,7 +201,7 @@ fn push() {
let init = r#"
var arr = [1, 2];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "arr.push()"), "2");
assert_eq!(forward(&mut engine, "arr.push(3, 4)"), "4");
@ -218,7 +218,7 @@ fn pop() {
var one = [1];
var many = [1, 2, 3, 4];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(
forward(&mut engine, "empty.pop()"),
@ -240,7 +240,7 @@ fn shift() {
var one = [1];
var many = [1, 2, 3, 4];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(
forward(&mut engine, "empty.shift()"),
@ -260,7 +260,7 @@ fn unshift() {
let init = r#"
var arr = [3, 4];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "arr.unshift()"), "2");
assert_eq!(forward(&mut engine, "arr.unshift(1, 2)"), "4");
@ -276,7 +276,7 @@ fn reverse() {
var arr = [1, 2];
var reversed = arr.reverse();
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "reversed[0]"), "2");
assert_eq!(forward(&mut engine, "reversed[1]"), "1");
assert_eq!(forward(&mut engine, "arr[0]"), "2");
@ -293,7 +293,7 @@ fn index_of() {
var many = ["a", "b", "c"];
var duplicates = ["a", "b", "c", "a", "b"];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// Empty
let empty = forward(&mut engine, "empty.indexOf('a')");
@ -357,7 +357,7 @@ fn last_index_of() {
var many = ["a", "b", "c"];
var duplicates = ["a", "b", "c", "a", "b"];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// Empty
let empty = forward(&mut engine, "empty.lastIndexOf('a')");
@ -509,7 +509,7 @@ fn fill() {
}
#[test]
fn inclues_value() {
fn includes_value() {
let realm = Realm::create();
let mut engine = Executor::new(realm);
let init = r#"
@ -519,7 +519,7 @@ fn inclues_value() {
var duplicates = ["a", "b", "c", "a", "b"];
var undefined = [undefined];
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// Empty
let empty = forward(&mut engine, "empty.includes('a')");
@ -619,7 +619,7 @@ fn slice() {
var many2 = ["a", "b", "c", "d"].slice(2, 3);
var many3 = ["a", "b", "c", "d"].slice(7);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "empty.length"), "0");
assert_eq!(forward(&mut engine, "one[0]"), "a");
@ -648,7 +648,7 @@ fn for_each() {
}
a.forEach(callingCallback);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "sum"), "14");
assert_eq!(forward(&mut engine, "indexSum"), "6");
@ -666,7 +666,7 @@ fn for_each_push_value() {
}
a.forEach(callingCallback);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// [ 1, 2, 3, 4, 2, 4, 6, 8 ]
assert_eq!(forward(&mut engine, "a.length"), "8");

9
boa/src/builtins/boolean/mod.rs

@ -16,14 +16,11 @@ pub fn construct_boolean(this: &Value, args: &[Value], _: &mut Interpreter) -> R
this.set_kind(ObjectKind::Boolean);
// Get the argument, if any
match args.get(0) {
Some(ref value) => {
if let Some(ref value) = args.get(0) {
this.set_internal_slot("BooleanData", to_boolean(value));
}
None => {
} else {
this.set_internal_slot("BooleanData", to_boolean(&to_value(false)));
}
}
// no need to return `this` as its passed by reference
Ok(this.clone())
@ -69,7 +66,7 @@ pub fn create_constructor(global: &Value) -> Value {
}
// === Utility Functions ===
/// [toBoolean](https://tc39.github.io/ecma262/#sec-toboolean)
/// [toBoolean](https://tc39.es/ecma262/#sec-toboolean)
/// Creates a new boolean value from the input
pub fn to_boolean(value: &Value) -> Value {
match *value.deref().borrow() {

6
boa/src/builtins/boolean/tests.rs

@ -20,7 +20,7 @@ fn construct_and_call() {
var one = new Boolean(1);
var zero = Boolean(0);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let one = forward_val(&mut engine, "one").unwrap();
let zero = forward_val(&mut engine, "zero").unwrap();
@ -39,7 +39,7 @@ fn constructor_gives_true_instance() {
var trueBool = new Boolean(trueVal);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let true_val = forward_val(&mut engine, "trueVal").expect("value expected");
let true_num = forward_val(&mut engine, "trueNum").expect("value expected");
let true_string = forward_val(&mut engine, "trueString").expect("value expected");
@ -67,7 +67,7 @@ fn instances_have_correct_proto_set() {
var boolProto = Boolean.prototype;
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let bool_instance = forward_val(&mut engine, "boolInstance").expect("value expected");
let bool_prototype = forward_val(&mut engine, "boolProto").expect("value expected");

15
boa/src/builtins/console.rs

@ -1,11 +1,14 @@
use crate::builtins::function::NativeFunctionData;
use crate::builtins::value::{
from_value, log_string_from, to_value, ResultValue, Value, ValueData,
#![allow(clippy::print_stdout)]
use crate::{
builtins::{
function::NativeFunctionData,
value::{from_value, log_string_from, to_value, ResultValue, Value, ValueData},
},
exec::Interpreter,
};
use crate::exec::Interpreter;
use gc::Gc;
use std::iter::FromIterator;
use std::ops::Deref;
use std::{iter::FromIterator, ops::Deref};
/// Print a javascript value to the standard output stream
/// <https://console.spec.whatwg.org/#logger>

25
boa/src/builtins/function/mod.rs

@ -8,18 +8,19 @@ use crate::{
value::{to_value, ResultValue, Value, ValueData},
},
exec::Interpreter,
syntax::ast::expr::Expr,
syntax::ast::node::{FormalParameter, Node},
};
use gc::{custom_trace, Gc};
use gc_derive::{Finalize, Trace};
use std::fmt::{self, Debug};
use std::ops::Deref;
/// fn(this, arguments, ctx)
pub type NativeFunctionData = fn(&Value, &[Value], &mut Interpreter) -> ResultValue;
/// A Javascript function
/// A member of the Object type that may be invoked as a subroutine
/// <https://tc39.github.io/ecma262/#sec-terms-and-definitions-function>
/// <https://tc39.es/ecma262/#sec-terms-and-definitions-function>
/// In our implementation, Function is extending Object by holding an object field which some extra data
/// A Javascript function
@ -37,26 +38,36 @@ pub struct RegularFunction {
/// The fields associated with the function
pub object: Object,
/// This function's expression
pub expr: Expr,
pub node: Node,
/// The argument declarations of the function
pub args: Vec<Expr>,
pub args: Vec<Node>,
}
impl RegularFunction {
/// Make a new regular function
#[allow(clippy::cast_possible_wrap)]
pub fn new(expr: Expr, args: Vec<Expr>) -> Self {
pub fn new(node: Node, f_args: Vec<FormalParameter>) -> Self {
let mut args = vec![];
for i in f_args {
let node = if let Some(init) = &i.init {
init.deref().clone()
} else {
Node::Local(i.name.clone())
};
args.push(node);
}
let mut object = Object::default();
object.properties.insert(
"arguments".to_string(),
Property::default().value(Gc::new(ValueData::Integer(args.len() as i32))),
);
Self { object, expr, args }
Self { object, node, args }
}
}
#[derive(Finalize, Clone)]
/// Represents a native javascript function in memory
#[derive(Finalize, Clone)]
pub struct NativeFunction {
/// The fields associated with the function
pub object: Object,

2
boa/src/builtins/function/tests.rs

@ -14,7 +14,7 @@ fn check_arguments_object() {
var val = jason(100, 6);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let expected_return_val: f64 = 100.0;
let return_val = forward_val(&mut engine, "val").expect("value expected");
assert_eq!(return_val.is_double(), true);

4
boa/src/builtins/json.rs

@ -1,13 +1,13 @@
use crate::builtins::function::NativeFunctionData;
use crate::builtins::object::{Object, ObjectKind, PROTOTYPE};
/// The JSON Object
/// <https://tc39.github.io/ecma262/#sec-json-object>
/// <https://tc39.es/ecma262/#sec-json-object>
use crate::builtins::value::{to_value, ResultValue, Value, ValueData};
use crate::exec::Interpreter;
use serde_json::{self, to_string_pretty, Value as JSONValue};
/// Parse a JSON string into a Javascript object
/// <https://tc39.github.io/ecma262/#sec-json.parse>
/// <https://tc39.es/ecma262/#sec-json.parse>
pub fn parse(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
match serde_json::from_str::<JSONValue>(
&args

54
boa/src/builtins/math/tests.rs

@ -10,7 +10,7 @@ fn abs() {
var b = Math.abs(1.23456 - 7.89012);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -30,7 +30,7 @@ fn acos() {
var d = Math.acos(2);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward(&mut engine, "b");
@ -53,7 +53,7 @@ fn acosh() {
var c = Math.acosh(0.5);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward(&mut engine, "b");
@ -73,7 +73,7 @@ fn asin() {
var b = Math.asin(5 / 3);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward(&mut engine, "b");
@ -91,7 +91,7 @@ fn asinh() {
var b = Math.asinh(0);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -110,7 +110,7 @@ fn atan() {
var c = Math.atan(-0);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -130,7 +130,7 @@ fn atan2() {
var b = Math.atan2(15, 90);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -149,7 +149,7 @@ fn cbrt() {
var c = Math.cbrt(1);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -170,7 +170,7 @@ fn ceil() {
var c = Math.ceil(-7.004);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -190,7 +190,7 @@ fn cos() {
var b = Math.cos(1);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -209,7 +209,7 @@ fn cosh() {
var c = Math.cosh(-1);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -230,7 +230,7 @@ fn exp() {
var c = Math.exp(2);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -251,7 +251,7 @@ fn floor() {
var c = Math.floor(3.01);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -272,7 +272,7 @@ fn log() {
var c = Math.log(-1);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -293,7 +293,7 @@ fn log10() {
var c = Math.log10(-2);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -314,7 +314,7 @@ fn log2() {
var c = Math.log2(-2);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -335,7 +335,7 @@ fn max() {
var c = Math.max(-10, 20);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -356,7 +356,7 @@ fn min() {
var c = Math.min(-10, 20);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -378,7 +378,7 @@ fn pow() {
var d = Math.pow(7, -2);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -400,7 +400,7 @@ fn round() {
var b = Math.round(-20.3);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -419,7 +419,7 @@ fn sign() {
var c = Math.sign(0);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -439,7 +439,7 @@ fn sin() {
var b = Math.sin(1);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -457,7 +457,7 @@ fn sinh() {
var b = Math.sinh(1);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -476,7 +476,7 @@ fn sqrt() {
var c = Math.sqrt(9);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -497,7 +497,7 @@ fn sqrt() {
// var a = Math.tan(1.1);
// "#;
// forward(&mut engine, init);
// eprintln!("{}", forward(&mut engine, init));
// let a = forward_val(&mut engine, "a").unwrap();
@ -513,7 +513,7 @@ fn tanh() {
var b = Math.tanh(0);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();
@ -531,7 +531,7 @@ fn trunc() {
var b = Math.trunc(0.123);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let a = forward_val(&mut engine, "a").unwrap();
let b = forward_val(&mut engine, "b").unwrap();

3
boa/src/builtins/number/mod.rs

@ -108,7 +108,6 @@ pub fn to_locale_string(this: &Value, _args: &[Value], _ctx: &mut Interpreter) -
///
/// https://tc39.es/ecma262/#sec-number.prototype.toprecision
pub fn to_precision(this: &Value, args: &[Value], _ctx: &mut Interpreter) -> ResultValue {
println!("Number::to_precision()");
let this_num = to_number(this);
let _num_str_len = format!("{}", this_num.to_num()).len();
let _precision = match args.get(0) {
@ -119,7 +118,7 @@ pub fn to_precision(this: &Value, args: &[Value], _ctx: &mut Interpreter) -> Res
None => 0,
};
// TODO: Implement toPrecision
unimplemented!();
unimplemented!("TODO: Implement toPrecision");
}
/// Number().toString()

14
boa/src/builtins/number/tests.rs

@ -24,7 +24,7 @@ fn call_number() {
var from_exp = Number("2.34e+2");
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_zero = forward_val(&mut engine, "default_zero").unwrap();
let int_one = forward_val(&mut engine, "int_one").unwrap();
let float_two = forward_val(&mut engine, "float_two").unwrap();
@ -57,7 +57,7 @@ fn to_exponential() {
var noop_exp = Number("1.23e+2").toExponential();
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_exp = forward(&mut engine, "default_exp");
let int_exp = forward(&mut engine, "int_exp");
let float_exp = forward(&mut engine, "float_exp");
@ -85,7 +85,7 @@ fn to_fixed() {
var nan_fixed = Number("I am not a number").toFixed();
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_fixed = forward(&mut engine, "default_fixed");
let pos_fixed = forward(&mut engine, "pos_fixed");
let neg_fixed = forward(&mut engine, "neg_fixed");
@ -113,7 +113,7 @@ fn to_locale_string() {
// TODO: We don't actually do any locale checking here
// To honor the spec we should print numbers according to user locale.
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_locale = forward(&mut engine, "default_locale");
let small_locale = forward(&mut engine, "small_locale");
let big_locale = forward(&mut engine, "big_locale");
@ -139,7 +139,7 @@ fn to_precision() {
var neg_precision = Number(-123456789).toPrecision(4);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_precision = forward(&mut engine, "default_precision");
let low_precision = forward(&mut engine, "low_precision");
let more_precision = forward(&mut engine, "more_precision");
@ -170,7 +170,7 @@ fn to_string() {
var neg_string = Number(-1.2).toString();
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_string = forward(&mut engine, "default_string");
let int_string = forward(&mut engine, "int_string");
let float_string = forward(&mut engine, "float_string");
@ -198,7 +198,7 @@ fn value_of() {
var neg_val = Number("-1.2e+4").valueOf()
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let default_val = forward_val(&mut engine, "default_val").unwrap();
let int_val = forward_val(&mut engine, "int_val").unwrap();
let float_val = forward_val(&mut engine, "float_val").unwrap();

4
boa/src/builtins/property.rs

@ -2,8 +2,8 @@ use crate::builtins::value::{from_value, to_value, FromValue, ToValue, Value, Va
use gc_derive::{Finalize, Trace};
/// A Javascript Property AKA The Property Descriptor
/// [[SPEC] - The Property Descriptor Specification Type](https://tc39.github.io/ecma262/#sec-property-descriptor-specification-type)
/// [[SPEC] - Default Attribute Values](https://tc39.github.io/ecma262/#table-4)
/// [[SPEC] - The Property Descriptor Specification Type](https://tc39.es/ecma262/#sec-property-descriptor-specification-type)
/// [[SPEC] - Default Attribute Values](https://tc39.es/ecma262/#table-4)
///
/// Any field in a JavaScript Property may be present or absent.
#[derive(Trace, Finalize, Clone, Debug)]

16
boa/src/builtins/regexp/mod.rs

@ -192,19 +192,16 @@ pub fn test(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
let mut last_index =
from_value::<usize>(this.get_field_slice("lastIndex")).map_err(to_value)?;
let result = this.with_internal_state_ref(|regex: &RegExp| {
let result = match regex.matcher.find_at(arg_str.as_str(), last_index) {
Some(m) => {
let result = if let Some(m) = regex.matcher.find_at(arg_str.as_str(), last_index) {
if regex.use_last_index {
last_index = m.end();
}
true
}
None => {
} else {
if regex.use_last_index {
last_index = 0;
}
false
}
};
Ok(Gc::new(ValueData::Boolean(result)))
});
@ -219,12 +216,11 @@ pub fn exec(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
from_value::<usize>(this.get_field_slice("lastIndex")).map_err(to_value)?;
let result = this.with_internal_state_ref(|regex: &RegExp| {
let mut locations = regex.matcher.capture_locations();
let result =
match regex
let result = if let Some(m) =
regex
.matcher
.captures_read_at(&mut locations, arg_str.as_str(), last_index)
{
Some(m) => {
if regex.use_last_index {
last_index = m.end();
}
@ -242,13 +238,11 @@ pub fn exec(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
result.set_prop_slice("index", Property::default().value(to_value(m.start())));
result.set_prop_slice("input", Property::default().value(to_value(arg_str)));
result
}
None => {
} else {
if regex.use_last_index {
last_index = 0;
}
Gc::new(ValueData::Null)
}
};
Ok(result)
});

8
boa/src/builtins/regexp/tests.rs

@ -13,7 +13,7 @@ fn test_constructors() {
var ctor_literal = new RegExp(/[0-9]+(\.[0-9]+)?/);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "constructed.test('1.0')"), "true");
assert_eq!(forward(&mut engine, "literal.test('1.0')"), "true");
assert_eq!(forward(&mut engine, "ctor_literal.test('1.0')"), "true");
@ -36,7 +36,7 @@ fn check_regexp_constructor_is_function() {
// var re_sm = /test/sm;
// "#;
//
// forward(&mut engine, init);
// eprintln!("{}", forward(&mut engine, init));
// assert_eq!(forward(&mut engine, "re_gi.global"), "true");
// assert_eq!(forward(&mut engine, "re_gi.ignoreCase"), "true");
// assert_eq!(forward(&mut engine, "re_gi.multiline"), "false");
@ -62,7 +62,7 @@ fn test_last_index() {
var regex = /[0-9]+(\.[0-9]+)?/g;
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "regex.lastIndex"), "0");
assert_eq!(forward(&mut engine, "regex.test('1.0foo')"), "true");
assert_eq!(forward(&mut engine, "regex.lastIndex"), "3");
@ -79,7 +79,7 @@ fn test_exec() {
var result = re.exec('The Quick Brown Fox Jumps Over The Lazy Dog');
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "result[0]"), "Quick Brown Fox Jumps");
assert_eq!(forward(&mut engine, "result[1]"), "Brown");
assert_eq!(forward(&mut engine, "result[2]"), "Jumps");

20
boa/src/builtins/string/mod.rs

@ -65,7 +65,7 @@ pub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue
/// Returns a single element String containing the code unit at index pos within the String value
/// resulting from converting this object to a String. If there is no element at that index, the
/// result is the empty String. The result is a String value, not a String object.
/// <https://tc39.github.io/ecma262/#sec-string.prototype.charat>
/// <https://tc39.es/ecma262/#sec-string.prototype.charat>
pub fn char_at(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -99,7 +99,7 @@ pub fn char_at(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultVal
/// Returns a Number (a nonnegative integer less than 216) that is the numeric value of the code
/// unit at index pos within the String resulting from converting this object to a String. If there
/// is no element at that index, the result is NaN.
/// <https://tc39.github.io/ecma262/#sec-string.prototype.charcodeat>
/// <https://tc39.es/ecma262/#sec-string.prototype.charcodeat>
pub fn char_code_at(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -130,7 +130,7 @@ pub fn char_code_at(this: &Value, args: &[Value], ctx: &mut Interpreter) -> Resu
/// Returns a String that is the result of concatenating this String and all strings provided as
/// arguments
/// <https://tc39.github.io/ecma262/#sec-string.prototype.concat>
/// <https://tc39.es/ecma262/#sec-string.prototype.concat>
pub fn concat(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -146,7 +146,7 @@ pub fn concat(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValu
/// Returns a String that is the result of repeating this String the number of times given by the
/// first argument
/// <https://tc39.github.io/ecma262/#sec-string.prototype.repeat>
/// <https://tc39.es/ecma262/#sec-string.prototype.repeat>
pub fn repeat(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -163,7 +163,7 @@ pub fn repeat(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValu
/// Returns a String which contains the slice of the JS String from character at "start" index up
/// to but not including character at "end" index
/// <https://tc39.github.io/ecma262/#sec-string.prototype.slice>
/// <https://tc39.es/ecma262/#sec-string.prototype.slice>
pub fn slice(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -214,7 +214,7 @@ pub fn slice(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue
/// Returns a Boolean indicating whether the sequence of code units of the
/// "search string" is the same as the corresponding code units of this string
/// starting at index "position"
/// <https://tc39.github.io/ecma262/#sec-string.prototype.startswith>
/// <https://tc39.es/ecma262/#sec-string.prototype.startswith>
pub fn starts_with(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -253,7 +253,7 @@ pub fn starts_with(this: &Value, args: &[Value], ctx: &mut Interpreter) -> Resul
/// Returns a Boolean indicating whether the sequence of code units of the
/// "search string" is the same as the corresponding code units of this string
/// starting at position "end position" - length
/// <https://tc39.github.io/ecma262/#sec-string.prototype.endswith>
/// <https://tc39.es/ecma262/#sec-string.prototype.endswith>
pub fn ends_with(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -295,7 +295,7 @@ pub fn ends_with(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultV
/// the result of converting this object to a String, at one or more indices
/// that are greater than or equal to position. If position is undefined, 0 is
/// assumed, so as to search all of the String.
/// <https://tc39.github.io/ecma262/#sec-string.prototype.includes>
/// <https://tc39.es/ecma262/#sec-string.prototype.includes>
pub fn includes(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -442,7 +442,7 @@ pub fn replace(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultVal
/// position, then the smallest such index is returned; otherwise, -1 is
/// returned. If position is undefined, 0 is assumed, so as to search all of the
/// String.
/// <https://tc39.github.io/ecma262/#sec-string.prototype.includes>
/// <https://tc39.es/ecma262/#sec-string.prototype.includes>
pub fn index_of(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value
@ -488,7 +488,7 @@ pub fn index_of(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultVa
/// position, then the greatest such index is returned; otherwise, -1 is
/// returned. If position is undefined, the length of the String value is
/// assumed, so as to search all of the String.
/// <https://tc39.github.io/ecma262/#sec-string.prototype.lastindexof>
/// <https://tc39.es/ecma262/#sec-string.prototype.lastindexof>
pub fn last_index_of(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {
// First we get it the actual string a private field stored on the object only the engine has access to.
// Then we convert it into a Rust String by wrapping it in from_value

20
boa/src/builtins/string/tests.rs

@ -21,7 +21,7 @@ fn check_string_constructor_is_function() {
// const c = new String(' \b ');
// cosnt d = new String('中文长度')
// "#;
// forward(&mut engine, init);
// eprintln!("{}", forward(&mut engine, init));
// let a = forward(&mut engine, "a.length");
// assert_eq!(a, String::from("1"));
// let b = forward(&mut engine, "b.length");
@ -44,7 +44,7 @@ fn concat() {
var world = new String('world! ');
var nice = new String('Have a nice day.');
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
// Todo: fix this
let _a = forward(&mut engine, "hello.concat(world, nice)");
@ -63,7 +63,7 @@ fn construct_and_call() {
var hello = new String('Hello');
var world = String('world');
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let hello = forward_val(&mut engine, "hello").unwrap();
let world = forward_val(&mut engine, "world").unwrap();
@ -80,7 +80,7 @@ fn repeat() {
var en = new String('english');
var zh = new String('');
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let empty = String::from("");
assert_eq!(forward(&mut engine, "empty.repeat(0)"), empty);
@ -108,7 +108,7 @@ fn replace() {
a = a.replace("a", "2");
a
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let empty = String::from("2bc");
assert_eq!(forward(&mut engine, "a"), empty);
@ -131,7 +131,7 @@ fn replace_with_function() {
a = a.replace(/c(o)(o)(l)/, replacer);
a;
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(
forward(&mut engine, "a"),
String::from("ecmascript is awesome!")
@ -155,7 +155,7 @@ fn starts_with() {
var enLiteral = 'english';
var zhLiteral = '';
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let pass = String::from("true");
assert_eq!(forward(&mut engine, "empty.startsWith('')"), pass);
assert_eq!(forward(&mut engine, "en.startsWith('e')"), pass);
@ -179,7 +179,7 @@ fn ends_with() {
var enLiteral = 'english';
var zhLiteral = '';
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let pass = String::from("true");
assert_eq!(forward(&mut engine, "empty.endsWith('')"), pass);
assert_eq!(forward(&mut engine, "en.endsWith('h')"), pass);
@ -250,7 +250,7 @@ fn match_all() {
var str = 'table football, foosball';
var matches = str.matchAll(regexp);
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(
forward(&mut engine, "matches[0][0]"),
String::from("football")
@ -275,7 +275,7 @@ fn test_match() {
var result4 = str.match(RegExp("B", 'g'));
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
assert_eq!(forward(&mut engine, "result1[0]"), "Quick Brown Fox Jumps");
assert_eq!(forward(&mut engine, "result1[1]"), "Brown");
assert_eq!(forward(&mut engine, "result1[2]"), "Jumps");

4
boa/src/builtins/symbol/tests.rs

@ -17,7 +17,7 @@ fn call_symbol_and_check_return_type() {
let init = r#"
var sym = Symbol();
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let sym = forward_val(&mut engine, "sym").unwrap();
assert_eq!(sym.is_symbol(), true);
}
@ -29,7 +29,7 @@ fn print_symbol_expect_description() {
let init = r#"
var sym = Symbol("Hello");
"#;
forward(&mut engine, init);
eprintln!("{}", forward(&mut engine, init));
let sym = forward_val(&mut engine, "sym.toString()").unwrap();
assert_eq!(sym.to_string(), "Symbol(Hello)");
}

35
boa/src/builtins/value/mod.rs

@ -21,8 +21,8 @@ use std::{
str::FromStr,
};
#[must_use]
/// The result of a Javascript expression is represented like this so it can succeed (`Ok`) or fail (`Err`)
#[must_use]
pub type ResultValue = Result<Value, Value>;
/// A Garbage-collected Javascript value as represented in the interpreter
pub type Value = Gc<ValueData>;
@ -57,19 +57,16 @@ pub enum ValueData {
impl ValueData {
/// Returns a new empty object
pub fn new_obj(global: Option<&Value>) -> Value {
match global {
Some(glob) => {
if let Some(glob) = global {
let obj_proto = glob.get_field_slice("Object").get_field_slice(PROTOTYPE);
let obj = Object::create(obj_proto);
Gc::new(ValueData::Object(GcCell::new(obj)))
}
None => {
} else {
let obj = Object::default();
Gc::new(ValueData::Object(GcCell::new(obj)))
}
}
}
/// Similar to `new_obj`, but you can pass a prototype to create from,
/// plus a kind
@ -170,7 +167,7 @@ impl ValueData {
}
/// Returns true if the value is true
/// [toBoolean](https://tc39.github.io/ecma262/#sec-toboolean)
/// [toBoolean](https://tc39.es/ecma262/#sec-toboolean)
pub fn is_true(&self) -> bool {
match *self {
ValueData::Object(_) => true,
@ -341,9 +338,9 @@ impl ValueData {
};
// If the getter is populated, use that. If not use [[Value]] instead
match prop_getter {
Some(val) => val,
None => {
if let Some(val) = prop_getter {
val
} else {
let val = prop
.value
.as_ref()
@ -351,7 +348,6 @@ impl ValueData {
val.clone()
}
}
}
None => Gc::new(ValueData::Undefined),
}
}
@ -789,8 +785,7 @@ fn display_obj(v: &ValueData, print_internals: bool) -> String {
indent: usize,
print_internals: bool,
) -> String {
match *data {
ValueData::Object(ref v) => {
if let ValueData::Object(ref v) = *data {
// The in-memory address of the current object
let addr = address_of(v.borrow().deref());
@ -818,10 +813,9 @@ fn display_obj(v: &ValueData, print_internals: bool) -> String {
.expect("Could not create the closing brace's indentation string");
format!("{{\n{}\n{}}}", result, closing_indent)
}
} else {
// Every other type of data is printed as is
_ => format!("{}", data),
format!("{}", data)
}
}
@ -829,7 +823,7 @@ fn display_obj(v: &ValueData, print_internals: bool) -> String {
}
impl Display for ValueData {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ValueData::Null => write!(f, "null"),
ValueData::Undefined => write!(f, "undefined"),
@ -855,15 +849,14 @@ impl Display for ValueData {
ValueData::Function(ref v) => match *v.borrow() {
Function::NativeFunc(_) => write!(f, "function() {{ [native code] }}"),
Function::RegularFunc(ref rf) => {
write!(f, "function(")?;
let last_index = rf.args.len() - 1;
write!(f, "function{}(", if rf.args.is_empty() { "" } else { " " })?;
for (index, arg) in rf.args.iter().enumerate() {
write!(f, "{}", arg)?;
if index != last_index {
if index + 1 != rf.args.len() {
write!(f, ", ")?;
}
}
write!(f, "){}", rf.expr)
write!(f, ") {}", rf.node)
}
},
}

18
boa/src/environment/declarative_environment_record.rs

@ -3,7 +3,7 @@
//! Each declarative Environment Record is associated with an ECMAScript program scope containing variable,
//! `constant`, `let`, `class`, `module`, `import`, and/or function declarations.
//! A declarative Environment Record binds the set of identifiers defined by the declarations contained within its scope.
//! More info: [ECMA-262 sec-declarative-environment-records](https://tc39.github.io/ecma262/#sec-declarative-environment-records)
//! More info: [ECMA-262 sec-declarative-environment-records](https://tc39.es/ecma262/#sec-declarative-environment-records)
use crate::{
builtins::value::{Value, ValueData},
@ -80,13 +80,12 @@ impl EnvironmentRecordTrait for DeclarativeEnvironmentRecord {
fn initialize_binding(&mut self, name: &str, value: Value) {
if let Some(ref mut record) = self.env_rec.get_mut(name) {
match record.value {
Some(_) => {
if record.value.is_none() {
record.value = Some(value);
} else {
// TODO: change this when error handling comes into play
panic!("Identifier {} has already been defined", name);
}
None => record.value = Some(value),
}
}
}
@ -121,18 +120,17 @@ impl EnvironmentRecordTrait for DeclarativeEnvironmentRecord {
}
fn get_binding_value(&self, name: &str, _strict: bool) -> Value {
match self.env_rec.get(name) {
Some(binding) => binding
if let Some(binding) = self.env_rec.get(name) {
binding
.value
.as_ref()
.expect("Could not get record as reference")
.clone(),
None => {
.clone()
} else {
// TODO: change this when error handling comes into play
panic!("ReferenceError: Cannot get binding value for {}", name);
}
}
}
fn delete_binding(&mut self, name: &str) -> bool {
match self.env_rec.get(name) {

6
boa/src/environment/environment_record_trait.rs

@ -1,7 +1,7 @@
//! # Environment Records
//!
//! <https://tc39.github.io/ecma262/#sec-environment-records>
//! <https://tc39.github.io/ecma262/#sec-lexical-environments>
//! <https://tc39.es/ecma262/#sec-environment-records>
//! <https://tc39.es/ecma262/#sec-lexical-environments>
//!
//! Some environments are stored as `JSObjects`. This is for GC, i.e we want to keep an environment if a variable is closed-over (a closure is returned).
//! All of the logic to handle scope/environment records are stored in here.
@ -15,7 +15,7 @@ use crate::{
use gc::{Finalize, Trace};
use std::fmt::Debug;
/// <https://tc39.github.io/ecma262/#sec-environment-records>
/// <https://tc39.es/ecma262/#sec-environment-records>
///
/// In the ECMAScript specification Environment Records are hierachical and have a base class with abstract methods.
/// In this implementation we have a trait which represents the behaviour of all `EnvironmentRecord` types.

13
boa/src/environment/function_environment_record.rs

@ -6,7 +6,7 @@
//! If a function is not an `ArrowFunction` function and references super,
//! its function Environment Record also contains the state that is used to perform super method invocations
//! from within the function.
//! More info: <https://tc39.github.io/ecma262/#sec-function-environment-records>
//! More info: <https://tc39.es/ecma262/#sec-function-environment-records>
use crate::{
builtins::value::{Value, ValueData},
@ -32,7 +32,7 @@ pub enum BindingStatus {
Uninitialized,
}
/// <https://tc39.github.io/ecma262/#table-16>
/// <https://tc39.es/ecma262/#table-16>
#[derive(Debug, Trace, Finalize, Clone)]
pub struct FunctionEnvironmentRecord {
pub env_rec: HashMap<String, DeclarativeEnvironmentRecordBinding>,
@ -179,18 +179,17 @@ impl EnvironmentRecordTrait for FunctionEnvironmentRecord {
}
fn get_binding_value(&self, name: &str, _strict: bool) -> Value {
match self.env_rec.get(name) {
Some(binding) => binding
if let Some(binding) = self.env_rec.get(name) {
binding
.value
.as_ref()
.expect("Could not get record as reference")
.clone(),
None => {
.clone()
} else {
// TODO: change this when error handling comes into play
panic!("ReferenceError: Cannot get binding value for {}", name);
}
}
}
fn delete_binding(&mut self, name: &str) -> bool {
match self.env_rec.get(name) {

2
boa/src/environment/global_environment_record.rs

@ -5,7 +5,7 @@
//! A global Environment Record provides the bindings for built-in globals (clause 18),
//! properties of the global object, and for all top-level declarations (13.2.8, 13.2.10)
//! that occur within a Script.
//! More info: <https://tc39.github.io/ecma262/#sec-global-environment-records>
//! More info: <https://tc39.es/ecma262/#sec-global-environment-records>
use crate::{
builtins::value::{Value, ValueData},

6
boa/src/environment/lexical_environment.rs

@ -1,6 +1,6 @@
//! # Lexical Environment
//!
//! <https://tc39.github.io/ecma262/#sec-lexical-environment-operations>
//! <https://tc39.es/ecma262/#sec-lexical-environment-operations>
//!
//! The following operations are used to operate upon lexical environments
//! This is the entrypoint to lexical environments.
@ -61,7 +61,7 @@ impl EnvironmentError {
}
impl fmt::Display for EnvironmentError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.details)
}
}
@ -237,7 +237,7 @@ pub fn new_function_environment(
this_binding_status: BindingStatus::Uninitialized, // hardcoding to unitialized for now until short functions are properly supported
home_object: Gc::new(ValueData::Undefined),
new_target,
outer_env: outer, // this will come from Environment set as a private property of F - https://tc39.github.io/ecma262/#sec-ecmascript-function-objects
outer_env: outer, // this will come from Environment set as a private property of F - https://tc39.es/ecma262/#sec-ecmascript-function-objects
this_value: Gc::new(ValueData::Undefined), // TODO: this_value should start as an Option as its not always there to begin with
})))
}

2
boa/src/environment/object_environment_record.rs

@ -4,7 +4,7 @@
//! An object Environment Record binds the set of string identifier names that directly
//! correspond to the property names of its binding object.
//! Property keys that are not strings in the form of an `IdentifierName` are not included in the set of bound identifiers.
//! More info: [Object Records](https://tc39.github.io/ecma262/#sec-object-environment-records)
//! More info: [Object Records](https://tc39.es/ecma262/#sec-object-environment-records)
use crate::{
builtins::{

181
boa/src/exec/mod.rs

@ -17,7 +17,7 @@ use crate::{
realm::Realm,
syntax::ast::{
constant::Const,
expr::{Expr, ExprDef},
node::{MethodDefinitionKind, Node, PropertyDefinition},
op::{AssignOp, BinOp, BitOp, CompOp, LogOp, NumOp, UnaryOp},
},
};
@ -32,7 +32,7 @@ pub trait Executor {
/// Make a new execution engine
fn new(realm: Realm) -> Self;
/// Run an expression
fn run(&mut self, expr: &Expr) -> ResultValue;
fn run(&mut self, expr: &Node) -> ResultValue;
}
/// A Javascript intepreter
@ -48,7 +48,7 @@ fn exec_assign_op(op: &AssignOp, v_a: ValueData, v_b: ValueData) -> Value {
AssignOp::Add => v_a + v_b,
AssignOp::Sub => v_a - v_b,
AssignOp::Mul => v_a * v_b,
AssignOp::Pow => v_a.as_num_to_power(v_b),
AssignOp::Exp => v_a.as_num_to_power(v_b),
AssignOp::Div => v_a / v_b,
AssignOp::Mod => v_a % v_b,
AssignOp::And => v_a & v_b,
@ -68,18 +68,18 @@ impl Executor for Interpreter {
}
#[allow(clippy::match_same_arms)]
fn run(&mut self, expr: &Expr) -> ResultValue {
match expr.def {
ExprDef::Const(Const::Null) => Ok(to_value(None::<()>)),
ExprDef::Const(Const::Undefined) => Ok(Gc::new(ValueData::Undefined)),
ExprDef::Const(Const::Num(num)) => Ok(to_value(num)),
ExprDef::Const(Const::Int(num)) => Ok(to_value(num)),
fn run(&mut self, node: &Node) -> ResultValue {
match *node {
Node::Const(Const::Null) => Ok(to_value(None::<()>)),
Node::Const(Const::Undefined) => Ok(Gc::new(ValueData::Undefined)),
Node::Const(Const::Num(num)) => Ok(to_value(num)),
Node::Const(Const::Int(num)) => Ok(to_value(num)),
// we can't move String from Const into value, because const is a garbage collected value
// Which means Drop() get's called on Const, but str will be gone at that point.
// Do Const values need to be garbage collected? We no longer need them once we've generated Values
ExprDef::Const(Const::String(ref str)) => Ok(to_value(str.to_owned())),
ExprDef::Const(Const::Bool(val)) => Ok(to_value(val)),
ExprDef::Block(ref es) => {
Node::Const(Const::String(ref str)) => Ok(to_value(str.to_owned())),
Node::Const(Const::Bool(val)) => Ok(to_value(val)),
Node::Block(ref es) => {
{
let env = &mut self.realm.environment;
env.push(new_declarative_environment(Some(
@ -105,31 +105,31 @@ impl Executor for Interpreter {
Ok(obj)
}
ExprDef::Local(ref name) => {
Node::Local(ref name) => {
let val = self.realm.environment.get_binding_value(name);
Ok(val)
}
ExprDef::GetConstField(ref obj, ref field) => {
Node::GetConstField(ref obj, ref field) => {
let val_obj = self.run(obj)?;
Ok(val_obj.borrow().get_field_slice(field))
}
ExprDef::GetField(ref obj, ref field) => {
Node::GetField(ref obj, ref field) => {
let val_obj = self.run(obj)?;
let val_field = self.run(field)?;
Ok(val_obj
.borrow()
.get_field_slice(&val_field.borrow().to_string()))
}
ExprDef::Call(ref callee, ref args) => {
let (this, func) = match callee.def {
ExprDef::GetConstField(ref obj, ref field) => {
Node::Call(ref callee, ref args) => {
let (this, func) = match callee.deref() {
Node::GetConstField(ref obj, ref field) => {
let mut obj = self.run(obj)?;
if obj.get_type() != "object" || obj.get_type() != "symbol" {
obj = self.to_object(&obj).expect("failed to convert to object");
}
(obj.clone(), obj.borrow().get_field_slice(field))
}
ExprDef::GetField(ref obj, ref field) => {
Node::GetField(ref obj, ref field) => {
let obj = self.run(obj)?;
let field = self.run(field)?;
(
@ -141,7 +141,7 @@ impl Executor for Interpreter {
};
let mut v_args = Vec::with_capacity(args.len());
for arg in args.iter() {
if let ExprDef::UnaryOp(UnaryOp::Spread, ref x) = arg.def {
if let Node::Spread(ref x) = arg.deref() {
let val = self.run(x)?;
let mut vals = self.extract_array_properties(&val).unwrap();
v_args.append(&mut vals);
@ -158,31 +158,31 @@ impl Executor for Interpreter {
fnct_result
}
ExprDef::WhileLoop(ref cond, ref expr) => {
Node::WhileLoop(ref cond, ref expr) => {
let mut result = Gc::new(ValueData::Undefined);
while self.run(cond)?.borrow().is_true() {
result = self.run(expr)?;
}
Ok(result)
}
ExprDef::If(ref cond, ref expr, None) => Ok(if self.run(cond)?.borrow().is_true() {
Node::If(ref cond, ref expr, None) => Ok(if self.run(cond)?.borrow().is_true() {
self.run(expr)?
} else {
Gc::new(ValueData::Undefined)
}),
ExprDef::If(ref cond, ref expr, Some(ref else_e)) => {
Node::If(ref cond, ref expr, Some(ref else_e)) => {
Ok(if self.run(cond)?.borrow().is_true() {
self.run(expr)?
} else {
self.run(else_e)?
})
}
ExprDef::Switch(ref val_e, ref vals, ref default) => {
Node::Switch(ref val_e, ref vals, ref default) => {
let val = self.run(val_e)?;
let mut result = Gc::new(ValueData::Null);
let mut matched = false;
for tup in vals.iter() {
let tup: &(Expr, Vec<Expr>) = tup;
let tup: &(Node, Vec<Node>) = tup;
let cond = &tup.0;
let block = &tup.1;
if val == self.run(cond)? {
@ -205,23 +205,39 @@ impl Executor for Interpreter {
}
Ok(result)
}
ExprDef::ObjectDecl(ref map) => {
Node::Object(ref properties) => {
let global_val = &self
.realm
.environment
.get_global_object()
.expect("Could not get the global object");
let obj = ValueData::new_obj(Some(global_val));
for (key, val) in map.iter() {
obj.borrow().set_field_slice(&key.clone(), self.run(val)?);
// TODO: Implement the rest of the property types.
for property in properties {
match property {
PropertyDefinition::Property(key, value) => {
obj.borrow().set_field_slice(&key.clone(), self.run(value)?);
}
PropertyDefinition::MethodDefinition(kind, name, func) => {
if let MethodDefinitionKind::Ordinary = kind {
obj.borrow().set_field_slice(&name.clone(), self.run(func)?);
} else {
// TODO: Implement other types of MethodDefinitionKinds.
unimplemented!("other types of property method definitions.");
}
}
i => unimplemented!("{:?} type of property", i),
}
}
Ok(obj)
}
ExprDef::ArrayDecl(ref arr) => {
Node::ArrayDecl(ref arr) => {
let array = array::new_array(self)?;
let mut elements: Vec<Value> = vec![];
for elem in arr.iter() {
if let ExprDef::UnaryOp(UnaryOp::Spread, ref x) = elem.def {
if let Node::Spread(ref x) = elem.deref() {
let val = self.run(x)?;
let mut vals = self.extract_array_properties(&val).unwrap();
elements.append(&mut vals);
@ -232,9 +248,9 @@ impl Executor for Interpreter {
array::add_to_array_object(&array, &elements)?;
Ok(array)
}
ExprDef::FunctionDecl(ref name, ref args, ref expr) => {
Node::FunctionDecl(ref name, ref args, ref expr) => {
let function =
Function::RegularFunc(RegularFunction::new(*expr.clone(), args.clone()));
Function::RegularFunc(RegularFunction::new(*expr.clone(), args.to_vec()));
let val = Gc::new(ValueData::Function(Box::new(GcCell::new(function))));
if name.is_some() {
self.realm.environment.create_mutable_binding(
@ -249,14 +265,14 @@ impl Executor for Interpreter {
}
Ok(val)
}
ExprDef::ArrowFunctionDecl(ref args, ref expr) => {
Node::ArrowFunctionDecl(ref args, ref expr) => {
let function =
Function::RegularFunc(RegularFunction::new(*expr.clone(), args.clone()));
Function::RegularFunc(RegularFunction::new(*expr.clone(), args.to_vec()));
Ok(Gc::new(ValueData::Function(Box::new(GcCell::new(
function,
)))))
}
ExprDef::BinOp(BinOp::Num(ref op), ref a, ref b) => {
Node::BinOp(BinOp::Num(ref op), ref a, ref b) => {
let v_r_a = self.run(a)?;
let v_r_b = self.run(b)?;
let v_a = (*v_r_a).clone();
@ -265,12 +281,12 @@ impl Executor for Interpreter {
NumOp::Add => v_a + v_b,
NumOp::Sub => v_a - v_b,
NumOp::Mul => v_a * v_b,
NumOp::Pow => v_a.as_num_to_power(v_b),
NumOp::Exp => v_a.as_num_to_power(v_b),
NumOp::Div => v_a / v_b,
NumOp::Mod => v_a % v_b,
}))
}
ExprDef::UnaryOp(ref op, ref a) => {
Node::UnaryOp(ref op, ref a) => {
let v_r_a = self.run(a)?;
let v_a = (*v_r_a).clone();
Ok(match *op {
@ -286,11 +302,10 @@ impl Executor for Interpreter {
!(num_v_a as i32)
})
}
UnaryOp::Spread => Gc::new(v_a), // for now we can do nothing but return the value as-is
_ => unreachable!(),
})
}
ExprDef::BinOp(BinOp::Bit(ref op), ref a, ref b) => {
Node::BinOp(BinOp::Bit(ref op), ref a, ref b) => {
let v_r_a = self.run(a)?;
let v_r_b = self.run(b)?;
let v_a = (*v_r_a).clone();
@ -301,9 +316,11 @@ impl Executor for Interpreter {
BitOp::Xor => v_a ^ v_b,
BitOp::Shl => v_a << v_b,
BitOp::Shr => v_a >> v_b,
// TODO Fix
BitOp::UShr => v_a >> v_b,
}))
}
ExprDef::BinOp(BinOp::Comp(ref op), ref a, ref b) => {
Node::BinOp(BinOp::Comp(ref op), ref a, ref b) => {
let v_r_a = self.run(a)?;
let v_r_b = self.run(b)?;
let v_a = v_r_a.borrow();
@ -323,7 +340,7 @@ impl Executor for Interpreter {
CompOp::LessThanOrEqual => v_a.to_num() <= v_b.to_num(),
}))
}
ExprDef::BinOp(BinOp::Log(ref op), ref a, ref b) => {
Node::BinOp(BinOp::Log(ref op), ref a, ref b) => {
// turn a `Value` into a `bool`
let to_bool =
|val| from_value::<bool>(val).expect("Could not convert JS value to bool");
@ -332,8 +349,8 @@ impl Executor for Interpreter {
LogOp::Or => to_value(to_bool(self.run(a)?) || to_bool(self.run(b)?)),
})
}
ExprDef::BinOp(BinOp::Assign(ref op), ref a, ref b) => match a.def {
ExprDef::Local(ref name) => {
Node::BinOp(BinOp::Assign(ref op), ref a, ref b) => match a.deref() {
Node::Local(ref name) => {
let v_a = (*self.realm.environment.get_binding_value(&name)).clone();
let v_b = (*self.run(b)?).clone();
let value = exec_assign_op(op, v_a, v_b);
@ -342,7 +359,7 @@ impl Executor for Interpreter {
.set_mutable_binding(&name, value.clone(), true);
Ok(value)
}
ExprDef::GetConstField(ref obj, ref field) => {
Node::GetConstField(ref obj, ref field) => {
let v_r_a = self.run(obj)?;
let v_a = (*v_r_a.borrow().get_field_slice(field)).clone();
let v_b = (*self.run(b)?).clone();
@ -354,7 +371,12 @@ impl Executor for Interpreter {
}
_ => Ok(Gc::new(ValueData::Undefined)),
},
ExprDef::Construct(ref callee, ref args) => {
Node::New(ref call) => {
let (callee, args) = match call.as_ref() {
Node::Call(callee, args) => (callee, args),
_ => unreachable!("Node::New(ref call): 'call' must only be Node::Call type."),
};
let func_object = self.run(callee)?;
let mut v_args = Vec::with_capacity(args.len());
for arg in args.iter() {
@ -390,8 +412,8 @@ impl Executor for Interpreter {
for i in 0..data.args.len() {
let arg_expr =
data.args.get(i).expect("Could not get data argument");
let name = match arg_expr.def {
ExprDef::Local(ref n) => Some(n),
let name = match arg_expr.deref() {
Node::Local(ref n) => Some(n),
_ => None,
}
.expect("Could not get argument");
@ -403,7 +425,7 @@ impl Executor for Interpreter {
);
env.initialize_binding(name, expr.to_owned());
}
let result = self.run(&data.expr);
let result = self.run(&data.node);
self.realm.environment.pop();
result
}
@ -411,7 +433,7 @@ impl Executor for Interpreter {
_ => Ok(Gc::new(ValueData::Undefined)),
}
}
ExprDef::Return(ref ret) => {
Node::Return(ref ret) => {
let result = match *ret {
Some(ref v) => self.run(v),
None => Ok(Gc::new(ValueData::Undefined)),
@ -420,11 +442,11 @@ impl Executor for Interpreter {
self.is_return = true;
result
}
ExprDef::Throw(ref ex) => Err(self.run(ex)?),
ExprDef::Assign(ref ref_e, ref val_e) => {
Node::Throw(ref ex) => Err(self.run(ex)?),
Node::Assign(ref ref_e, ref val_e) => {
let val = self.run(val_e)?;
match ref_e.def {
ExprDef::Local(ref name) => {
match ref_e.deref() {
Node::Local(ref name) => {
if self.realm.environment.has_binding(name) {
// Binding already exists
self.realm
@ -439,13 +461,13 @@ impl Executor for Interpreter {
self.realm.environment.initialize_binding(name, val.clone());
}
}
ExprDef::GetConstField(ref obj, ref field) => {
Node::GetConstField(ref obj, ref field) => {
let val_obj = self.run(obj)?;
val_obj
.borrow()
.set_field_slice(&field.clone(), val.clone());
}
ExprDef::GetField(ref obj, ref field) => {
Node::GetField(ref obj, ref field) => {
let val_obj = self.run(obj)?;
let val_field = self.run(field)?;
val_obj.borrow().set_field(val_field, val.clone());
@ -454,7 +476,7 @@ impl Executor for Interpreter {
}
Ok(val)
}
ExprDef::VarDecl(ref vars) => {
Node::VarDecl(ref vars) => {
for var in vars.iter() {
let (name, value) = var.clone();
let val = match value {
@ -470,7 +492,7 @@ impl Executor for Interpreter {
}
Ok(Gc::new(ValueData::Undefined))
}
ExprDef::LetDecl(ref vars) => {
Node::LetDecl(ref vars) => {
for var in vars.iter() {
let (name, value) = var.clone();
let val = match value {
@ -486,7 +508,7 @@ impl Executor for Interpreter {
}
Ok(Gc::new(ValueData::Undefined))
}
ExprDef::ConstDecl(ref vars) => {
Node::ConstDecl(ref vars) => {
for (name, value) in vars.iter() {
self.realm.environment.create_immutable_binding(
name.clone(),
@ -498,7 +520,7 @@ impl Executor for Interpreter {
}
Ok(Gc::new(ValueData::Undefined))
}
ExprDef::TypeOf(ref val_e) => {
Node::TypeOf(ref val_e) => {
let val = self.run(val_e)?;
Ok(to_value(match *val {
ValueData::Undefined => "undefined",
@ -510,6 +532,37 @@ impl Executor for Interpreter {
ValueData::Function(_) => "function",
}))
}
Node::StatementList(ref list) => {
{
let env = &mut self.realm.environment;
env.push(new_declarative_environment(Some(
env.get_current_environment_ref().clone(),
)));
}
let mut obj = to_value(None::<()>);
for (i, item) in list.iter().enumerate() {
let val = self.run(item)?;
// early return
if self.is_return {
obj = val;
break;
}
if i + 1 == list.len() {
obj = val;
}
}
// pop the block env
let _ = self.realm.environment.pop();
Ok(obj)
}
Node::Spread(ref node) => {
// TODO: for now we can do nothing but return the value as-is
Ok(Gc::new((*self.run(node)?).clone()))
}
ref i => unimplemented!("{}", i),
}
}
}
@ -549,8 +602,8 @@ impl Interpreter {
));
for i in 0..data.args.len() {
let arg_expr = data.args.get(i).expect("Could not get data argument");
match arg_expr.def {
ExprDef::Local(ref name) => {
match arg_expr.deref() {
Node::Local(ref name) => {
let expr: &Value =
arguments_list.get(i).expect("Could not get argument");
self.realm.environment.create_mutable_binding(
@ -562,8 +615,8 @@ impl Interpreter {
.environment
.initialize_binding(name, expr.clone());
}
ExprDef::UnaryOp(UnaryOp::Spread, ref expr) => {
if let ExprDef::Local(ref name) = expr.def {
Node::Spread(ref expr) => {
if let Node::Local(ref name) = expr.deref() {
let array = array::new_array(self)?;
array::add_to_array_object(&array, &arguments_list[i..])?;
@ -592,7 +645,7 @@ impl Interpreter {
.environment
.initialize_binding("arguments", arguments_obj);
let result = self.run(&data.expr);
let result = self.run(&data.node);
self.realm.environment.pop();
result
}

37
boa/src/lib.rs

@ -1,6 +1,30 @@
#![deny(unused_qualifications, clippy::correctness, clippy::style)]
#![warn(clippy::perf)]
#![allow(clippy::cognitive_complexity)]
#![deny(
unused_qualifications,
clippy::all,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
future_incompatible,
nonstandard_style
)]
#![warn(clippy::perf, clippy::single_match_else, clippy::dbg_macro)]
#![allow(
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions
)]
pub mod builtins;
pub mod environment;
@ -16,17 +40,18 @@ use crate::{
builtins::value::ResultValue,
exec::{Executor, Interpreter},
realm::Realm,
syntax::{ast::expr::Expr, lexer::Lexer, parser::Parser},
syntax::{ast::node::Node, lexer::Lexer, parser::Parser},
};
#[cfg(feature = "serde-ast")]
pub use serde_json;
fn parser_expr(src: &str) -> Result<Expr, String> {
fn parser_expr(src: &str) -> Result<Node, String> {
let mut lexer = Lexer::new(src);
lexer.lex().map_err(|e| format!("SyntaxError: {}", e))?;
let tokens = lexer.tokens;
Parser::new(tokens)
// dbg!(&tokens);
Parser::new(&tokens)
.parse_all()
.map_err(|e| format!("ParsingError: {}", e))
}

5
boa/src/syntax/ast/constant.rs

@ -3,10 +3,9 @@ use std::fmt::{Display, Formatter, Result};
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
/// A Javascript Constant.
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A Javascript Constant
pub enum Const {
/// A UTF-8 string, such as `"Hello, world"`
String(String),
@ -23,7 +22,7 @@ pub enum Const {
}
impl Display for Const {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
match *self {
Const::String(ref st) => write!(f, "\"{}\"", st),
Const::Num(num) => write!(f, "{}", num),

246
boa/src/syntax/ast/expr.rs

@ -1,246 +0,0 @@
use crate::syntax::ast::{
constant::Const,
op::{BinOp, Operator, UnaryOp},
};
use gc_derive::{Finalize, Trace};
use std::{
collections::btree_map::BTreeMap,
fmt::{Display, Formatter, Result},
};
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Trace, Finalize, Debug, PartialEq)]
pub struct Expr {
/// The expression definition
pub def: ExprDef,
}
impl Expr {
/// Create a new expression with a starting and ending position
pub fn new(def: ExprDef) -> Self {
Self { def }
}
}
impl Display for Expr {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{}", self.def)
}
}
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A Javascript Expression
pub enum ExprDef {
/// Run a operation between 2 expressions
BinOp(BinOp, Box<Expr>, Box<Expr>),
/// Run an operation on a value
UnaryOp(UnaryOp, Box<Expr>),
/// Make a constant value
Const(Const),
/// Const declaration
ConstDecl(Vec<(String, Expr)>),
/// Construct an object from the function and arg{
Construct(Box<Expr>, Vec<Expr>),
/// Run several expressions from top-to-bottom
Block(Vec<Expr>),
/// Load a reference to a value, or a function argument
Local(String),
/// Gets the constant field of a value
GetConstField(Box<Expr>, String),
/// Gets the field of a value
GetField(Box<Expr>, Box<Expr>),
/// Call a function with some values
Call(Box<Expr>, Vec<Expr>),
/// Repeatedly run an expression while the conditional expression resolves to true
WhileLoop(Box<Expr>, Box<Expr>),
/// Check if a conditional expression is true and run an expression if it is and another expression if it isn't
If(Box<Expr>, Box<Expr>, Option<Box<Expr>>),
/// Run blocks whose cases match the expression
Switch(Box<Expr>, Vec<(Expr, Vec<Expr>)>, Option<Box<Expr>>),
/// Create an object out of the binary tree given
ObjectDecl(Box<BTreeMap<String, Expr>>),
/// Create an array with items inside
ArrayDecl(Vec<Expr>),
/// Create a function with the given name, arguments, and expression
FunctionDecl(Option<String>, Vec<Expr>, Box<Expr>),
/// Create an arrow function with the given arguments and expression
ArrowFunctionDecl(Vec<Expr>, Box<Expr>),
/// Return the expression from a function
Return(Option<Box<Expr>>),
/// Throw a value
Throw(Box<Expr>),
/// Assign an expression to a value
Assign(Box<Expr>, Box<Expr>),
/// {
/// A variable declaratio
/// }
VarDecl(Vec<(String, Option<Expr>)>),
/// Let declaraton
LetDecl(Vec<(String, Option<Expr>)>),
/// Return a string representing the type of the given expression
TypeOf(Box<Expr>),
}
impl Operator for ExprDef {
fn get_assoc(&self) -> bool {
match *self {
ExprDef::Construct(_, _)
| ExprDef::UnaryOp(_, _)
| ExprDef::TypeOf(_)
| ExprDef::If(_, _, _)
| ExprDef::Assign(_, _) => false,
_ => true,
}
}
fn get_precedence(&self) -> u64 {
match self {
ExprDef::GetField(_, _) | ExprDef::GetConstField(_, _) => 1,
ExprDef::Call(_, _) | ExprDef::Construct(_, _) => 2,
ExprDef::UnaryOp(UnaryOp::IncrementPost, _)
| ExprDef::UnaryOp(UnaryOp::IncrementPre, _)
| ExprDef::UnaryOp(UnaryOp::DecrementPost, _)
| ExprDef::UnaryOp(UnaryOp::DecrementPre, _) => 3,
ExprDef::UnaryOp(UnaryOp::Not, _)
| ExprDef::UnaryOp(UnaryOp::Tilde, _)
| ExprDef::UnaryOp(UnaryOp::Minus, _)
| ExprDef::TypeOf(_) => 4,
ExprDef::BinOp(op, _, _) => op.get_precedence(),
ExprDef::If(_, _, _) => 15,
// 16 should be yield
ExprDef::Assign(_, _) => 17,
_ => 19,
}
}
}
impl Display for ExprDef {
fn fmt(&self, f: &mut Formatter) -> Result {
match *self {
ExprDef::Const(ref c) => write!(f, "{}", c),
ExprDef::Block(ref block) => {
write!(f, "{{")?;
for expr in block.iter() {
write!(f, "{};", expr)?;
}
write!(f, "}}")
}
ExprDef::Local(ref s) => write!(f, "{}", s),
ExprDef::GetConstField(ref ex, ref field) => write!(f, "{}.{}", ex, field),
ExprDef::GetField(ref ex, ref field) => write!(f, "{}[{}]", ex, field),
ExprDef::Call(ref ex, ref args) => {
write!(f, "{}(", ex)?;
let arg_strs: Vec<String> = args.iter().map(ToString::to_string).collect();
write!(f, "{})", arg_strs.join(","))
}
ExprDef::Construct(ref func, ref args) => {
f.write_fmt(format_args!("new {}", func))?;
f.write_str("(")?;
let mut first = true;
for e in args.iter() {
if !first {
f.write_str(", ")?;
}
first = false;
Display::fmt(e, f)?;
}
f.write_str(")")
}
ExprDef::WhileLoop(ref cond, ref expr) => write!(f, "while({}) {}", cond, expr),
ExprDef::If(ref cond, ref expr, None) => write!(f, "if({}) {}", cond, expr),
ExprDef::If(ref cond, ref expr, Some(ref else_e)) => {
write!(f, "if({}) {} else {}", cond, expr, else_e)
}
ExprDef::Switch(ref val, ref vals, None) => {
f.write_fmt(format_args!("switch({})", val))?;
f.write_str(" {")?;
for e in vals.iter() {
f.write_fmt(format_args!("case {}: \n", e.0))?;
join_expr(f, &e.1)?;
}
f.write_str("}")
}
ExprDef::Switch(ref val, ref vals, Some(ref def)) => {
f.write_fmt(format_args!("switch({})", val))?;
f.write_str(" {")?;
for e in vals.iter() {
f.write_fmt(format_args!("case {}: \n", e.0))?;
join_expr(f, &e.1)?;
}
f.write_str("default: \n")?;
Display::fmt(def, f)?;
f.write_str("}")
}
ExprDef::ObjectDecl(ref map) => {
f.write_str("{")?;
for (key, value) in map.iter() {
f.write_fmt(format_args!("{}: {},", key, value))?;
}
f.write_str("}")
}
ExprDef::ArrayDecl(ref arr) => {
f.write_str("[")?;
join_expr(f, arr)?;
f.write_str("]")
}
ExprDef::FunctionDecl(ref name, ref args, ref expr) => {
write!(f, "function ")?;
if let Some(func_name) = name {
f.write_fmt(format_args!("{}", func_name))?;
}
write!(f, "{{")?;
join_expr(f, args)?;
write!(f, "}} {}", expr)
}
ExprDef::ArrowFunctionDecl(ref args, ref expr) => {
write!(f, "(")?;
join_expr(f, args)?;
write!(f, ") => {}", expr)
}
ExprDef::BinOp(ref op, ref a, ref b) => write!(f, "{} {} {}", a, op, b),
ExprDef::UnaryOp(ref op, ref a) => write!(f, "{}{}", op, a),
ExprDef::Return(Some(ref ex)) => write!(f, "return {}", ex),
ExprDef::Return(None) => write!(f, "return"),
ExprDef::Throw(ref ex) => write!(f, "throw {}", ex),
ExprDef::Assign(ref ref_e, ref val) => write!(f, "{} = {}", ref_e, val),
ExprDef::VarDecl(ref vars) | ExprDef::LetDecl(ref vars) => {
if let ExprDef::VarDecl(_) = *self {
f.write_str("var ")?;
} else {
f.write_str("let ")?;
}
for (key, val) in vars.iter() {
match val {
Some(x) => f.write_fmt(format_args!("{} = {}", key, x))?,
None => f.write_fmt(format_args!("{}", key))?,
}
}
Ok(())
}
ExprDef::ConstDecl(ref vars) => {
f.write_str("const ")?;
for (key, val) in vars.iter() {
f.write_fmt(format_args!("{} = {}", key, val))?
}
Ok(())
}
ExprDef::TypeOf(ref e) => write!(f, "typeof {}", e),
}
}
}
/// `join_expr` - Utility to join multiple Expressions into a single string
fn join_expr(f: &mut Formatter, expr: &[Expr]) -> Result {
let mut first = true;
for e in expr.iter() {
if !first {
f.write_str(", ")?;
}
first = false;
Display::fmt(e, f)?;
}
Ok(())
}

9
boa/src/syntax/ast/keyword.rs

@ -7,10 +7,11 @@ use std::{
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, PartialEq, Debug)]
/// A Javascript Keyword
///
/// As specificed by <https://www.ecma-international.org/ecma-262/#sec-keywords>
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Keyword {
/// The `await` keyword
Await,
@ -89,7 +90,7 @@ pub enum Keyword {
#[derive(Debug, Clone, Copy)]
pub struct KeywordError;
impl Display for KeywordError {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(f, "invalid token")
}
}
@ -150,7 +151,7 @@ impl FromStr for Keyword {
}
}
impl Display for Keyword {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(
f,
"{}",

2
boa/src/syntax/ast/mod.rs

@ -1,6 +1,6 @@
pub mod constant;
pub mod expr;
pub mod keyword;
pub mod node;
pub mod op;
pub mod pos;
pub mod punc;

372
boa/src/syntax/ast/node.rs

@ -0,0 +1,372 @@
use crate::syntax::ast::{
constant::Const,
op::{BinOp, Operator, UnaryOp},
};
use gc_derive::{Finalize, Trace};
use std::fmt;
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
/// A Javascript AST Node.
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub enum Node {
/// Create an array with items inside.
ArrayDecl(Vec<Node>),
/// Create an arrow function with the given arguments and internal AST node.
ArrowFunctionDecl(Vec<FormalParameter>, Box<Node>),
/// Assign an AST node result to an AST node.
Assign(Box<Node>, Box<Node>),
/// Run an operation between 2 AST nodes.
BinOp(BinOp, Box<Node>, Box<Node>),
/// Run several AST nodes from top-to-bottom.
Block(Vec<Node>),
/// Break statement with an optional label.
Break(Option<String>),
/// Call a function with some values.
Call(Box<Node>, Vec<Node>),
/// Conditional Operator (`{condition} ? {if true} : {if false}`).
ConditionalOp(Box<Node>, Box<Node>, Box<Node>),
/// Make a constant value.
Const(Const),
/// Const declaration.
ConstDecl(Vec<(String, Node)>),
/// Continue with an optional label.
Continue(Option<String>),
/// Create a function with the given name, arguments, and internal AST node.
FunctionDecl(Option<String>, Vec<FormalParameter>, Box<Node>),
/// Gets the constant field of a value.
GetConstField(Box<Node>, String),
/// Gets the [field] of a value.
GetField(Box<Node>, Box<Node>),
/// [init], [cond], [step], body
ForLoop(
Option<Box<Node>>,
Option<Box<Node>>,
Option<Box<Node>>,
Box<Node>,
),
/// Check if a conditional expression is true and run an expression if it is and another expression if it isn't
If(Box<Node>, Box<Node>, Option<Box<Node>>),
/// Let declaraton
LetDecl(Vec<(String, Option<Node>)>),
/// Load a reference to a value, or a function argument
Local(String),
/// New
New(Box<Node>),
/// Object Declaration
Object(Vec<PropertyDefinition>),
/// Return the expression from a function
Return(Option<Box<Node>>),
/// Run blocks whose cases match the expression
Switch(Box<Node>, Vec<(Node, Vec<Node>)>, Option<Box<Node>>),
/// `...a` - spread an iterable value
Spread(Box<Node>),
// Similar to Block but without the braces
StatementList(Vec<Node>),
/// Throw a value
Throw(Box<Node>),
/// Return a string representing the type of the given expression
TypeOf(Box<Node>),
/// Try / Catch
Try(
Box<Node>,
Option<Box<Node>>,
Option<Box<Node>>,
Option<Box<Node>>,
),
/// The JavaScript `this` keyword refers to the object it belongs to.
///
/// A property of an execution context (global, function or eval) that,
/// in non–strict mode, is always a reference to an object and in strict
/// mode can be any value.
///
/// For more information, please check: <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/this>
This,
/// Run an operation on a value
UnaryOp(UnaryOp, Box<Node>),
/// A variable declaration
VarDecl(Vec<(String, Option<Node>)>),
/// Repeatedly run an expression while the conditional expression resolves to true
WhileLoop(Box<Node>, Box<Node>),
}
impl Operator for Node {
fn get_assoc(&self) -> bool {
match *self {
Node::UnaryOp(_, _) | Node::TypeOf(_) | Node::If(_, _, _) | Node::Assign(_, _) => false,
_ => true,
}
}
fn get_precedence(&self) -> u64 {
match self {
Node::GetField(_, _) | Node::GetConstField(_, _) => 1,
Node::Call(_, _) => 2,
Node::UnaryOp(UnaryOp::IncrementPost, _)
| Node::UnaryOp(UnaryOp::IncrementPre, _)
| Node::UnaryOp(UnaryOp::DecrementPost, _)
| Node::UnaryOp(UnaryOp::DecrementPre, _) => 3,
Node::UnaryOp(UnaryOp::Not, _)
| Node::UnaryOp(UnaryOp::Tilde, _)
| Node::UnaryOp(UnaryOp::Minus, _)
| Node::TypeOf(_) => 4,
Node::BinOp(op, _, _) => op.get_precedence(),
Node::If(_, _, _) => 15,
// 16 should be yield
Node::Assign(_, _) => 17,
_ => 19,
}
}
}
impl fmt::Display for Node {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.display(f, 0)
}
}
impl Node {
/// Implements the display formatting with indentation.
fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result {
let indent = " ".repeat(indentation);
match *self {
Self::Block(_) => {}
_ => write!(f, "{}", indent)?,
}
match *self {
Self::Const(ref c) => write!(f, "{}", c),
Self::ConditionalOp(_, _, _) => write!(f, "Conditional op"), // TODO
Self::ForLoop(_, _, _, _) => write!(f, "for loop"), // TODO
Self::This => write!(f, "this"), // TODO
Self::Try(_, _, _, _) => write!(f, "try/catch/finally"), // TODO
Self::Break(_) => write!(f, "break"), // TODO: add potential value
Self::Continue(_) => write!(f, "continue"), // TODO: add potential value
Self::Spread(ref node) => write!(f, "...{}", node),
Self::Block(ref block) => {
writeln!(f, "{{")?;
for node in block.iter() {
node.display(f, indentation + 1)?;
match node {
Self::Block(_)
| Self::If(_, _, _)
| Self::Switch(_, _, _)
| Self::FunctionDecl(_, _, _)
| Self::WhileLoop(_, _)
| Self::StatementList(_) => {}
_ => write!(f, ";")?,
}
writeln!(f)?;
}
write!(f, "{}}}", indent)
}
Node::StatementList(ref list) => {
for node in list.iter() {
node.display(f, indentation + 1)?;
match node {
Self::Block(_)
| Self::If(_, _, _)
| Self::Switch(_, _, _)
| Self::FunctionDecl(_, _, _)
| Self::WhileLoop(_, _)
| Self::StatementList(_) => {}
_ => write!(f, ";")?,
}
writeln!(f)?;
}
Ok(())
}
Self::Local(ref s) => write!(f, "{}", s),
Self::GetConstField(ref ex, ref field) => write!(f, "{}.{}", ex, field),
Self::GetField(ref ex, ref field) => write!(f, "{}[{}]", ex, field),
Self::Call(ref ex, ref args) => {
write!(f, "{}(", ex)?;
let arg_strs: Vec<String> = args.iter().map(ToString::to_string).collect();
write!(f, "{})", arg_strs.join(", "))
}
Self::New(ref call) => {
let (func, args) = match call.as_ref() {
Node::Call(func, args) => (func, args),
_ => unreachable!("Node::New(ref call): 'call' must only be Node::Call type."),
};
write!(f, "new {}", func)?;
f.write_str("(")?;
let mut first = true;
for e in args.iter() {
if !first {
f.write_str(", ")?;
}
first = false;
write!(f, "{}", e)?;
}
f.write_str(")")
}
Self::WhileLoop(ref cond, ref node) => {
write!(f, "while ({}) ", cond)?;
node.display(f, indentation)
}
Self::If(ref cond, ref node, None) => {
write!(f, "if ({}) ", cond)?;
node.display(f, indentation)
}
Self::If(ref cond, ref node, Some(ref else_e)) => {
write!(f, "if ({}) ", cond)?;
node.display(f, indentation)?;
f.write_str(" else ")?;
else_e.display(f, indentation)
}
Self::Switch(ref val, ref vals, None) => {
writeln!(f, "switch ({}) {{", val)?;
for e in vals.iter() {
writeln!(f, "{}case {}:", indent, e.0)?;
join_nodes(f, &e.1)?;
}
writeln!(f, "{}}}", indent)
}
Self::Switch(ref val, ref vals, Some(ref def)) => {
writeln!(f, "switch ({}) {{", val)?;
for e in vals.iter() {
writeln!(f, "{}case {}:", indent, e.0)?;
join_nodes(f, &e.1)?;
}
writeln!(f, "{}default:", indent)?;
def.display(f, indentation + 1)?;
write!(f, "{}}}", indent)
}
Self::Object(ref properties) => {
f.write_str("{\n")?;
for property in properties {
match property {
PropertyDefinition::IdentifierReference(key) => {
write!(f, "{} {},", indent, key)?;
}
PropertyDefinition::Property(key, value) => {
write!(f, "{} {}: {},", indent, key, value)?;
}
PropertyDefinition::SpreadObject(key) => {
write!(f, "{} ...{},", indent, key)?;
}
PropertyDefinition::MethodDefinition(_kind, _key, _node) => {
// TODO: Implement display for PropertyDefinition::MethodDefinition.
unimplemented!("Display for PropertyDefinition::MethodDefinition");
}
}
}
f.write_str("}")
}
Self::ArrayDecl(ref arr) => {
f.write_str("[")?;
join_nodes(f, arr)?;
f.write_str("]")
}
Self::FunctionDecl(ref name, ref _args, ref node) => {
write!(f, "function ")?;
if let Some(func_name) = name {
write!(f, "{}", func_name)?;
}
write!(f, "{{")?;
//join_nodes(f, args)?; TODO: port
f.write_str("} ")?;
node.display(f, indentation + 1)
}
Self::ArrowFunctionDecl(ref _args, ref node) => {
write!(f, "(")?;
//join_nodes(f, args)?; TODO: port
f.write_str(") => ")?;
node.display(f, indentation)
}
Self::BinOp(ref op, ref a, ref b) => write!(f, "{} {} {}", a, op, b),
Self::UnaryOp(ref op, ref a) => write!(f, "{}{}", op, a),
Self::Return(Some(ref ex)) => write!(f, "return {}", ex),
Self::Return(None) => write!(f, "return"),
Self::Throw(ref ex) => write!(f, "throw {}", ex),
Self::Assign(ref ref_e, ref val) => write!(f, "{} = {}", ref_e, val),
Self::VarDecl(ref vars) | Self::LetDecl(ref vars) => {
if let Self::VarDecl(_) = *self {
f.write_str("var ")?;
} else {
f.write_str("let ")?;
}
for (key, val) in vars.iter() {
match val {
Some(x) => write!(f, "{} = {}", key, x)?,
None => write!(f, "{}", key)?,
}
}
Ok(())
}
Self::ConstDecl(ref vars) => {
f.write_str("const ")?;
for (key, val) in vars.iter() {
write!(f, "{} = {}", key, val)?
}
Ok(())
}
Self::TypeOf(ref e) => write!(f, "typeof {}", e),
}
}
}
/// Utility to join multiple Nodes into a single string.
fn join_nodes(f: &mut fmt::Formatter<'_>, nodes: &[Node]) -> fmt::Result {
let mut first = true;
for e in nodes {
if !first {
f.write_str(", ")?;
}
first = false;
write!(f, "{}", e)?;
}
Ok(())
}
/// "Formal parameter" is a fancy way of saying "function parameter".
///
/// In the declaration of a function, the parameters must be identifiers,
/// not any value like numbers, strings, or objects.
///```javascript
///function foo(formalParametar1, formalParametar2) {
///}
///```
/// For more information, please check <https://tc39.es/ecma262/#prod-FormalParameter>
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Trace, Finalize)]
pub struct FormalParameter {
pub name: String,
pub init: Option<Box<Node>>,
pub is_rest_param: bool,
}
pub type FormalParameters = Vec<FormalParameter>;
impl FormalParameter {
pub fn new(name: String, init: Option<Box<Node>>, is_rest_param: bool) -> FormalParameter {
FormalParameter {
name,
init,
is_rest_param,
}
}
}
// TODO: Support all features: https://tc39.es/ecma262/#prod-PropertyDefinition
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Trace, Finalize)]
pub enum PropertyDefinition {
IdentifierReference(String),
Property(String, Node),
MethodDefinition(MethodDefinitionKind, String, Node),
SpreadObject(Node),
}
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, PartialEq, Trace, Finalize)]
pub enum MethodDefinitionKind {
Get,
Set,
Ordinary,
}

81
boa/src/syntax/ast/op.rs

@ -16,9 +16,9 @@ pub trait Operator {
}
}
/// A numeric operation between 2 values
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A numeric operation between 2 values
pub enum NumOp {
/// `a + b` - Addition
Add,
@ -29,13 +29,13 @@ pub enum NumOp {
/// `a * b` - Multiplication
Mul,
/// `a ** b` - Exponentiation
Pow,
Exp,
/// `a % b` - Modulus
Mod,
}
impl Display for NumOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -44,16 +44,18 @@ impl Display for NumOp {
NumOp::Sub => "-",
NumOp::Div => "/",
NumOp::Mul => "*",
NumOp::Pow => "**",
NumOp::Exp => "**",
NumOp::Mod => "%",
}
)
}
}
/// A unary operation on a single value
///
/// For more information, please check: <https://tc39.es/ecma262/#prod-UnaryExpression>
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A unary operation on a single value
pub enum UnaryOp {
/// `a++` - increment the value
IncrementPost,
@ -71,12 +73,38 @@ pub enum UnaryOp {
Not,
/// `~a` - bitwise-not of the value
Tilde,
/// `...a` - spread an iterable value
Spread,
/// `typeof` - Get the type of object
TypeOf,
/// The JavaScript `delete` operator removes a property from an object.
///
/// Unlike what common belief suggests, the delete operator has nothing to do with
/// directly freeing memory. Memory management is done indirectly via breaking references.
/// If no more references to the same property are held, it is eventually released automatically.
///
/// The `delete` operator returns `true` for all cases except when the property is an
/// [own](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/hasOwnProperty)
/// [non-configurable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Cant_delete)
/// property, in which case, `false` is returned in non-strict mode.
///
/// For more information, please check: <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/delete>
Delete,
/// The `void` operator evaluates the given `expression` and then returns `undefined`.
///
/// This operator allows evaluating expressions that produce a value into places where an
/// expression that evaluates to `undefined` is desired.
/// The `void` operator is often used merely to obtain the `undefined` primitive value, usually using `void(0)`
/// (which is equivalent to `void 0`). In these cases, the global variable undefined can be used.
///
/// When using an [immediately-invoked function expression](https://developer.mozilla.org/en-US/docs/Glossary/IIFE),
/// `void` can be used to force the function keyword to be treated as an expression instead of a declaration.
///
/// For more information, please check: <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/void>
Void,
}
impl Display for UnaryOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -87,15 +115,17 @@ impl Display for UnaryOp {
UnaryOp::Minus => "-",
UnaryOp::Not => "!",
UnaryOp::Tilde => "~",
UnaryOp::Spread => "...",
UnaryOp::Delete => "delete",
UnaryOp::TypeOf => "typeof",
UnaryOp::Void => "void",
}
)
}
}
/// A bitwise operation between 2 values
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A bitwise operation between 2 values
pub enum BitOp {
/// `a & b` - Bitwise and
And,
@ -107,10 +137,12 @@ pub enum BitOp {
Shl,
/// `a >> b` - Bit-shift rightrights
Shr,
/// `a >>> b` - Zero-fill right shift
UShr,
}
impl Display for BitOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -120,14 +152,15 @@ impl Display for BitOp {
BitOp::Xor => "^",
BitOp::Shl => "<<",
BitOp::Shr => ">>",
BitOp::UShr => ">>>",
}
)
}
}
/// A comparitive operation between 2 values
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A comparitive operation between 2 values
pub enum CompOp {
/// `a == b` - Equality
Equal,
@ -148,7 +181,7 @@ pub enum CompOp {
}
impl Display for CompOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -166,9 +199,9 @@ impl Display for CompOp {
}
}
/// A logical operation between 2 boolean values
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A logical operation between 2 boolean values
pub enum LogOp {
/// `a && b` - Logical and
And,
@ -177,7 +210,7 @@ pub enum LogOp {
}
impl Display for LogOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -189,9 +222,9 @@ impl Display for LogOp {
}
}
/// A binary operation between 2 values
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A binary operation between 2 values
pub enum BinOp {
/// Numeric operation
Num(NumOp),
@ -211,10 +244,10 @@ impl Operator for BinOp {
}
fn get_precedence(&self) -> u64 {
match *self {
BinOp::Num(NumOp::Pow) => 4,
BinOp::Num(NumOp::Exp) => 4,
BinOp::Num(NumOp::Mul) | BinOp::Num(NumOp::Div) | BinOp::Num(NumOp::Mod) => 5,
BinOp::Num(NumOp::Add) | BinOp::Num(NumOp::Sub) => 6,
BinOp::Bit(BitOp::Shl) | BinOp::Bit(BitOp::Shr) => 7,
BinOp::Bit(BitOp::Shl) | BinOp::Bit(BitOp::Shr) | BinOp::Bit(BitOp::UShr) => 7,
BinOp::Comp(CompOp::LessThan)
| BinOp::Comp(CompOp::LessThanOrEqual)
| BinOp::Comp(CompOp::GreaterThan)
@ -234,7 +267,7 @@ impl Operator for BinOp {
}
impl Display for BinOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -249,9 +282,11 @@ impl Display for BinOp {
}
}
/// A binary operation between 2 values
///
/// <https://tc39.es/ecma262/#prod-AssignmentOperator>
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
/// A binary operation between 2 values
pub enum AssignOp {
/// `a += b` - Add assign
Add,
@ -260,7 +295,7 @@ pub enum AssignOp {
/// `a *= b` - Mul assign
Mul,
/// `a **= b` - Exponent assign
Pow,
Exp,
/// `a /= b` - Div assign
Div,
/// `a %= b` - Modulus assign
@ -278,7 +313,7 @@ pub enum AssignOp {
}
impl Display for AssignOp {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(
f,
"{}",
@ -286,7 +321,7 @@ impl Display for AssignOp {
AssignOp::Add => "+=",
AssignOp::Sub => "-=",
AssignOp::Mul => "*=",
AssignOp::Pow => "**=",
AssignOp::Exp => "**=",
AssignOp::Div => "/=",
AssignOp::Mod => "%=",
AssignOp::And => "&=",

11
boa/src/syntax/ast/pos.rs

@ -1,13 +1,14 @@
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, PartialEq, Debug)]
/// A position in the Javascript source code
/// A position in the Javascript source code.
///
/// Stores both the column number and the line number
///
/// ## Similar Implementations
/// [V8: Location](https://cs.chromium.org/chromium/src/v8/src/parsing/scanner.h?type=cs&q=isValid+Location&g=0&l=216)
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct Position {
// Column number
pub column_number: u64,
@ -16,9 +17,9 @@ pub struct Position {
}
impl Position {
/// Create a new position, positions are usually created by Tokens..
/// Creates a new `Position`.
///
/// See [Token](struct.token/Token.html) for example usage
/// Positions are usually created by a [`Token`](struct.token/Token.html).
///
/// # Arguments
///

42
boa/src/syntax/ast/punc.rs

@ -1,11 +1,12 @@
use crate::syntax::ast::op::{BinOp, BitOp, CompOp, LogOp, NumOp};
use std::fmt::{Display, Error, Formatter};
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
/// Punctuation
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(PartialEq, Clone, Copy, Debug)]
/// Punctuation
pub enum Punctuator {
/// `+`
Add,
@ -92,7 +93,7 @@ pub enum Punctuator {
/// `|`
Or,
/// `**`
Pow,
Exp,
/// `?`
Question,
/// `>>`
@ -112,8 +113,41 @@ pub enum Punctuator {
/// `^`
Xor,
}
impl Punctuator {
/// Attempts to convert a punctuator (`+`, `=`...) to a Binary Operator
///
/// If there is no match, `None` will be returned.
pub fn as_binop(self) -> Option<BinOp> {
match self {
Punctuator::Add => Some(BinOp::Num(NumOp::Add)),
Punctuator::Sub => Some(BinOp::Num(NumOp::Sub)),
Punctuator::Mul => Some(BinOp::Num(NumOp::Mul)),
Punctuator::Div => Some(BinOp::Num(NumOp::Div)),
Punctuator::Mod => Some(BinOp::Num(NumOp::Mod)),
Punctuator::And => Some(BinOp::Bit(BitOp::And)),
Punctuator::Or => Some(BinOp::Bit(BitOp::Or)),
Punctuator::Xor => Some(BinOp::Bit(BitOp::Xor)),
Punctuator::BoolAnd => Some(BinOp::Log(LogOp::And)),
Punctuator::BoolOr => Some(BinOp::Log(LogOp::Or)),
Punctuator::Eq => Some(BinOp::Comp(CompOp::Equal)),
Punctuator::NotEq => Some(BinOp::Comp(CompOp::NotEqual)),
Punctuator::StrictEq => Some(BinOp::Comp(CompOp::StrictEqual)),
Punctuator::StrictNotEq => Some(BinOp::Comp(CompOp::StrictNotEqual)),
Punctuator::LessThan => Some(BinOp::Comp(CompOp::LessThan)),
Punctuator::GreaterThan => Some(BinOp::Comp(CompOp::GreaterThan)),
Punctuator::GreaterThanOrEq => Some(BinOp::Comp(CompOp::GreaterThanOrEqual)),
Punctuator::LessThanOrEq => Some(BinOp::Comp(CompOp::LessThanOrEqual)),
Punctuator::LeftSh => Some(BinOp::Bit(BitOp::Shl)),
Punctuator::RightSh => Some(BinOp::Bit(BitOp::Shr)),
Punctuator::URightSh => Some(BinOp::Bit(BitOp::UShr)),
_ => None,
}
}
}
impl Display for Punctuator {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
write!(
f,
"{}",
@ -160,7 +194,7 @@ impl Display for Punctuator {
Punctuator::OpenBracket => "[",
Punctuator::OpenParen => "(",
Punctuator::Or => "|",
Punctuator::Pow => "**",
Punctuator::Exp => "**",
Punctuator::Question => "?",
Punctuator::RightSh => ">>",
Punctuator::Semicolon => ";",

47
boa/src/syntax/ast/token.rs

@ -4,36 +4,36 @@ use std::fmt::{Debug, Display, Formatter, Result};
#[cfg(feature = "serde-ast")]
use serde::{Deserialize, Serialize};
/// Represents a token
/// Represents a token.
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq)]
pub struct Token {
/// The token Data
pub data: TokenData,
pub kind: TokenKind,
/// Token position from original source code
pub pos: Position,
}
impl Token {
/// Create a new detailed token from the token data, line number and column number
pub fn new(data: TokenData, line_number: u64, column_number: u64) -> Self {
pub fn new(kind: TokenKind, line_number: u64, column_number: u64) -> Self {
Self {
data,
kind,
pos: Position::new(line_number, column_number),
}
}
}
impl Display for Token {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{}", self.data)
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
write!(f, "{}", self.kind)
}
}
pub struct VecToken(Vec<Token>);
impl Debug for VecToken {
fn fmt(&self, f: &mut Formatter) -> Result {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let mut buffer = String::new();
for token in &self.0 {
buffer.push_str(&token.to_string());
@ -41,10 +41,11 @@ impl Debug for VecToken {
write!(f, "{}", buffer)
}
}
/// Represents the type of Token and the data it has inside.
#[cfg_attr(feature = "serde-ast", derive(Serialize, Deserialize))]
#[derive(Clone, PartialEq, Debug)]
/// Represents the type of Token
pub enum TokenData {
pub enum TokenKind {
/// A boolean literal, which is either `true` or `false`
BooleanLiteral(bool),
/// The end of the file
@ -63,25 +64,25 @@ pub enum TokenData {
StringLiteral(String),
/// A regular expression, consisting of body and flags
RegularExpressionLiteral(String, String),
/// A comment
Comment(String),
/// Indicates the end of a line \n
LineTerminator,
}
impl Display for TokenData {
fn fmt(&self, f: &mut Formatter) -> Result {
impl Display for TokenKind {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
match *self {
TokenData::BooleanLiteral(ref val) => write!(f, "{}", val),
TokenData::EOF => write!(f, "end of file"),
TokenData::Identifier(ref ident) => write!(f, "{}", ident),
TokenData::Keyword(ref word) => write!(f, "{}", word),
TokenData::NullLiteral => write!(f, "null"),
TokenData::NumericLiteral(ref num) => write!(f, "{}", num),
TokenData::Punctuator(ref punc) => write!(f, "{}", punc),
TokenData::StringLiteral(ref lit) => write!(f, "{}", lit),
TokenData::RegularExpressionLiteral(ref body, ref flags) => {
TokenKind::BooleanLiteral(ref val) => write!(f, "{}", val),
TokenKind::EOF => write!(f, "end of file"),
TokenKind::Identifier(ref ident) => write!(f, "{}", ident),
TokenKind::Keyword(ref word) => write!(f, "{}", word),
TokenKind::NullLiteral => write!(f, "null"),
TokenKind::NumericLiteral(ref num) => write!(f, "{}", num),
TokenKind::Punctuator(ref punc) => write!(f, "{}", punc),
TokenKind::StringLiteral(ref lit) => write!(f, "{}", lit),
TokenKind::RegularExpressionLiteral(ref body, ref flags) => {
write!(f, "/{}/{}", body, flags)
}
TokenData::Comment(ref comm) => write!(f, "/*{}*/", comm),
TokenKind::LineTerminator => write!(f, "line terminator"),
}
}
}

64
boa/src/syntax/lexer/mod.rs

@ -8,7 +8,7 @@ mod tests;
use crate::syntax::ast::{
punc::Punctuator,
token::{Token, TokenData},
token::{Token, TokenKind},
};
use std::{
char::{decode_utf16, from_u32},
@ -84,7 +84,7 @@ impl LexerError {
}
impl fmt::Display for LexerError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.details)
}
}
@ -136,14 +136,14 @@ impl<'a> Lexer<'a> {
}
}
/// Push tokens onto the token queue
fn push_token(&mut self, tk: TokenData) {
fn push_token(&mut self, tk: TokenKind) {
self.tokens
.push(Token::new(tk, self.line_number, self.column_number))
}
/// Push a punctuation token
fn push_punc(&mut self, punc: Punctuator) {
self.push_token(TokenData::Punctuator(punc));
self.push_token(TokenKind::Punctuator(punc));
}
/// next fetches the next token and return it, or a LexerError if there are no more.
@ -154,23 +154,6 @@ impl<'a> Lexer<'a> {
}
}
/// read_line attempts to read until the end of the line and returns the String object or a LexerError
fn read_line(&mut self) -> Result<String, LexerError> {
let mut buf = String::new();
while self.preview_next().is_some() {
let ch = self.next();
match ch {
_ if ch.is_ascii_control() => {
break;
}
_ => {
buf.push(ch);
}
}
}
Ok(buf)
}
/// Preview the next character but don't actually increment
fn preview_next(&mut self) -> Option<char> {
self.buffer.peek().copied()
@ -368,7 +351,7 @@ impl<'a> Lexer<'a> {
}
}
let str_length = buf.len() as u64;
self.push_token(TokenData::StringLiteral(buf));
self.push_token(TokenKind::StringLiteral(buf));
// Why +1? Quotation marks are not included,
// So technically it would be +2, (for both " ") but we want to be 1 less
// to compensate for the incrementing at the top
@ -379,7 +362,7 @@ impl<'a> Lexer<'a> {
let num = match self.preview_next() {
None => {
self.push_token(TokenData::NumericLiteral(0 as f64));
self.push_token(TokenKind::NumericLiteral(0_f64));
return Ok(());
}
Some('x') | Some('X') => {
@ -423,7 +406,7 @@ impl<'a> Lexer<'a> {
}
};
self.push_token(TokenData::NumericLiteral(num));
self.push_token(TokenKind::NumericLiteral(num));
//11.8.3
if let Err(e) = self.check_after_numeric_literal() {
@ -481,7 +464,7 @@ impl<'a> Lexer<'a> {
}
}
// TODO make this a bit more safe -------------------------------VVVV
self.push_token(TokenData::NumericLiteral(
self.push_token(TokenKind::NumericLiteral(
f64::from_str(&buf).map_err(|_| LexerError::new("Could not convert value to f64"))?,
))
}
@ -497,14 +480,14 @@ impl<'a> Lexer<'a> {
// Match won't compare &String to &str so i need to convert it :(
let buf_compare: &str = &buf;
self.push_token(match buf_compare {
"true" => TokenData::BooleanLiteral(true),
"false" => TokenData::BooleanLiteral(false),
"null" => TokenData::NullLiteral,
"true" => TokenKind::BooleanLiteral(true),
"false" => TokenKind::BooleanLiteral(false),
"null" => TokenKind::NullLiteral,
slice => {
if let Ok(keyword) = FromStr::from_str(slice) {
TokenData::Keyword(keyword)
TokenKind::Keyword(keyword)
} else {
TokenData::Identifier(buf.clone())
TokenKind::Identifier(buf.clone())
}
}
});
@ -540,14 +523,16 @@ impl<'a> Lexer<'a> {
match ch {
// line comment
'/' => {
let comment = "/".to_owned() + &self.read_line()?;
self.push_token(TokenData::Comment(comment));
while self.preview_next().is_some() {
if self.next() == '\n' {
break;
}
}
self.line_number += 1;
self.column_number = 0;
}
// block comment
'*' => {
let mut buf = "/".to_owned();
let mut lines = 0;
loop {
if self.preview_next().is_none() {
@ -555,9 +540,7 @@ impl<'a> Lexer<'a> {
}
match self.next() {
'*' => {
buf.push('*');
if self.next_is('/') {
buf.push('/');
break;
}
}
@ -565,11 +548,9 @@ impl<'a> Lexer<'a> {
if next_ch == '\n' {
lines += 1;
}
buf.push(next_ch)
},
}
}
self.push_token(TokenData::Comment(buf));
self.line_number += lines;
self.column_number = 0;
}
@ -609,7 +590,7 @@ impl<'a> Lexer<'a> {
if regex {
// body was parsed, now look for flags
let flags = self.take_char_while(char::is_alphabetic)?;
self.push_token(TokenData::RegularExpressionLiteral(
self.push_token(TokenKind::RegularExpressionLiteral(
body, flags,
));
} else {
@ -617,11 +598,11 @@ impl<'a> Lexer<'a> {
// parse either div or assigndiv
self.buffer = original_buffer;
if self.next_is('=') {
self.push_token(TokenData::Punctuator(
self.push_token(TokenKind::Punctuator(
Punctuator::AssignDiv,
));
} else {
self.push_token(TokenData::Punctuator(Punctuator::Div));
self.push_token(TokenKind::Punctuator(Punctuator::Div));
}
}
}
@ -631,7 +612,7 @@ impl<'a> Lexer<'a> {
}
}
'*' => op!(self, Punctuator::AssignMul, Punctuator::Mul, {
'*' => vop!(self, Punctuator::AssignPow, Punctuator::Pow)
'*' => vop!(self, Punctuator::AssignPow, Punctuator::Exp)
}),
'+' => op!(self, Punctuator::AssignAdd, Punctuator::Add, {
'+' => Punctuator::Inc
@ -673,6 +654,7 @@ impl<'a> Lexer<'a> {
),
'~' => self.push_punc(Punctuator::Neg),
'\n' | '\u{2028}' | '\u{2029}' => {
self.push_token(TokenKind::LineTerminator);
self.line_number += 1;
self.column_number = 0;
}

423
boa/src/syntax/lexer/tests.rs

@ -6,12 +6,12 @@ use crate::syntax::ast::keyword::Keyword;
#[test]
fn check_single_line_comment() {
let s1 = "var \n//=\nx";
let s1 = "var \n//This is a comment\ntrue";
let mut lexer = Lexer::new(s1);
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::Keyword(Keyword::Var));
assert_eq!(lexer.tokens[1].data, TokenData::Comment("//=".to_owned()));
assert_eq!(lexer.tokens[2].data, TokenData::Identifier("x".to_string()));
assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Var));
assert_eq!(lexer.tokens[1].kind, TokenKind::LineTerminator);
assert_eq!(lexer.tokens[2].kind, TokenKind::BooleanLiteral(true));
}
#[test]
@ -19,12 +19,8 @@ fn check_multi_line_comment() {
let s = "var /* await \n break \n*/ x";
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::Keyword(Keyword::Var));
assert_eq!(
lexer.tokens[1].data,
TokenData::Comment("/* await \n break \n*/".to_owned())
);
assert_eq!(lexer.tokens[2].data, TokenData::Identifier("x".to_string()));
assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Var));
assert_eq!(lexer.tokens[1].kind, TokenKind::Identifier("x".to_string()));
}
#[test]
@ -33,13 +29,13 @@ fn check_string() {
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
assert_eq!(
lexer.tokens[0].data,
TokenData::StringLiteral("aaa".to_string())
lexer.tokens[0].kind,
TokenKind::StringLiteral("aaa".to_string())
);
assert_eq!(
lexer.tokens[1].data,
TokenData::StringLiteral("bbb".to_string())
lexer.tokens[1].kind,
TokenKind::StringLiteral("bbb".to_string())
);
}
@ -52,191 +48,191 @@ fn check_punctuators() {
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
assert_eq!(
lexer.tokens[0].data,
TokenData::Punctuator(Punctuator::OpenBlock)
lexer.tokens[0].kind,
TokenKind::Punctuator(Punctuator::OpenBlock)
);
assert_eq!(
lexer.tokens[1].data,
TokenData::Punctuator(Punctuator::OpenParen)
lexer.tokens[1].kind,
TokenKind::Punctuator(Punctuator::OpenParen)
);
assert_eq!(
lexer.tokens[2].data,
TokenData::Punctuator(Punctuator::CloseParen)
lexer.tokens[2].kind,
TokenKind::Punctuator(Punctuator::CloseParen)
);
assert_eq!(
lexer.tokens[3].data,
TokenData::Punctuator(Punctuator::OpenBracket)
lexer.tokens[3].kind,
TokenKind::Punctuator(Punctuator::OpenBracket)
);
assert_eq!(
lexer.tokens[4].data,
TokenData::Punctuator(Punctuator::CloseBracket)
lexer.tokens[4].kind,
TokenKind::Punctuator(Punctuator::CloseBracket)
);
assert_eq!(lexer.tokens[5].data, TokenData::Punctuator(Punctuator::Dot));
assert_eq!(lexer.tokens[5].kind, TokenKind::Punctuator(Punctuator::Dot));
assert_eq!(
lexer.tokens[6].data,
TokenData::Punctuator(Punctuator::Spread)
lexer.tokens[6].kind,
TokenKind::Punctuator(Punctuator::Spread)
);
assert_eq!(
lexer.tokens[7].data,
TokenData::Punctuator(Punctuator::Semicolon)
lexer.tokens[7].kind,
TokenKind::Punctuator(Punctuator::Semicolon)
);
assert_eq!(
lexer.tokens[8].data,
TokenData::Punctuator(Punctuator::Comma)
lexer.tokens[8].kind,
TokenKind::Punctuator(Punctuator::Comma)
);
assert_eq!(
lexer.tokens[9].data,
TokenData::Punctuator(Punctuator::LessThan)
lexer.tokens[9].kind,
TokenKind::Punctuator(Punctuator::LessThan)
);
assert_eq!(
lexer.tokens[10].data,
TokenData::Punctuator(Punctuator::GreaterThan)
lexer.tokens[10].kind,
TokenKind::Punctuator(Punctuator::GreaterThan)
);
assert_eq!(
lexer.tokens[11].data,
TokenData::Punctuator(Punctuator::LessThanOrEq)
lexer.tokens[11].kind,
TokenKind::Punctuator(Punctuator::LessThanOrEq)
);
assert_eq!(
lexer.tokens[12].data,
TokenData::Punctuator(Punctuator::GreaterThanOrEq)
lexer.tokens[12].kind,
TokenKind::Punctuator(Punctuator::GreaterThanOrEq)
);
assert_eq!(lexer.tokens[13].data, TokenData::Punctuator(Punctuator::Eq));
assert_eq!(lexer.tokens[13].kind, TokenKind::Punctuator(Punctuator::Eq));
assert_eq!(
lexer.tokens[14].data,
TokenData::Punctuator(Punctuator::NotEq)
lexer.tokens[14].kind,
TokenKind::Punctuator(Punctuator::NotEq)
);
assert_eq!(
lexer.tokens[15].data,
TokenData::Punctuator(Punctuator::StrictEq)
lexer.tokens[15].kind,
TokenKind::Punctuator(Punctuator::StrictEq)
);
assert_eq!(
lexer.tokens[16].data,
TokenData::Punctuator(Punctuator::StrictNotEq)
lexer.tokens[16].kind,
TokenKind::Punctuator(Punctuator::StrictNotEq)
);
assert_eq!(
lexer.tokens[17].data,
TokenData::Punctuator(Punctuator::Add)
lexer.tokens[17].kind,
TokenKind::Punctuator(Punctuator::Add)
);
assert_eq!(
lexer.tokens[18].data,
TokenData::Punctuator(Punctuator::Sub)
lexer.tokens[18].kind,
TokenKind::Punctuator(Punctuator::Sub)
);
assert_eq!(
lexer.tokens[19].data,
TokenData::Punctuator(Punctuator::Mul)
lexer.tokens[19].kind,
TokenKind::Punctuator(Punctuator::Mul)
);
assert_eq!(
lexer.tokens[20].data,
TokenData::Punctuator(Punctuator::Mod)
lexer.tokens[20].kind,
TokenKind::Punctuator(Punctuator::Mod)
);
assert_eq!(
lexer.tokens[21].data,
TokenData::Punctuator(Punctuator::Dec)
lexer.tokens[21].kind,
TokenKind::Punctuator(Punctuator::Dec)
);
assert_eq!(
lexer.tokens[22].data,
TokenData::Punctuator(Punctuator::LeftSh)
lexer.tokens[22].kind,
TokenKind::Punctuator(Punctuator::LeftSh)
);
assert_eq!(
lexer.tokens[23].data,
TokenData::Punctuator(Punctuator::RightSh)
lexer.tokens[23].kind,
TokenKind::Punctuator(Punctuator::RightSh)
);
assert_eq!(
lexer.tokens[24].data,
TokenData::Punctuator(Punctuator::URightSh)
lexer.tokens[24].kind,
TokenKind::Punctuator(Punctuator::URightSh)
);
assert_eq!(
lexer.tokens[25].data,
TokenData::Punctuator(Punctuator::And)
lexer.tokens[25].kind,
TokenKind::Punctuator(Punctuator::And)
);
assert_eq!(lexer.tokens[26].data, TokenData::Punctuator(Punctuator::Or));
assert_eq!(lexer.tokens[26].kind, TokenKind::Punctuator(Punctuator::Or));
assert_eq!(
lexer.tokens[27].data,
TokenData::Punctuator(Punctuator::Xor)
lexer.tokens[27].kind,
TokenKind::Punctuator(Punctuator::Xor)
);
assert_eq!(
lexer.tokens[28].data,
TokenData::Punctuator(Punctuator::Not)
lexer.tokens[28].kind,
TokenKind::Punctuator(Punctuator::Not)
);
assert_eq!(
lexer.tokens[29].data,
TokenData::Punctuator(Punctuator::Neg)
lexer.tokens[29].kind,
TokenKind::Punctuator(Punctuator::Neg)
);
assert_eq!(
lexer.tokens[30].data,
TokenData::Punctuator(Punctuator::BoolAnd)
lexer.tokens[30].kind,
TokenKind::Punctuator(Punctuator::BoolAnd)
);
assert_eq!(
lexer.tokens[31].data,
TokenData::Punctuator(Punctuator::BoolOr)
lexer.tokens[31].kind,
TokenKind::Punctuator(Punctuator::BoolOr)
);
assert_eq!(
lexer.tokens[32].data,
TokenData::Punctuator(Punctuator::Question)
lexer.tokens[32].kind,
TokenKind::Punctuator(Punctuator::Question)
);
assert_eq!(
lexer.tokens[33].data,
TokenData::Punctuator(Punctuator::Colon)
lexer.tokens[33].kind,
TokenKind::Punctuator(Punctuator::Colon)
);
assert_eq!(
lexer.tokens[34].data,
TokenData::Punctuator(Punctuator::Assign)
lexer.tokens[34].kind,
TokenKind::Punctuator(Punctuator::Assign)
);
assert_eq!(
lexer.tokens[35].data,
TokenData::Punctuator(Punctuator::AssignAdd)
lexer.tokens[35].kind,
TokenKind::Punctuator(Punctuator::AssignAdd)
);
assert_eq!(
lexer.tokens[36].data,
TokenData::Punctuator(Punctuator::AssignSub)
lexer.tokens[36].kind,
TokenKind::Punctuator(Punctuator::AssignSub)
);
assert_eq!(
lexer.tokens[37].data,
TokenData::Punctuator(Punctuator::AssignMul)
lexer.tokens[37].kind,
TokenKind::Punctuator(Punctuator::AssignMul)
);
assert_eq!(
lexer.tokens[38].data,
TokenData::Punctuator(Punctuator::AssignAnd)
lexer.tokens[38].kind,
TokenKind::Punctuator(Punctuator::AssignAnd)
);
assert_eq!(
lexer.tokens[39].data,
TokenData::Punctuator(Punctuator::AssignPow)
lexer.tokens[39].kind,
TokenKind::Punctuator(Punctuator::AssignPow)
);
assert_eq!(
lexer.tokens[40].data,
TokenData::Punctuator(Punctuator::Inc)
lexer.tokens[40].kind,
TokenKind::Punctuator(Punctuator::Inc)
);
assert_eq!(
lexer.tokens[41].data,
TokenData::Punctuator(Punctuator::Pow)
lexer.tokens[41].kind,
TokenKind::Punctuator(Punctuator::Exp)
);
assert_eq!(
lexer.tokens[42].data,
TokenData::Punctuator(Punctuator::AssignLeftSh)
lexer.tokens[42].kind,
TokenKind::Punctuator(Punctuator::AssignLeftSh)
);
assert_eq!(
lexer.tokens[43].data,
TokenData::Punctuator(Punctuator::AssignRightSh)
lexer.tokens[43].kind,
TokenKind::Punctuator(Punctuator::AssignRightSh)
);
assert_eq!(
lexer.tokens[44].data,
TokenData::Punctuator(Punctuator::AssignURightSh)
lexer.tokens[44].kind,
TokenKind::Punctuator(Punctuator::AssignURightSh)
);
assert_eq!(
lexer.tokens[45].data,
TokenData::Punctuator(Punctuator::AssignAnd)
lexer.tokens[45].kind,
TokenKind::Punctuator(Punctuator::AssignAnd)
);
assert_eq!(
lexer.tokens[46].data,
TokenData::Punctuator(Punctuator::AssignOr)
lexer.tokens[46].kind,
TokenKind::Punctuator(Punctuator::AssignOr)
);
assert_eq!(
lexer.tokens[47].data,
TokenData::Punctuator(Punctuator::AssignXor)
lexer.tokens[47].kind,
TokenKind::Punctuator(Punctuator::AssignXor)
);
assert_eq!(
lexer.tokens[48].data,
TokenData::Punctuator(Punctuator::Arrow)
lexer.tokens[48].kind,
TokenKind::Punctuator(Punctuator::Arrow)
);
}
@ -249,43 +245,43 @@ fn check_keywords() {
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::Keyword(Keyword::Await));
assert_eq!(lexer.tokens[1].data, TokenData::Keyword(Keyword::Break));
assert_eq!(lexer.tokens[2].data, TokenData::Keyword(Keyword::Case));
assert_eq!(lexer.tokens[3].data, TokenData::Keyword(Keyword::Catch));
assert_eq!(lexer.tokens[4].data, TokenData::Keyword(Keyword::Class));
assert_eq!(lexer.tokens[5].data, TokenData::Keyword(Keyword::Const));
assert_eq!(lexer.tokens[6].data, TokenData::Keyword(Keyword::Continue));
assert_eq!(lexer.tokens[7].data, TokenData::Keyword(Keyword::Debugger));
assert_eq!(lexer.tokens[8].data, TokenData::Keyword(Keyword::Default));
assert_eq!(lexer.tokens[9].data, TokenData::Keyword(Keyword::Delete));
assert_eq!(lexer.tokens[10].data, TokenData::Keyword(Keyword::Do));
assert_eq!(lexer.tokens[11].data, TokenData::Keyword(Keyword::Else));
assert_eq!(lexer.tokens[12].data, TokenData::Keyword(Keyword::Export));
assert_eq!(lexer.tokens[13].data, TokenData::Keyword(Keyword::Extends));
assert_eq!(lexer.tokens[14].data, TokenData::Keyword(Keyword::Finally));
assert_eq!(lexer.tokens[15].data, TokenData::Keyword(Keyword::For));
assert_eq!(lexer.tokens[16].data, TokenData::Keyword(Keyword::Function));
assert_eq!(lexer.tokens[17].data, TokenData::Keyword(Keyword::If));
assert_eq!(lexer.tokens[18].data, TokenData::Keyword(Keyword::Import));
assert_eq!(lexer.tokens[19].data, TokenData::Keyword(Keyword::In));
assert_eq!(
lexer.tokens[20].data,
TokenData::Keyword(Keyword::InstanceOf)
);
assert_eq!(lexer.tokens[21].data, TokenData::Keyword(Keyword::New));
assert_eq!(lexer.tokens[22].data, TokenData::Keyword(Keyword::Return));
assert_eq!(lexer.tokens[23].data, TokenData::Keyword(Keyword::Super));
assert_eq!(lexer.tokens[24].data, TokenData::Keyword(Keyword::Switch));
assert_eq!(lexer.tokens[25].data, TokenData::Keyword(Keyword::This));
assert_eq!(lexer.tokens[26].data, TokenData::Keyword(Keyword::Throw));
assert_eq!(lexer.tokens[27].data, TokenData::Keyword(Keyword::Try));
assert_eq!(lexer.tokens[28].data, TokenData::Keyword(Keyword::TypeOf));
assert_eq!(lexer.tokens[29].data, TokenData::Keyword(Keyword::Var));
assert_eq!(lexer.tokens[30].data, TokenData::Keyword(Keyword::Void));
assert_eq!(lexer.tokens[31].data, TokenData::Keyword(Keyword::While));
assert_eq!(lexer.tokens[32].data, TokenData::Keyword(Keyword::With));
assert_eq!(lexer.tokens[33].data, TokenData::Keyword(Keyword::Yield));
assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Await));
assert_eq!(lexer.tokens[1].kind, TokenKind::Keyword(Keyword::Break));
assert_eq!(lexer.tokens[2].kind, TokenKind::Keyword(Keyword::Case));
assert_eq!(lexer.tokens[3].kind, TokenKind::Keyword(Keyword::Catch));
assert_eq!(lexer.tokens[4].kind, TokenKind::Keyword(Keyword::Class));
assert_eq!(lexer.tokens[5].kind, TokenKind::Keyword(Keyword::Const));
assert_eq!(lexer.tokens[6].kind, TokenKind::Keyword(Keyword::Continue));
assert_eq!(lexer.tokens[7].kind, TokenKind::Keyword(Keyword::Debugger));
assert_eq!(lexer.tokens[8].kind, TokenKind::Keyword(Keyword::Default));
assert_eq!(lexer.tokens[9].kind, TokenKind::Keyword(Keyword::Delete));
assert_eq!(lexer.tokens[10].kind, TokenKind::Keyword(Keyword::Do));
assert_eq!(lexer.tokens[11].kind, TokenKind::Keyword(Keyword::Else));
assert_eq!(lexer.tokens[12].kind, TokenKind::Keyword(Keyword::Export));
assert_eq!(lexer.tokens[13].kind, TokenKind::Keyword(Keyword::Extends));
assert_eq!(lexer.tokens[14].kind, TokenKind::Keyword(Keyword::Finally));
assert_eq!(lexer.tokens[15].kind, TokenKind::Keyword(Keyword::For));
assert_eq!(lexer.tokens[16].kind, TokenKind::Keyword(Keyword::Function));
assert_eq!(lexer.tokens[17].kind, TokenKind::Keyword(Keyword::If));
assert_eq!(lexer.tokens[18].kind, TokenKind::Keyword(Keyword::Import));
assert_eq!(lexer.tokens[19].kind, TokenKind::Keyword(Keyword::In));
assert_eq!(
lexer.tokens[20].kind,
TokenKind::Keyword(Keyword::InstanceOf)
);
assert_eq!(lexer.tokens[21].kind, TokenKind::Keyword(Keyword::New));
assert_eq!(lexer.tokens[22].kind, TokenKind::Keyword(Keyword::Return));
assert_eq!(lexer.tokens[23].kind, TokenKind::Keyword(Keyword::Super));
assert_eq!(lexer.tokens[24].kind, TokenKind::Keyword(Keyword::Switch));
assert_eq!(lexer.tokens[25].kind, TokenKind::Keyword(Keyword::This));
assert_eq!(lexer.tokens[26].kind, TokenKind::Keyword(Keyword::Throw));
assert_eq!(lexer.tokens[27].kind, TokenKind::Keyword(Keyword::Try));
assert_eq!(lexer.tokens[28].kind, TokenKind::Keyword(Keyword::TypeOf));
assert_eq!(lexer.tokens[29].kind, TokenKind::Keyword(Keyword::Var));
assert_eq!(lexer.tokens[30].kind, TokenKind::Keyword(Keyword::Void));
assert_eq!(lexer.tokens[31].kind, TokenKind::Keyword(Keyword::While));
assert_eq!(lexer.tokens[32].kind, TokenKind::Keyword(Keyword::With));
assert_eq!(lexer.tokens[33].kind, TokenKind::Keyword(Keyword::Yield));
}
#[test]
@ -293,15 +289,15 @@ fn check_variable_definition_tokens() {
let s = "let a = 'hello';";
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::Keyword(Keyword::Let));
assert_eq!(lexer.tokens[1].data, TokenData::Identifier("a".to_string()));
assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Let));
assert_eq!(lexer.tokens[1].kind, TokenKind::Identifier("a".to_string()));
assert_eq!(
lexer.tokens[2].data,
TokenData::Punctuator(Punctuator::Assign)
lexer.tokens[2].kind,
TokenKind::Punctuator(Punctuator::Assign)
);
assert_eq!(
lexer.tokens[3].data,
TokenData::StringLiteral("hello".to_string())
lexer.tokens[3].kind,
TokenKind::StringLiteral("hello".to_string())
);
}
@ -332,37 +328,26 @@ fn check_positions() {
// Semi Colon token starts on column 27
assert_eq!(lexer.tokens[6].pos.column_number, 27);
assert_eq!(lexer.tokens[6].pos.line_number, 1);
// Comment start on column 29
// Semi Colon token starts on column 27
assert_eq!(lexer.tokens[7].pos.column_number, 29);
assert_eq!(lexer.tokens[7].pos.line_number, 1);
}
#[test]
fn check_line_numbers() {
let s = "// Copyright (C) 2017 Ecma International. All rights reserved.\n\
// This code is governed by the BSD license found in the LICENSE file.\n\
/*---\n\
description: |\n \
Collection of assertion functions used throughout test262\n\
defines: [assert]\n\
---*/\n\n\n\
function assert(mustBeTrue, message) {";
let s = "x\ny\n";
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
// The first column is 1 (not zero indexed), first line is also 1
assert_eq!(lexer.tokens[0].pos.column_number, 1);
assert_eq!(lexer.tokens[0].pos.line_number, 1);
// Second comment starts on line 2
assert_eq!(lexer.tokens[1].pos.column_number, 1);
assert_eq!(lexer.tokens[1].pos.line_number, 2);
// Multiline comment starts on line 3
assert_eq!(lexer.tokens[1].pos.column_number, 2);
assert_eq!(lexer.tokens[1].pos.line_number, 1);
assert_eq!(lexer.tokens[2].pos.column_number, 1);
assert_eq!(lexer.tokens[2].pos.line_number, 3);
// Function Token is on line 10
assert_eq!(lexer.tokens[3].pos.column_number, 1);
assert_eq!(lexer.tokens[3].pos.line_number, 10);
assert_eq!(lexer.tokens[2].pos.line_number, 2);
assert_eq!(lexer.tokens[3].pos.column_number, 2);
assert_eq!(lexer.tokens[3].pos.line_number, 2);
}
// Increment/Decrement
@ -372,12 +357,12 @@ fn check_decrement_advances_lexer_2_places() {
let s = "let a = b--;";
let mut lexer = Lexer::new(s);
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[4].data, TokenData::Punctuator(Punctuator::Dec));
assert_eq!(lexer.tokens[4].kind, TokenKind::Punctuator(Punctuator::Dec));
// Decrementing means adding 2 characters '--', the lexer should consume it as a single token
// and move the curser forward by 2, meaning the next token should be a semicolon
assert_eq!(
lexer.tokens[5].data,
TokenData::Punctuator(Punctuator::Semicolon)
lexer.tokens[5].kind,
TokenKind::Punctuator(Punctuator::Semicolon)
);
}
@ -388,24 +373,24 @@ fn numbers() {
);
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::NumericLiteral(2.0));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(52.0));
assert_eq!(lexer.tokens[3].data, TokenData::NumericLiteral(46.0));
assert_eq!(lexer.tokens[4].data, TokenData::NumericLiteral(7.89));
assert_eq!(lexer.tokens[5].data, TokenData::NumericLiteral(42.0));
assert_eq!(lexer.tokens[6].data, TokenData::NumericLiteral(5000.0));
assert_eq!(lexer.tokens[7].data, TokenData::NumericLiteral(5000.0));
assert_eq!(lexer.tokens[8].data, TokenData::NumericLiteral(0.005));
assert_eq!(lexer.tokens[9].data, TokenData::NumericLiteral(2.0));
assert_eq!(lexer.tokens[10].data, TokenData::NumericLiteral(83.0));
assert_eq!(lexer.tokens[11].data, TokenData::NumericLiteral(999.0));
assert_eq!(lexer.tokens[12].data, TokenData::NumericLiteral(10.0));
assert_eq!(lexer.tokens[13].data, TokenData::NumericLiteral(0.1));
assert_eq!(lexer.tokens[14].data, TokenData::NumericLiteral(10.0));
assert_eq!(lexer.tokens[15].data, TokenData::NumericLiteral(10.0));
assert_eq!(lexer.tokens[16].data, TokenData::NumericLiteral(0.0));
assert_eq!(lexer.tokens[17].data, TokenData::NumericLiteral(0.12));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::NumericLiteral(2.0));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(52.0));
assert_eq!(lexer.tokens[3].kind, TokenKind::NumericLiteral(46.0));
assert_eq!(lexer.tokens[4].kind, TokenKind::NumericLiteral(7.89));
assert_eq!(lexer.tokens[5].kind, TokenKind::NumericLiteral(42.0));
assert_eq!(lexer.tokens[6].kind, TokenKind::NumericLiteral(5000.0));
assert_eq!(lexer.tokens[7].kind, TokenKind::NumericLiteral(5000.0));
assert_eq!(lexer.tokens[8].kind, TokenKind::NumericLiteral(0.005));
assert_eq!(lexer.tokens[9].kind, TokenKind::NumericLiteral(2.0));
assert_eq!(lexer.tokens[10].kind, TokenKind::NumericLiteral(83.0));
assert_eq!(lexer.tokens[11].kind, TokenKind::NumericLiteral(999.0));
assert_eq!(lexer.tokens[12].kind, TokenKind::NumericLiteral(10.0));
assert_eq!(lexer.tokens[13].kind, TokenKind::NumericLiteral(0.1));
assert_eq!(lexer.tokens[14].kind, TokenKind::NumericLiteral(10.0));
assert_eq!(lexer.tokens[15].kind, TokenKind::NumericLiteral(10.0));
assert_eq!(lexer.tokens[16].kind, TokenKind::NumericLiteral(0.0));
assert_eq!(lexer.tokens[17].kind, TokenKind::NumericLiteral(0.12));
}
#[test]
@ -418,8 +403,8 @@ fn test_single_number_without_semicolon() {
fn test_number_followed_by_dot() {
let mut lexer = Lexer::new("1..");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Dot));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Dot));
}
#[test]
@ -427,8 +412,8 @@ fn test_regex_literal() {
let mut lexer = Lexer::new("/(?:)/");
lexer.lex().expect("failed to lex");
assert_eq!(
lexer.tokens[0].data,
TokenData::RegularExpressionLiteral("(?:)".to_string(), "".to_string())
lexer.tokens[0].kind,
TokenKind::RegularExpressionLiteral("(?:)".to_string(), "".to_string())
);
}
@ -437,8 +422,8 @@ fn test_regex_literal_flags() {
let mut lexer = Lexer::new(r"/\/[^\/]*\/*/gmi");
lexer.lex().expect("failed to lex");
assert_eq!(
lexer.tokens[0].data,
TokenData::RegularExpressionLiteral("\\/[^\\/]*\\/*".to_string(), "gmi".to_string())
lexer.tokens[0].kind,
TokenKind::RegularExpressionLiteral("\\/[^\\/]*\\/*".to_string(), "gmi".to_string())
);
}
@ -446,55 +431,55 @@ fn test_regex_literal_flags() {
fn test_addition_no_spaces() {
let mut lexer = Lexer::new("1+1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_left_side() {
let mut lexer = Lexer::new("1+ 1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_right_side() {
let mut lexer = Lexer::new("1 +1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_e_number_left_side() {
let mut lexer = Lexer::new("1e2+ 1");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(100.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(100.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1.0));
}
#[test]
fn test_addition_no_spaces_e_number_right_side() {
let mut lexer = Lexer::new("1 +1e3");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].data, TokenData::NumericLiteral(1000.0));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[2].kind, TokenKind::NumericLiteral(1000.0));
}
#[test]
fn test_addition_no_spaces_e_number() {
let mut lexer = Lexer::new("1e3+1e11");
lexer.lex().expect("failed to lex");
assert_eq!(lexer.tokens[0].data, TokenData::NumericLiteral(1000.0));
assert_eq!(lexer.tokens[1].data, TokenData::Punctuator(Punctuator::Add));
assert_eq!(lexer.tokens[0].kind, TokenKind::NumericLiteral(1000.0));
assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add));
assert_eq!(
lexer.tokens[2].data,
TokenData::NumericLiteral(100_000_000_000.0)
lexer.tokens[2].kind,
TokenKind::NumericLiteral(100_000_000_000.0)
);
}

102
boa/src/syntax/parser/cursor.rs

@ -0,0 +1,102 @@
//! Cursor implementation for the parser.
use crate::syntax::ast::token::Token;
/// Token cursor.
///
/// This internal structure gives basic testable operations to the parser.
#[derive(Debug, Clone, Default)]
pub(super) struct Cursor<'a> {
/// The tokens being input.
tokens: &'a [Token],
/// The current position within the tokens.
pos: usize,
}
impl<'a> Cursor<'a> {
/// Creates a new cursor.
pub(super) fn new(tokens: &'a [Token]) -> Self {
Self {
tokens,
..Self::default()
}
}
/// Retrieves the current position of the cursor in the token stream.
pub(super) fn pos(&self) -> usize {
self.pos
}
/// Moves the cursor to the given position.
/// This is intended to be used *always* with `Cursor::pos()`.
///
pub(super) fn seek(&mut self, pos: usize) {
self.pos = pos
}
/// Moves the cursor to the next token and returns the token.
pub(super) fn next(&mut self) -> Option<&'a Token> {
let token = self.tokens.get(self.pos);
if self.pos != self.tokens.len() {
self.pos += 1;
}
token
}
/// Moves the cursor to the next token after skipping tokens based on the predicate.
pub(super) fn next_skip<P>(&mut self, mut skip: P) -> Option<&'a Token>
where
P: FnMut(&Token) -> bool,
{
while let Some(token) = self.tokens.get(self.pos) {
self.pos += 1;
if !skip(token) {
return Some(token);
}
}
None
}
/// Peeks the next token without moving the cursor.
pub(super) fn peek(&self, skip: usize) -> Option<&'a Token> {
self.tokens.get(self.pos + skip)
}
/// Peeks the next token after skipping tokens based on the predicate.
pub(super) fn peek_skip<P>(&self, mut skip: P) -> Option<&'a Token>
where
P: FnMut(&Token) -> bool,
{
let mut current = self.pos;
while let Some(token) = self.tokens.get(current) {
if !skip(token) {
return Some(token);
}
current += 1;
}
None
}
/// Moves the cursor to the previous token and returns the token.
pub(super) fn back(&mut self) {
assert!(
self.pos > 0,
"cannot go back in a cursor that is at the beginning of the list of tokens"
);
self.pos -= 1;
}
/// Peeks the previous token without moving the cursor.
pub(super) fn peek_prev(&self) -> Option<&'a Token> {
if self.pos == 0 {
None
} else {
self.tokens.get(self.pos - 1)
}
}
}

2189
boa/src/syntax/parser/mod.rs

File diff suppressed because it is too large Load Diff

540
boa/src/syntax/parser/tests.rs

@ -1,24 +1,24 @@
//! Tests for the parser.
use super::*;
use crate::syntax::ast::{constant::Const, op::BinOp};
use crate::syntax::ast::{constant::Const, op::BinOp, op::BitOp};
use crate::syntax::{
ast::expr::{Expr, ExprDef},
ast::node::{FormalParameter, Node},
lexer::Lexer,
};
fn create_bin_op(op: BinOp, exp1: Expr, exp2: Expr) -> Expr {
Expr::new(ExprDef::BinOp(op, Box::new(exp1), Box::new(exp2)))
fn create_bin_op(op: BinOp, exp1: Node, exp2: Node) -> Node {
Node::BinOp(op, Box::new(exp1), Box::new(exp2))
}
#[allow(clippy::result_unwrap_used)]
fn check_parser(js: &str, expr: &[Expr]) {
fn check_parser(js: &str, expr: &[Node]) {
let mut lexer = Lexer::new(js);
lexer.lex().expect("failed to lex");
assert_eq!(
Parser::new(lexer.tokens).parse_all().unwrap(),
Expr::new(ExprDef::Block(expr.into()))
Parser::new(&lexer.tokens).parse_all().unwrap(),
Node::StatementList(expr.into())
);
}
@ -26,7 +26,7 @@ fn check_invalid(js: &str) {
let mut lexer = Lexer::new(js);
lexer.lex().expect("failed to lex");
assert!(Parser::new(lexer.tokens).parse_all().is_err());
assert!(Parser::new(&lexer.tokens).parse_all().is_err());
}
#[test]
@ -34,70 +34,86 @@ fn check_string() {
use crate::syntax::ast::constant::Const;
// Check empty string
check_parser(
"\"\"",
&[Expr::new(ExprDef::Const(Const::String(String::new())))],
);
check_parser("\"\"", &[Node::Const(Const::String(String::new()))]);
// Check non-empty string
check_parser(
"\"hello\"",
&[Expr::new(ExprDef::Const(Const::String(String::from(
"hello",
))))],
&[Node::Const(Const::String(String::from("hello")))],
);
}
#[test]
fn check_object_literal() {
let object_properties = vec![
PropertyDefinition::Property(String::from("a"), Node::Const(Const::Bool(true))),
PropertyDefinition::Property(String::from("b"), Node::Const(Const::Bool(false))),
];
check_parser(
"const x = {
a: true,
b: false,
};
",
&[Node::ConstDecl(vec![(
String::from("x"),
Node::Object(object_properties),
)])],
);
}
#[test]
fn check_object_short_function() {
// Testing short function syntax
let mut object_properties: BTreeMap<String, Expr> = BTreeMap::new();
object_properties.insert(
String::from("a"),
Expr::new(ExprDef::Const(Const::Bool(true))),
);
object_properties.insert(
let object_properties = vec![
PropertyDefinition::Property(String::from("a"), Node::Const(Const::Bool(true))),
PropertyDefinition::MethodDefinition(
MethodDefinitionKind::Ordinary,
String::from("b"),
Expr::new(ExprDef::FunctionDecl(
None,
vec![],
Box::new(Expr::new(ExprDef::Block(vec![]))),
)),
);
Node::FunctionDecl(None, Vec::new(), Box::new(Node::StatementList(Vec::new()))),
),
];
check_parser(
"{
"const x = {
a: true,
b() {}
b() {},
};
",
&[Expr::new(ExprDef::ObjectDecl(Box::new(object_properties)))],
&[Node::ConstDecl(vec![(
String::from("x"),
Node::Object(object_properties),
)])],
);
}
#[test]
fn check_object_short_function_arguments() {
// Testing short function syntax
let mut object_properties: BTreeMap<String, Expr> = BTreeMap::new();
object_properties.insert(
String::from("a"),
Expr::new(ExprDef::Const(Const::Bool(true))),
);
object_properties.insert(
let object_properties = vec![
PropertyDefinition::Property(String::from("a"), Node::Const(Const::Bool(true))),
PropertyDefinition::MethodDefinition(
MethodDefinitionKind::Ordinary,
String::from("b"),
Expr::new(ExprDef::FunctionDecl(
Node::FunctionDecl(
None,
vec![Expr::new(ExprDef::Local(String::from("test")))],
Box::new(Expr::new(ExprDef::Block(vec![]))),
)),
);
vec![FormalParameter::new(String::from("test"), None, false)],
Box::new(Node::StatementList(Vec::new())),
),
),
];
check_parser(
"{
"const x = {
a: true,
b(test) {}
};
",
&[Expr::new(ExprDef::ObjectDecl(Box::new(object_properties)))],
&[Node::ConstDecl(vec![(
String::from("x"),
Node::Object(object_properties),
)])],
);
}
#[test]
@ -105,77 +121,75 @@ fn check_array() {
use crate::syntax::ast::constant::Const;
// Check empty array
check_parser("[]", &[Expr::new(ExprDef::ArrayDecl(vec![]))]);
check_parser("[]", &[Node::ArrayDecl(vec![])]);
// Check array with empty slot
check_parser(
"[,]",
&[Expr::new(ExprDef::ArrayDecl(vec![Expr::new(
ExprDef::Const(Const::Undefined),
)]))],
&[Node::ArrayDecl(vec![Node::Const(Const::Undefined)])],
);
// Check numeric array
check_parser(
"[1, 2, 3]",
&[Expr::new(ExprDef::ArrayDecl(vec![
Expr::new(ExprDef::Const(Const::Num(1.0))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Expr::new(ExprDef::Const(Const::Num(3.0))),
]))],
&[Node::ArrayDecl(vec![
Node::Const(Const::Num(1.0)),
Node::Const(Const::Num(2.0)),
Node::Const(Const::Num(3.0)),
])],
);
// Check numeric array with trailing comma
check_parser(
"[1, 2, 3,]",
&[Expr::new(ExprDef::ArrayDecl(vec![
Expr::new(ExprDef::Const(Const::Num(1.0))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Expr::new(ExprDef::Const(Const::Num(3.0))),
]))],
&[Node::ArrayDecl(vec![
Node::Const(Const::Num(1.0)),
Node::Const(Const::Num(2.0)),
Node::Const(Const::Num(3.0)),
])],
);
// Check numeric array with an elision
check_parser(
"[1, 2, , 3]",
&[Expr::new(ExprDef::ArrayDecl(vec![
Expr::new(ExprDef::Const(Const::Num(1.0))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Expr::new(ExprDef::Const(Const::Undefined)),
Expr::new(ExprDef::Const(Const::Num(3.0))),
]))],
&[Node::ArrayDecl(vec![
Node::Const(Const::Num(1.0)),
Node::Const(Const::Num(2.0)),
Node::Const(Const::Undefined),
Node::Const(Const::Num(3.0)),
])],
);
// Check numeric array with repeated elision
check_parser(
"[1, 2, ,, 3]",
&[Expr::new(ExprDef::ArrayDecl(vec![
Expr::new(ExprDef::Const(Const::Num(1.0))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Expr::new(ExprDef::Const(Const::Undefined)),
Expr::new(ExprDef::Const(Const::Undefined)),
Expr::new(ExprDef::Const(Const::Num(3.0))),
]))],
&[Node::ArrayDecl(vec![
Node::Const(Const::Num(1.0)),
Node::Const(Const::Num(2.0)),
Node::Const(Const::Undefined),
Node::Const(Const::Undefined),
Node::Const(Const::Num(3.0)),
])],
);
// Check combined array
check_parser(
"[1, \"a\", 2]",
&[Expr::new(ExprDef::ArrayDecl(vec![
Expr::new(ExprDef::Const(Const::Num(1.0))),
Expr::new(ExprDef::Const(Const::String(String::from("a")))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
]))],
&[Node::ArrayDecl(vec![
Node::Const(Const::Num(1.0)),
Node::Const(Const::String(String::from("a"))),
Node::Const(Const::Num(2.0)),
])],
);
// Check combined array with empty string
check_parser(
"[1, \"\", 2]",
&[Expr::new(ExprDef::ArrayDecl(vec![
Expr::new(ExprDef::Const(Const::Num(1.0))),
Expr::new(ExprDef::Const(Const::String(String::new()))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
]))],
&[Node::ArrayDecl(vec![
Node::Const(Const::Num(1.0)),
Node::Const(Const::String(String::new())),
Node::Const(Const::Num(2.0)),
])],
);
}
@ -186,99 +200,81 @@ fn check_declarations() {
// Check `var` declaration
check_parser(
"var a = 5;",
&[Expr::new(ExprDef::VarDecl(vec![(
&[Node::VarDecl(vec![(
String::from("a"),
Some(Expr::new(ExprDef::Const(Const::Num(5.0)))),
)]))],
Some(Node::Const(Const::Num(5.0))),
)])],
);
// Check `var` declaration with no spaces
check_parser(
"var a=5;",
&[Expr::new(ExprDef::VarDecl(vec![(
&[Node::VarDecl(vec![(
String::from("a"),
Some(Expr::new(ExprDef::Const(Const::Num(5.0)))),
)]))],
Some(Node::Const(Const::Num(5.0))),
)])],
);
// Check empty `var` declaration
check_parser(
"var a;",
&[Expr::new(ExprDef::VarDecl(vec![(String::from("a"), None)]))],
);
check_parser("var a;", &[Node::VarDecl(vec![(String::from("a"), None)])]);
// Check multiple `var` declaration
check_parser(
"var a = 5, b, c = 6;",
&[Expr::new(ExprDef::VarDecl(vec![
(
String::from("a"),
Some(Expr::new(ExprDef::Const(Const::Num(5.0)))),
),
&[Node::VarDecl(vec![
(String::from("a"), Some(Node::Const(Const::Num(5.0)))),
(String::from("b"), None),
(
String::from("c"),
Some(Expr::new(ExprDef::Const(Const::Num(6.0)))),
),
]))],
(String::from("c"), Some(Node::Const(Const::Num(6.0)))),
])],
);
// Check `let` declaration
check_parser(
"let a = 5;",
&[Expr::new(ExprDef::LetDecl(vec![(
&[Node::LetDecl(vec![(
String::from("a"),
Some(Expr::new(ExprDef::Const(Const::Num(5.0)))),
)]))],
Some(Node::Const(Const::Num(5.0))),
)])],
);
// Check `let` declaration with no spaces
check_parser(
"let a=5;",
&[Expr::new(ExprDef::LetDecl(vec![(
&[Node::LetDecl(vec![(
String::from("a"),
Some(Expr::new(ExprDef::Const(Const::Num(5.0)))),
)]))],
Some(Node::Const(Const::Num(5.0))),
)])],
);
// Check empty `let` declaration
check_parser(
"let a;",
&[Expr::new(ExprDef::LetDecl(vec![(String::from("a"), None)]))],
);
check_parser("let a;", &[Node::LetDecl(vec![(String::from("a"), None)])]);
// Check multiple `let` declaration
check_parser(
"let a = 5, b, c = 6;",
&[Expr::new(ExprDef::LetDecl(vec![
(
String::from("a"),
Some(Expr::new(ExprDef::Const(Const::Num(5.0)))),
),
&[Node::LetDecl(vec![
(String::from("a"), Some(Node::Const(Const::Num(5.0)))),
(String::from("b"), None),
(
String::from("c"),
Some(Expr::new(ExprDef::Const(Const::Num(6.0)))),
),
]))],
(String::from("c"), Some(Node::Const(Const::Num(6.0)))),
])],
);
// Check `const` declaration
check_parser(
"const a = 5;",
&[Expr::new(ExprDef::ConstDecl(vec![(
&[Node::ConstDecl(vec![(
String::from("a"),
Expr::new(ExprDef::Const(Const::Num(5.0))),
)]))],
Node::Const(Const::Num(5.0)),
)])],
);
// Check `const` declaration with no spaces
check_parser(
"const a=5;",
&[Expr::new(ExprDef::ConstDecl(vec![(
&[Node::ConstDecl(vec![(
String::from("a"),
Expr::new(ExprDef::Const(Const::Num(5.0))),
)]))],
Node::Const(Const::Num(5.0)),
)])],
);
// Check empty `const` declaration
@ -287,16 +283,10 @@ fn check_declarations() {
// Check multiple `const` declaration
check_parser(
"const a = 5, c = 6;",
&[Expr::new(ExprDef::ConstDecl(vec![
(
String::from("a"),
Expr::new(ExprDef::Const(Const::Num(5.0))),
),
(
String::from("c"),
Expr::new(ExprDef::Const(Const::Num(6.0))),
),
]))],
&[Node::ConstDecl(vec![
(String::from("a"), Node::Const(Const::Num(5.0))),
(String::from("c"), Node::Const(Const::Num(6.0))),
])],
);
}
@ -304,8 +294,8 @@ fn check_declarations() {
fn check_operations() {
use crate::syntax::ast::{constant::Const, op::BinOp};
fn create_bin_op(op: BinOp, exp1: Expr, exp2: Expr) -> Expr {
Expr::new(ExprDef::BinOp(op, Box::new(exp1), Box::new(exp2)))
fn create_bin_op(op: BinOp, exp1: Node, exp2: Node) -> Node {
Node::BinOp(op, Box::new(exp1), Box::new(exp2))
}
// Check numeric operations
@ -313,96 +303,96 @@ fn check_operations() {
"a + b",
&[create_bin_op(
BinOp::Num(NumOp::Add),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a+1",
&[create_bin_op(
BinOp::Num(NumOp::Add),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Const(Const::Num(1.0))),
Node::Local(String::from("a")),
Node::Const(Const::Num(1.0)),
)],
);
check_parser(
"a - b",
&[create_bin_op(
BinOp::Num(NumOp::Sub),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a-1",
&[create_bin_op(
BinOp::Num(NumOp::Sub),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Const(Const::Num(1.0))),
Node::Local(String::from("a")),
Node::Const(Const::Num(1.0)),
)],
);
check_parser(
"a / b",
&[create_bin_op(
BinOp::Num(NumOp::Div),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a/2",
&[create_bin_op(
BinOp::Num(NumOp::Div),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Node::Local(String::from("a")),
Node::Const(Const::Num(2.0)),
)],
);
check_parser(
"a * b",
&[create_bin_op(
BinOp::Num(NumOp::Mul),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a*2",
&[create_bin_op(
BinOp::Num(NumOp::Mul),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Node::Local(String::from("a")),
Node::Const(Const::Num(2.0)),
)],
);
check_parser(
"a ** b",
&[create_bin_op(
BinOp::Num(NumOp::Pow),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
BinOp::Num(NumOp::Exp),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a**2",
&[create_bin_op(
BinOp::Num(NumOp::Pow),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
BinOp::Num(NumOp::Exp),
Node::Local(String::from("a")),
Node::Const(Const::Num(2.0)),
)],
);
check_parser(
"a % b",
&[create_bin_op(
BinOp::Num(NumOp::Mod),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a%2",
&[create_bin_op(
BinOp::Num(NumOp::Mod),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Node::Local(String::from("a")),
Node::Const(Const::Num(2.0)),
)],
);
@ -411,21 +401,20 @@ fn check_operations() {
"a + d*(b-3)+1",
&[create_bin_op(
BinOp::Num(NumOp::Add),
Expr::new(ExprDef::Local(String::from("a"))),
create_bin_op(
BinOp::Num(NumOp::Add),
// FIXME: shouldn't the last addition be on the right?
Expr::new(ExprDef::Const(Const::Num(1.0))),
Node::Local(String::from("a")),
create_bin_op(
BinOp::Num(NumOp::Mul),
Expr::new(ExprDef::Local(String::from("d"))),
Node::Local(String::from("d")),
create_bin_op(
BinOp::Num(NumOp::Sub),
Expr::new(ExprDef::Local(String::from("b"))),
Expr::new(ExprDef::Const(Const::Num(3.0))),
Node::Local(String::from("b")),
Node::Const(Const::Num(3.0)),
),
),
),
Node::Const(Const::Num(1.0)),
)],
);
@ -434,16 +423,16 @@ fn check_operations() {
"a & b",
&[create_bin_op(
BinOp::Bit(BitOp::And),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a&b",
&[create_bin_op(
BinOp::Bit(BitOp::And),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
@ -451,16 +440,16 @@ fn check_operations() {
"a | b",
&[create_bin_op(
BinOp::Bit(BitOp::Or),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a|b",
&[create_bin_op(
BinOp::Bit(BitOp::Or),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
@ -468,16 +457,16 @@ fn check_operations() {
"a ^ b",
&[create_bin_op(
BinOp::Bit(BitOp::Xor),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a^b",
&[create_bin_op(
BinOp::Bit(BitOp::Xor),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
@ -485,16 +474,16 @@ fn check_operations() {
"a << b",
&[create_bin_op(
BinOp::Bit(BitOp::Shl),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a<<b",
&[create_bin_op(
BinOp::Bit(BitOp::Shl),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
@ -502,16 +491,16 @@ fn check_operations() {
"a >> b",
&[create_bin_op(
BinOp::Bit(BitOp::Shr),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a>>b",
&[create_bin_op(
BinOp::Bit(BitOp::Shr),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
@ -520,99 +509,99 @@ fn check_operations() {
"a += b",
&[create_bin_op(
BinOp::Assign(AssignOp::Add),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a -= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Sub),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a *= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Mul),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a **= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Pow),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
BinOp::Assign(AssignOp::Exp),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a /= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Div),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a %= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Mod),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a &= b",
&[create_bin_op(
BinOp::Assign(AssignOp::And),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a |= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Or),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a ^= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Xor),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a <<= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Shl),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a >>= b",
&[create_bin_op(
BinOp::Assign(AssignOp::Shr),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Node::Local(String::from("a")),
Node::Local(String::from("b")),
)],
);
check_parser(
"a %= 10 / 2",
&[create_bin_op(
BinOp::Assign(AssignOp::Mod),
Expr::new(ExprDef::Local(String::from("a"))),
Node::Local(String::from("a")),
create_bin_op(
BinOp::Num(NumOp::Div),
Expr::new(ExprDef::Const(Const::Num(10.0))),
Expr::new(ExprDef::Const(Const::Num(2.0))),
Node::Const(Const::Num(10.0)),
Node::Const(Const::Num(2.0)),
),
)],
);
@ -622,118 +611,101 @@ fn check_operations() {
fn check_function_declarations() {
check_parser(
"function foo(a) { return a; }",
&[Expr::new(ExprDef::FunctionDecl(
&[Node::FunctionDecl(
Some(String::from("foo")),
vec![Expr::new(ExprDef::Local(String::from("a")))],
Box::new(Expr::new(ExprDef::Block(vec![Expr::new(ExprDef::Return(
Some(Box::new(Expr::new(ExprDef::Local(String::from("a"))))),
))]))),
))],
vec![FormalParameter::new(String::from("a"), None, false)],
Box::new(Node::StatementList(vec![Node::Return(Some(Box::new(
Node::Local(String::from("a")),
)))])),
)],
);
check_parser(
"function foo(a) { return; }",
&[Expr::new(ExprDef::FunctionDecl(
&[Node::FunctionDecl(
Some(String::from("foo")),
vec![Expr::new(ExprDef::Local(String::from("a")))],
Box::new(Expr::new(ExprDef::Block(vec![Expr::new(ExprDef::Return(
None,
))]))),
))],
vec![FormalParameter::new(String::from("a"), None, false)],
Box::new(Node::StatementList(vec![Node::Return(None)])),
)],
);
check_parser(
"function foo(a) { return }",
&[Expr::new(ExprDef::FunctionDecl(
&[Node::FunctionDecl(
Some(String::from("foo")),
vec![Expr::new(ExprDef::Local(String::from("a")))],
Box::new(Expr::new(ExprDef::Block(vec![Expr::new(ExprDef::Return(
None,
))]))),
))],
vec![FormalParameter::new(String::from("a"), None, false)],
Box::new(Node::StatementList(vec![Node::Return(None)])),
)],
);
check_parser(
"function (a, ...b) {}",
&[Expr::new(ExprDef::FunctionDecl(
None,
"function foo(a, ...b) {}",
&[Node::FunctionDecl(
Some(String::from("foo")),
vec![
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::UnaryOp(
UnaryOp::Spread,
Box::new(Expr::new(ExprDef::Local(String::from("b")))),
)),
FormalParameter::new(String::from("a"), None, false),
FormalParameter::new(String::from("b"), None, true),
],
Box::new(Expr::new(ExprDef::ObjectDecl(Box::new(BTreeMap::new())))),
))],
Box::new(Node::StatementList(Vec::new())),
)],
);
check_parser(
"(...a) => {}",
&[Expr::new(ExprDef::ArrowFunctionDecl(
vec![Expr::new(ExprDef::UnaryOp(
UnaryOp::Spread,
Box::new(Expr::new(ExprDef::Local(String::from("a")))),
))],
Box::new(Expr::new(ExprDef::ObjectDecl(Box::new(BTreeMap::new())))),
))],
&[Node::ArrowFunctionDecl(
vec![FormalParameter::new(String::from("a"), None, true)],
Box::new(Node::StatementList(Vec::new())),
)],
);
check_parser(
"(a, b, ...c) => {}",
&[Expr::new(ExprDef::ArrowFunctionDecl(
&[Node::ArrowFunctionDecl(
vec![
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
Expr::new(ExprDef::UnaryOp(
UnaryOp::Spread,
Box::new(Expr::new(ExprDef::Local(String::from("c")))),
)),
FormalParameter::new(String::from("a"), None, false),
FormalParameter::new(String::from("b"), None, false),
FormalParameter::new(String::from("c"), None, true),
],
Box::new(Expr::new(ExprDef::ObjectDecl(Box::new(BTreeMap::new())))),
))],
Box::new(Node::StatementList(Vec::new())),
)],
);
check_parser(
"(a, b) => { return a + b; }",
&[Expr::new(ExprDef::ArrowFunctionDecl(
&[Node::ArrowFunctionDecl(
vec![
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
FormalParameter::new(String::from("a"), None, false),
FormalParameter::new(String::from("b"), None, false),
],
Box::new(Expr::new(ExprDef::Block(vec![Expr::new(ExprDef::Return(
Some(Box::new(create_bin_op(
Box::new(Node::StatementList(vec![Node::Return(Some(Box::new(
create_bin_op(
BinOp::Num(NumOp::Add),
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
))),
))]))),
))],
Node::Local(String::from("a")),
Node::Local(String::from("b")),
),
)))])),
)],
);
check_parser(
"(a, b) => { return; }",
&[Expr::new(ExprDef::ArrowFunctionDecl(
&[Node::ArrowFunctionDecl(
vec![
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
FormalParameter::new(String::from("a"), None, false),
FormalParameter::new(String::from("b"), None, false),
],
Box::new(Expr::new(ExprDef::Block(vec![Expr::new(ExprDef::Return(
None,
))]))),
))],
Box::new(Node::StatementList(vec![Node::Return(None)])),
)],
);
check_parser(
"(a, b) => { return }",
&[Expr::new(ExprDef::ArrowFunctionDecl(
&[Node::ArrowFunctionDecl(
vec![
Expr::new(ExprDef::Local(String::from("a"))),
Expr::new(ExprDef::Local(String::from("b"))),
FormalParameter::new(String::from("a"), None, false),
FormalParameter::new(String::from("b"), None, false),
],
Box::new(Expr::new(ExprDef::Block(vec![Expr::new(ExprDef::Return(
None,
))]))),
))],
Box::new(Node::StatementList(vec![Node::Return(None)])),
)],
);
}

10
boa/src/wasm.rs

@ -1,7 +1,7 @@
use crate::{
exec::{Executor, Interpreter},
realm::Realm,
syntax::{ast::expr::Expr, lexer::Lexer, parser::Parser},
syntax::{ast::node::Node, lexer::Lexer, parser::Parser},
};
use wasm_bindgen::prelude::*;
@ -25,11 +25,11 @@ pub fn evaluate(src: &str) -> String {
let tokens = lexer.tokens;
// Setup executor
let expr: Expr;
let node: Node;
match Parser::new(tokens).parse_all() {
match Parser::new(&tokens).parse_all() {
Ok(v) => {
expr = v;
node = v;
}
Err(_v) => {
log("parsing fail");
@ -39,7 +39,7 @@ pub fn evaluate(src: &str) -> String {
// Create new Realm
let realm = Realm::create();
let mut engine: Interpreter = Executor::new(realm);
let result = engine.run(&expr);
let result = engine.run(&node);
match result {
Ok(v) => v.to_string(),
Err(v) => format!("{}: {}", "error", v.to_string()),

2
boa_cli/Cargo.toml

@ -12,4 +12,4 @@ edition = "2018"
[dependencies]
Boa = { path = "../boa", features = ["serde-ast"], default-features = false }
structopt = "0.3.9"
structopt = "0.3.12"

6
boa_cli/src/main.rs

@ -4,7 +4,7 @@
use boa::builtins::console::log;
use boa::serde_json;
use boa::syntax::ast::{expr::Expr, token::Token};
use boa::syntax::ast::{node::Node, token::Token};
use boa::{exec::Executor, forward_val, realm::Realm};
use std::io::{self, Write};
use std::{fs::read_to_string, path::PathBuf};
@ -91,10 +91,10 @@ fn lex_source(src: &str) -> Result<Vec<Token>, String> {
///
/// Returns a error of type String with a message,
/// if the token stream has a parsing error.
fn parse_tokens(tokens: Vec<Token>) -> Result<Expr, String> {
fn parse_tokens(tokens: Vec<Token>) -> Result<Node, String> {
use boa::syntax::parser::Parser;
Parser::new(tokens)
Parser::new(&tokens)
.parse_all()
.map_err(|e| format!("ParsingError: {}", e))
}

Loading…
Cancel
Save