Browse Source

Add ECMAScript test suite (test262) (#567)

pull/687/head
Iban Eguia 4 years ago committed by GitHub
parent
commit
8fde98afa9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 69
      .github/workflows/test262.yml
  2. 5
      .github/workflows/webassembly.yml
  3. 3
      .gitignore
  4. 3
      .gitmodules
  5. 2
      .prettierignore
  6. 19
      CONTRIBUTING.md
  7. 58
      Cargo.lock
  8. 1
      Cargo.toml
  9. 5
      boa/src/builtins/array/mod.rs
  10. 2
      boa/src/builtins/math/tests.rs
  11. 20
      boa/src/exec/tests.rs
  12. 39
      boa/src/lib.rs
  13. 2
      boa/src/syntax/ast/position.rs
  14. 74
      boa/src/syntax/lexer/tests.rs
  15. 28
      boa/src/syntax/parser/cursor/buffered_lexer/tests.rs
  16. 33
      boa/src/syntax/parser/tests.rs
  17. 2
      boa_cli/Cargo.toml
  18. 4
      boa_cli/src/main.rs
  19. 22
      boa_wasm/src/lib.rs
  20. 1
      test262
  21. 28
      test_ignore.txt
  22. 23
      tester/Cargo.toml
  23. 194
      tester/src/exec.rs
  24. 303
      tester/src/main.rs
  25. 257
      tester/src/read.rs
  26. 97
      tester/src/results.rs

69
.github/workflows/test262.yml

@ -0,0 +1,69 @@
name: EcmaScript official test suite (test262)
on:
push:
branches:
- master
tags:
- v*
pull_request:
branches:
- master
jobs:
run_test262:
name: Run the test262 test suite
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v2
with:
submodules: true
- name: Install the Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
profile: minimal
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
# Run the test suite and upload the results
- name: Checkout GitHub pages
if: github.event_name == 'push'
uses: actions/checkout@v2
with:
ref: gh-pages
path: gh-pages
- run: mkdir -p gh-pages/test262
- name: Run the test262 test suite
run: cargo run --release --bin boa_tester -- -o gh-pages/test262
- name: Commit files
if: github.event_name == 'push'
run: |
cd gh-pages
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
git pull
git commit -m "Add new test262 results" -a
cd ..
- name: Upload results
if: github.event_name == 'push'
uses: ad-m/github-push-action@v0.6.0
with:
directory: gh-pages
github_token: ${{ secrets.GITHUB_TOKEN }}

5
.github/workflows/webassembly.yml

@ -16,10 +16,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v2
- name: Check code formatting
uses: creyD/prettier_action@v3.0
with:
dry: true
prettier_options: --check .
run: npx prettier --check .
build:
name: Build webassembly demo

3
.gitignore vendored

@ -24,3 +24,6 @@ tests/js/test.js
*.string_index
*.events
chrome_profiler.json
# Logs
*.log

3
.gitmodules vendored

@ -0,0 +1,3 @@
[submodule "test262"]
path = test262
url = https://github.com/tc39/test262.git

2
.prettierignore

@ -6,3 +6,5 @@ boa/benches/bench_scripts/mini_js.js
boa/benches/bench_scripts/clean_js.js
boa_wasm/pkg
dist
test262
tests/js/test.js

19
CONTRIBUTING.md

@ -65,6 +65,25 @@ There are some pre-defined tasks in [tasks.json](.vscode/tasks.json)
If you don't want to install everything on your machine, you can use the Dockerfile.
Start VSCode in container mode (you may need the docker container plugin) and use the Dockerfile.
## Testing
Boa provides its own test suite, and can also run the official ECMAScript test suite. To run the Boa test
suite, you can just run the normal `cargo test`, and to run the full ECMAScript test suite, you can run it
with this command:
```
cargo run --release --bin boa_tester -- -v 2> error.log
```
Note that this requires the `test262` submodule to be checked out, so you will need to run the following first:
```
git submodule init && git submodule update
```
This will run the test suite in verbose mode (you can remove the `-- -v` part to run it in non-verbose mode),
and output nice colorings in the terminal. It will also output any panic information into the `error.log` file.
## Communication
We have a Discord server, feel free to ask questions here:

58
Cargo.lock generated

@ -109,6 +109,22 @@ dependencies = [
"structopt",
]
[[package]]
name = "boa_tester"
version = "0.9.0"
dependencies = [
"Boa",
"bitflags",
"colored",
"fxhash",
"once_cell",
"regex",
"serde",
"serde_json",
"serde_yaml",
"structopt",
]
[[package]]
name = "boa_wasm"
version = "0.9.0"
@ -339,6 +355,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "dtoa"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b"
[[package]]
name = "either"
version = "1.6.0"
@ -360,6 +382,15 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "gc"
version = "0.3.6"
@ -486,6 +517,12 @@ version = "0.2.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "755456fae044e6fa1ebbbd1b3e902ae19e73097ed4ed87bb79934a867c007bc3"
[[package]]
name = "linked-hash-map"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a"
[[package]]
name = "lock_api"
version = "0.3.4"
@ -938,6 +975,18 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_yaml"
version = "0.8.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae3e2dd40a7cdc18ca80db804b7f461a39bb721160a85c9a1fa30134bf3c02a5"
dependencies = [
"dtoa",
"linked-hash-map",
"serde",
"yaml-rust",
]
[[package]]
name = "smallvec"
version = "0.6.13"
@ -1198,3 +1247,12 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "yaml-rust"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39f0c922f1a334134dc2f7a8b67dc5d25f0735263feec974345ff706bcf20b0d"
dependencies = [
"linked-hash-map",
]

1
Cargo.toml

@ -3,6 +3,7 @@ members = [
"boa",
"boa_cli",
"boa_wasm",
"tester",
]
# The release profile, used for `cargo build --release`.

5
boa/src/builtins/array/mod.rs

@ -655,7 +655,10 @@ impl Array {
}
let search_element = args[0].clone();
let len = this.get_field("length").as_number().unwrap() as i32;
let len = this
.get_field("length")
.as_number()
.expect("length was not a number") as i32;
let mut idx = match args.get(1) {
Some(from_idx_ptr) => {

2
boa/src/builtins/math/tests.rs

@ -721,7 +721,7 @@ fn tan() {
assert!(float_cmp::approx_eq!(
f64,
a.to_number(&mut engine).unwrap(),
f64::from(1.964_759_657_248_652_5)
1.964_759_657_248_652_5
));
}

20
boa/src/exec/tests.rs

@ -1303,7 +1303,7 @@ fn assignment_to_non_assignable() {
for case in test_cases.iter() {
let string = forward(&mut engine, case);
assert!(string.starts_with("Syntax Error: "));
assert!(string.starts_with("Uncaught \"SyntaxError\": "));
assert!(string.contains("1:3"));
}
}
@ -1318,9 +1318,9 @@ fn multicharacter_assignment_to_non_assignable() {
let test_cases = ["3 **= 5", "3 <<= 5", "3 >>= 5"];
for case in test_cases.iter() {
let string = forward(&mut engine, case);
let string = dbg!(forward(&mut engine, case));
assert!(string.starts_with("Syntax Error: "));
assert!(string.starts_with("Uncaught \"SyntaxError\": "));
assert!(string.contains("1:3"));
}
}
@ -1335,9 +1335,9 @@ fn multicharacter_bitwise_assignment_to_non_assignable() {
let test_cases = ["3 >>>= 5", "3 &&= 5", "3 ||= 5", "3 ??= 5"];
for case in test_cases.iter() {
let string = forward(&mut engine, case);
let string = dbg!(forward(&mut engine, case));
assert!(string.starts_with("Syntax Error: "));
assert!(string.starts_with("Uncaught \"SyntaxError\": "));
assert!(string.contains("1:3"));
}
}
@ -1347,10 +1347,10 @@ fn assign_to_array_decl() {
let realm = Realm::create();
let mut engine = Interpreter::new(realm);
assert!(forward(&mut engine, "[1] = [2]").starts_with("Syntax Error: "));
assert!(forward(&mut engine, "[3, 5] = [7, 8]").starts_with("Syntax Error: "));
assert!(forward(&mut engine, "[6, 8] = [2]").starts_with("Syntax Error: "));
assert!(forward(&mut engine, "[6] = [2, 9]").starts_with("Syntax Error: "));
assert!(forward(&mut engine, "[1] = [2]").starts_with("Uncaught \"SyntaxError\": "));
assert!(forward(&mut engine, "[3, 5] = [7, 8]").starts_with("Uncaught \"SyntaxError\": "));
assert!(forward(&mut engine, "[6, 8] = [2]").starts_with("Uncaught \"SyntaxError\": "));
assert!(forward(&mut engine, "[6] = [2, 9]").starts_with("Uncaught \"SyntaxError\": "));
}
#[test]
@ -1359,7 +1359,7 @@ fn assign_to_object_decl() {
let mut engine = Interpreter::new(realm);
const ERR_MSG: &str =
"expected token \';\', got \':\' in expression statement at line 1, col 3";
"Uncaught \"SyntaxError\": \"expected token \';\', got \':\' in expression statement at line 1, col 3\"";
assert_eq!(forward(&mut engine, "{a: 3} = {a: 5};"), ERR_MSG);
}

39
boa/src/lib.rs

@ -60,22 +60,33 @@ pub use gc::{custom_trace, unsafe_empty_trace, Finalize, Trace};
#[must_use]
pub type Result<T> = StdResult<T, Value>;
fn parser_expr(src: &str) -> StdResult<StatementList, String> {
Parser::new(src.as_bytes())
.parse_all()
.map_err(|e| e.to_string())
/// Parses the given source code.
///
/// It will return either the statement list AST node for the code, or a parsing error if something
/// goes wrong.
#[inline]
pub fn parse(src: &str) -> StdResult<StatementList, ParseError> {
Parser::new(src.as_bytes()).parse_all()
}
/// Execute the code using an existing Interpreter
/// The str is consumed and the state of the Interpreter is changed
pub fn forward(engine: &mut Interpreter, src: &str) -> String {
// Setup executor
let expr = match parser_expr(src) {
let expr = match parse(src) {
Ok(res) => res,
Err(e) => return e,
Err(e) => {
return format!(
"Uncaught {}",
engine
.throw_syntax_error(e.to_string())
.expect_err("interpreter.throw_syntax_error() did not return an error")
.display()
);
}
};
expr.run(engine).map_or_else(
|e| format!("Error: {}", e.display()),
|e| format!("Uncaught {}", e.display()),
|v| v.display().to_string(),
)
}
@ -88,13 +99,13 @@ pub fn forward(engine: &mut Interpreter, src: &str) -> String {
pub fn forward_val(engine: &mut Interpreter, src: &str) -> Result<Value> {
let main_timer = BoaProfiler::global().start_event("Main", "Main");
// Setup executor
let result = match parser_expr(src) {
Ok(expr) => expr.run(engine),
Err(e) => {
eprintln!("{}", e);
panic!();
}
};
let result = parse(src)
.map_err(|e| {
engine
.throw_syntax_error(e.to_string())
.expect_err("interpreter.throw_syntax_error() did not return an error")
})
.and_then(|expr| expr.run(engine));
// The main_timer needs to be dropped before the BoaProfiler is.
drop(main_timer);

2
boa/src/syntax/ast/position.rs

@ -26,6 +26,7 @@ pub struct Position {
impl Position {
/// Creates a new `Position`.
#[inline]
#[track_caller]
pub fn new(line_number: u32, column_number: u32) -> Self {
Self {
line_number: NonZeroU32::new(line_number).expect("line number cannot be 0"),
@ -65,6 +66,7 @@ pub struct Span {
impl Span {
/// Creates a new `Span`.
#[inline]
#[track_caller]
pub fn new(start: Position, end: Position) -> Self {
assert!(start <= end, "a span cannot start after its end");

74
boa/src/syntax/lexer/tests.rs

@ -312,7 +312,7 @@ fn check_line_numbers() {
#[test]
fn check_decrement_advances_lexer_2_places() {
// Here we want an example of decrementing an integer
let mut lexer = Lexer::new(&b"let a = b--;"[0..]);
let mut lexer = Lexer::new(&b"let a = b--;"[..]);
for _ in 0..4 {
lexer.next().unwrap();
@ -333,7 +333,7 @@ fn check_decrement_advances_lexer_2_places() {
#[test]
fn single_int() {
let mut lexer = Lexer::new(&b"52"[0..]);
let mut lexer = Lexer::new(&b"52"[..]);
let expected = [TokenKind::numeric_literal(52)];
@ -375,7 +375,7 @@ fn numbers() {
#[test]
fn big_exp_numbers() {
let mut lexer = Lexer::new(&b"1.0e25 1.0e36 9.0e50"[0..]);
let mut lexer = Lexer::new(&b"1.0e25 1.0e36 9.0e50"[..]);
let expected = [
TokenKind::numeric_literal(10000000000000000000000000.0),
@ -389,7 +389,7 @@ fn big_exp_numbers() {
#[test]
#[ignore]
fn big_literal_numbers() {
let mut lexer = Lexer::new(&b"10000000000000000000000000"[0..]);
let mut lexer = Lexer::new(&b"10000000000000000000000000"[..]);
let expected = [TokenKind::numeric_literal(10000000000000000000000000.0)];
@ -398,7 +398,7 @@ fn big_literal_numbers() {
#[test]
fn implicit_octal_edge_case() {
let mut lexer = Lexer::new(&b"044.5 094.5"[0..]);
let mut lexer = Lexer::new(&b"044.5 094.5"[..]);
let expected = [
TokenKind::numeric_literal(36),
@ -412,7 +412,7 @@ fn implicit_octal_edge_case() {
#[test]
fn hexadecimal_edge_case() {
let mut lexer = Lexer::new(&b"0xffff.ff 0xffffff"[0..]);
let mut lexer = Lexer::new(&b"0xffff.ff 0xffffff"[..]);
let expected = [
TokenKind::numeric_literal(0xffff),
@ -426,7 +426,7 @@ fn hexadecimal_edge_case() {
#[test]
fn single_number_without_semicolon() {
let mut lexer = Lexer::new(&b"1"[0..]);
let mut lexer = Lexer::new(&b"1"[..]);
if let Some(x) = lexer.next().unwrap() {
assert_eq!(x.kind(), &TokenKind::numeric_literal(Numeric::Integer(1)));
} else {
@ -436,7 +436,7 @@ fn single_number_without_semicolon() {
#[test]
fn number_followed_by_dot() {
let mut lexer = Lexer::new(&b"1.."[0..]);
let mut lexer = Lexer::new(&b"1.."[..]);
let expected = [
TokenKind::numeric_literal(1),
@ -448,7 +448,7 @@ fn number_followed_by_dot() {
#[test]
fn regex_literal() {
let mut lexer = Lexer::new(&b"/(?:)/"[0..]);
let mut lexer = Lexer::new(&b"/(?:)/"[..]);
let expected = [TokenKind::regular_expression_literal(
"(?:)",
@ -460,7 +460,7 @@ fn regex_literal() {
#[test]
fn regex_literal_flags() {
let mut lexer = Lexer::new(&br"/\/[^\/]*\/*/gmi"[0..]);
let mut lexer = Lexer::new(&br"/\/[^\/]*\/*/gmi"[..]);
let mut flags = RegExpFlags::default();
flags.insert(RegExpFlags::GLOBAL);
@ -477,7 +477,7 @@ fn regex_literal_flags() {
#[test]
fn addition_no_spaces() {
let mut lexer = Lexer::new(&b"1+1"[0..]);
let mut lexer = Lexer::new(&b"1+1"[..]);
let expected = [
TokenKind::numeric_literal(1),
@ -490,7 +490,7 @@ fn addition_no_spaces() {
#[test]
fn addition_no_spaces_left_side() {
let mut lexer = Lexer::new(&b"1+ 1"[0..]);
let mut lexer = Lexer::new(&b"1+ 1"[..]);
let expected = [
TokenKind::numeric_literal(1),
@ -503,7 +503,7 @@ fn addition_no_spaces_left_side() {
#[test]
fn addition_no_spaces_right_side() {
let mut lexer = Lexer::new(&b"1 +1"[0..]);
let mut lexer = Lexer::new(&b"1 +1"[..]);
let expected = [
TokenKind::numeric_literal(1),
@ -516,7 +516,7 @@ fn addition_no_spaces_right_side() {
#[test]
fn addition_no_spaces_e_number_left_side() {
let mut lexer = Lexer::new(&b"1e2+ 1"[0..]);
let mut lexer = Lexer::new(&b"1e2+ 1"[..]);
let expected = [
TokenKind::numeric_literal(100),
@ -529,7 +529,7 @@ fn addition_no_spaces_e_number_left_side() {
#[test]
fn addition_no_spaces_e_number_right_side() {
let mut lexer = Lexer::new(&b"1 +1e3"[0..]);
let mut lexer = Lexer::new(&b"1 +1e3"[..]);
let expected = [
TokenKind::numeric_literal(1),
@ -542,7 +542,7 @@ fn addition_no_spaces_e_number_right_side() {
#[test]
fn addition_no_spaces_e_number() {
let mut lexer = Lexer::new(&b"1e3+1e11"[0..]);
let mut lexer = Lexer::new(&b"1e3+1e11"[..]);
let expected = [
TokenKind::numeric_literal(1000),
@ -555,7 +555,7 @@ fn addition_no_spaces_e_number() {
#[test]
fn take_while_pred_simple() {
let mut cur = Cursor::new(&b"abcdefghijk"[0..]);
let mut cur = Cursor::new(&b"abcdefghijk"[..]);
let mut buf: String = String::new();
@ -567,7 +567,7 @@ fn take_while_pred_simple() {
#[test]
fn take_while_pred_immediate_stop() {
let mut cur = Cursor::new(&b"abcdefghijk"[0..]);
let mut cur = Cursor::new(&b"abcdefghijk"[..]);
let mut buf: String = String::new();
@ -578,7 +578,7 @@ fn take_while_pred_immediate_stop() {
#[test]
fn take_while_pred_entire_str() {
let mut cur = Cursor::new(&b"abcdefghijk"[0..]);
let mut cur = Cursor::new(&b"abcdefghijk"[..]);
let mut buf: String = String::new();
@ -594,7 +594,7 @@ fn illegal_following_numeric_literal() {
// be immediately followed by an IdentifierStart or DecimalDigit.
// Decimal Digit
let mut lexer = Lexer::new(&b"11.6n3"[0..]);
let mut lexer = Lexer::new(&b"11.6n3"[..]);
let err = lexer
.next()
.expect_err("DecimalDigit following NumericLiteral not rejected as expected");
@ -605,28 +605,30 @@ fn illegal_following_numeric_literal() {
}
// Identifier Start
let mut lexer = Lexer::new(&b"17.4$"[0..]);
match lexer.next() {
Err(Error::Syntax(_, pos)) => assert_eq!(pos, Position::new(1, 5)),
_ => assert!(
false,
"IdentifierStart '$' following NumericLiteral not rejected as expected"
),
let mut lexer = Lexer::new(&b"17.4$"[..]);
if let Error::Syntax(_, pos) = lexer
.next()
.expect_err("IdentifierStart '$' following NumericLiteral not rejected as expected")
{
assert_eq!(pos, Position::new(1, 5));
} else {
panic!("invalid error type");
}
let mut lexer = Lexer::new(&b"17.4_"[0..]);
match lexer.next() {
Err(Error::Syntax(_, pos)) => assert_eq!(pos, Position::new(1, 5)),
_ => assert!(
false,
"IdentifierStart '_' following NumericLiteral not rejected as expected"
),
let mut lexer = Lexer::new(&b"17.4_"[..]);
if let Error::Syntax(_, pos) = lexer
.next()
.expect_err("IdentifierStart '_' following NumericLiteral not rejected as expected")
{
assert_eq!(pos, Position::new(1, 5));
} else {
panic!("invalid error type");
}
}
#[test]
fn codepoint_with_no_braces() {
let mut lexer = Lexer::new(r#""test\uD83Dtest""#.as_bytes());
let mut lexer = Lexer::new(&br#""test\uD83Dtest""#[..]);
assert!(lexer.next().is_ok());
}
@ -635,7 +637,7 @@ fn codepoint_with_no_braces() {
fn illegal_code_point_following_numeric_literal() {
// Checks as per https://tc39.es/ecma262/#sec-literals-numeric-literals that a NumericLiteral cannot
// be immediately followed by an IdentifierStart where the IdentifierStart
let mut lexer = Lexer::new(r#"17.4\u{{2764}}"#.as_bytes());
let mut lexer = Lexer::new(&br#"17.4\u{{2764}}"#[..]);
assert!(
lexer.next().is_err(),
"IdentifierStart \\u{{2764}} following NumericLiteral not rejected as expected"

28
boa/src/syntax/parser/cursor/buffered_lexer/tests.rs

@ -3,9 +3,7 @@ use crate::syntax::lexer::{Token, TokenKind};
#[test]
fn peek_skip_accending() {
let buf: &[u8] = "a b c d e f g h i".as_bytes();
let mut cur = BufferedLexer::from(buf);
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
assert_eq!(
*cur.peek(0, false)
@ -53,9 +51,7 @@ fn peek_skip_accending() {
#[test]
fn peek_skip_next() {
let buf: &[u8] = "a b c d e f g h i".as_bytes();
let mut cur = BufferedLexer::from(buf);
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
assert_eq!(
*cur.peek(0, false)
@ -138,9 +134,7 @@ fn peek_skip_next() {
#[test]
fn peek_skip_next_alternating() {
let buf: &[u8] = "a b c d e f g h i".as_bytes();
let mut cur = BufferedLexer::from(buf);
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
assert_eq!(
*cur.peek(0, false)
@ -195,9 +189,7 @@ fn peek_skip_next_alternating() {
#[test]
fn peek_next_till_end() {
let buf: &[u8] = "a b c d e f g h i".as_bytes();
let mut cur = BufferedLexer::from(buf);
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
loop {
let peek = cur.peek(0, false).unwrap().cloned();
@ -213,18 +205,18 @@ fn peek_next_till_end() {
#[test]
fn peek_skip_next_till_end() {
let mut cur = BufferedLexer::from("a b c d e f g h i".as_bytes());
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
let mut peeked: [Option<Token>; super::MAX_PEEK_SKIP + 1] =
[None::<Token>, None::<Token>, None::<Token>];
loop {
for i in 0..super::MAX_PEEK_SKIP {
peeked[i] = cur.peek(i, false).unwrap().cloned();
for (i, peek) in peeked.iter_mut().enumerate() {
*peek = cur.peek(i, false).unwrap().cloned();
}
for i in 0..super::MAX_PEEK_SKIP {
assert_eq!(cur.next(false).unwrap(), peeked[i]);
for peek in &peeked {
assert_eq!(&cur.next(false).unwrap(), peek);
}
if peeked[super::MAX_PEEK_SKIP - 1].is_none() {
@ -235,7 +227,7 @@ fn peek_skip_next_till_end() {
#[test]
fn skip_peeked_terminators() {
let mut cur = BufferedLexer::from("A \n B".as_bytes());
let mut cur = BufferedLexer::from(&b"A \n B"[..]);
assert_eq!(
*cur.peek(0, false)
.unwrap()

33
boa/src/syntax/parser/tests.rs

@ -144,14 +144,12 @@ fn comment_semi_colon_insertion() {
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"a",
Some(Const::Int(10).into()),
)
.into()])
)])
.into(),
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"b",
Some(Const::Int(20).into()),
)
.into()])
)])
.into(),
],
);
@ -172,14 +170,12 @@ fn multiline_comment_semi_colon_insertion() {
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"a",
Some(Const::Int(10).into()),
)
.into()])
)])
.into(),
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"b",
Some(Const::Int(20).into()),
)
.into()])
)])
.into(),
],
);
@ -197,14 +193,12 @@ fn multiline_comment_no_lineterminator() {
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"a",
Some(Const::Int(10).into()),
)
.into()])
)])
.into(),
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"b",
Some(Const::Int(20).into()),
)
.into()])
)])
.into(),
],
);
@ -225,8 +219,7 @@ fn assignment_line_terminator() {
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"a",
Some(Const::Int(3).into()),
)
.into()])
)])
.into(),
Assign::new(Identifier::from("a"), Const::from(5)).into(),
],
@ -239,18 +232,9 @@ fn assignment_multiline_terminator() {
let a = 3;
a =
5;
"#;
@ -260,8 +244,7 @@ fn assignment_multiline_terminator() {
LetDeclList::from(vec![LetDecl::new::<&str, Option<Node>>(
"a",
Some(Const::Int(3).into()),
)
.into()])
)])
.into(),
Assign::new(Identifier::from("a"), Const::from(5)).into(),
],

2
boa_cli/Cargo.toml

@ -14,7 +14,7 @@ edition = "2018"
Boa = { path = "../boa", features = ["serde"] }
rustyline = "6.2.0"
rustyline-derive = "0.3.1"
structopt = "0.3.16"
structopt = "0.3.17"
serde_json = "1.0.57"
colored = "2.0.0"
regex = "1.3.9"

4
boa_cli/src/main.rs

@ -152,8 +152,8 @@ pub fn main() -> Result<(), std::io::Error> {
}
} else {
match forward_val(&mut engine, &buffer) {
Ok(v) => print!("{}", v.display()),
Err(v) => eprint!("{}", v.display()),
Ok(v) => println!("{}", v.display()),
Err(v) => eprintln!("Uncaught {}", v.display()),
}
}
}

22
boa_wasm/src/lib.rs

@ -1,18 +1,26 @@
use boa::{Executable, Interpreter, Parser, Realm};
use boa::{parse, Executable, Interpreter, Realm};
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn evaluate(src: &str) -> Result<String, JsValue> {
let expr = Parser::new(src.as_bytes())
.parse_all()
.map_err(|e| JsValue::from(format!("Parsing Error: {}", e)))?;
// Setup executor
let realm = Realm::create();
let mut engine = Interpreter::new(realm);
// Setup executor
let expr = match parse(src) {
Ok(res) => res,
Err(e) => {
return Err(format!(
"Uncaught {}",
engine
.throw_syntax_error(e.to_string())
.expect_err("interpreter.throw_syntax_error() did not return an error")
.display()
)
.into());
}
};
expr.run(&mut engine)
.map_err(|e| JsValue::from(format!("Error: {}", e.display())))
.map_err(|e| JsValue::from(format!("Uncaught {}", e.display())))
.map(|v| v.display().to_string())
}

1
test262

@ -0,0 +1 @@
Subproject commit 896994413cad849f470cec7757c4bb7d1b4ffc12

28
test_ignore.txt

@ -0,0 +1,28 @@
// This does not break the tester but it does iterate from 0 to u32::MAX,
// because of incorect implementation of `Array.prototype.indexOf`.
// TODO: Fix it do iterate on the elements in the array **in insertion order**, not from
// 0 to u32::MAX untill it reaches the element.
15.4.4.14-5-13
// New errors:
// Stack overflows:
tco-non-eval-function
tco-non-eval-global
value-tojson-array-circular
value-array-circular
value-tojson-object-circular
value-object-circular
// This does not stack overflow, but freezes the computer:
arg-length-exceeding-integer-limit
// These seem to run forever:
15.4.4.22-9-b-9
15.4.4.22-7-11
15.4.4.22-9-5
15.4.4.22-8-b-iii-1-30
15.4.4.22-10-3
15.4.4.19-8-c-ii-1
fill-string-empty
S15.4.4.10_A3_T2
S15.4.4.10_A3_T1

23
tester/Cargo.toml

@ -0,0 +1,23 @@
[package]
name = "boa_tester"
version = "0.9.0"
authors = ["Iban Eguia Moraza <razican@protonmail.ch>"]
description = "Boa is a Javascript lexer, parser and Just-in-Time compiler written in Rust. Currently, it has support for some of the language."
repository = "https://github.com/boa-dev/boa"
keywords = ["javascript", "compiler", "test262", "tester", "js"]
categories = ["parser-implementations", "wasm"]
license = "Unlicense/MIT"
exclude = ["../.vscode/*", "../Dockerfile", "../Makefile", "../.editorConfig"]
edition = "2018"
[dependencies]
Boa = { path = "../boa" }
structopt = "0.3.17"
serde = { version = "1.0.115", features = ["derive"] }
serde_yaml = "0.8.13"
serde_json = "1.0.57"
bitflags = "1.2.1"
regex = "1.3.9"
once_cell = "1.4.1"
colored = "2.0.0"
fxhash = "0.2.1"

194
tester/src/exec.rs

@ -0,0 +1,194 @@
//! Execution module for the test runner.
use super::{Harness, Outcome, Phase, SuiteResult, Test, TestFlags, TestResult, TestSuite, CLI};
use boa::{forward_val, parse, Interpreter, Realm};
use colored::Colorize;
use fxhash::FxHashSet;
use once_cell::sync::Lazy;
use std::{fs, panic, path::Path};
/// List of ignored tests.
static IGNORED: Lazy<FxHashSet<Box<str>>> = Lazy::new(|| {
let path = Path::new("test_ignore.txt");
if path.exists() {
let filtered = fs::read_to_string(path).expect("could not read test filters");
filtered
.lines()
.filter(|line| !line.is_empty() && !line.starts_with("//"))
.map(|line| line.to_owned().into_boxed_str())
.collect::<FxHashSet<_>>()
} else {
FxHashSet::default()
}
});
impl TestSuite {
/// Runs the test suite.
pub(crate) fn run(&self, harness: &Harness) -> SuiteResult {
if CLI.verbose() {
println!("Suite {}:", self.name);
}
// TODO: in parallel
let suites: Vec<_> = self.suites.iter().map(|suite| suite.run(harness)).collect();
// TODO: in parallel
let tests: Vec<_> = self.tests.iter().map(|test| test.run(harness)).collect();
if CLI.verbose() {
println!();
}
// Count passed tests
let mut passed = 0;
let mut ignored = 0;
for test in &tests {
if let Some(true) = test.passed {
passed += 1;
} else if test.passed.is_none() {
ignored += 1;
}
}
// Count total tests
let mut total = tests.len();
for suite in &suites {
total += suite.total;
passed += suite.passed;
ignored += suite.ignored;
}
if CLI.verbose() {
println!(
"Results: total: {}, passed: {}, ignored: {}, conformance: {:.2}%",
total,
passed,
ignored,
(passed as f64 / total as f64) * 100.0
);
}
SuiteResult {
name: self.name.clone(),
total,
passed,
ignored,
suites,
tests: tests.into_boxed_slice(),
}
}
}
impl Test {
/// Runs the test.
pub(crate) fn run(&self, harness: &Harness) -> TestResult {
// println!("Starting `{}`", self.name);
let passed = if !self.flags.intersects(TestFlags::ASYNC | TestFlags::MODULE)
&& !IGNORED.contains(&self.name)
{
let res = panic::catch_unwind(|| {
match self.expected_outcome {
Outcome::Positive => {
let mut passed = true;
if self.flags.contains(TestFlags::RAW) {
let mut engine = self.set_up_env(&harness, false);
let res = forward_val(&mut engine, &self.content);
passed = res.is_ok()
} else {
if self.flags.contains(TestFlags::STRICT) {
let mut engine = self.set_up_env(&harness, true);
let res = forward_val(&mut engine, &self.content);
passed = res.is_ok()
}
if passed && self.flags.contains(TestFlags::NO_STRICT) {
let mut engine = self.set_up_env(&harness, false);
let res = forward_val(&mut engine, &self.content);
passed = res.is_ok()
}
}
passed
}
Outcome::Negative {
phase: Phase::Parse,
ref error_type,
} => {
assert_eq!(
error_type.as_ref(),
"SyntaxError",
"non-SyntaxError parsing error found in {}",
self.name
);
parse(&self.content).is_err()
}
Outcome::Negative {
phase: _,
error_type: _,
} => {
// TODO: check the phase
false
}
}
});
let passed = res.unwrap_or_else(|_| {
eprintln!("last panic was on test \"{}\"", self.name);
false
});
print!("{}", if passed { ".".green() } else { ".".red() });
Some(passed)
} else {
// Ignoring async tests for now.
// TODO: implement async and add `harness/doneprintHandle.js` to the includes.
print!("{}", ".".yellow());
None
};
TestResult {
name: self.name.clone(),
passed,
}
}
/// Sets the environment up to run the test.
fn set_up_env(&self, harness: &Harness, strict: bool) -> Interpreter {
// Create new Realm
// TODO: in parallel.
let realm = Realm::create();
let mut engine = Interpreter::new(realm);
// TODO: set up the environment.
if strict {
forward_val(&mut engine, r#""use strict";"#).expect("could not set strict mode");
}
forward_val(&mut engine, &harness.assert).expect("could not run assert.js");
forward_val(&mut engine, &harness.sta).expect("could not run sta.js");
self.includes.iter().for_each(|include| {
let res = forward_val(
&mut engine,
&harness
.includes
.get(include)
.expect("could not find include file"),
);
if let Err(e) = res {
eprintln!("could not run the {} include file.", include);
panic!("Uncaught {}", e.display());
}
});
engine
}
}

303
tester/src/main.rs

@ -0,0 +1,303 @@
//! Test262 test runner
//!
//! This crate will run the full ECMAScript test suite (Test262) and report compliance of the
//! `boa` engine.
#![doc(
html_logo_url = "https://raw.githubusercontent.com/jasonwilliams/boa/master/assets/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/jasonwilliams/boa/master/assets/logo.svg"
)]
#![deny(
unused_qualifications,
clippy::all,
unused_qualifications,
unused_import_braces,
unused_lifetimes,
unreachable_pub,
trivial_numeric_casts,
// rustdoc,
missing_debug_implementations,
missing_copy_implementations,
deprecated_in_future,
meta_variable_misuse,
non_ascii_idents,
rust_2018_compatibility,
rust_2018_idioms,
future_incompatible,
nonstandard_style,
)]
#![warn(clippy::perf, clippy::single_match_else, clippy::dbg_macro)]
#![allow(
clippy::missing_inline_in_public_items,
clippy::cognitive_complexity,
clippy::must_use_candidate,
clippy::missing_errors_doc,
clippy::as_conversions,
clippy::let_unit_value,
missing_doc_code_examples
)]
mod exec;
mod read;
mod results;
use self::{
read::{read_global_suite, read_harness, MetaData, Negative, TestFlag},
results::write_json,
};
use bitflags::bitflags;
use fxhash::FxHashMap;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use std::{
fs,
path::{Path, PathBuf},
};
use structopt::StructOpt;
/// CLI information.
static CLI: Lazy<Cli> = Lazy::new(Cli::from_args);
/// Boa test262 tester
#[derive(StructOpt, Debug)]
#[structopt(name = "Boa test262 tester")]
struct Cli {
// Whether to show verbose output.
#[structopt(short, long)]
verbose: bool,
/// Path to the Test262 suite.
#[structopt(long, parse(from_os_str), default_value = "./test262")]
test262_path: PathBuf,
/// Optional output folder for the full results information.
#[structopt(short, long, parse(from_os_str))]
output: Option<PathBuf>,
}
impl Cli {
// Whether to show verbose output.
fn verbose(&self) -> bool {
self.verbose
}
/// Path to the Test262 suite.
fn test262_path(&self) -> &Path {
self.test262_path.as_path()
}
/// Optional output folder for the full results information.
fn output(&self) -> Option<&Path> {
self.output.as_deref()
}
}
/// Program entry point.
fn main() {
if let Some(path) = CLI.output() {
if path.exists() {
if !path.is_dir() {
eprintln!("The output path must be a directory.");
std::process::exit(1);
}
} else {
fs::create_dir_all(path).expect("could not create the output directory");
}
}
if CLI.verbose() {
println!("Loading the test suite...");
}
let harness = read_harness().expect("could not read initialization bindings");
let global_suite = read_global_suite().expect("could not get the list of tests to run");
if CLI.verbose() {
println!("Test suite loaded, starting tests...");
}
let results = global_suite.run(&harness);
println!();
if CLI.verbose() {
println!("Results:");
println!("Total tests: {}", results.total);
println!("Passed tests: {}", results.passed);
println!(
"Conformance: {:.2}%",
(results.passed as f64 / results.total as f64) * 100.0
)
}
write_json(results).expect("could not write the results to the output JSON file");
}
/// All the harness include files.
#[derive(Debug, Clone)]
struct Harness {
assert: Box<str>,
sta: Box<str>,
includes: FxHashMap<Box<str>, Box<str>>,
}
/// Represents a test suite.
#[derive(Debug, Clone)]
struct TestSuite {
name: Box<str>,
suites: Box<[TestSuite]>,
tests: Box<[Test]>,
}
/// Outcome of a test suite.
#[derive(Debug, Clone, Serialize, Deserialize)]
struct SuiteResult {
name: Box<str>,
total: usize,
passed: usize,
ignored: usize,
#[serde(skip_serializing_if = "Vec::is_empty")]
suites: Vec<SuiteResult>,
tests: Box<[TestResult]>,
}
/// Outcome of a test.
#[derive(Debug, Clone, Serialize, Deserialize)]
struct TestResult {
name: Box<str>,
passed: Option<bool>,
}
/// Represents a test.
#[derive(Debug, Clone)]
struct Test {
name: Box<str>,
description: Box<str>,
esid: Option<Box<str>>,
flags: TestFlags,
information: Box<str>,
features: Box<[Box<str>]>,
expected_outcome: Outcome,
includes: Box<[Box<str>]>,
locale: Locale,
content: Box<str>,
}
impl Test {
/// Creates a new test.
#[inline]
fn new<N, C>(name: N, content: C, metadata: MetaData) -> Self
where
N: Into<Box<str>>,
C: Into<Box<str>>,
{
Self {
name: name.into(),
description: metadata.description,
esid: metadata.esid,
flags: metadata.flags.into(),
information: metadata.info,
features: metadata.features,
expected_outcome: Outcome::from(metadata.negative),
includes: metadata.includes,
locale: metadata.locale,
content: content.into(),
}
}
}
/// An outcome for a test.
#[derive(Debug, Clone)]
enum Outcome {
Positive,
Negative { phase: Phase, error_type: Box<str> },
}
impl Default for Outcome {
fn default() -> Self {
Self::Positive
}
}
impl From<Option<Negative>> for Outcome {
fn from(neg: Option<Negative>) -> Self {
neg.map(|neg| Self::Negative {
phase: neg.phase,
error_type: neg.error_type,
})
.unwrap_or_default()
}
}
bitflags! {
struct TestFlags: u16 {
const STRICT = 0b000000001;
const NO_STRICT = 0b000000010;
const MODULE = 0b000000100;
const RAW = 0b000001000;
const ASYNC = 0b000010000;
const GENERATED = 0b000100000;
const CAN_BLOCK_IS_FALSE = 0b001000000;
const CAN_BLOCK_IS_TRUE = 0b010000000;
const NON_DETERMINISTIC = 0b100000000;
}
}
impl Default for TestFlags {
fn default() -> Self {
Self::STRICT | Self::NO_STRICT
}
}
impl From<TestFlag> for TestFlags {
fn from(flag: TestFlag) -> Self {
match flag {
TestFlag::OnlyStrict => Self::STRICT,
TestFlag::NoStrict => Self::NO_STRICT,
TestFlag::Module => Self::MODULE,
TestFlag::Raw => Self::RAW,
TestFlag::Async => Self::ASYNC,
TestFlag::Generated => Self::GENERATED,
TestFlag::CanBlockIsFalse => Self::CAN_BLOCK_IS_FALSE,
TestFlag::CanBlockIsTrue => Self::CAN_BLOCK_IS_TRUE,
TestFlag::NonDeterministic => Self::NON_DETERMINISTIC,
}
}
}
impl<T> From<T> for TestFlags
where
T: AsRef<[TestFlag]>,
{
fn from(flags: T) -> Self {
let flags = flags.as_ref();
if flags.is_empty() {
Self::default()
} else {
let mut result = Self::empty();
for flag in flags {
result |= Self::from(*flag);
}
if !result.intersects(Self::default()) {
result |= Self::default()
}
result
}
}
}
/// Phase for an error.
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "lowercase")]
enum Phase {
Parse,
Early,
Resolution,
Runtime,
}
/// Locale information structure.
#[derive(Debug, Default, Clone, Deserialize)]
#[serde(transparent)]
struct Locale {
locale: Box<[Box<str>]>,
}

257
tester/src/read.rs

@ -0,0 +1,257 @@
//! Module to read the list of test suites from disk.
use super::{Harness, Locale, Phase, Test, TestSuite, CLI};
use fxhash::FxHashMap;
use serde::{Deserialize, Serialize};
use std::{fs, io, path::Path};
/// Representation of the YAML metadata in Test262 tests.
#[derive(Debug, Clone, Deserialize)]
pub(super) struct MetaData {
pub(super) description: Box<str>,
pub(super) esid: Option<Box<str>>,
pub(super) es5id: Option<Box<str>>,
pub(super) es6id: Option<Box<str>>,
#[serde(default)]
pub(super) info: Box<str>,
#[serde(default)]
pub(super) features: Box<[Box<str>]>,
#[serde(default)]
pub(super) includes: Box<[Box<str>]>,
#[serde(default)]
pub(super) flags: Box<[TestFlag]>,
#[serde(default)]
pub(super) negative: Option<Negative>,
#[serde(default)]
pub(super) locale: Locale,
}
/// Negative test information structure.
#[derive(Debug, Clone, Deserialize)]
pub(super) struct Negative {
pub(super) phase: Phase,
#[serde(rename = "type")]
pub(super) error_type: Box<str>,
}
/// Individual test flag.
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(super) enum TestFlag {
OnlyStrict,
NoStrict,
Module,
Raw,
Async,
Generated,
#[serde(rename = "CanBlockIsFalse")]
CanBlockIsFalse,
#[serde(rename = "CanBlockIsTrue")]
CanBlockIsTrue,
#[serde(rename = "non-deterministic")]
NonDeterministic,
}
/// Test information structure.
#[derive(Debug, Clone, Serialize, Deserialize)]
struct TestInfo {
desc: Box<str>,
info: Box<str>,
}
impl TestInfo {
/// Creates a test information structure from the full metadata.
fn from_metadata(metadata: &MetaData) -> Self {
Self {
desc: metadata.description.trim().to_owned().into_boxed_str(),
info: metadata.info.trim().to_owned().into_boxed_str(),
}
}
}
/// Name of the "test information" file.
const INFO_FILE_NAME: &str = "info.json";
/// Reads the Test262 defined bindings.
pub(super) fn read_harness() -> io::Result<Harness> {
let mut includes = FxHashMap::default();
for entry in fs::read_dir(CLI.test262_path().join("harness"))? {
let entry = entry?;
let file_name = entry.file_name();
let file_name = file_name.to_string_lossy();
if file_name == "assert.js" || file_name == "sta.js" {
continue;
}
let content = fs::read_to_string(entry.path())?;
includes.insert(
file_name.into_owned().into_boxed_str(),
content.into_boxed_str(),
);
}
let assert = fs::read_to_string(CLI.test262_path().join("harness/assert.js"))?.into_boxed_str();
let sta = fs::read_to_string(CLI.test262_path().join("harness/sta.js"))?.into_boxed_str();
Ok(Harness {
assert,
sta,
includes,
})
}
/// Reads the global suite from disk.
pub(super) fn read_global_suite() -> io::Result<TestSuite> {
let path = CLI.test262_path().join("test");
let mut info = if let Some(path) = CLI.output() {
let path = path.join(INFO_FILE_NAME);
if path.exists() {
Some(serde_json::from_reader(io::BufReader::new(
fs::File::open(path)?,
))?)
} else {
Some(FxHashMap::default())
}
} else {
None
};
let suite = read_suite(path.as_path(), &mut info)?;
if let (Some(path), info) = (CLI.output(), info) {
let path = path.join(INFO_FILE_NAME);
if CLI.verbose() {
println!("Writing the test information file at {}...", path.display());
}
let output = io::BufWriter::new(fs::File::create(path)?);
serde_json::to_writer(output, &info)?;
if CLI.verbose() {
println!("Test information file written.");
}
}
Ok(suite)
}
/// Reads a test suite in the given path.
fn read_suite(
path: &Path,
test_info: &mut Option<FxHashMap<Box<str>, TestInfo>>,
) -> io::Result<TestSuite> {
use std::ffi::OsStr;
let name = path
.file_stem()
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("test suite with no name found: {}", path.display()),
)
})?
.to_str()
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("non-UTF-8 suite name found: {}", path.display()),
)
})?;
let mut suites = Vec::new();
let mut tests = Vec::new();
let filter = |st: &OsStr| {
st.to_string_lossy().ends_with("_FIXTURE.js")
// TODO: see if we can fix this.
|| st.to_string_lossy() == "line-terminator-normalisation-CR.js"
};
// TODO: iterate in parallel
for entry in path.read_dir()? {
let entry = entry?;
if entry.file_type()?.is_dir() {
suites.push(read_suite(entry.path().as_path(), test_info)?);
} else if filter(&entry.file_name()) {
continue;
} else {
tests.push(read_test(entry.path().as_path(), test_info)?);
}
}
Ok(TestSuite {
name: name.into(),
suites: suites.into_boxed_slice(),
tests: tests.into_boxed_slice(),
})
}
/// Reads information about a given test case.
fn read_test(
path: &Path,
test_info: &mut Option<FxHashMap<Box<str>, TestInfo>>,
) -> io::Result<Test> {
let name = path
.file_stem()
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("test with no file name found: {}", path.display()),
)
})?
.to_str()
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("non-UTF-8 file name found: {}", path.display()),
)
})?;
let content = fs::read_to_string(path)?;
let metadata = read_metadata(&content)?;
if let Some(all_info) = test_info {
let path_str = path
.strip_prefix(CLI.test262_path())
.expect("could not get test path string")
.to_str()
.ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("non-UTF-8 path found: {}", path.display()),
)
})?;
let new_info = TestInfo::from_metadata(&metadata);
let _ = all_info.insert(path_str.to_owned().into_boxed_str(), new_info);
}
Ok(Test::new(name, content, metadata))
}
/// Reads the metadata from the input test code.
fn read_metadata(code: &str) -> io::Result<MetaData> {
use once_cell::sync::Lazy;
use regex::Regex;
/// Regular expression to retrieve the metadata of a test.
static META_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"/\*\-{3}((?:.|\n)*)\-{3}\*/"#)
.expect("could not compile metadata regular expression")
});
let yaml = META_REGEX
.captures(code)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "no metadata found"))?
.get(1)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "no metadata found"))?
.as_str();
serde_yaml::from_str(yaml).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}

97
tester/src/results.rs

@ -0,0 +1,97 @@
use super::{SuiteResult, CLI};
use serde::{Deserialize, Serialize};
use std::{
env, fs,
io::{self, BufReader, BufWriter},
};
/// Structure to store full result information.
#[derive(Debug, Clone, Deserialize, Serialize)]
struct ResultInfo {
commit: Box<str>,
results: SuiteResult,
}
/// Structure to store full result information.
#[derive(Debug, Clone, Deserialize, Serialize)]
struct ReducedResultInfo {
commit: Box<str>,
total: usize,
passed: usize,
ignored: usize,
}
impl From<ResultInfo> for ReducedResultInfo {
/// Creates a new reduced suite result from a full suite result.
fn from(info: ResultInfo) -> Self {
Self {
commit: info.commit,
total: info.results.total,
passed: info.results.passed,
ignored: info.results.ignored,
}
}
}
/// File name of the "latest results" JSON file.
const LATEST_FILE_NAME: &str = "latest.json";
/// File name of the "all results" JSON file.
const RESULTS_FILE_NAME: &str = "results.json";
/// Writes the results of running the test suite to the given JSON output file.
///
/// It will append the results to the ones already present, in an array.
pub(crate) fn write_json(results: SuiteResult) -> io::Result<()> {
if let Some(path) = CLI.output() {
let mut branch = env::var("GITHUB_REF").unwrap_or_default();
if branch.starts_with("refs/pull") {
branch = "pull".to_owned();
}
let path = if branch.is_empty() {
path.to_path_buf()
} else {
let folder = path.join(branch);
fs::create_dir_all(&folder)?;
folder
};
if CLI.verbose() {
println!("Writing the results to {}...", path.display());
}
// Write the latest results.
let latest_path = path.join(LATEST_FILE_NAME);
let new_results = ResultInfo {
commit: env::var("GITHUB_SHA").unwrap_or_default().into_boxed_str(),
results,
};
let latest_output = BufWriter::new(fs::File::create(latest_path)?);
serde_json::to_writer(latest_output, &new_results)?;
// Write the full list of results, retrieving the existing ones first.
let all_path = path.join(RESULTS_FILE_NAME);
let mut all_results: Vec<ReducedResultInfo> = if all_path.exists() {
serde_json::from_reader(BufReader::new(fs::File::open(&all_path)?))?
} else {
Vec::new()
};
all_results.push(new_results.into());
let output = BufWriter::new(fs::File::create(&all_path)?);
serde_json::to_writer(output, &all_results)?;
if CLI.verbose() {
println!("Results written correctly");
}
}
Ok(())
}
Loading…
Cancel
Save