Browse Source

Improve tester display for multiple editions (#2720)

This Pull Request improves our test results display per edition and cleanups our edition detector logic.

It changes the following:

- Adds a new `edition` flag to limit the maximum edition that will be tested.
- Adds a new `versioned` flag to display all tested editions in a table.
- Adds utility methods to `SpecEdition` to detect the edition of a test and get all the available editions.
- Cleanups logic.

Output with this PR ~(We only collect ES5, ES6 and ES13 stats, so all other editions are a WIP)~:

![image](https://user-images.githubusercontent.com/38230983/227010384-883f0934-47be-4be7-84c2-a21feb9de8a9.png)

~Marking as a draft since I need to determine the version of the remaining features, but feel free to review everything else.~ Finished!
pull/2724/head
José Julián Espina 2 years ago
parent
commit
9b53859126
  1. 69
      Cargo.lock
  2. 2
      boa_tester/Cargo.toml
  3. 358
      boa_tester/src/edition.rs
  4. 89
      boa_tester/src/exec/mod.rs
  5. 299
      boa_tester/src/main.rs
  6. 19
      boa_tester/src/read.rs
  7. 151
      boa_tester/src/results.rs

69
Cargo.lock generated

@ -473,8 +473,10 @@ dependencies = [
"clap 4.1.11", "clap 4.1.11",
"color-eyre", "color-eyre",
"colored", "colored",
"comfy-table",
"fxhash", "fxhash",
"once_cell", "once_cell",
"phf",
"rayon", "rayon",
"regex", "regex",
"serde", "serde",
@ -787,6 +789,18 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "comfy-table"
version = "6.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e7b787b0dc42e8111badfdbe4c3059158ccb2db8780352fa1b01e8ccf45cc4d"
dependencies = [
"crossterm",
"strum",
"strum_macros",
"unicode-width",
]
[[package]] [[package]]
name = "concurrent-queue" name = "concurrent-queue"
version = "2.1.0" version = "2.1.0"
@ -952,6 +966,31 @@ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
] ]
[[package]]
name = "crossterm"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67"
dependencies = [
"bitflags 1.3.2",
"crossterm_winapi",
"libc",
"mio",
"parking_lot 0.12.1",
"signal-hook",
"signal-hook-mio",
"winapi",
]
[[package]]
name = "crossterm_winapi"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ae1b35a484aa10e07fe0638d02301c5ad24de82d310ccbd2f3693da5f09bf1c"
dependencies = [
"winapi",
]
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.1.6" version = "0.1.6"
@ -3595,6 +3634,17 @@ dependencies = [
"signal-hook-registry", "signal-hook-registry",
] ]
[[package]]
name = "signal-hook-mio"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af"
dependencies = [
"libc",
"mio",
"signal-hook",
]
[[package]] [[package]]
name = "signal-hook-registry" name = "signal-hook-registry"
version = "1.4.1" version = "1.4.1"
@ -3794,6 +3844,25 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strum"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f"
[[package]]
name = "strum_macros"
version = "0.24.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn 1.0.109",
]
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.109" version = "1.0.109"

2
boa_tester/Cargo.toml

@ -26,6 +26,8 @@ fxhash = "0.2.1"
rayon = "1.7.0" rayon = "1.7.0"
toml = "0.7.3" toml = "0.7.3"
color-eyre = "0.6.2" color-eyre = "0.6.2"
phf = { version = "0.11.1", features = ["macros"] }
comfy-table = "6.1.4"
[features] [features]
default = ["intl"] default = ["intl"]

358
boa_tester/src/edition.rs

@ -0,0 +1,358 @@
//! Edition detection utilities.
//!
//! This module contains the [`SpecEdition`] struct, which is used in the tester to
//! classify all tests per minimum required ECMAScript edition.
use std::fmt::Display;
use serde::{Deserialize, Serialize};
use crate::read::{MetaData, TestFlag};
// TODO: Open PR in https://github.com/tc39/test262 to add "exp-operator" and "Array.prototype.includes"
// features.
/// Minimum edition required by a specific feature in the `test262` repository.
static FEATURE_EDITION: phf::Map<&'static str, SpecEdition> = phf::phf_map! {
// Proposed language features
// Hashbang Grammar
// https://github.com/tc39/proposal-hashbang
"hashbang" => SpecEdition::ESNext,
// Intl.Locale Info
// https://github.com/tc39/proposal-intl-locale-info
"Intl.Locale-info" => SpecEdition::ESNext,
// FinalizationRegistry#cleanupSome
// https://github.com/tc39/proposal-cleanup-some
"FinalizationRegistry.prototype.cleanupSome" => SpecEdition::ESNext,
// Intl.NumberFormat V3
// https://github.com/tc39/proposal-intl-numberformat-v3
"Intl.NumberFormat-v3" => SpecEdition::ESNext,
// Legacy RegExp features
// https://github.com/tc39/proposal-regexp-legacy-features
"legacy-regexp" => SpecEdition::ESNext,
// Atomics.waitAsync
// https://github.com/tc39/proposal-atomics-wait-async
"Atomics.waitAsync" => SpecEdition::ESNext,
// Import Assertions
// https://github.com/tc39/proposal-import-assertions/
"import-assertions" => SpecEdition::ESNext,
// JSON modules
// https://github.com/tc39/proposal-json-modules
"json-modules" => SpecEdition::ESNext,
// Resizable Arraybuffer
// https://github.com/tc39/proposal-resizablearraybuffer
"resizable-arraybuffer" => SpecEdition::ESNext,
// ArrayBuffer transfer
// https://github.com/tc39/proposal-arraybuffer-transfer
"arraybuffer-transfer" => SpecEdition::ESNext,
// Temporal
// https://github.com/tc39/proposal-temporal
"Temporal" => SpecEdition::ESNext,
// ShadowRealm, née Callable Boundary Realms
// https://github.com/tc39/proposal-realms
"ShadowRealm" => SpecEdition::ESNext,
// Array.prototype.findLast & Array.prototype.findLastIndex
// https://github.com/tc39/proposal-array-find-from-last
"array-find-from-last" => SpecEdition::ESNext,
// Array.prototype.group & Array.prototype.groupToMap
// https://github.com/tc39/proposal-array-grouping
"array-grouping" => SpecEdition::ESNext,
// Intl.DurationFormat
// https://github.com/tc39/proposal-intl-duration-format
"Intl.DurationFormat" => SpecEdition::ESNext,
// RegExp set notation + properties of strings
// https://github.com/tc39/proposal-regexp-set-notation
"regexp-v-flag" => SpecEdition::ESNext,
// Decorators
// https://github.com/tc39/proposal-decorators
"decorators" => SpecEdition::ESNext,
// Duplicate named capturing groups
// https://github.com/tc39/proposal-duplicate-named-capturing-groups
"regexp-duplicate-named-groups" => SpecEdition::ESNext,
// Symbols as WeakMap keys
// https://github.com/tc39/proposal-symbols-as-weakmap-keys
"symbols-as-weakmap-keys" => SpecEdition::ESNext,
// Array.prototype.toReversed, Array.prototype.toSorted, Array.prototype.toSpliced,
// Array.prototype.with and the equivalent TypedArray methods.
// https://github.com/tc39/proposal-change-array-by-copy/
"change-array-by-copy" => SpecEdition::ESNext,
// https://tc39.es/proposal-array-from-async/
"Array.fromAsync" => SpecEdition::ESNext,
// Well-formed Unicode strings
// https://github.com/tc39/proposal-is-usv-string
"String.prototype.isWellFormed" => SpecEdition::ESNext,
"String.prototype.toWellFormed" => SpecEdition::ESNext,
// https://github.com/tc39/proposal-intl-enumeration
"Intl-enumeration" => SpecEdition::ESNext,
// Part of the next ES14 edition
"Intl.DateTimeFormat-extend-timezonename" => SpecEdition::ESNext,
"Intl.DisplayNames-v2" => SpecEdition::ESNext,
"Intl.Segmenter" => SpecEdition::ESNext,
// Standard language features
"AggregateError" => SpecEdition::ES12,
"align-detached-buffer-semantics-with-web-reality" => SpecEdition::ES12,
"arbitrary-module-namespace-names" => SpecEdition::ES13,
"ArrayBuffer" => SpecEdition::ES6,
"Array.prototype.at" => SpecEdition::ES13,
"Array.prototype.flat" => SpecEdition::ES10,
"Array.prototype.flatMap" => SpecEdition::ES10,
"Array.prototype.values" => SpecEdition::ES6,
"arrow-function" => SpecEdition::ES6,
"async-iteration" => SpecEdition::ES9,
"async-functions" => SpecEdition::ES8,
"Atomics" => SpecEdition::ES8,
"BigInt" => SpecEdition::ES11,
"caller" => SpecEdition::ES5,
"class" => SpecEdition::ES6,
"class-fields-private" => SpecEdition::ES13,
"class-fields-private-in" => SpecEdition::ES13,
"class-fields-public" => SpecEdition::ES13,
"class-methods-private" => SpecEdition::ES13,
"class-static-block" => SpecEdition::ES13,
"class-static-fields-private" => SpecEdition::ES13,
"class-static-fields-public" => SpecEdition::ES13,
"class-static-methods-private" => SpecEdition::ES13,
"coalesce-expression" => SpecEdition::ES11,
"computed-property-names" => SpecEdition::ES6,
"const" => SpecEdition::ES6,
"cross-realm" => SpecEdition::ES6,
"DataView" => SpecEdition::ES6,
"DataView.prototype.getFloat32" => SpecEdition::ES6,
"DataView.prototype.getFloat64" => SpecEdition::ES6,
"DataView.prototype.getInt16" => SpecEdition::ES6,
"DataView.prototype.getInt32" => SpecEdition::ES6,
"DataView.prototype.getInt8" => SpecEdition::ES6,
"DataView.prototype.getUint16" => SpecEdition::ES6,
"DataView.prototype.getUint32" => SpecEdition::ES6,
"DataView.prototype.setUint8" => SpecEdition::ES6,
"default-parameters" => SpecEdition::ES6,
"destructuring-assignment" => SpecEdition::ES6,
"destructuring-binding" => SpecEdition::ES6,
"dynamic-import" => SpecEdition::ES11,
"error-cause" => SpecEdition::ES13,
"export-star-as-namespace-from-module" => SpecEdition::ES11,
"FinalizationRegistry" => SpecEdition::ES12,
"for-in-order" => SpecEdition::ES11,
"for-of" => SpecEdition::ES6,
"Float32Array" => SpecEdition::ES6,
"Float64Array" => SpecEdition::ES6,
"generators" => SpecEdition::ES6,
"globalThis" => SpecEdition::ES11,
"import.meta" => SpecEdition::ES11,
"Int8Array" => SpecEdition::ES6,
"Int16Array" => SpecEdition::ES6,
"Int32Array" => SpecEdition::ES6,
"intl-normative-optional" => SpecEdition::ES8,
"Intl.DateTimeFormat-datetimestyle" => SpecEdition::ES12,
"Intl.DateTimeFormat-dayPeriod" => SpecEdition::ES8,
"Intl.DateTimeFormat-formatRange" => SpecEdition::ES12,
"Intl.DateTimeFormat-fractionalSecondDigits" => SpecEdition::ES12,
"Intl.DisplayNames" => SpecEdition::ES12,
"Intl.ListFormat" => SpecEdition::ES12,
"Intl.Locale" => SpecEdition::ES12,
"Intl.NumberFormat-unified" => SpecEdition::ES11,
"Intl.RelativeTimeFormat" => SpecEdition::ES11,
"json-superset" => SpecEdition::ES10,
"let" => SpecEdition::ES6,
"logical-assignment-operators" => SpecEdition::ES12,
"Map" => SpecEdition::ES6,
"new.target" => SpecEdition::ES6,
"numeric-separator-literal" => SpecEdition::ES12,
"object-rest" => SpecEdition::ES9,
"object-spread" => SpecEdition::ES9,
"Object.fromEntries" => SpecEdition::ES10,
"Object.hasOwn" => SpecEdition::ES13,
"Object.is" => SpecEdition::ES6,
"optional-catch-binding" => SpecEdition::ES10,
"optional-chaining" => SpecEdition::ES11,
"Promise" => SpecEdition::ES6,
"Promise.allSettled" => SpecEdition::ES11,
"Promise.any" => SpecEdition::ES12,
"Promise.prototype.finally" => SpecEdition::ES9,
"Proxy" => SpecEdition::ES6,
"proxy-missing-checks" => SpecEdition::ES6,
"Reflect" => SpecEdition::ES6,
"Reflect.construct" => SpecEdition::ES6,
"Reflect.set" => SpecEdition::ES6,
"Reflect.setPrototypeOf" => SpecEdition::ES6,
"regexp-dotall" => SpecEdition::ES9,
"regexp-lookbehind" => SpecEdition::ES9,
"regexp-match-indices" => SpecEdition::ES13,
"regexp-named-groups" => SpecEdition::ES9,
"regexp-unicode-property-escapes" => SpecEdition::ES9,
"rest-parameters" => SpecEdition::ES6,
"Set" => SpecEdition::ES6,
"SharedArrayBuffer" => SpecEdition::ES8,
"string-trimming" => SpecEdition::ES10,
"String.fromCodePoint" => SpecEdition::ES6,
"String.prototype.at" => SpecEdition::ES13,
"String.prototype.endsWith" => SpecEdition::ES6,
"String.prototype.includes" => SpecEdition::ES6,
"String.prototype.matchAll" => SpecEdition::ES11,
"String.prototype.replaceAll" => SpecEdition::ES12,
"String.prototype.trimEnd" => SpecEdition::ES10,
"String.prototype.trimStart" => SpecEdition::ES10,
"super" => SpecEdition::ES6,
"Symbol" => SpecEdition::ES6,
"Symbol.asyncIterator" => SpecEdition::ES9,
"Symbol.hasInstance" => SpecEdition::ES6,
"Symbol.isConcatSpreadable" => SpecEdition::ES6,
"Symbol.iterator" => SpecEdition::ES6,
"Symbol.match" => SpecEdition::ES6,
"Symbol.matchAll" => SpecEdition::ES11,
"Symbol.prototype.description" => SpecEdition::ES10,
"Symbol.replace" => SpecEdition::ES6,
"Symbol.search" => SpecEdition::ES6,
"Symbol.species" => SpecEdition::ES6,
"Symbol.split" => SpecEdition::ES6,
"Symbol.toPrimitive" => SpecEdition::ES6,
"Symbol.toStringTag" => SpecEdition::ES6,
"Symbol.unscopables" => SpecEdition::ES6,
"tail-call-optimization" => SpecEdition::ES6,
"template" => SpecEdition::ES6,
"top-level-await" => SpecEdition::ES13,
"TypedArray" => SpecEdition::ES6,
"TypedArray.prototype.at" => SpecEdition::ES13,
"u180e" => SpecEdition::ES7,
"Uint8Array" => SpecEdition::ES6,
"Uint16Array" => SpecEdition::ES6,
"Uint32Array" => SpecEdition::ES6,
"Uint8ClampedArray" => SpecEdition::ES6,
"WeakMap" => SpecEdition::ES6,
"WeakRef" => SpecEdition::ES12,
"WeakSet" => SpecEdition::ES6,
"well-formed-json-stringify" => SpecEdition::ES10,
"__proto__" => SpecEdition::ES6,
"__getter__" => SpecEdition::ES8,
"__setter__" => SpecEdition::ES8,
// Test-Harness Features
"IsHTMLDDA" => SpecEdition::ES9,
"host-gc-required" => SpecEdition::ES5,
};
/// List of ECMAScript editions that can be tested in the `test262` repository.
#[derive(
Debug,
Clone,
Copy,
PartialEq,
Eq,
PartialOrd,
Ord,
Default,
Serialize,
Deserialize,
clap::ValueEnum,
)]
#[serde(untagged)]
pub(crate) enum SpecEdition {
/// [ECMAScript 5.1 Edition](https://262.ecma-international.org/5.1)
ES5 = 5,
/// [ECMAScript 6th Edition](https://262.ecma-international.org/6.0)
ES6,
/// [ECMAScript 7th Edition](https://262.ecma-international.org/7.0)
ES7,
/// [ECMAScript 8th Edition](https://262.ecma-international.org/8.0)
ES8,
/// [ECMAScript 9th Edition](https://262.ecma-international.org/9.0)
ES9,
/// [ECMAScript 10th Edition](https://262.ecma-international.org/10.0)
ES10,
/// [ECMAScript 11th Edition](https://262.ecma-international.org/11.0)
ES11,
/// [ECMAScript 12th Edition](https://262.ecma-international.org/12.0)
ES12,
/// [ECMAScript 13th Edition](https://262.ecma-international.org/13.0)
ES13,
/// The edition being worked on right now.
///
/// A draft is currently available in <https://tc39.es/ecma262>.
#[default]
ESNext,
}
impl Display for SpecEdition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Self::ESNext => write!(f, "ECMAScript Next"),
Self::ES5 => write!(f, "ECMAScript 5.1"),
v => write!(f, "ECMAScript {}", v as u8),
}
}
}
impl SpecEdition {
/// Gets the minimum required ECMAScript edition of a test from its metadata.
///
/// If the function finds unknown features in `metadata`, returns an `Err(Vec<&str>)` containing
/// the list of unknown features.
pub(crate) fn from_test_metadata(metadata: &MetaData) -> Result<Self, Vec<&str>> {
let mut min_edition = if metadata.flags.contains(&TestFlag::Async) {
Self::ES8
} else if metadata.es6id.is_some() || metadata.flags.contains(&TestFlag::Module) {
Self::ES6
} else {
Self::ES5
};
let mut unknowns = Vec::new();
for feature in &*metadata.features {
let Some(feature_edition) = FEATURE_EDITION.get(feature).copied() else {
unknowns.push(&**feature);
continue;
};
min_edition = std::cmp::max(min_edition, feature_edition);
}
if unknowns.is_empty() {
Ok(min_edition)
} else {
Err(unknowns)
}
}
/// Gets an iterator of all currently available editions.
pub(crate) fn all_editions() -> impl Iterator<Item = Self> {
[
Self::ES5,
Self::ES6,
Self::ES7,
Self::ES8,
Self::ES9,
Self::ES10,
Self::ES11,
Self::ES12,
Self::ES13,
Self::ESNext,
]
.into_iter()
}
}

89
boa_tester/src/exec/mod.rs

@ -3,8 +3,8 @@
mod js262; mod js262;
use crate::{ use crate::{
read::ErrorType, Harness, Outcome, Phase, SpecVersion, Statistics, SuiteResult, Test, read::ErrorType, Harness, Outcome, Phase, SpecEdition, Statistics, SuiteResult, Test,
TestFlags, TestOutcomeResult, TestResult, TestSuite, TestFlags, TestOutcomeResult, TestResult, TestSuite, VersionedStats,
}; };
use boa_engine::{ use boa_engine::{
context::ContextBuilder, job::SimpleJobQueue, native_function::NativeFunction, context::ContextBuilder, job::SimpleJobQueue, native_function::NativeFunction,
@ -12,12 +12,19 @@ use boa_engine::{
JsValue, Source, JsValue, Source,
}; };
use colored::Colorize; use colored::Colorize;
use fxhash::FxHashSet;
use rayon::prelude::*; use rayon::prelude::*;
use std::{cell::RefCell, rc::Rc}; use std::{cell::RefCell, rc::Rc};
impl TestSuite { impl TestSuite {
/// Runs the test suite. /// Runs the test suite.
pub(crate) fn run(&self, harness: &Harness, verbose: u8, parallel: bool) -> SuiteResult { pub(crate) fn run(
&self,
harness: &Harness,
verbose: u8,
parallel: bool,
max_edition: SpecEdition,
) -> SuiteResult {
if verbose != 0 { if verbose != 0 {
println!("Suite {}:", self.path.display()); println!("Suite {}:", self.path.display());
} }
@ -25,32 +32,32 @@ impl TestSuite {
let suites: Vec<_> = if parallel { let suites: Vec<_> = if parallel {
self.suites self.suites
.par_iter() .par_iter()
.map(|suite| suite.run(harness, verbose, parallel)) .map(|suite| suite.run(harness, verbose, parallel, max_edition))
.collect() .collect()
} else { } else {
self.suites self.suites
.iter() .iter()
.map(|suite| suite.run(harness, verbose, parallel)) .map(|suite| suite.run(harness, verbose, parallel, max_edition))
.collect() .collect()
}; };
let tests: Vec<_> = if parallel { let tests: Vec<_> = if parallel {
self.tests self.tests
.par_iter() .par_iter()
.filter(|test| test.edition <= max_edition)
.flat_map(|test| test.run(harness, verbose)) .flat_map(|test| test.run(harness, verbose))
.collect() .collect()
} else { } else {
self.tests self.tests
.iter() .iter()
.filter(|test| test.edition <= max_edition)
.flat_map(|test| test.run(harness, verbose)) .flat_map(|test| test.run(harness, verbose))
.collect() .collect()
}; };
let mut features = Vec::new(); let mut features = FxHashSet::default();
for test_iter in self.tests.iter() { for test_iter in self.tests.iter() {
for feature_iter in test_iter.features.iter() { features.extend(test_iter.features.iter().map(ToString::to_string));
features.push(feature_iter.to_string());
}
} }
if verbose != 0 { if verbose != 0 {
@ -58,74 +65,68 @@ impl TestSuite {
} }
// Count passed tests and es specs // Count passed tests and es specs
let mut all = Statistics::default(); let mut versioned_stats = VersionedStats::default();
let mut es5 = Statistics::default(); let mut es_next = Statistics::default();
let mut es6 = Statistics::default();
let mut append_stats = |spec_version: SpecVersion, f: &dyn Fn(&mut Statistics)| {
f(&mut all);
if spec_version == SpecVersion::ES5 {
f(&mut es5);
} else if spec_version == SpecVersion::ES6 {
f(&mut es6);
}
};
for test in &tests { for test in &tests {
match test.result { match test.result {
TestOutcomeResult::Passed => { TestOutcomeResult::Passed => {
append_stats(test.spec_version, &|stats| { versioned_stats.apply(test.edition, |stats| {
stats.passed += 1; stats.passed += 1;
}); });
es_next.passed += 1;
} }
TestOutcomeResult::Ignored => { TestOutcomeResult::Ignored => {
append_stats(test.spec_version, &|stats| { versioned_stats.apply(test.edition, |stats| {
stats.ignored += 1; stats.ignored += 1;
}); });
es_next.ignored += 1;
} }
TestOutcomeResult::Panic => { TestOutcomeResult::Panic => {
append_stats(test.spec_version, &|stats| { versioned_stats.apply(test.edition, |stats| {
stats.panic += 1; stats.panic += 1;
}); });
es_next.panic += 1;
} }
TestOutcomeResult::Failed => {} TestOutcomeResult::Failed => {}
} }
append_stats(test.spec_version, &|stats| { versioned_stats.apply(test.edition, |stats| {
stats.total += 1; stats.total += 1;
}); });
es_next.total += 1;
} }
// Count total tests // Count total tests
for suite in &suites { for suite in &suites {
all = all + suite.all_stats.clone(); versioned_stats += suite.versioned_stats;
es5 = es5 + suite.es5_stats.clone(); es_next += suite.stats;
es6 = es6 + suite.es6_stats.clone(); features.extend(suite.features.iter().cloned());
features.append(&mut suite.features.clone());
} }
if verbose != 0 { if verbose != 0 {
println!( println!(
"Suite {} results: total: {}, passed: {}, ignored: {}, failed: {} (panics: \ "Suite {} results: total: {}, passed: {}, ignored: {}, failed: {} (panics: \
{}{}), conformance: {:.2}%", {}{}), conformance: {:.2}%",
all.total, es_next.total,
self.path.display(), self.path.display(),
all.passed.to_string().green(), es_next.passed.to_string().green(),
all.ignored.to_string().yellow(), es_next.ignored.to_string().yellow(),
(all.total - all.passed - all.ignored).to_string().red(), (es_next.total - es_next.passed - es_next.ignored)
if all.panic == 0 { .to_string()
.red(),
if es_next.panic == 0 {
"0".normal() "0".normal()
} else { } else {
all.panic.to_string().red() es_next.panic.to_string().red()
}, },
if all.panic == 0 { "" } else { " ⚠" }.red(), if es_next.panic == 0 { "" } else { " ⚠" }.red(),
(all.passed as f64 / all.total as f64) * 100.0 (es_next.passed as f64 / es_next.total as f64) * 100.0
); );
} }
SuiteResult { SuiteResult {
name: self.name.clone(), name: self.name.clone(),
all_stats: all, stats: es_next,
es5_stats: es5, versioned_stats,
es6_stats: es6,
suites, suites,
tests, tests,
features, features,
@ -163,7 +164,7 @@ impl Test {
} }
return TestResult { return TestResult {
name: self.name.clone(), name: self.name.clone(),
spec_version: self.spec_version, edition: self.edition,
strict, strict,
result: TestOutcomeResult::Failed, result: TestOutcomeResult::Failed,
result_text: Box::from("Could not read test file.") result_text: Box::from("Could not read test file.")
@ -182,7 +183,7 @@ impl Test {
} }
return TestResult { return TestResult {
name: self.name.clone(), name: self.name.clone(),
spec_version: self.spec_version, edition: self.edition,
strict, strict,
result: TestOutcomeResult::Ignored, result: TestOutcomeResult::Ignored,
result_text: Box::default(), result_text: Box::default(),
@ -381,7 +382,7 @@ impl Test {
TestResult { TestResult {
name: self.name.clone(), name: self.name.clone(),
spec_version: self.spec_version, edition: self.edition,
strict, strict,
result, result,
result_text: result_text.into_boxed_str(), result_text: result_text.into_boxed_str(),
@ -428,7 +429,7 @@ impl Test {
.map_err(|e| format!("could not run doneprintHandle.js:\n{e}"))?; .map_err(|e| format!("could not run doneprintHandle.js:\n{e}"))?;
} }
for include_name in self.includes.iter() { for include_name in &self.includes {
let include = harness let include = harness
.includes .includes
.get(include_name) .get(include_name)

299
boa_tester/src/main.rs

@ -66,6 +66,7 @@
clippy::cast_possible_wrap clippy::cast_possible_wrap
)] )]
mod edition;
mod exec; mod exec;
mod read; mod read;
mod results; mod results;
@ -77,10 +78,11 @@ use self::{
use bitflags::bitflags; use bitflags::bitflags;
use clap::{ArgAction, Parser, ValueHint}; use clap::{ArgAction, Parser, ValueHint};
use color_eyre::{ use color_eyre::{
eyre::{bail, WrapErr}, eyre::{bail, eyre, WrapErr},
Result, Result,
}; };
use colored::Colorize; use colored::Colorize;
use edition::SpecEdition;
use fxhash::{FxHashMap, FxHashSet}; use fxhash::{FxHashMap, FxHashSet};
use read::ErrorType; use read::ErrorType;
use serde::{ use serde::{
@ -90,7 +92,7 @@ use serde::{
use std::{ use std::{
fs::{self, File}, fs::{self, File},
io::Read, io::Read,
ops::Add, ops::{Add, AddAssign},
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -172,6 +174,14 @@ enum Cli {
/// Path to a TOML file with the ignored tests, features, flags and/or files. /// Path to a TOML file with the ignored tests, features, flags and/or files.
#[arg(short, long, default_value = "test_ignore.toml", value_hint = ValueHint::FilePath)] #[arg(short, long, default_value = "test_ignore.toml", value_hint = ValueHint::FilePath)]
ignored: PathBuf, ignored: PathBuf,
/// Maximum ECMAScript edition to test for.
#[arg(long)]
edition: Option<SpecEdition>,
/// Displays the conformance results per ECMAScript edition.
#[arg(long)]
versioned: bool,
}, },
/// Compare two test suite results. /// Compare two test suite results.
Compare { Compare {
@ -200,6 +210,8 @@ fn main() -> Result<()> {
output, output,
disable_parallelism, disable_parallelism,
ignored: ignore, ignored: ignore,
edition,
versioned,
} => run_test_suite( } => run_test_suite(
verbose, verbose,
!disable_parallelism, !disable_parallelism,
@ -207,6 +219,8 @@ fn main() -> Result<()> {
suite.as_path(), suite.as_path(),
output.as_deref(), output.as_deref(),
ignore.as_path(), ignore.as_path(),
edition.unwrap_or_default(),
versioned,
), ),
Cli::Compare { Cli::Compare {
base, base,
@ -217,13 +231,16 @@ fn main() -> Result<()> {
} }
/// Runs the full test suite. /// Runs the full test suite.
#[allow(clippy::too_many_arguments)]
fn run_test_suite( fn run_test_suite(
verbose: u8, verbose: u8,
parallel: bool, parallel: bool,
test262_path: &Path, test262: &Path,
suite: &Path, suite: &Path,
output: Option<&Path>, output: Option<&Path>,
ignored: &Path, ignore: &Path,
edition: SpecEdition,
versioned: bool,
) -> Result<()> { ) -> Result<()> {
if let Some(path) = output { if let Some(path) = output {
if path.exists() { if path.exists() {
@ -237,7 +254,7 @@ fn run_test_suite(
let ignored = { let ignored = {
let mut input = String::new(); let mut input = String::new();
let mut f = File::open(ignored).wrap_err("could not open ignored tests file")?; let mut f = File::open(ignore).wrap_err("could not open ignored tests file")?;
f.read_to_string(&mut input) f.read_to_string(&mut input)
.wrap_err("could not read ignored tests file")?; .wrap_err("could not read ignored tests file")?;
toml::from_str(&input).wrap_err("could not decode ignored tests file")? toml::from_str(&input).wrap_err("could not decode ignored tests file")?
@ -246,22 +263,28 @@ fn run_test_suite(
if verbose != 0 { if verbose != 0 {
println!("Loading the test suite..."); println!("Loading the test suite...");
} }
let harness = read_harness(test262_path).wrap_err("could not read harness")?; let harness = read_harness(test262).wrap_err("could not read harness")?;
if suite.to_string_lossy().ends_with(".js") { if suite.to_string_lossy().ends_with(".js") {
let test = read_test(&test262_path.join(suite)).wrap_err_with(|| { let test = read_test(&test262.join(suite)).wrap_err_with(|| {
let suite = suite.display(); let suite = suite.display();
format!("could not read the test {suite}") format!("could not read the test {suite}")
})?; })?;
if verbose != 0 { if test.edition <= edition {
println!("Test loaded, starting..."); if verbose != 0 {
println!("Test loaded, starting...");
}
test.run(&harness, verbose);
} else {
println!(
"Minimum spec edition of test is bigger than the specified edition. Skipping."
);
} }
test.run(&harness, verbose);
println!(); println!();
} else { } else {
let suite = read_suite(&test262_path.join(suite), &ignored, false).wrap_err_with(|| { let suite = read_suite(&test262.join(suite), &ignored, false).wrap_err_with(|| {
let suite = suite.display(); let suite = suite.display();
format!("could not read the suite {suite}") format!("could not read the suite {suite}")
})?; })?;
@ -269,38 +292,71 @@ fn run_test_suite(
if verbose != 0 { if verbose != 0 {
println!("Test suite loaded, starting tests..."); println!("Test suite loaded, starting tests...");
} }
let results = suite.run(&harness, verbose, parallel); let results = suite.run(&harness, verbose, parallel, edition);
let total = results.all_stats.total; if versioned {
let passed = results.all_stats.passed; let mut table = comfy_table::Table::new();
let ignored = results.all_stats.ignored; table.load_preset(comfy_table::presets::UTF8_HORIZONTAL_ONLY);
let panicked = results.all_stats.panic; table.set_header(vec![
"Edition", "Total", "Passed", "Ignored", "Failed", "Panics", "%",
]);
for column in table.column_iter_mut().skip(1) {
column.set_cell_alignment(comfy_table::CellAlignment::Right);
}
for (v, stats) in SpecEdition::all_editions()
.filter(|v| *v <= edition)
.map(|v| {
let stats = results.versioned_stats.get(v).unwrap_or(results.stats);
(v, stats)
})
{
let Statistics {
total,
passed,
ignored,
panic,
} = stats;
let failed = total - passed - ignored;
let conformance = (passed as f64 / total as f64) * 100.0;
let conformance = format!("{conformance:.2}");
table.add_row(vec![
v.to_string(),
total.to_string(),
passed.to_string(),
ignored.to_string(),
failed.to_string(),
panic.to_string(),
conformance,
]);
}
println!("\n\nResults\n");
println!("{table}");
} else {
let Statistics {
total,
passed,
ignored,
panic,
} = results.stats;
println!("\n\nResults ({edition}):");
println!("Total tests: {total}");
println!("Passed tests: {}", passed.to_string().green());
println!("Ignored tests: {}", ignored.to_string().yellow());
println!(
"Failed tests: {} (panics: {})",
(total - passed - ignored).to_string().red(),
panic.to_string().red()
);
println!(
"Conformance: {:.2}%",
(passed as f64 / total as f64) * 100.0
);
}
println!(); if let Some(output) = output {
println!("Results:"); write_json(results, output, verbose)
println!("Total tests: {total}"); .wrap_err("could not write the results to the output JSON file")?;
println!("Passed tests: {}", passed.to_string().green()); }
println!("Ignored tests: {}", ignored.to_string().yellow());
println!(
"Failed tests: {} (panics: {})",
(total - passed - ignored).to_string().red(),
panicked.to_string().red()
);
println!(
"Conformance: {:.2}%",
(passed as f64 / total as f64) * 100.0
);
println!(
"ES5 Conformance: {:.2}%",
(results.es5_stats.passed as f64 / results.es5_stats.total as f64) * 100.0
);
println!(
"ES6 Conformance: {:.2}%",
(results.es6_stats.passed as f64 / results.es6_stats.total as f64) * 100.0
);
write_json(results, output, verbose)
.wrap_err("could not write the results to the output JSON file")?;
} }
Ok(()) Ok(())
@ -331,7 +387,7 @@ struct TestSuite {
} }
/// Represents a tests statistic /// Represents a tests statistic
#[derive(Default, Debug, Clone, Serialize, Deserialize)] #[derive(Default, Debug, Copy, Clone, Serialize, Deserialize)]
struct Statistics { struct Statistics {
#[serde(rename = "t")] #[serde(rename = "t")]
total: usize, total: usize,
@ -356,26 +412,127 @@ impl Add for Statistics {
} }
} }
impl AddAssign for Statistics {
fn add_assign(&mut self, rhs: Self) {
self.total += rhs.total;
self.passed += rhs.passed;
self.ignored += rhs.ignored;
self.panic += rhs.panic;
}
}
/// Represents tests statistics separated by ECMAScript edition
#[derive(Default, Debug, Copy, Clone, Serialize, Deserialize)]
struct VersionedStats {
es5: Statistics,
es6: Statistics,
es7: Statistics,
es8: Statistics,
es9: Statistics,
es10: Statistics,
es11: Statistics,
es12: Statistics,
es13: Statistics,
}
impl VersionedStats {
/// Applies `f` to all the statistics for which its edition is bigger or equal
/// than `min_edition`.
fn apply(&mut self, min_edition: SpecEdition, f: fn(&mut Statistics)) {
for edition in SpecEdition::all_editions().filter(|&edition| min_edition <= edition) {
if let Some(stats) = self.get_mut(edition) {
f(stats);
}
}
}
/// Gets the statistics corresponding to `edition`, returning `None` if `edition`
/// is `SpecEdition::ESNext`.
const fn get(&self, edition: SpecEdition) -> Option<Statistics> {
let stats = match edition {
SpecEdition::ES5 => self.es5,
SpecEdition::ES6 => self.es6,
SpecEdition::ES7 => self.es7,
SpecEdition::ES8 => self.es8,
SpecEdition::ES9 => self.es9,
SpecEdition::ES10 => self.es10,
SpecEdition::ES11 => self.es11,
SpecEdition::ES12 => self.es12,
SpecEdition::ES13 => self.es13,
SpecEdition::ESNext => return None,
};
Some(stats)
}
/// Gets a mutable reference to the statistics corresponding to `edition`, returning `None` if
/// `edition` is `SpecEdition::ESNext`.
fn get_mut(&mut self, edition: SpecEdition) -> Option<&mut Statistics> {
let stats = match edition {
SpecEdition::ES5 => &mut self.es5,
SpecEdition::ES6 => &mut self.es6,
SpecEdition::ES7 => &mut self.es7,
SpecEdition::ES8 => &mut self.es8,
SpecEdition::ES9 => &mut self.es9,
SpecEdition::ES10 => &mut self.es10,
SpecEdition::ES11 => &mut self.es11,
SpecEdition::ES12 => &mut self.es12,
SpecEdition::ES13 => &mut self.es13,
SpecEdition::ESNext => return None,
};
Some(stats)
}
}
impl Add for VersionedStats {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self {
es5: self.es5 + rhs.es5,
es6: self.es6 + rhs.es6,
es7: self.es7 + rhs.es7,
es8: self.es8 + rhs.es8,
es9: self.es9 + rhs.es9,
es10: self.es10 + rhs.es10,
es11: self.es11 + rhs.es11,
es12: self.es12 + rhs.es12,
es13: self.es13 + rhs.es13,
}
}
}
impl AddAssign for VersionedStats {
fn add_assign(&mut self, rhs: Self) {
self.es5 += rhs.es5;
self.es6 += rhs.es6;
self.es7 += rhs.es7;
self.es8 += rhs.es8;
self.es9 += rhs.es9;
self.es10 += rhs.es10;
self.es11 += rhs.es11;
self.es12 += rhs.es12;
self.es13 += rhs.es13;
}
}
/// Outcome of a test suite. /// Outcome of a test suite.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
struct SuiteResult { struct SuiteResult {
#[serde(rename = "n")] #[serde(rename = "n")]
name: Box<str>, name: Box<str>,
#[serde(rename = "a")] #[serde(rename = "a")]
all_stats: Statistics, stats: Statistics,
#[serde(rename = "a5", default)] #[serde(rename = "av", default)]
es5_stats: Statistics, versioned_stats: VersionedStats,
#[serde(rename = "a6", default)]
es6_stats: Statistics,
#[serde(skip_serializing_if = "Vec::is_empty", default)] #[serde(skip_serializing_if = "Vec::is_empty", default)]
#[serde(rename = "s")] #[serde(rename = "s")]
suites: Vec<SuiteResult>, suites: Vec<SuiteResult>,
#[serde(rename = "t")]
#[serde(skip_serializing_if = "Vec::is_empty", default)] #[serde(skip_serializing_if = "Vec::is_empty", default)]
#[serde(rename = "t")]
tests: Vec<TestResult>, tests: Vec<TestResult>,
#[serde(skip_serializing_if = "FxHashSet::is_empty", default)]
#[serde(rename = "f")] #[serde(rename = "f")]
#[serde(skip_serializing_if = "Vec::is_empty", default)] features: FxHashSet<String>,
features: Vec<String>,
} }
/// Outcome of a test. /// Outcome of a test.
@ -385,7 +542,7 @@ struct TestResult {
#[serde(rename = "n")] #[serde(rename = "n")]
name: Box<str>, name: Box<str>,
#[serde(rename = "v", default)] #[serde(rename = "v", default)]
spec_version: SpecVersion, edition: SpecEdition,
#[serde(rename = "s", default)] #[serde(rename = "s", default)]
strict: bool, strict: bool,
#[serde(skip)] #[serde(skip)]
@ -406,62 +563,48 @@ enum TestOutcomeResult {
Panic, Panic,
} }
#[derive(Debug, Serialize, Clone, Copy, Deserialize, PartialEq, Default)]
#[serde(untagged)]
enum SpecVersion {
ES5 = 5,
ES6 = 6,
#[default]
ES13 = 13,
}
/// Represents a test. /// Represents a test.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[allow(dead_code)] #[allow(dead_code)]
struct Test { struct Test {
name: Box<str>, name: Box<str>,
path: Box<Path>,
description: Box<str>, description: Box<str>,
esid: Option<Box<str>>, esid: Option<Box<str>>,
spec_version: SpecVersion, edition: SpecEdition,
flags: TestFlags, flags: TestFlags,
information: Box<str>, information: Box<str>,
features: Box<[Box<str>]>,
expected_outcome: Outcome, expected_outcome: Outcome,
includes: Box<[Box<str>]>, features: FxHashSet<Box<str>>,
includes: FxHashSet<Box<str>>,
locale: Locale, locale: Locale,
path: Box<Path>,
ignored: bool, ignored: bool,
} }
impl Test { impl Test {
/// Creates a new test. /// Creates a new test.
fn new<N, C>(name: N, path: C, metadata: MetaData) -> Self fn new<N, C>(name: N, path: C, metadata: MetaData) -> Result<Self>
where where
N: Into<Box<str>>, N: Into<Box<str>>,
C: Into<Box<Path>>, C: Into<Box<Path>>,
{ {
let spec_version = if metadata.es5id.is_some() { let edition = SpecEdition::from_test_metadata(&metadata)
SpecVersion::ES5 .map_err(|feats| eyre!("test metadata contained unknown features: {feats:?}"))?;
} else if metadata.es6id.is_some() {
SpecVersion::ES6
} else {
SpecVersion::ES13
};
Self { Ok(Self {
edition,
name: name.into(), name: name.into(),
description: metadata.description, description: metadata.description,
esid: metadata.esid, esid: metadata.esid,
spec_version,
flags: metadata.flags.into(), flags: metadata.flags.into(),
information: metadata.info, information: metadata.info,
features: metadata.features, features: metadata.features.into_vec().into_iter().collect(),
expected_outcome: Outcome::from(metadata.negative), expected_outcome: Outcome::from(metadata.negative),
includes: metadata.includes, includes: metadata.includes.into_vec().into_iter().collect(),
locale: metadata.locale, locale: metadata.locale,
path: path.into(), path: path.into(),
ignored: false, ignored: false,
} })
} }
/// Sets the test as ignored. /// Sets the test as ignored.

19
boa_tester/src/read.rs

@ -68,7 +68,7 @@ impl ErrorType {
} }
/// Individual test flag. /// Individual test flag.
#[derive(Debug, Clone, Copy, Deserialize)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(super) enum TestFlag { pub(super) enum TestFlag {
OnlyStrict, OnlyStrict,
@ -187,26 +187,21 @@ pub(super) fn read_suite(
} }
/// Reads information about a given test case. /// Reads information about a given test case.
pub(super) fn read_test(path: &Path) -> io::Result<Test> { pub(super) fn read_test(path: &Path) -> Result<Test> {
let name = path let name = path
.file_stem() .file_stem()
.ok_or_else(|| { .ok_or_else(|| eyre!("path for test `{}` has no file name", path.display()))?
io::Error::new(
io::ErrorKind::InvalidInput,
format!("test with no file name found: {}", path.display()),
)
})?
.to_str() .to_str()
.ok_or_else(|| { .ok_or_else(|| {
io::Error::new( eyre!(
io::ErrorKind::InvalidInput, "path for test `{}` is not a valid UTF-8 string",
format!("non-UTF-8 file name found: {}", path.display()), path.display()
) )
})?; })?;
let metadata = read_metadata(path)?; let metadata = read_metadata(path)?;
Ok(Test::new(name, path, metadata)) Test::new(name, path, metadata).wrap_err("failed to read test")
} }
/// Reads the metadata from the input test code. /// Reads the metadata from the input test code.

151
boa_tester/src/results.rs

@ -1,7 +1,8 @@
use crate::Statistics; use crate::{Statistics, VersionedStats};
use super::SuiteResult; use super::SuiteResult;
use color_eyre::{eyre::WrapErr, Result}; use color_eyre::{eyre::WrapErr, Result};
use fxhash::FxHashSet;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
env, fs, env, fs,
@ -28,11 +29,9 @@ struct ReducedResultInfo {
#[serde(rename = "u")] #[serde(rename = "u")]
test262_commit: Box<str>, test262_commit: Box<str>,
#[serde(rename = "a")] #[serde(rename = "a")]
all_stats: Statistics, stats: Statistics,
#[serde(rename = "a5", default)] #[serde(rename = "av", default)]
es5_stats: Statistics, versioned_stats: VersionedStats,
#[serde(rename = "a6", default)]
es6_stats: Statistics,
} }
impl From<ResultInfo> for ReducedResultInfo { impl From<ResultInfo> for ReducedResultInfo {
@ -41,9 +40,8 @@ impl From<ResultInfo> for ReducedResultInfo {
Self { Self {
commit: info.commit, commit: info.commit,
test262_commit: info.test262_commit, test262_commit: info.test262_commit,
all_stats: info.results.all_stats, stats: info.results.stats,
es5_stats: info.results.es5_stats, versioned_stats: info.results.versioned_stats,
es6_stats: info.results.es6_stats,
} }
} }
} }
@ -57,14 +55,7 @@ struct FeaturesInfo {
#[serde(rename = "n")] #[serde(rename = "n")]
suite_name: Box<str>, suite_name: Box<str>,
#[serde(rename = "f")] #[serde(rename = "f")]
features: Vec<String>, features: FxHashSet<String>,
}
fn remove_duplicates(features_vec: &[String]) -> Vec<String> {
let mut result = features_vec.to_vec();
result.sort();
result.dedup();
result
} }
impl From<ResultInfo> for FeaturesInfo { impl From<ResultInfo> for FeaturesInfo {
@ -73,7 +64,7 @@ impl From<ResultInfo> for FeaturesInfo {
commit: info.commit, commit: info.commit,
test262_commit: info.test262_commit, test262_commit: info.test262_commit,
suite_name: info.results.name, suite_name: info.results.name,
features: remove_duplicates(&info.results.features), features: info.results.features,
} }
} }
} }
@ -90,82 +81,76 @@ const FEATURES_FILE_NAME: &str = "features.json";
/// Writes the results of running the test suite to the given JSON output file. /// Writes the results of running the test suite to the given JSON output file.
/// ///
/// It will append the results to the ones already present, in an array. /// It will append the results to the ones already present, in an array.
pub(crate) fn write_json( pub(crate) fn write_json(results: SuiteResult, output_dir: &Path, verbose: u8) -> io::Result<()> {
results: SuiteResult, let mut branch = env::var("GITHUB_REF").unwrap_or_default();
output: Option<&Path>, if branch.starts_with("refs/pull") {
verbose: u8, branch = "pull".to_owned();
) -> io::Result<()> { }
if let Some(path) = output {
let mut branch = env::var("GITHUB_REF").unwrap_or_default();
if branch.starts_with("refs/pull") {
branch = "pull".to_owned();
}
let path = if branch.is_empty() { let output_dir = if branch.is_empty() {
path.to_path_buf() output_dir.to_path_buf()
} else { } else {
let folder = path.join(branch); let folder = output_dir.join(branch);
fs::create_dir_all(&folder)?; fs::create_dir_all(&folder)?;
folder folder
}; };
// We make sure we are using the latest commit information in GitHub pages: // We make sure we are using the latest commit information in GitHub pages:
update_gh_pages_repo(path.as_path(), verbose); update_gh_pages_repo(output_dir.as_path(), verbose);
if verbose != 0 { if verbose != 0 {
println!("Writing the results to {}...", path.display()); println!("Writing the results to {}...", output_dir.display());
} }
// Write the latest results. // Write the latest results.
let latest_path = path.join(LATEST_FILE_NAME); let latest = output_dir.join(LATEST_FILE_NAME);
let new_results = ResultInfo { let new_results = ResultInfo {
commit: env::var("GITHUB_SHA").unwrap_or_default().into_boxed_str(), commit: env::var("GITHUB_SHA").unwrap_or_default().into_boxed_str(),
test262_commit: get_test262_commit(), test262_commit: get_test262_commit(),
results, results,
}; };
let latest_output = BufWriter::new(fs::File::create(latest_path)?); let latest = BufWriter::new(fs::File::create(latest)?);
serde_json::to_writer(latest_output, &new_results)?; serde_json::to_writer(latest, &new_results)?;
// Write the full list of results, retrieving the existing ones first. // Write the full list of results, retrieving the existing ones first.
let all_path = path.join(RESULTS_FILE_NAME); let all_path = output_dir.join(RESULTS_FILE_NAME);
let mut all_results: Vec<ReducedResultInfo> = if all_path.exists() { let mut all_results: Vec<ReducedResultInfo> = if all_path.exists() {
serde_json::from_reader(BufReader::new(fs::File::open(&all_path)?))? serde_json::from_reader(BufReader::new(fs::File::open(&all_path)?))?
} else { } else {
Vec::new() Vec::new()
}; };
all_results.push(new_results.clone().into()); all_results.push(new_results.clone().into());
let output = BufWriter::new(fs::File::create(&all_path)?); let output = BufWriter::new(fs::File::create(&all_path)?);
serde_json::to_writer(output, &all_results)?; serde_json::to_writer(output, &all_results)?;
if verbose != 0 { if verbose != 0 {
println!("Results written correctly"); println!("Results written correctly");
} }
// Write the full list of features, existing features go first. // Write the full list of features, existing features go first.
let features_path = path.join(FEATURES_FILE_NAME); let features = output_dir.join(FEATURES_FILE_NAME);
let mut all_features: Vec<FeaturesInfo> = if features_path.exists() { let mut all_features: Vec<FeaturesInfo> = if features.exists() {
serde_json::from_reader(BufReader::new(fs::File::open(&features_path)?))? serde_json::from_reader(BufReader::new(fs::File::open(&features)?))?
} else { } else {
Vec::new() Vec::new()
}; };
all_features.push(new_results.into()); all_features.push(new_results.into());
let features_output = BufWriter::new(fs::File::create(&features_path)?); let features = BufWriter::new(fs::File::create(&features)?);
serde_json::to_writer(features_output, &all_features)?; serde_json::to_writer(features, &all_features)?;
if verbose != 0 { if verbose != 0 {
println!("Features written correctly"); println!("Features written correctly");
}
} }
Ok(()) Ok(())
@ -219,24 +204,24 @@ pub(crate) fn compare_results(base: &Path, new: &Path, markdown: bool) -> Result
)) ))
.wrap_err("could not read the new results")?; .wrap_err("could not read the new results")?;
let base_total = base_results.results.all_stats.total as isize; let base_total = base_results.results.stats.total as isize;
let new_total = new_results.results.all_stats.total as isize; let new_total = new_results.results.stats.total as isize;
let total_diff = new_total - base_total; let total_diff = new_total - base_total;
let base_passed = base_results.results.all_stats.passed as isize; let base_passed = base_results.results.stats.passed as isize;
let new_passed = new_results.results.all_stats.passed as isize; let new_passed = new_results.results.stats.passed as isize;
let passed_diff = new_passed - base_passed; let passed_diff = new_passed - base_passed;
let base_ignored = base_results.results.all_stats.ignored as isize; let base_ignored = base_results.results.stats.ignored as isize;
let new_ignored = new_results.results.all_stats.ignored as isize; let new_ignored = new_results.results.stats.ignored as isize;
let ignored_diff = new_ignored - base_ignored; let ignored_diff = new_ignored - base_ignored;
let base_failed = base_total - base_passed - base_ignored; let base_failed = base_total - base_passed - base_ignored;
let new_failed = new_total - new_passed - new_ignored; let new_failed = new_total - new_passed - new_ignored;
let failed_diff = new_failed - base_failed; let failed_diff = new_failed - base_failed;
let base_panics = base_results.results.all_stats.panic as isize; let base_panics = base_results.results.stats.panic as isize;
let new_panics = new_results.results.all_stats.panic as isize; let new_panics = new_results.results.stats.panic as isize;
let panic_diff = new_panics - base_panics; let panic_diff = new_panics - base_panics;
let base_conformance = (base_passed as f64 / base_total as f64) * 100_f64; let base_conformance = (base_passed as f64 / base_total as f64) * 100_f64;

Loading…
Cancel
Save