Unify logic for running parser tests from external suites (#10444)

This commit is contained in:
Nicolò Ribaudo 2019-09-16 00:07:10 +02:00
parent 5440ae1cae
commit 7195f0d8cf
11 changed files with 582 additions and 757 deletions

View File

@ -159,13 +159,13 @@ bootstrap-flow:
cd build/flow && git checkout $(FLOW_COMMIT)
test-flow:
node scripts/tests/flow/run_babel_parser_flow_tests.js
node scripts/tests/flow
test-flow-ci: build-bundle-ci bootstrap-flow
$(MAKE) test-flow
test-flow-update-whitelist:
node scripts/tests/flow/run_babel_parser_flow_tests.js --update-whitelist
node scripts/tests/flow --update-whitelist
bootstrap-test262:
rm -rf build/test262
@ -174,13 +174,13 @@ bootstrap-test262:
cd build/test262 && git checkout $(TEST262_COMMIT)
test-test262:
node scripts/tests/test262/run_babel_parser_test262.js
node scripts/tests/test262
test-test262-ci: build-bundle-ci bootstrap-test262
$(MAKE) test-test262
test-test262-update-whitelist:
node scripts/tests/test262/run_babel_parser_test262.js --update-whitelist
node scripts/tests/test262 --update-whitelist
# Does not work on Windows
clone-license:

View File

@ -60,6 +60,7 @@
"lerna-changelog": "^0.5.0",
"lint-staged": "^9.2.0",
"lodash": "^4.17.13",
"mergeiterator": "^1.2.5",
"output-file-sync": "^2.0.0",
"prettier": "^1.19.1",
"pump": "^3.0.0",

132
scripts/tests/flow/index.js Normal file
View File

@ -0,0 +1,132 @@
const fs = require("fs").promises;
const path = require("path");
const merge = require("mergeiterator");
const TestRunner = require("../utils/parser-test-runner");
const flowOptionsMapping = {
esproposal_class_instance_fields: "classProperties",
esproposal_class_static_fields: "classProperties",
esproposal_export_star_as: "exportNamespaceFrom",
esproposal_decorators: "decorators-legacy",
esproposal_nullish_coalescing: "nullishCoalescingOperator",
esproposal_optional_chaining: "optionalChaining",
types: "flowComments",
intern_comments: false,
};
function getPlugins(test) {
const plugins = [
"dynamicImport",
["flow", { all: true }],
"flowComments",
"jsx",
"classProperties",
"classPrivateProperties",
"classPrivateMethods",
"bigInt",
"numericSeparator",
];
if (!test.options) return plugins;
for (const [option, enabled] of Object.entries(test.options)) {
if (!enabled) {
const idx = plugins.indexOf(flowOptionsMapping[option]);
if (idx !== -1) plugins.splice(idx, 1);
} else if (!(option in flowOptionsMapping)) {
throw new Error("Parser options not mapped " + option);
} else if (flowOptionsMapping[option]) {
plugins.push(flowOptionsMapping[option]);
}
}
return plugins;
}
async function* readdirRecursive(root, dir = ".") {
const names = await fs.readdir(path.join(root, dir));
const dirs = [];
for (const name of names) {
const file = path.join(dir, name);
const stats = await fs.stat(path.join(root, file));
if (!stats.isDirectory()) {
if (!file) continue;
yield file;
} else {
dirs.push(readdirRecursive(root, file));
}
}
yield* merge(dirs);
}
async function* loadTests(root) {
for await (const file of readdirRecursive(root)) {
if (file.slice(-3) === ".js") {
const noExt = path.join(root, file).slice(0, -3);
const [contents, tree, options] = await Promise.all([
fs.readFile(noExt + ".js", "utf8"),
fs.readFile(noExt + ".tree.json", "utf8").catch(() => null),
fs.readFile(noExt + ".options.json", "utf8").catch(() => null),
]);
yield {
file,
contents,
tree: JSON.parse(tree),
options: JSON.parse(options),
};
}
}
}
const runner = new TestRunner({
testDir: path.join(__dirname, "../../../build/flow/src/parser/test/flow"),
whitelist: path.join(__dirname, "whitelist.txt"),
shouldUpdate: process.argv.includes("--update-whitelist"),
async *getTests() {
for await (const test of loadTests(this.testDir)) {
const shouldSuccess =
test.tree && (!test.tree.errors || !test.tree.errors.length);
yield {
contents: test.contents,
fileName: test.file,
id: test.file,
expectedError: !shouldSuccess,
plugins: getPlugins(test),
};
}
},
parse(test, parser) {
try {
parser(test.contents, {
sourceType: "module",
plugins: test.plugins,
});
} catch (e) {
// lets retry in script mode
if (!test.expectedError) {
try {
parser(test.contents, {
sourceType: "script",
plugins: test.plugins,
});
return;
} catch {}
}
throw e;
}
},
});
runner.run().catch(err => {
console.error(err);
process.exitCode = 1;
});

View File

@ -1,311 +0,0 @@
"use strict";
const path = require("path");
const fs = require("fs");
const chalk = require("chalk");
const parse = require("../../../packages/babel-parser").parse;
const TESTS_FOLDER = path.join(
__dirname,
"../../../build/flow/src/parser/test/flow"
);
const WHITELIST_PATH = path.join(__dirname, "./flow_tests_whitelist.txt");
const shouldUpdateWhitelist = process.argv.indexOf("--update-whitelist") > 0;
function map_get_default(map, key, defaultConstructor) {
if (map.has(key)) {
return map.get(key);
}
const value = new defaultConstructor();
map.set(key, value);
return value;
}
function get_whitelist(filename) {
return fs
.readFileSync(filename, "utf8")
.split("\n")
.map(line => line.replace(/#.*$/, "").trim())
.filter(Boolean);
}
function list_files(root, dir) {
const files = fs.readdirSync(dir ? path.join(root, dir) : root);
let result = [];
for (let i = 0; i < files.length; i++) {
const file = dir ? path.join(dir, files[i]) : files[i];
const stats = fs.statSync(path.join(root, file));
if (stats.isDirectory()) {
result = result.concat(list_files(root, file));
} else {
result.push(file);
}
}
return result.sort();
}
function get_tests(root_dir) {
const files = list_files(root_dir);
const tests = new Map();
for (let i = 0; i < files.length; i++) {
const file = files[i];
const test_name = path.dirname(file);
const case_parts = path.basename(file).split(".");
const case_name = case_parts[0];
// Hack to ignore hidden files.
if (case_name === "") {
continue;
}
const cases = map_get_default(tests, test_name, Map);
const case_ = map_get_default(cases, case_name, Object);
const content = fs.readFileSync(path.join(root_dir, file), "utf8");
const ext = case_parts[case_parts.length - 1];
const kind =
case_parts.length > 2 ? case_parts[case_parts.length - 2] : null;
if (ext === "js") {
case_.file = file;
case_.content = content;
} else if (ext === "json" && kind === "tree") {
case_.expected_ast = JSON.parse(content);
} else if (ext === "json" && kind === "options") {
case_.options = JSON.parse(content);
}
}
return tests;
}
function update_whitelist(summary) {
const contains = (tests, file) =>
tests.some(({ test }) => test.file === file);
const disallowed = summary.disallowed.success.concat(
summary.disallowed.failure
);
const oldLines = fs
.readFileSync(WHITELIST_PATH, "utf8")
.trim()
.split("\n")
.filter(line => {
const file = line.replace(/#.*$/, "").trim();
return (
!contains(disallowed, file) && summary.unrecognized.indexOf(file) === -1
);
});
const newLines = summary.disallowed.failure
.map(({ test }) => test.file)
.filter(test => oldLines.indexOf(test) === -1);
const result = oldLines.concat(newLines).join("\n") + "\n";
fs.writeFileSync(WHITELIST_PATH, result);
}
const options = {
plugins: [
"dynamicImport",
["flow", { all: true }],
"flowComments",
"jsx",
"classProperties",
"classPrivateProperties",
"classPrivateMethods",
"bigInt",
"numericSeparator",
],
sourceType: "module",
ranges: true,
};
const flowOptionsMapping = {
esproposal_class_instance_fields: "classProperties",
esproposal_class_static_fields: "classProperties",
esproposal_export_star_as: "exportNamespaceFrom",
esproposal_decorators: "decorators-legacy",
esproposal_nullish_coalescing: "nullishCoalescingOperator",
esproposal_optional_chaining: "optionalChaining",
types: "flowComments",
intern_comments: false,
};
const summary = {
passed: true,
allowed: {
success: [],
failure: [],
},
disallowed: {
success: [],
failure: [],
},
unrecognized: [],
};
const tests = get_tests(TESTS_FOLDER);
const whitelist = get_whitelist(WHITELIST_PATH);
const unrecognized = new Set(whitelist);
tests.forEach(section => {
section.forEach(test => {
const shouldSuccess =
test.expected_ast &&
(!Array.isArray(test.expected_ast.errors) ||
test.expected_ast.errors.length === 0);
const inWhitelist = whitelist.indexOf(test.file) > -1;
const babelParserOptions = Object.assign({}, options);
babelParserOptions.plugins = babelParserOptions.plugins.slice();
if (test.options) {
Object.keys(test.options).forEach(option => {
if (!test.options[option]) {
const idx = babelParserOptions.plugins.indexOf(
flowOptionsMapping[option]
);
if (idx) {
babelParserOptions.plugins.splice(idx, 1);
}
return;
}
if (!(option in flowOptionsMapping)) {
throw new Error("Parser options not mapped " + option);
}
if (flowOptionsMapping[option]) {
babelParserOptions.plugins.push(flowOptionsMapping[option]);
}
});
}
let failed = false;
let exception = null;
try {
parse(test.content, babelParserOptions);
} catch (e) {
exception = e;
failed = true;
// lets retry in script mode
if (shouldSuccess) {
try {
parse(
test.content,
Object.assign({}, babelParserOptions, { sourceType: "script" })
);
exception = null;
failed = false;
} catch (e) {}
}
}
const isSuccess = shouldSuccess !== failed;
const isAllowed = isSuccess !== inWhitelist;
summary[isAllowed ? "allowed" : "disallowed"][
isSuccess ? "success" : "failure"
].push({ test, exception, shouldSuccess, babelParserOptions });
summary.passed &= isAllowed;
unrecognized.delete(test.file);
process.stdout.write(chalk.gray("."));
});
});
summary.unrecognized = Array.from(unrecognized);
summary.passed &= summary.unrecognized.length === 0;
// This is needed because, after the dots written using
// `process.stdout.write(".")` there is no final newline
console.log();
if (summary.disallowed.failure.length || summary.disallowed.success.length) {
console.log("\n-- FAILED TESTS --");
summary.disallowed.failure.forEach(
({ test, shouldSuccess, exception, babelParserOptions }) => {
console.log(chalk.red(`${test.file}`));
if (shouldSuccess) {
console.log(chalk.yellow(" Should parse successfully, but did not"));
console.log(chalk.yellow(` Failed with: \`${exception.message}\``));
} else {
console.log(chalk.yellow(" Should fail parsing, but did not"));
}
console.log(
chalk.yellow(
` Active plugins: ${JSON.stringify(babelParserOptions.plugins)}`
)
);
}
);
summary.disallowed.success.forEach(
({ test, shouldSuccess, babelParserOptions }) => {
console.log(chalk.red(`${test.file}`));
if (shouldSuccess) {
console.log(
chalk.yellow(
" Correctly parsed successfully, but" +
" was disallowed by the whitelist"
)
);
} else {
console.log(
chalk.yellow(
" Correctly failed parsing, but" +
" was disallowed by the whitelist"
)
);
}
console.log(
chalk.yellow(
` Active plugins: ${JSON.stringify(babelParserOptions.plugins)}`
)
);
}
);
}
console.log("-- SUMMARY --");
console.log(
chalk.green("✔ " + summary.allowed.success.length + " tests passed")
);
console.log(
chalk.green(
"✔ " +
summary.allowed.failure.length +
" tests failed but were allowed in the whitelist"
)
);
console.log(
chalk.red("✘ " + summary.disallowed.failure.length + " tests failed")
);
console.log(
chalk.red(
"✘ " +
summary.disallowed.success.length +
" tests passed but were disallowed in the whitelist"
)
);
console.log(
chalk.red(
"✘ " +
summary.unrecognized.length +
" tests specified in the whitelist were not found"
)
);
// Some padding to separate the output from the message `make`
// adds at the end of failing scripts
console.log();
if (shouldUpdateWhitelist) {
update_whitelist(summary);
console.log("\nWhitelist updated");
} else {
process.exit(summary.passed ? 0 : 1);
}

View File

@ -1,13 +1,4 @@
# This file lists tests that are known to produce incorrect results when parsed
# with the babel parser:
#
# - Tests that are expected to parse successfully but for which the babel parser reports
# a syntax error
# - Tests that contain invalid syntax but for which the babel parser reports no syntax
# error
#
# Entries should be removed incrementally as the babel parser is improved.
ES6/modules/migrated_0020.js
JSX_invalid/migrated_0000.js
arrow_function_invalid/migrated_0002.js
async_await/migrated_0007.js
@ -15,18 +6,18 @@ async_await/migrated_0020.js
async_await/migrated_0024.js
async_await/migrated_0027.js
async_generators/migrated_0007.js
class_method_kinds/polymorphic_getter.js
class_properties/migrated_0021.js
class_properties/migrated_0026.js
decorators/migrated_0003.js
private_class_properties/multiple.js
private_class_properties/getter_and_field.js
private_class_properties/getter_duplicate.js
private_class_properties/setter_and_field.js
private_class_properties/setter_duplicate.js
types/member/reserved_words.js
class_method_kinds/polymorphic_getter.js
ES6/modules/migrated_0020.js
export_import_reserved_words/migrated_0003.js
export_statements/export_trailing_comma.js
nullish_coalescing/precedence_and.js
nullish_coalescing/precedence_or.js
private_class_properties/getter_and_field.js
private_class_properties/getter_duplicate.js
private_class_properties/multiple.js
private_class_properties/multiple.js
private_class_properties/setter_and_field.js
private_class_properties/setter_duplicate.js
types/member/reserved_words.js

View File

@ -0,0 +1,193 @@
const path = require("path");
const TestStream = require("test262-stream");
const TestRunner = require("../utils/parser-test-runner");
const ignoredFeatures = [
"Array.prototype.flat",
"Array.prototype.flatMap",
"Array.prototype.values",
"ArrayBuffer",
"async-functions",
"async-iteration",
"arrow-function",
"Atomics",
"caller",
"class",
"computed-property-names",
"const",
"cross-realm",
"DataView",
"DataView.prototype.getFloat32",
"DataView.prototype.getFloat64",
"DataView.prototype.getInt8",
"DataView.prototype.getInt16",
"DataView.prototype.getInt32",
"DataView.prototype.getUint16",
"DataView.prototype.getUint32",
"DataView.prototype.setUint8",
"default-parameters",
"destructuring-assignment",
"destructuring-binding",
"FinalizationGroup",
"Float32Array",
"Float64Array",
"for-of",
"generators",
"globalThis",
"hashbang",
"host-gc-required",
"Int8Array",
"Int32Array",
"Intl.DateTimeFormat-datetimestyle",
"Intl.DateTimeFormat-dayPeriod",
"Intl.DateTimeFormat-fractionalSecondDigits",
"Intl.DateTimeFormat-formatRange",
"Intl.ListFormat",
"Intl.Locale",
"Intl.NumberFormat-unified",
"Intl.RelativeTimeFormat",
"Intl.Segmenter",
"IsHTMLDDA",
"json-superset",
"let",
"Map",
"new.target",
"Object.fromEntries",
"Object.is",
"object-rest",
"object-spread",
"optional-catch-binding",
"Promise.allSettled",
"Promise.prototype.finally",
"Proxy",
"proxy-missing-checks",
"Reflect",
"Reflect.construct",
"Reflect.set",
"Reflect.setPrototypeOf",
"regexp-dotall",
"regexp-lookbehind",
"regexp-named-groups",
"regexp-unicode-property-escapes",
"rest-parameters",
"SharedArrayBuffer",
"Set",
"String.fromCodePoint",
"String.prototype.endsWith",
"String.prototype.includes",
"String.prototype.matchAll",
"String.prototype.trimEnd",
"String.prototype.trimStart",
"string-trimming",
"super",
"Symbol",
"Symbol.asyncIterator",
"Symbol.hasInstance",
"Symbol.isConcatSpreadable",
"Symbol.iterator",
"Symbol.match",
"Symbol.matchAll",
"Symbol.prototype.description",
"Symbol.replace",
"Symbol.search",
"Symbol.split",
"Symbol.species",
"Symbol.toPrimitive",
"Symbol.toStringTag",
"Symbol.unscopables",
"tail-call-optimization",
"template",
"TypedArray",
"u180e",
"Uint8Array",
"Uint8ClampedArray",
"Uint16Array",
"WeakMap",
"WeakSet",
"WeakRef",
"well-formed-json-stringify",
];
const ignoredTests = ["built-ins/RegExp/", "language/literals/regexp/"];
const featuresToPlugins = {
BigInt: "bigInt",
"class-fields-private": "classPrivateProperties",
"class-fields-public": "classProperties",
"class-methods-private": "classPrivateMethods",
"class-static-fields-public": "classProperties",
"class-static-fields-private": "classPrivateProperties",
"class-static-methods-private": "classPrivateMethods",
"dynamic-import": "dynamicImport",
"export-star-as-namespace-from-module": "exportNamespaceFrom",
"import.meta": "importMeta",
"numeric-separator-literal": "numericSeparator",
"optional-chaining": "optionalChaining",
"top-level-await": "topLevelAwait",
};
const unmappedFeatures = new Set();
function* getPlugins(features) {
if (!features) return;
for (const f of features) {
if (featuresToPlugins[f]) {
yield featuresToPlugins[f];
} else if (!ignoredFeatures.includes(f)) {
unmappedFeatures.add(f);
}
}
}
const runner = new TestRunner({
testDir: path.join(__dirname, "../../../build/test262"),
whitelist: path.join(__dirname, "whitelist.txt"),
logInterval: 500,
shouldUpdate: process.argv.includes("--update-whitelist"),
async *getTests() {
const stream = new TestStream(this.testDir, {
omitRuntime: true,
});
for await (const test of stream) {
// strip test/
const fileName = test.file.substr(5);
if (ignoredTests.some(start => fileName.startsWith(start))) continue;
yield {
contents: test.contents,
fileName,
id: `${fileName}(${test.scenario})`,
sourceType: test.attrs.flags.module ? "module" : "script",
plugins: Array.from(getPlugins(test.attrs.features)),
expectedError:
!!test.attrs.negative &&
(test.attrs.negative.phase === "parse" ||
test.attrs.negative.phase === "early"),
};
}
},
});
runner
.run()
.then(() => {
if (unmappedFeatures.size) {
console.log("");
console.log(
"The following Features are not currently mapped or ignored:"
);
console.log(
Array.from(unmappedFeatures)
.join("\n")
.replace(/^/gm, " ")
);
}
})
.catch(err => {
console.error(err);
process.exitCode = 1;
});

View File

@ -1,126 +0,0 @@
"use strict";
const path = require("path");
const chalk = require("chalk");
const utils = require("./run_babel_parser_test262_utils");
const testDir = path.join(__dirname, "../../../build/test262");
const whitelistFile = path.join(__dirname, "test262_whitelist.txt");
const shouldUpdate = process.argv.indexOf("--update-whitelist") > -1;
Promise.all([utils.getTests(testDir), utils.getWhitelist(whitelistFile)])
.then(function([tests, whitelist]) {
const total = tests.length;
const reportInc = Math.floor(total / 20);
console.log(`Now running ${total} tests...`);
const results = tests.map(function(test, idx) {
if (idx % reportInc === 0) {
console.log(`> ${Math.round((100 * idx) / total)}% complete`);
}
return utils.runTest(test);
});
return utils.interpret(results, whitelist);
})
.then(function(summary) {
const goodnews = [
summary.allowed.success.length + " valid programs parsed without error",
summary.allowed.failure.length +
" invalid programs produced a parsing error",
summary.allowed.falsePositive.length +
" invalid programs did not produce a parsing error" +
" (and allowed by the whitelist file)",
summary.allowed.falseNegative.length +
" valid programs produced a parsing error" +
" (and allowed by the whitelist file)",
];
const badnews = [];
const badnewsDetails = [];
void [
{
tests: summary.disallowed.success,
label:
"valid programs parsed without error" +
" (in violation of the whitelist file)",
},
{
tests: summary.disallowed.failure,
label:
"invalid programs produced a parsing error" +
" (in violation of the whitelist file)",
},
{
tests: summary.disallowed.falsePositive,
label:
"invalid programs did not produce a parsing error" +
" (without a corresponding entry in the whitelist file)",
},
{
tests: summary.disallowed.falseNegative,
label:
"valid programs produced a parsing error" +
" (without a corresponding entry in the whitelist file)",
},
{
tests: summary.unrecognized,
label: "non-existent programs specified in the whitelist file",
},
].forEach(function({ tests, label }) {
if (!tests.length) {
return;
}
const desc = tests.length + " " + label;
badnews.push(desc);
badnewsDetails.push(desc + ":");
badnewsDetails.push(
...tests.map(function(test) {
return test.id || test;
})
);
});
console.log("Testing complete.");
console.log("Summary:");
console.log(chalk.green(goodnews.join("\n").replace(/^/gm, " ✔ ")));
if (!summary.passed) {
console.log("");
console.log(chalk.red(badnews.join("\n").replace(/^/gm, " ✘ ")));
console.log("");
console.log("Details:");
console.log(badnewsDetails.join("\n").replace(/^/gm, " "));
}
if (shouldUpdate) {
return utils.updateWhitelist(whitelistFile, summary).then(function() {
console.log("");
console.log("Whitelist file updated.");
});
} else {
process.exitCode = summary.passed ? 0 : 1;
}
const unmappedFeatures = utils.getUnmappedFeatures();
if (unmappedFeatures.size) {
console.log("");
console.log(
"The following Features are not currently mapped or ignored:"
);
console.log(
Array.from(unmappedFeatures)
.join("\n")
.replace(/^/gm, " ")
);
}
})
.catch(function(err) {
console.error(err);
process.exitCode = 1;
});

View File

@ -1,297 +0,0 @@
"use strict";
const fs = require("graceful-fs");
const promisify = require("util").promisify;
const TestStream = require("test262-stream");
const pfs = {
readFile: promisify(fs.readFile),
writeFile: promisify(fs.writeFile),
readdir: promisify(fs.readdir),
stat: promisify(fs.stat),
};
const parse = require("../../../packages/babel-parser").parse;
const ignoredFeatures = [
"Array.prototype.flat",
"Array.prototype.flatMap",
"Array.prototype.values",
"ArrayBuffer",
"async-functions",
"async-iteration",
"arrow-function",
"Atomics",
"caller",
"class",
"computed-property-names",
"const",
"cross-realm",
"DataView",
"DataView.prototype.getFloat32",
"DataView.prototype.getFloat64",
"DataView.prototype.getInt8",
"DataView.prototype.getInt16",
"DataView.prototype.getInt32",
"DataView.prototype.getUint16",
"DataView.prototype.getUint32",
"DataView.prototype.setUint8",
"default-parameters",
"destructuring-assignment",
"destructuring-binding",
"Float32Array",
"Float64Array",
"for-of",
"generators",
"globalThis",
"hashbang",
"Int8Array",
"Int32Array",
"Intl.ListFormat",
"Intl.Locale",
"Intl.NumberFormat-unified",
"Intl.RelativeTimeFormat",
"Intl.Segmenter",
"IsHTMLDDA",
"json-superset",
"let",
"Map",
"new.target",
"Object.fromEntries",
"Object.is",
"object-rest",
"object-spread",
"optional-catch-binding",
"Promise.prototype.finally",
"Proxy",
"Reflect",
"Reflect.construct",
"Reflect.set",
"Reflect.setPrototypeOf",
"regexp-dotall",
"regexp-lookbehind",
"regexp-named-groups",
"regexp-unicode-property-escapes",
"SharedArrayBuffer",
"Set",
"String.fromCodePoint",
"String.prototype.endsWith",
"String.prototype.includes",
"String.prototype.matchAll",
"String.prototype.trimEnd",
"String.prototype.trimStart",
"string-trimming",
"super",
"Symbol",
"Symbol.asyncIterator",
"Symbol.hasInstance",
"Symbol.isConcatSpreadable",
"Symbol.iterator",
"Symbol.match",
"Symbol.matchAll",
"Symbol.prototype.description",
"Symbol.replace",
"Symbol.search",
"Symbol.split",
"Symbol.species",
"Symbol.toPrimitive",
"Symbol.toStringTag",
"Symbol.unscopables",
"tail-call-optimization",
"template",
"TypedArray",
"u180e",
"Uint8Array",
"Uint8ClampedArray",
"Uint16Array",
"WeakMap",
"WeakSet",
"well-formed-json-stringify",
];
const ignoredTests = ["built-ins/RegExp/", "language/literals/regexp/"];
const featuresToPlugins = {
BigInt: "bigInt",
"class-fields-private": "classPrivateProperties",
"class-fields-public": "classProperties",
"class-methods-private": "classPrivateMethods",
"class-static-fields-public": "classProperties",
"class-static-fields-private": "classPrivateProperties",
"class-static-methods-private": "classPrivateMethods",
"dynamic-import": "dynamicImport",
"export-star-as-namespace-from-module": "exportNamespaceFrom",
"import.meta": "importMeta",
"numeric-separator-literal": "numericSeparator",
"optional-chaining": "optionalChaining",
"top-level-await": "topLevelAwait",
};
function getPlugins(features) {
return (
features &&
features
.map(f => {
if (!featuresToPlugins[f] && !ignoredFeatures.includes(f)) {
unmappedFeatures.add(f);
}
return featuresToPlugins[f];
})
.filter(Boolean)
);
}
const unmappedFeatures = new Set();
exports.getUnmappedFeatures = function() {
return unmappedFeatures;
};
exports.getTests = function(testDir) {
const stream = new TestStream(testDir, {
omitRuntime: true,
});
const tests = [];
stream.on("data", test => {
// strip test/
const fileName = test.file.substr(5);
if (ignoredTests.some(start => fileName.startsWith(start))) return;
tests.push({
contents: test.contents,
fileName,
id: `${fileName}(${test.scenario})`,
sourceType: test.attrs.flags.module ? "module" : "script",
plugins: getPlugins(test.attrs.features),
expectedError:
!!test.attrs.negative &&
(test.attrs.negative.phase === "parse" ||
test.attrs.negative.phase === "early"),
});
});
return new Promise((resolve, reject) => {
stream.on("end", () => resolve(tests));
stream.on("error", reject);
});
};
exports.runTest = function(test) {
try {
parse(test.contents, {
sourceType: test.sourceType,
plugins: test.plugins,
});
test.actualError = false;
} catch (err) {
test.actualError = true;
}
test.result = test.expectedError !== test.actualError ? "fail" : "pass";
return test;
};
exports.getWhitelist = function(filename) {
return pfs.readFile(filename, "utf-8").then(function(contents) {
return contents
.split("\n")
.map(function(line) {
return line.replace(/#.*$/, "").trim();
})
.filter(function(line) {
return line.length > 0;
})
.reduce(function(table, filename) {
table[filename] = true;
return table;
}, Object.create(null));
});
};
exports.updateWhitelist = function(filename, summary) {
return pfs.readFile(filename, "utf-8").then(function(contents) {
const toRemove = summary.disallowed.success
.concat(summary.disallowed.failure)
.map(function(test) {
return test.id;
})
.concat(summary.unrecognized);
const toAdd = summary.disallowed.falsePositive
.concat(summary.disallowed.falseNegative)
.map(function(test) {
return test.id;
});
const newContents = contents
.split("\n")
.map(function(line) {
const testId = line.replace(/#.*$/, "").trim();
if (toRemove.indexOf(testId) > -1) {
return null;
}
return line;
})
.filter(function(line) {
return line !== null && line !== "";
})
.concat(toAdd)
.sort()
.join("\n");
return pfs.writeFile(filename, newContents, "utf-8");
});
};
exports.interpret = function(results, whitelist) {
const summary = {
passed: true,
allowed: {
success: [],
failure: [],
falsePositive: [],
falseNegative: [],
},
disallowed: {
success: [],
failure: [],
falsePositive: [],
falseNegative: [],
},
unrecognized: null,
};
results.forEach(function(result) {
let classification, isAllowed;
const inWhitelist = result.id in whitelist;
delete whitelist[result.id];
if (!result.expectedError) {
if (!result.actualError) {
classification = "success";
isAllowed = !inWhitelist;
} else {
classification = "falseNegative";
isAllowed = inWhitelist;
}
} else {
if (!result.actualError) {
classification = "falsePositive";
isAllowed = inWhitelist;
} else {
classification = "failure";
isAllowed = !inWhitelist;
}
}
summary.passed &= isAllowed;
summary[isAllowed ? "allowed" : "disallowed"][classification].push(result);
});
summary.unrecognized = Object.keys(whitelist);
summary.passed = !!summary.passed && summary.unrecognized.length === 0;
return summary;
};

View File

@ -191,4 +191,4 @@ language/statements/class/elements/syntax/early-errors/invalid-names/method-outt
language/statements/class/elements/syntax/early-errors/invalid-names/method-outter-member-expression-bad-reference.js(default)
language/statements/class/elements/syntax/early-errors/invalid-names/method-outter-member-expression-bad-reference.js(strict mode)
language/statements/class/elements/syntax/early-errors/invalid-names/method-outter-member-expression-this.js(default)
language/statements/class/elements/syntax/early-errors/invalid-names/method-outter-member-expression-this.js(strict mode)
language/statements/class/elements/syntax/early-errors/invalid-names/method-outter-member-expression-this.js(strict mode)

View File

@ -0,0 +1,237 @@
"use strict";
const fs = require("fs").promises;
const chalk = require("chalk");
const { parse: parser } = require("../../../packages/babel-parser");
const dot = chalk.gray(".");
class TestRunner {
constructor({
testDir,
whitelist,
logInterval = 1,
shouldUpdate,
getTests,
parse = this.parse,
}) {
this.testDir = testDir;
this.whitelist = whitelist;
this.logInterval = logInterval;
this.shouldUpdate = shouldUpdate;
this.getTests = getTests;
this.parse = parse;
}
async run() {
const whitelistP = this.getWhitelist();
console.log(`Now running tests...`);
const results = [];
for await (const result of this.runTests()) {
results.push(result);
if (results.length % this.logInterval === 0) process.stdout.write(dot);
}
process.stdout.write("\n");
const summary = this.interpret(results, await whitelistP);
await this.output(summary);
}
async *runTests() {
for await (const test of this.getTests()) {
yield this.runTest(test);
}
}
runTest(test) {
try {
this.parse(test, parser);
test.actualError = false;
} catch (err) {
test.actualError = true;
}
test.result = test.expectedError !== test.actualError ? "fail" : "pass";
return test;
}
parse(test, parser) {
parser(test.contents, {
sourceType: test.sourceType,
plugins: test.plugins,
});
}
async getWhitelist() {
const contents = await fs.readFile(this.whitelist, "utf-8");
const table = new Set();
for (const line of contents.split("\n")) {
const filename = line.replace(/#.*$/, "").trim();
if (filename) table.add(filename);
}
return table;
}
async updateWhitelist(summary) {
const contents = await fs.readFile(this.whitelist, "utf-8");
const toRemove = summary.disallowed.success
.concat(summary.disallowed.failure)
.map(test => test.id)
.concat(summary.unrecognized);
const updated = summary.disallowed.falsePositive
.concat(summary.disallowed.falseNegative)
.map(test => test.id);
for (const line of contents.split("\n")) {
const testId = line.replace(/#.*$/, "").trim();
if (!toRemove.includes(testId) && line) {
updated.push(line);
}
}
updated.sort();
await fs.writeFile(this.whitelist, updated.join("\n") + "\n", "utf8");
}
interpret(results, whitelist) {
const summary = {
passed: true,
allowed: {
success: [],
failure: [],
falsePositive: [],
falseNegative: [],
},
disallowed: {
success: [],
failure: [],
falsePositive: [],
falseNegative: [],
},
unrecognized: null,
count: results.length,
};
results.forEach(function(result) {
let classification, isAllowed;
const inWhitelist = whitelist.has(result.id);
whitelist.delete(result.id);
if (!result.expectedError) {
if (!result.actualError) {
classification = "success";
isAllowed = !inWhitelist;
} else {
classification = "falseNegative";
isAllowed = inWhitelist;
}
} else {
if (!result.actualError) {
classification = "falsePositive";
isAllowed = inWhitelist;
} else {
classification = "failure";
isAllowed = !inWhitelist;
}
}
summary.passed &= isAllowed;
summary[isAllowed ? "allowed" : "disallowed"][classification].push(
result
);
});
summary.unrecognized = Array.from(whitelist);
summary.passed = !!summary.passed && summary.unrecognized.length === 0;
return summary;
}
async output(summary) {
const goodnews = [
summary.allowed.success.length + " valid programs parsed without error",
summary.allowed.failure.length +
" invalid programs produced a parsing error",
summary.allowed.falsePositive.length +
" invalid programs did not produce a parsing error" +
" (and allowed by the whitelist file)",
summary.allowed.falseNegative.length +
" valid programs produced a parsing error" +
" (and allowed by the whitelist file)",
];
const badnews = [];
const badnewsDetails = [];
void [
{
tests: summary.disallowed.success,
label:
"valid programs parsed without error" +
" (in violation of the whitelist file)",
},
{
tests: summary.disallowed.failure,
label:
"invalid programs produced a parsing error" +
" (in violation of the whitelist file)",
},
{
tests: summary.disallowed.falsePositive,
label:
"invalid programs did not produce a parsing error" +
" (without a corresponding entry in the whitelist file)",
},
{
tests: summary.disallowed.falseNegative,
label:
"valid programs produced a parsing error" +
" (without a corresponding entry in the whitelist file)",
},
{
tests: summary.unrecognized,
label: "non-existent programs specified in the whitelist file",
},
].forEach(function({ tests, label }) {
if (!tests.length) {
return;
}
const desc = tests.length + " " + label;
badnews.push(desc);
badnewsDetails.push(desc + ":");
badnewsDetails.push(...tests.map(test => ` ${test.id || test}`));
});
console.log(`Testing complete (${summary.count} tests).`);
console.log("Summary:");
console.log(chalk.green(goodnews.join("\n").replace(/^/gm, " ✔ ")));
if (!summary.passed) {
console.log("");
console.log(chalk.red(badnews.join("\n").replace(/^/gm, " ✘ ")));
console.log("");
console.log("Details:");
console.log(badnewsDetails.join("\n").replace(/^/gm, " "));
}
if (this.shouldUpdate) {
await this.updateWhitelist(summary);
console.log("");
console.log("Whitelist file updated.");
} else {
process.exitCode = summary.passed ? 0 : 1;
}
}
}
module.exports = exports = TestRunner;

View File

@ -7438,6 +7438,11 @@ merge2@^1.2.3:
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.3.0.tgz#5b366ee83b2f1582c48f87e47cf1a9352103ca81"
integrity sha512-2j4DAdlBOkiSZIsaXk4mTE3sRS02yBHAtfy127xRV3bQUFqXkjHCHLW6Scv7DwNRbIWNHH8zpnz9zMaKXIdvYw==
mergeiterator@^1.2.5:
version "1.2.5"
resolved "https://registry.yarnpkg.com/mergeiterator/-/mergeiterator-1.2.5.tgz#40f0a27888f2eb85d80fa4c29632cf400a0b8c7b"
integrity sha512-KVZWGnRkEgmVewnpVIEUm/KuQ2kHx4T0zYQlPbGypBk4kM5sJ1+7bPDR2N4yg3EoS1CiGKk2GRb5PkAsMEoh3w==
micromatch@^2.1.5:
version "2.3.11"
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565"