feat(testing): add batch support for jest (#6234)

* feat(testing): add batch support for jest

* chore(testing): add missing deps

* fix(testing): properly structure the data without reduce

* fix(testing): use results.starttime instead of performance.now

* chore(testing): fix typo

* feat(testing): use overrides to pass to the jest process

* feat(testing): get all configs before building results

* feat(testing): use index to get config
This commit is contained in:
Jonathan Cammisuli 2021-07-06 10:29:55 -04:00 committed by GitHub
parent 0fd0fc4d93
commit d491b3eb66
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 274 additions and 12 deletions

View File

@ -9,6 +9,7 @@
"executors": {
"jest": {
"implementation": "./src/executors/jest/jest.impl",
"batchImplementation": "./src/executors/jest/jest.impl#batchJest",
"schema": "./src/executors/jest/schema.json",
"description": "Run Jest unit tests"
}

View File

@ -34,8 +34,13 @@
"dependencies": {
"@nrwl/devkit": "*",
"identity-obj-proxy": "3.0.0",
"jest-resolve": "^26.6.2",
"jest-resolve": "27.0.6",
"rxjs": "^6.5.4",
"tslib": "^2.0.0"
"tslib": "^2.0.0",
"@jest/reporters": "27.0.6",
"@jest/test-result": "27.0.6",
"chalk": "4.1.0",
"jest-config": "27.0.6",
"jest-util": "27.0.6"
}
}

View File

@ -1,8 +1,13 @@
import { runCLI } from 'jest';
import { readConfig } from 'jest-config';
import { utils as jestReporterUtils } from '@jest/reporters';
import { makeEmptyAggregatedTestResult, addResult } from '@jest/test-result';
import * as path from 'path';
import { JestExecutorOptions } from './schema';
import { Config } from '@jest/types';
import { ExecutorContext } from '@nrwl/devkit';
import { ExecutorContext, TaskGraph } from '@nrwl/devkit';
import { join } from 'path';
import { getSummary } from './summary';
try {
require('dotenv').config();
@ -27,16 +32,22 @@ export async function jestExecutor(
export function jestConfigParser(
options: JestExecutorOptions,
context: ExecutorContext
context: ExecutorContext,
multiProjects = false
): Config.Argv {
options.jestConfig = path.resolve(context.root, options.jestConfig);
let jestConfig:
| {
transform: any;
globals: any;
setupFilesAfterEnv: any;
}
| undefined;
const jestConfig: {
transform: any;
globals: any;
setupFilesAfterEnv: any;
// eslint-disable-next-line @typescript-eslint/no-var-requires
} = require(options.jestConfig);
if (!multiProjects) {
options.jestConfig = path.resolve(context.root, options.jestConfig);
jestConfig = require(options.jestConfig);
}
const config: Config.Argv = {
$0: undefined,
@ -69,7 +80,7 @@ export function jestConfigParser(
};
// for backwards compatibility
if (options.setupFile) {
if (options.setupFile && !multiProjects) {
const setupFilesAfterEnvSet = new Set([
...(jestConfig.setupFilesAfterEnv ?? []),
path.resolve(context.root, options.setupFile),
@ -115,3 +126,58 @@ export function jestConfigParser(
}
export default jestExecutor;
export async function batchJest(
taskGraph: TaskGraph,
inputs: Record<string, JestExecutorOptions>,
overrides: JestExecutorOptions,
context: ExecutorContext
): Promise<Record<string, { success: boolean; terminalOutput: string }>> {
const configPaths = taskGraph.roots.map((root) =>
path.resolve(context.root, inputs[root].jestConfig)
);
const { globalConfig, results } = await runCLI(
jestConfigParser(overrides, context, true),
[...configPaths]
);
const jestTaskExecutionResults: Record<
string,
{ success: boolean; terminalOutput: string }
> = {};
const configs = await Promise.all(
configPaths.map(async (path) => readConfig({ $0: '', _: undefined }, path))
);
for (let i = 0; i < taskGraph.roots.length; i++) {
let root = taskGraph.roots[i];
const aggregatedResults = makeEmptyAggregatedTestResult();
aggregatedResults.startTime = results.startTime;
const projectRoot = join(context.root, taskGraph.tasks[root].projectRoot);
let resultOutput = '';
for (const testResult of results.testResults) {
if (testResult.testFilePath.startsWith(projectRoot)) {
addResult(aggregatedResults, testResult);
resultOutput +=
'\n\r' +
jestReporterUtils.getResultHeader(
testResult,
globalConfig,
configs[i].projectConfig
);
}
}
aggregatedResults.numTotalTestSuites = aggregatedResults.testResults.length;
jestTaskExecutionResults[root] = {
success: aggregatedResults.numFailedTests === 0,
terminalOutput: resultOutput + '\n\r\n\r' + getSummary(aggregatedResults),
};
}
return jestTaskExecutionResults;
}

View File

@ -0,0 +1,190 @@
import { AggregatedResult } from '@jest/reporters';
import { pluralize, formatTime } from 'jest-util';
import * as chalk from 'chalk';
/**
* Copied from the jest repo because these utility functions are not exposed through the package
* https://github.com/facebook/jest/blob/7a64ede2163eba4ecc725f448cd92102cd8c14aa/packages/jest-reporters/src/utils.ts
*/
const PROGRESS_BAR_WIDTH = 40;
const getValuesCurrentTestCases = (currentTestCases = []) => {
let numFailingTests = 0;
let numPassingTests = 0;
let numPendingTests = 0;
let numTodoTests = 0;
let numTotalTests = 0;
currentTestCases.forEach((testCase) => {
switch (testCase.testCaseResult.status) {
case 'failed': {
numFailingTests++;
break;
}
case 'passed': {
numPassingTests++;
break;
}
case 'skipped': {
numPendingTests++;
break;
}
case 'todo': {
numTodoTests++;
break;
}
}
numTotalTests++;
});
return {
numFailingTests,
numPassingTests,
numPendingTests,
numTodoTests,
numTotalTests,
};
};
const renderTime = (runTime: number, estimatedTime: number, width: number) => {
// If we are more than one second over the estimated time, highlight it.
const renderedTime =
estimatedTime && runTime >= estimatedTime + 1
? chalk.bold.yellow(formatTime(runTime, 0))
: formatTime(runTime, 0);
let time = chalk.bold(`Time:`) + ` ${renderedTime}`;
if (runTime < estimatedTime) {
time += `, estimated ${formatTime(estimatedTime, 0)}`;
}
// Only show a progress bar if the test run is actually going to take
// some time.
if (estimatedTime > 2 && runTime < estimatedTime && width) {
const availableWidth = Math.min(PROGRESS_BAR_WIDTH, width);
const length = Math.min(
Math.floor((runTime / estimatedTime) * availableWidth),
availableWidth
);
if (availableWidth >= 2) {
time +=
'\n' +
chalk.green('█').repeat(length) +
chalk.white('█').repeat(availableWidth - length);
}
}
return time;
};
export const getSummary = (
aggregatedResults: AggregatedResult,
options?: {
currentTestCases?: any;
estimatedTime?: number;
roundTime?: boolean;
width?: number;
}
): string => {
let runTime = (Date.now() - aggregatedResults.startTime) / 1000;
if (options && options.roundTime) {
runTime = Math.floor(runTime);
}
const valuesForCurrentTestCases = getValuesCurrentTestCases(
options?.currentTestCases
);
const estimatedTime = (options && options.estimatedTime) || 0;
const snapshotResults = aggregatedResults.snapshot;
const snapshotsAdded = snapshotResults.added;
const snapshotsFailed = snapshotResults.unmatched;
const snapshotsOutdated = snapshotResults.unchecked;
const snapshotsFilesRemoved = snapshotResults.filesRemoved;
const snapshotsDidUpdate = snapshotResults.didUpdate;
const snapshotsPassed = snapshotResults.matched;
const snapshotsTotal = snapshotResults.total;
const snapshotsUpdated = snapshotResults.updated;
const suitesFailed = aggregatedResults.numFailedTestSuites;
const suitesPassed = aggregatedResults.numPassedTestSuites;
const suitesPending = aggregatedResults.numPendingTestSuites;
const suitesRun = suitesFailed + suitesPassed;
const suitesTotal = aggregatedResults.numTotalTestSuites;
const testsFailed = aggregatedResults.numFailedTests;
const testsPassed = aggregatedResults.numPassedTests;
const testsPending = aggregatedResults.numPendingTests;
const testsTodo = aggregatedResults.numTodoTests;
const testsTotal = aggregatedResults.numTotalTests;
const width = (options && options.width) || 0;
const suites =
chalk.bold('Test Suites: ') +
(suitesFailed ? chalk.bold.red(`${suitesFailed} failed`) + ', ' : '') +
(suitesPending
? chalk.bold.yellow(`${suitesPending} skipped`) + ', '
: '') +
(suitesPassed ? chalk.bold.green(`${suitesPassed} passed`) + ', ' : '') +
(suitesRun !== suitesTotal
? suitesRun + ' of ' + suitesTotal
: suitesTotal) +
` total`;
const updatedTestsFailed =
testsFailed + valuesForCurrentTestCases.numFailingTests;
const updatedTestsPending =
testsPending + valuesForCurrentTestCases.numPendingTests;
const updatedTestsTodo = testsTodo + valuesForCurrentTestCases.numTodoTests;
const updatedTestsPassed =
testsPassed + valuesForCurrentTestCases.numPassingTests;
const updatedTestsTotal =
testsTotal + valuesForCurrentTestCases.numTotalTests;
const tests =
chalk.bold('Tests: ') +
(updatedTestsFailed > 0
? chalk.bold.red(`${updatedTestsFailed} failed`) + ', '
: '') +
(updatedTestsPending > 0
? chalk.bold.yellow(`${updatedTestsPending} skipped`) + ', '
: '') +
(updatedTestsTodo > 0
? chalk.bold.magenta(`${updatedTestsTodo} todo`) + ', '
: '') +
(updatedTestsPassed > 0
? chalk.bold.green(`${updatedTestsPassed} passed`) + ', '
: '') +
`${updatedTestsTotal} total`;
const snapshots =
chalk.bold('Snapshots: ') +
(snapshotsFailed
? chalk.bold.red(`${snapshotsFailed} failed`) + ', '
: '') +
(snapshotsOutdated && !snapshotsDidUpdate
? chalk.bold.yellow(`${snapshotsOutdated} obsolete`) + ', '
: '') +
(snapshotsOutdated && snapshotsDidUpdate
? chalk.bold.green(`${snapshotsOutdated} removed`) + ', '
: '') +
(snapshotsFilesRemoved && !snapshotsDidUpdate
? chalk.bold.yellow(
pluralize('file', snapshotsFilesRemoved) + ' obsolete'
) + ', '
: '') +
(snapshotsFilesRemoved && snapshotsDidUpdate
? chalk.bold.green(
pluralize('file', snapshotsFilesRemoved) + ' removed'
) + ', '
: '') +
(snapshotsUpdated
? chalk.bold.green(`${snapshotsUpdated} updated`) + ', '
: '') +
(snapshotsAdded
? chalk.bold.green(`${snapshotsAdded} written`) + ', '
: '') +
(snapshotsPassed
? chalk.bold.green(`${snapshotsPassed} passed`) + ', '
: '') +
`${snapshotsTotal} total`;
const time = renderTime(runTime, estimatedTime, width);
return [suites, tests, snapshots, time].join('\n');
};