feat(core): implement caching
This commit is contained in:
parent
6411a29683
commit
d2d9f1cca0
138
e2e/cache.test.ts
Normal file
138
e2e/cache.test.ts
Normal file
@ -0,0 +1,138 @@
|
||||
import {
|
||||
ensureProject,
|
||||
forEachCli,
|
||||
listFiles,
|
||||
rmDist,
|
||||
runCLI,
|
||||
runCommand,
|
||||
uniq,
|
||||
updateFile
|
||||
} from './utils';
|
||||
|
||||
forEachCli(() => {
|
||||
describe('Cache', () => {
|
||||
it('should not use cache when it is not enabled', async () => {
|
||||
ensureProject();
|
||||
|
||||
const myapp1 = uniq('myapp1');
|
||||
const myapp2 = uniq('myapp2');
|
||||
runCLI(`generate @nrwl/web:app ${myapp1}`);
|
||||
runCLI(`generate @nrwl/web:app ${myapp2}`);
|
||||
const files = `--files="apps/${myapp1}/src/main.ts,apps/${myapp2}/src/main.ts"`;
|
||||
|
||||
// run without caching
|
||||
// --------------------------------------------
|
||||
const outputWithoutCachingEnabled1 = runCommand(
|
||||
`npm run affected:build -- ${files}`
|
||||
);
|
||||
const filesApp1 = listFiles(`dist/apps/${myapp1}`);
|
||||
const filesApp2 = listFiles(`dist/apps/${myapp2}`);
|
||||
|
||||
expect(outputWithoutCachingEnabled1).not.toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
|
||||
const outputWithoutCachingEnabled2 = runCommand(
|
||||
`npm run affected:build -- ${files}`
|
||||
);
|
||||
expect(outputWithoutCachingEnabled2).not.toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
|
||||
// enable caching
|
||||
// --------------------------------------------
|
||||
updateFile('nx.json', c => {
|
||||
const nxJson = JSON.parse(c);
|
||||
nxJson.tasksRunnerOptions = {
|
||||
default: {
|
||||
runner: '@nrwl/workspace/src/tasks-runner/tasks-runner-v2',
|
||||
options: {
|
||||
cacheableOperations: ['build', 'lint']
|
||||
}
|
||||
}
|
||||
};
|
||||
return JSON.stringify(nxJson, null, 2);
|
||||
});
|
||||
|
||||
// run build with caching
|
||||
// --------------------------------------------
|
||||
const outputThatPutsDataIntoCache = runCommand(
|
||||
`npm run affected:build -- ${files}`
|
||||
);
|
||||
// now the data is in cache
|
||||
expect(outputThatPutsDataIntoCache).not.toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
|
||||
rmDist();
|
||||
|
||||
const outputWithBothBuildTasksCached = runCommand(
|
||||
`npm run affected:build -- ${files}`
|
||||
);
|
||||
expect(outputWithBothBuildTasksCached).toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
expectCached(outputWithBothBuildTasksCached, [myapp1, myapp2]);
|
||||
expect(listFiles(`dist/apps/${myapp1}`)).toEqual(filesApp1);
|
||||
expect(listFiles(`dist/apps/${myapp2}`)).toEqual(filesApp2);
|
||||
|
||||
// touch myapp1
|
||||
// --------------------------------------------
|
||||
updateFile(`apps/${myapp1}/src/main.ts`, c => {
|
||||
return `${c}\n//some comment`;
|
||||
});
|
||||
const outputWithBuildApp2Cached = runCommand(
|
||||
`npm run affected:build -- ${files}`
|
||||
);
|
||||
expect(outputWithBuildApp2Cached).toContain('read the output from cache');
|
||||
expectCached(outputWithBuildApp2Cached, [myapp2]);
|
||||
|
||||
// touch package.json
|
||||
// --------------------------------------------
|
||||
updateFile(`package.json`, c => {
|
||||
const r = JSON.parse(c);
|
||||
r.description = 'different';
|
||||
return JSON.stringify(r);
|
||||
});
|
||||
const outputWithNoBuildCached = runCommand(
|
||||
`npm run affected:build -- ${files}`
|
||||
);
|
||||
expect(outputWithNoBuildCached).not.toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
|
||||
// run lint with caching
|
||||
// --------------------------------------------
|
||||
const outputWithNoLintCached = runCommand(
|
||||
`npm run affected:lint -- ${files}`
|
||||
);
|
||||
expect(outputWithNoLintCached).not.toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
|
||||
const outputWithBothLintTasksCached = runCommand(
|
||||
`npm run affected:lint -- ${files}`
|
||||
);
|
||||
expect(outputWithBothLintTasksCached).toContain(
|
||||
'read the output from cache'
|
||||
);
|
||||
expectCached(outputWithBothLintTasksCached, [
|
||||
myapp1,
|
||||
myapp2,
|
||||
`${myapp1}-e2e`,
|
||||
`${myapp2}-e2e`
|
||||
]);
|
||||
}, 120000);
|
||||
});
|
||||
|
||||
function expectCached(actual: string, expected: string[]) {
|
||||
const section = actual.split('read the output from cache')[1];
|
||||
const r = section
|
||||
.split('\n')
|
||||
.filter(l => l.trim().startsWith('-'))
|
||||
.map(l => l.split('- ')[1].trim());
|
||||
r.sort((a, b) => a.localeCompare(b));
|
||||
expected.sort((a, b) => a.localeCompare(b));
|
||||
expect(r).toEqual(expected);
|
||||
}
|
||||
});
|
||||
@ -1,9 +1,9 @@
|
||||
import { ensureProject, forEachCli } from './utils';
|
||||
import { ensureProject, forEachCli, newProject, runCLI } from './utils';
|
||||
|
||||
forEachCli(() => {
|
||||
forEachCli('angular', () => {
|
||||
describe('create playground', () => {
|
||||
it('create playground', () => {
|
||||
ensureProject();
|
||||
newProject();
|
||||
}, 120000);
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,6 +2,7 @@ import { exec, execSync } from 'child_process';
|
||||
import { readFileSync, renameSync, statSync, writeFileSync } from 'fs';
|
||||
import { ensureDirSync } from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
|
||||
export let cli;
|
||||
|
||||
@ -456,6 +457,10 @@ export function checkFilesExist(...expectedFiles: string[]) {
|
||||
});
|
||||
}
|
||||
|
||||
export function listFiles(dirName: string) {
|
||||
return fs.readdirSync(tmpProjPath(dirName));
|
||||
}
|
||||
|
||||
export function readJson(f: string): any {
|
||||
return JSON.parse(readFile(f));
|
||||
}
|
||||
@ -469,6 +474,10 @@ export function cleanup() {
|
||||
execSync(`rm -rf ${tmpProjPath()}`);
|
||||
}
|
||||
|
||||
export function rmDist() {
|
||||
execSync(`rm -rf ${tmpProjPath()}/dist`);
|
||||
}
|
||||
|
||||
export function getCwd(): string {
|
||||
return process.cwd();
|
||||
}
|
||||
|
||||
@ -80,7 +80,7 @@
|
||||
"@types/jasmine": "~2.8.6",
|
||||
"@types/jasminewd2": "~2.0.3",
|
||||
"@types/jest": "24.0.9",
|
||||
"@types/node": "~8.9.4",
|
||||
"@types/node": "10.17.13",
|
||||
"@types/prettier": "^1.10.0",
|
||||
"@types/react": "16.9.17",
|
||||
"@types/react-dom": "16.9.4",
|
||||
@ -164,7 +164,6 @@
|
||||
"next": "9.1.5",
|
||||
"ng-packagr": "5.7.0",
|
||||
"ngrx-store-freeze": "0.2.4",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"open": "6.4.0",
|
||||
"opn": "^5.3.0",
|
||||
"parse5": "4.0.0",
|
||||
@ -224,7 +223,10 @@
|
||||
"worker-plugin": "3.2.0",
|
||||
"yargs": "^11.0.0",
|
||||
"yargs-parser": "10.0.0",
|
||||
"zone.js": "^0.9.0"
|
||||
"zone.js": "^0.9.0",
|
||||
"tar": "5.0.5",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"hasha": "5.1.0"
|
||||
},
|
||||
"author": "Victor Savkin",
|
||||
"license": "MIT",
|
||||
|
||||
@ -24,6 +24,7 @@
|
||||
"@nrwl/workspace": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^0.19.0"
|
||||
"axios": "^0.19.0",
|
||||
"tar": "5.0.5"
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,12 +6,18 @@ import {
|
||||
} from '@nrwl/workspace/src/tasks-runner/tasks-runner';
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import {
|
||||
defaultTasksRunner,
|
||||
DefaultTasksRunnerOptions
|
||||
} from '@nrwl/workspace/src/tasks-runner/default-tasks-runner';
|
||||
tasksRunnerV2,
|
||||
DefaultTasksRunnerOptions,
|
||||
RemoteCache
|
||||
} from '@nrwl/workspace/src/tasks-runner/tasks-runner-v2';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { ProjectGraph } from '@nrwl/workspace/src/core/project-graph';
|
||||
import { NxJson } from '@nrwl/workspace/src/core/shared-interfaces';
|
||||
import { writeFileSync } from 'fs';
|
||||
|
||||
const axios = require('axios');
|
||||
const tar = require('tar');
|
||||
|
||||
interface InsightsTaskRunnerOptions extends DefaultTasksRunnerOptions {
|
||||
insightsUrl?: string;
|
||||
@ -20,8 +26,68 @@ interface InsightsTaskRunnerOptions extends DefaultTasksRunnerOptions {
|
||||
type Context = {
|
||||
projectGraph: ProjectGraph;
|
||||
target: string;
|
||||
nxJson: NxJson;
|
||||
};
|
||||
|
||||
class InsightsRemoteCache implements RemoteCache {
|
||||
constructor(private readonly axiosInstance: any) {}
|
||||
|
||||
async retrieve(hash: string, cacheDirectory: string): Promise<boolean> {
|
||||
try {
|
||||
const resp = await this.axiosInstance({
|
||||
method: 'get',
|
||||
url: `/nx-cache/${hash}`,
|
||||
maxContentLength: 1000 * 1000 * 100
|
||||
});
|
||||
const tg = path.join(cacheDirectory, `${hash}.tg`);
|
||||
writeFileSync(tg, resp.data, { encoding: 'base64' });
|
||||
await tar.x({
|
||||
file: tg,
|
||||
cwd: cacheDirectory
|
||||
});
|
||||
writeFileSync(path.join(cacheDirectory, `${hash}.commit`), 'true');
|
||||
return true;
|
||||
} catch (e) {
|
||||
if (e.response && e.response.status === 404) {
|
||||
// cache miss. print nothing
|
||||
} else if (e.code === 'ECONNREFUSED') {
|
||||
console.error(`Error: Cannot cannot to remote cache.`);
|
||||
} else {
|
||||
console.error(e.message);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async store(hash: string, cacheDirectory: string): Promise<boolean> {
|
||||
const tg = path.join(cacheDirectory, `${hash}.tg`);
|
||||
try {
|
||||
await tar.c(
|
||||
{
|
||||
gzip: false,
|
||||
file: tg,
|
||||
cwd: cacheDirectory
|
||||
},
|
||||
[hash]
|
||||
);
|
||||
await this.axiosInstance({
|
||||
method: 'post',
|
||||
url: `/nx-cache/${hash}`,
|
||||
data: { tgz: fs.readFileSync(tg).toString('base64') },
|
||||
maxContentLength: 1000 * 1000 * 50
|
||||
});
|
||||
return true;
|
||||
} catch (e) {
|
||||
if (e.code === 'ECONNREFUSED') {
|
||||
console.error(`Error: Cannot cannot to remote cache.`);
|
||||
} else {
|
||||
console.error(e.message);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const insightsTaskRunner: TasksRunner<InsightsTaskRunnerOptions> = (
|
||||
tasks: Task[],
|
||||
options: InsightsTaskRunnerOptions,
|
||||
@ -30,10 +96,11 @@ const insightsTaskRunner: TasksRunner<InsightsTaskRunnerOptions> = (
|
||||
const res = new Subject<AffectedEvent>();
|
||||
|
||||
const notifier = createNotifier(options, context);
|
||||
const remoteCache = createRemoteCache(options);
|
||||
|
||||
let commandResult = true;
|
||||
notifier.startCommand(tasks).then(() => {
|
||||
defaultTasksRunner(tasks, options).subscribe({
|
||||
tasksRunnerV2(tasks, { ...options, remoteCache }, context).subscribe({
|
||||
next: (t: TaskCompleteEvent) => {
|
||||
commandResult = commandResult && t.success;
|
||||
res.next(t);
|
||||
@ -51,6 +118,13 @@ const insightsTaskRunner: TasksRunner<InsightsTaskRunnerOptions> = (
|
||||
return res;
|
||||
};
|
||||
|
||||
function createRemoteCache(options: InsightsTaskRunnerOptions) {
|
||||
// if (!process.env.NX_INSIGHTS_AUTH_TOKEN) {
|
||||
// return undefined;
|
||||
// }
|
||||
return new InsightsRemoteCache(createAxios(options));
|
||||
}
|
||||
|
||||
function createNotifier(
|
||||
options: InsightsTaskRunnerOptions,
|
||||
context: Context
|
||||
@ -70,7 +144,15 @@ function createNotifier(
|
||||
reportSetupError(`NX_INSIGHTS_RUN_ID env variable is not set.`);
|
||||
return new EmptyNotifier();
|
||||
}
|
||||
return new InsightsNotifier(options, context);
|
||||
return new InsightsNotifier(createAxios(options), context);
|
||||
}
|
||||
|
||||
function createAxios(options: InsightsTaskRunnerOptions) {
|
||||
return axios.create({
|
||||
baseURL: options.insightsUrl || 'https://nrwl.api.io',
|
||||
timeout: 30000,
|
||||
headers: { authorization: `auth ${process.env.NX_INSIGHTS_AUTH_TOKEN}` }
|
||||
});
|
||||
}
|
||||
|
||||
function reportSetupError(reason: string) {
|
||||
@ -124,22 +206,16 @@ class EmptyNotifier implements Notifier {
|
||||
}
|
||||
|
||||
class InsightsNotifier implements Notifier {
|
||||
axiosInstance: any;
|
||||
errors: string[] = [];
|
||||
endTaskNotifications = [];
|
||||
|
||||
commandId: string;
|
||||
|
||||
constructor(
|
||||
private readonly options: InsightsTaskRunnerOptions,
|
||||
private readonly axiosInstance: any,
|
||||
private readonly context: Context
|
||||
) {
|
||||
this.commandId = this.generateCommandId();
|
||||
this.axiosInstance = axios.create({
|
||||
baseURL: options.insightsUrl || 'https://nrwl.api.io',
|
||||
timeout: 30000,
|
||||
headers: { authorization: `auth ${this.envOptions.authToken}` }
|
||||
});
|
||||
}
|
||||
|
||||
startCommand(tasks: Task[]) {
|
||||
@ -184,8 +260,7 @@ class InsightsNotifier implements Notifier {
|
||||
private get envOptions() {
|
||||
return {
|
||||
branchId: process.env.NX_INSIGHTS_BRANCH_ID,
|
||||
runId: process.env.NX_INSIGHTS_RUN_ID,
|
||||
authToken: process.env.NX_INSIGHTS_AUTH_TOKEN
|
||||
runId: process.env.NX_INSIGHTS_RUN_ID
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -38,7 +38,6 @@
|
||||
"fs-extra": "6.0.0",
|
||||
"graphviz": "0.0.8",
|
||||
"ignore": "5.0.4",
|
||||
"npm-run-all": "4.1.5",
|
||||
"opn": "^5.3.0",
|
||||
"rxjs": "^6.4.0",
|
||||
"semver": "5.4.1",
|
||||
|
||||
@ -78,7 +78,7 @@ function run(
|
||||
protocol: serveOptions.ssl ? 'https' : 'http',
|
||||
hostname: serveOptions.host,
|
||||
port: serveOptions.port.toString(),
|
||||
path: path
|
||||
pathname: path
|
||||
});
|
||||
|
||||
context.logger.info(stripIndents`
|
||||
|
||||
@ -55,6 +55,7 @@
|
||||
"fs-extra": "6.0.0",
|
||||
"ignore": "5.0.4",
|
||||
"npm-run-all": "4.1.5",
|
||||
"hasha": "5.1.0",
|
||||
"opn": "^5.3.0",
|
||||
"rxjs": "^6.4.0",
|
||||
"semver": "5.4.1",
|
||||
|
||||
@ -2,7 +2,7 @@ import { ProjectGraph, ProjectGraphNode } from '../core/project-graph';
|
||||
import { Task } from '../tasks-runner/tasks-runner';
|
||||
import { createTask } from '../tasks-runner/run-command';
|
||||
import { basename } from 'path';
|
||||
import { getCommand, getOutputs } from '../tasks-runner/utils';
|
||||
import { getCommandAsString, getOutputs } from '../tasks-runner/utils';
|
||||
import * as yargs from 'yargs';
|
||||
import { NxArgs } from './utils';
|
||||
import { cliCommand } from '../core/file-utils';
|
||||
@ -54,7 +54,11 @@ function createTasks(
|
||||
id: task.id,
|
||||
overrides: overrides,
|
||||
target: task.target,
|
||||
command: `${isYarn ? 'yarn' : 'npm run'} ${getCommand(cli, isYarn, task)}`,
|
||||
command: `${isYarn ? 'yarn' : 'npm run'} ${getCommandAsString(
|
||||
cli,
|
||||
isYarn,
|
||||
task
|
||||
)}`,
|
||||
outputs: getOutputs(projectGraph.nodes, task)
|
||||
}));
|
||||
}
|
||||
|
||||
142
packages/workspace/src/tasks-runner/cache.ts
Normal file
142
packages/workspace/src/tasks-runner/cache.ts
Normal file
@ -0,0 +1,142 @@
|
||||
import { appRootPath } from '../utils/app-root';
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
import { Task } from './tasks-runner';
|
||||
import {
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
readFileSync,
|
||||
rmdirSync,
|
||||
writeFileSync
|
||||
} from 'fs';
|
||||
import { join } from 'path';
|
||||
import { Hasher } from './hasher';
|
||||
import * as fsExtra from 'fs-extra';
|
||||
import { DefaultTasksRunnerOptions } from './tasks-runner-v2';
|
||||
|
||||
export type CachedResult = { terminalOutput: string; outputsPath: string };
|
||||
export type TaskWithCachedResult = { task: Task; cachedResult: CachedResult };
|
||||
|
||||
export class Cache {
|
||||
root = appRootPath;
|
||||
cachePath = this.createCacheDir();
|
||||
hasher = new Hasher(this.projectGraph, this.nxJson);
|
||||
|
||||
constructor(
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly nxJson: NxJson,
|
||||
private readonly options: DefaultTasksRunnerOptions
|
||||
) {}
|
||||
|
||||
async get(task: Task): Promise<CachedResult> {
|
||||
if (!this.cacheable(task)) return null;
|
||||
|
||||
const res = await this.getFromLocalDir(task);
|
||||
|
||||
// didn't find it locally but we have a remote cache
|
||||
if (!res && this.options.remoteCache) {
|
||||
// attempt remote cache
|
||||
await this.options.remoteCache.retrieve(
|
||||
await this.hasher.hash(task),
|
||||
this.cachePath
|
||||
);
|
||||
// try again from local cache
|
||||
return this.getFromLocalDir(task);
|
||||
} else {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
async put(task: Task, terminalOutput: string, folders: string[]) {
|
||||
if (!this.cacheable(task)) return;
|
||||
const hash = await this.hasher.hash(task);
|
||||
const td = join(this.cachePath, hash);
|
||||
const tdCommit = join(this.cachePath, `${hash}.commit`);
|
||||
|
||||
// might be left overs from partially-completed cache invocations
|
||||
if (existsSync(td)) {
|
||||
fsExtra.removeSync(td);
|
||||
}
|
||||
if (existsSync(tdCommit)) {
|
||||
fsExtra.removeSync(tdCommit);
|
||||
}
|
||||
|
||||
mkdirSync(td);
|
||||
writeFileSync(join(td, 'terminalOutput'), terminalOutput);
|
||||
|
||||
mkdirSync(join(td, 'outputs'));
|
||||
folders.forEach(f => {
|
||||
const srcDir = join(this.root, f);
|
||||
if (existsSync(srcDir)) {
|
||||
const cachedDir = join(td, 'outputs', f);
|
||||
mkdirSync(cachedDir, { recursive: true });
|
||||
fsExtra.copySync(srcDir, cachedDir);
|
||||
}
|
||||
});
|
||||
// we need this file to account for partial writes to the cache folder.
|
||||
// creating this file is atomic, whereas creating a folder is not.
|
||||
// so if the process gets terminated while we are copying stuff into cache,
|
||||
// the cache entry won't be used.
|
||||
writeFileSync(tdCommit, 'true');
|
||||
|
||||
if (this.options.remoteCache) {
|
||||
await this.options.remoteCache.store(
|
||||
await this.hasher.hash(task),
|
||||
this.cachePath
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
copyFilesFromCache(cachedResult: CachedResult, outputs: string[]) {
|
||||
outputs.forEach(f => {
|
||||
const cachedDir = join(cachedResult.outputsPath, f);
|
||||
if (existsSync(cachedDir)) {
|
||||
const srcDir = join(this.root, f);
|
||||
if (existsSync(srcDir)) {
|
||||
fsExtra.removeSync(srcDir);
|
||||
}
|
||||
mkdirSync(srcDir, { recursive: true });
|
||||
fsExtra.copySync(cachedDir, srcDir);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async getFromLocalDir(task: Task) {
|
||||
const hash = await this.hasher.hash(task);
|
||||
const tdCommit = join(this.cachePath, `${hash}.commit`);
|
||||
const td = join(this.cachePath, hash);
|
||||
|
||||
if (existsSync(tdCommit)) {
|
||||
return {
|
||||
terminalOutput: readFileSync(join(td, 'terminalOutput')).toString(),
|
||||
outputsPath: join(td, 'outputs')
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private cacheable(task: Task) {
|
||||
return (
|
||||
this.options.cacheableOperations &&
|
||||
this.options.cacheableOperations.indexOf(task.target.target) > -1
|
||||
);
|
||||
}
|
||||
|
||||
private createCacheDir() {
|
||||
let dir;
|
||||
if (this.options.cacheDirectory) {
|
||||
if (this.options.cacheDirectory.startsWith('./')) {
|
||||
dir = join(this.root, this.options.cacheDirectory);
|
||||
} else {
|
||||
dir = this.options.cacheDirectory;
|
||||
}
|
||||
} else {
|
||||
dir = join(this.root, 'node_modules', '.cache', 'nx');
|
||||
}
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
@ -45,7 +45,8 @@ export class DefaultReporter {
|
||||
printResults(
|
||||
affectedArgs: ReporterArgs,
|
||||
failedProjectNames: string[],
|
||||
startedWithFailedProjects: boolean
|
||||
startedWithFailedProjects: boolean,
|
||||
cachedProjectNames: string[]
|
||||
) {
|
||||
output.addNewline();
|
||||
output.addVerticalSeparator();
|
||||
@ -67,27 +68,36 @@ export class DefaultReporter {
|
||||
]
|
||||
});
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
const bodyLines = [
|
||||
output.colors.gray('Failed projects:'),
|
||||
'',
|
||||
...failedProjectNames.map(
|
||||
project => `${output.colors.gray('-')} ${project}`
|
||||
)
|
||||
];
|
||||
if (!affectedArgs.onlyFailed && !startedWithFailedProjects) {
|
||||
bodyLines.push('');
|
||||
bodyLines.push(
|
||||
`${output.colors.gray(
|
||||
'You can isolate the above projects by passing:'
|
||||
)} ${output.bold('--only-failed')}`
|
||||
);
|
||||
}
|
||||
output.error({
|
||||
title: `Running target "${affectedArgs.target}" failed`,
|
||||
bodyLines
|
||||
});
|
||||
}
|
||||
|
||||
const bodyLines = [
|
||||
output.colors.gray('Failed projects:'),
|
||||
'',
|
||||
...failedProjectNames.map(
|
||||
if (cachedProjectNames.length > 0) {
|
||||
const bodyLines = cachedProjectNames.map(
|
||||
project => `${output.colors.gray('-')} ${project}`
|
||||
)
|
||||
];
|
||||
if (!affectedArgs.onlyFailed && !startedWithFailedProjects) {
|
||||
bodyLines.push('');
|
||||
bodyLines.push(
|
||||
`${output.colors.gray(
|
||||
'You can isolate the above projects by passing:'
|
||||
)} ${output.bold('--only-failed')}`
|
||||
);
|
||||
output.note({
|
||||
title: `Nx read the output from cache instead of running the command for the following projects:`,
|
||||
bodyLines
|
||||
});
|
||||
}
|
||||
output.error({
|
||||
title: `Running target "${affectedArgs.target}" failed`,
|
||||
bodyLines
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,214 +0,0 @@
|
||||
import defaultTaskRunner, {
|
||||
splitTasksIntoStages
|
||||
} from './default-tasks-runner';
|
||||
import { AffectedEventType } from './tasks-runner';
|
||||
import * as runAll from 'npm-run-all';
|
||||
import { DependencyType } from '@nrwl/workspace/src/core/project-graph';
|
||||
|
||||
jest.mock('npm-run-all', () => jest.fn());
|
||||
jest.mock('../core/file-utils', () => ({
|
||||
cliCommand: () => 'nx'
|
||||
}));
|
||||
jest.mock('../utils/fileutils', () => ({
|
||||
readJsonFile: () => ({
|
||||
scripts: {
|
||||
nx: 'nx'
|
||||
}
|
||||
})
|
||||
}));
|
||||
|
||||
describe('defaultTasksRunner', () => {
|
||||
const tasks = [
|
||||
{
|
||||
id: 'task-1',
|
||||
target: {
|
||||
project: 'app-1',
|
||||
target: 'target'
|
||||
},
|
||||
overrides: {}
|
||||
},
|
||||
{
|
||||
id: 'task-2',
|
||||
target: {
|
||||
project: 'app-2',
|
||||
target: 'target'
|
||||
},
|
||||
overrides: {}
|
||||
}
|
||||
];
|
||||
|
||||
const context = {
|
||||
dependencyGraph: {
|
||||
projects: {
|
||||
'app-1': { architect: { target: {} } },
|
||||
'app-2': { architect: { target: {} } }
|
||||
},
|
||||
dependencies: {
|
||||
'app-1': [],
|
||||
'app-2': []
|
||||
},
|
||||
roots: ['app-1', 'app-2']
|
||||
},
|
||||
tasksMap: {
|
||||
'app-1': {
|
||||
target: tasks[0]
|
||||
},
|
||||
'app-2': {
|
||||
target: tasks[1]
|
||||
}
|
||||
}
|
||||
} as any;
|
||||
|
||||
it('should pass the right options when options are passed', done => {
|
||||
runAll.mockImplementation(() => Promise.resolve());
|
||||
defaultTaskRunner(
|
||||
tasks,
|
||||
{
|
||||
parallel: true,
|
||||
maxParallel: 5
|
||||
},
|
||||
context
|
||||
).subscribe({
|
||||
complete: () => {
|
||||
expect(runAll).toHaveBeenCalledWith(
|
||||
jasmine.any(Array),
|
||||
jasmine.objectContaining({
|
||||
parallel: true,
|
||||
maxParallel: 5
|
||||
})
|
||||
);
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should run emit task complete events when "run-all-prerender" resolves', done => {
|
||||
runAll.mockImplementation(() => Promise.resolve());
|
||||
let i = 0;
|
||||
const expected = [
|
||||
{
|
||||
task: tasks[0],
|
||||
type: AffectedEventType.TaskComplete,
|
||||
success: true
|
||||
},
|
||||
{
|
||||
task: tasks[1],
|
||||
type: AffectedEventType.TaskComplete,
|
||||
success: true
|
||||
}
|
||||
];
|
||||
defaultTaskRunner(tasks, {}, context).subscribe({
|
||||
next: event => {
|
||||
expect(event).toEqual(expected[i++]);
|
||||
},
|
||||
complete: done
|
||||
});
|
||||
});
|
||||
|
||||
it('should run emit task complete events when "run-all-prerender" rejects', done => {
|
||||
runAll.mockImplementation(() =>
|
||||
Promise.reject({
|
||||
results: [
|
||||
{
|
||||
code: 0
|
||||
},
|
||||
{
|
||||
code: 1
|
||||
}
|
||||
]
|
||||
})
|
||||
);
|
||||
let i = 0;
|
||||
const expected = [
|
||||
{
|
||||
task: tasks[0],
|
||||
type: AffectedEventType.TaskComplete,
|
||||
success: true
|
||||
},
|
||||
{
|
||||
task: tasks[1],
|
||||
type: AffectedEventType.TaskComplete,
|
||||
success: false
|
||||
}
|
||||
];
|
||||
defaultTaskRunner(tasks, {}, context).subscribe({
|
||||
next: event => {
|
||||
expect(event).toEqual(expected[i++]);
|
||||
},
|
||||
complete: done
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitTasksIntoStages', () => {
|
||||
it('should return empty for an empty array', () => {
|
||||
const stages = splitTasksIntoStages([], { nodes: {}, dependencies: {} });
|
||||
expect(stages).toEqual([]);
|
||||
});
|
||||
|
||||
it('should split tasks into stages based on their dependencies', () => {
|
||||
const stages = splitTasksIntoStages(
|
||||
[
|
||||
{
|
||||
target: { project: 'parent' }
|
||||
},
|
||||
{
|
||||
target: { project: 'child1' }
|
||||
},
|
||||
{
|
||||
target: { project: 'child2' }
|
||||
},
|
||||
{
|
||||
target: { project: 'grandparent' }
|
||||
}
|
||||
] as any,
|
||||
{
|
||||
nodes: {},
|
||||
dependencies: {
|
||||
child1: [],
|
||||
child2: [],
|
||||
parent: [
|
||||
{
|
||||
source: 'parent',
|
||||
target: 'child1',
|
||||
type: DependencyType.static
|
||||
},
|
||||
{
|
||||
source: 'parent',
|
||||
target: 'child2',
|
||||
type: DependencyType.static
|
||||
}
|
||||
],
|
||||
grandparent: [
|
||||
{
|
||||
source: 'grandparent',
|
||||
target: 'parent',
|
||||
type: DependencyType.static
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
expect(stages).toEqual([
|
||||
[
|
||||
{
|
||||
target: { project: 'child1' }
|
||||
},
|
||||
{
|
||||
target: { project: 'child2' }
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
target: { project: 'parent' }
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
target: { project: 'grandparent' }
|
||||
}
|
||||
]
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -9,9 +9,10 @@ import {
|
||||
} from './tasks-runner';
|
||||
import { output } from '../utils/output';
|
||||
import { readJsonFile } from '../utils/fileutils';
|
||||
import { getCommand } from './utils';
|
||||
import { getCommand, getCommandAsString } from './utils';
|
||||
import { cliCommand } from '../core/file-utils';
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { NxJson } from '@nrwl/workspace/src/core/shared-interfaces';
|
||||
|
||||
export interface DefaultTasksRunnerOptions {
|
||||
parallel?: boolean;
|
||||
@ -75,7 +76,7 @@ export function splitTasksIntoStages(
|
||||
export const defaultTasksRunner: TasksRunner<DefaultTasksRunnerOptions> = (
|
||||
tasks: Task[],
|
||||
options: DefaultTasksRunnerOptions,
|
||||
context: { target: string; projectGraph: ProjectGraph }
|
||||
context: { target: string; projectGraph: ProjectGraph; nxJson: NxJson }
|
||||
): Observable<TaskCompleteEvent> => {
|
||||
return new Observable(subscriber => {
|
||||
runTasks(tasks, options, context)
|
||||
@ -110,7 +111,9 @@ async function runTasks(
|
||||
for (let i = 0; i < stages.length; ++i) {
|
||||
const tasksInStage = stages[i];
|
||||
try {
|
||||
const commands = tasksInStage.map(t => getCommand(cli, isYarn, t));
|
||||
const commands = tasksInStage.map(t =>
|
||||
getCommandAsString(cli, isYarn, t)
|
||||
);
|
||||
await runAll(commands, {
|
||||
parallel: options.parallel || false,
|
||||
maxParallel: options.maxParallel || 3,
|
||||
|
||||
93
packages/workspace/src/tasks-runner/hasher.ts
Normal file
93
packages/workspace/src/tasks-runner/hasher.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import { ProjectGraph, ProjectGraphNode } from '../core/project-graph';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
import { Task } from './tasks-runner';
|
||||
import { statSync } from 'fs';
|
||||
const hasha = require('hasha');
|
||||
|
||||
export class Hasher {
|
||||
static version = '1.0';
|
||||
implicitDependencies: string;
|
||||
hashes: { [k: string]: string } = {};
|
||||
|
||||
constructor(
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly nxJson: NxJson
|
||||
) {}
|
||||
|
||||
async hash(task: Task): Promise<string> {
|
||||
const ps = await Promise.all(
|
||||
this.traverseInDepthFirstOrder(task).map(p => this.hashProjectNode(p))
|
||||
);
|
||||
const implicits = await this.implicitDepsHash();
|
||||
return hasha(
|
||||
[
|
||||
Hasher.version,
|
||||
task.target.project || '',
|
||||
task.target.target || '',
|
||||
task.target.configuration || '',
|
||||
JSON.stringify(task.overrides),
|
||||
implicits,
|
||||
...ps
|
||||
],
|
||||
{ algorithm: 'sha256' }
|
||||
);
|
||||
}
|
||||
|
||||
private traverseInDepthFirstOrder(task: Task): ProjectGraphNode[] {
|
||||
const r = [];
|
||||
this.traverseNode(task.target.project, r);
|
||||
return r.map(rr => this.projectGraph.nodes[rr]);
|
||||
}
|
||||
|
||||
private traverseNode(project: string, acc: string[]): void {
|
||||
if (acc.indexOf(project) > -1) return;
|
||||
acc.push(project);
|
||||
(this.projectGraph.dependencies[project] || [])
|
||||
.map(t => t.target)
|
||||
.forEach(r => {
|
||||
this.traverseNode(r, acc);
|
||||
});
|
||||
}
|
||||
|
||||
private async hashProjectNode(p: ProjectGraphNode) {
|
||||
if (this.hashes[p.name]) {
|
||||
return this.hashes[p.name];
|
||||
} else {
|
||||
const values = await Promise.all(
|
||||
p.data.files.map(f => this.readFileContents(f.file))
|
||||
);
|
||||
const r = hasha(values, { algorithm: 'sha256' });
|
||||
this.hashes[p.name] = r;
|
||||
return r;
|
||||
}
|
||||
}
|
||||
|
||||
private async implicitDepsHash() {
|
||||
if (this.implicitDependencies) return this.implicitDependencies;
|
||||
|
||||
const values = await Promise.all([
|
||||
...Object.keys(this.nxJson.implicitDependencies).map(r =>
|
||||
this.readFileContents(r)
|
||||
),
|
||||
this.readFileContents('package-lock.json'),
|
||||
this.readFileContents('yarn.lock')
|
||||
]);
|
||||
this.implicitDependencies = hasha(values, { algorithm: 'sha256' });
|
||||
return this.implicitDependencies;
|
||||
}
|
||||
|
||||
private readFileContents(path: string): Promise<string> {
|
||||
try {
|
||||
const stats = statSync(path);
|
||||
const fileSizeInMegabytes = stats.size / 1000000;
|
||||
// large binary file, skip it
|
||||
if (fileSizeInMegabytes > 5) {
|
||||
return Promise.resolve(stats.size.toString());
|
||||
} else {
|
||||
return hasha.fromFile(path, { algorithm: 'sha256' });
|
||||
}
|
||||
} catch (e) {
|
||||
return Promise.resolve('');
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,5 @@
|
||||
import { TasksRunner } from './tasks-runner';
|
||||
import defaultTasksRunner from './default-tasks-runner';
|
||||
import defaultTaskRunner from './default-tasks-runner';
|
||||
import { getRunner } from './run-command';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
|
||||
@ -24,7 +24,7 @@ describe('getRunner', () => {
|
||||
overrides
|
||||
);
|
||||
|
||||
expect(tasksRunner).toEqual(defaultTasksRunner);
|
||||
expect(tasksRunner).toEqual(defaultTaskRunner);
|
||||
expect(tasksOptions).toEqual(overrides);
|
||||
});
|
||||
|
||||
@ -35,7 +35,7 @@ describe('getRunner', () => {
|
||||
overrides
|
||||
);
|
||||
|
||||
expect(tasksRunner).toEqual(defaultTasksRunner);
|
||||
expect(tasksRunner).toEqual(defaultTaskRunner);
|
||||
expect(tasksOptions).toEqual(overrides);
|
||||
});
|
||||
|
||||
|
||||
@ -1,9 +1,4 @@
|
||||
import {
|
||||
AffectedEventType,
|
||||
Task,
|
||||
TaskCompleteEvent,
|
||||
TasksRunner
|
||||
} from './tasks-runner';
|
||||
import { AffectedEventType, Task, TasksRunner } from './tasks-runner';
|
||||
import { defaultTasksRunner } from './default-tasks-runner';
|
||||
import { isRelativePath } from '../utils/fileutils';
|
||||
import { join } from 'path';
|
||||
@ -39,14 +34,22 @@ export function runCommand<T extends RunArgs>(
|
||||
nxJson,
|
||||
overrides
|
||||
);
|
||||
const cached = [];
|
||||
tasksRunner(tasks, tasksOptions, {
|
||||
target: nxArgs.target,
|
||||
projectGraph
|
||||
projectGraph,
|
||||
nxJson
|
||||
}).subscribe({
|
||||
next: (event: TaskCompleteEvent) => {
|
||||
next: (event: any) => {
|
||||
switch (event.type) {
|
||||
case AffectedEventType.TaskComplete: {
|
||||
workspace.setResult(event.task.target.project, event.success);
|
||||
break;
|
||||
}
|
||||
case AffectedEventType.TaskCacheRead: {
|
||||
workspace.setResult(event.task.target.project, event.success);
|
||||
cached.push(event.task.target.project);
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -59,7 +62,8 @@ export function runCommand<T extends RunArgs>(
|
||||
reporter.printResults(
|
||||
nxArgs,
|
||||
workspace.failedProjects,
|
||||
workspace.startedWithFailedProjects
|
||||
workspace.startedWithFailedProjects,
|
||||
cached
|
||||
);
|
||||
|
||||
if (workspace.hasFailure) {
|
||||
@ -143,7 +147,6 @@ export function getRunner(
|
||||
let tasksRunner = require(modulePath);
|
||||
// to support both babel and ts formats
|
||||
if (tasksRunner.default) {
|
||||
throw new Error('boom');
|
||||
tasksRunner = tasksRunner.default;
|
||||
}
|
||||
|
||||
|
||||
137
packages/workspace/src/tasks-runner/task-orchestrator.ts
Normal file
137
packages/workspace/src/tasks-runner/task-orchestrator.ts
Normal file
@ -0,0 +1,137 @@
|
||||
import { Cache, TaskWithCachedResult } from './cache';
|
||||
import { cliCommand } from '../core/file-utils';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { AffectedEventType, Task } from './tasks-runner';
|
||||
import { getCommand, getOutputs } from './utils';
|
||||
import { basename } from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
import { DefaultTasksRunnerOptions } from './tasks-runner-v2';
|
||||
import { output } from '../utils/output';
|
||||
|
||||
export class TaskOrchestrator {
|
||||
cache = new Cache(this.projectGraph, this.nxJson, this.options);
|
||||
cli = cliCommand();
|
||||
isYarn = basename(process.env.npm_execpath || 'npm').startsWith('yarn');
|
||||
|
||||
constructor(
|
||||
private readonly nxJson: NxJson,
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly options: DefaultTasksRunnerOptions
|
||||
) {}
|
||||
|
||||
async run(tasksInStage: Task[]) {
|
||||
const { cached, rest } = await this.splitTasksIntoCachedAndNotCached(
|
||||
tasksInStage
|
||||
);
|
||||
|
||||
const r1 = await this.applyCachedResults(cached);
|
||||
const r2 = await this.runRest(rest);
|
||||
return [...r1, ...r2];
|
||||
}
|
||||
|
||||
private async runRest(tasks: Task[]) {
|
||||
const left = [...tasks];
|
||||
const res = [];
|
||||
|
||||
const that = this;
|
||||
|
||||
function takeFromQueue() {
|
||||
if (left.length > 0) {
|
||||
const task = left.pop();
|
||||
return that
|
||||
.spawnProcess(task)
|
||||
.then(code => {
|
||||
res.push({
|
||||
task,
|
||||
success: code === 0,
|
||||
type: AffectedEventType.TaskComplete
|
||||
});
|
||||
})
|
||||
.then(takeFromQueue)
|
||||
.catch(takeFromQueue);
|
||||
} else {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
}
|
||||
|
||||
const wait = [];
|
||||
// initial seeding
|
||||
const maxParallel = this.options.parallel
|
||||
? this.options.maxParallel || 3
|
||||
: 1;
|
||||
for (let i = 0; i < maxParallel; ++i) {
|
||||
wait.push(takeFromQueue());
|
||||
}
|
||||
await Promise.all(wait);
|
||||
return res;
|
||||
}
|
||||
|
||||
private async splitTasksIntoCachedAndNotCached(
|
||||
tasks: Task[]
|
||||
): Promise<{ cached: TaskWithCachedResult[]; rest: Task[] }> {
|
||||
const cached: TaskWithCachedResult[] = [];
|
||||
const rest: Task[] = [];
|
||||
await Promise.all(
|
||||
tasks.map(async task => {
|
||||
const cachedResult = await this.cache.get(task);
|
||||
if (cachedResult) {
|
||||
cached.push({ task, cachedResult });
|
||||
} else {
|
||||
rest.push(task);
|
||||
}
|
||||
})
|
||||
);
|
||||
return { cached, rest };
|
||||
}
|
||||
|
||||
private applyCachedResults(tasks: TaskWithCachedResult[]) {
|
||||
tasks.forEach(t => {
|
||||
output.note({ title: `Cached Output:` });
|
||||
process.stdout.write(t.cachedResult.terminalOutput);
|
||||
const outputs = getOutputs(this.projectGraph.nodes, t.task);
|
||||
this.cache.copyFilesFromCache(t.cachedResult, outputs);
|
||||
});
|
||||
|
||||
return tasks.reduce((m, c) => {
|
||||
m.push({
|
||||
task: c.task,
|
||||
type: AffectedEventType.TaskCacheRead,
|
||||
success: true
|
||||
});
|
||||
return m;
|
||||
}, []);
|
||||
}
|
||||
|
||||
private spawnProcess(task: Task) {
|
||||
const taskOutputs = getOutputs(this.projectGraph.nodes, task);
|
||||
return new Promise(res => {
|
||||
const command = this.isYarn ? 'yarn' : 'npm';
|
||||
const commandArgs = this.isYarn
|
||||
? getCommand(this.cli, this.isYarn, task)
|
||||
: ['run', ...getCommand(this.cli, this.isYarn, task)];
|
||||
const p = spawn(command, commandArgs, {
|
||||
stdio: [process.stdin, 'pipe', 'pipe'],
|
||||
env: { ...process.env, FORCE_COLOR: 'true' }
|
||||
});
|
||||
|
||||
let out = [];
|
||||
|
||||
p.stdout.on('data', data => {
|
||||
out.push(data);
|
||||
process.stdout.write(data);
|
||||
});
|
||||
|
||||
p.stderr.on('data', data => {
|
||||
out.push(data);
|
||||
process.stderr.write(data);
|
||||
});
|
||||
|
||||
p.on('close', code => {
|
||||
this.cache.put(task, out.join(''), taskOutputs).then(() => {
|
||||
res(code);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
75
packages/workspace/src/tasks-runner/task-orderer.spec.ts
Normal file
75
packages/workspace/src/tasks-runner/task-orderer.spec.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { TaskOrderer } from './task-orderer';
|
||||
import { DependencyType } from '../core/project-graph';
|
||||
|
||||
describe('TaskStages', () => {
|
||||
it('should return empty for an empty array', () => {
|
||||
const stages = new TaskOrderer('build', {
|
||||
nodes: {},
|
||||
dependencies: {}
|
||||
}).splitTasksIntoStages([]);
|
||||
expect(stages).toEqual([]);
|
||||
});
|
||||
|
||||
it('should split tasks into stages based on their dependencies', () => {
|
||||
const stages = new TaskOrderer('build', {
|
||||
nodes: {},
|
||||
dependencies: {
|
||||
child1: [],
|
||||
child2: [],
|
||||
parent: [
|
||||
{
|
||||
source: 'parent',
|
||||
target: 'child1',
|
||||
type: DependencyType.static
|
||||
},
|
||||
{
|
||||
source: 'parent',
|
||||
target: 'child2',
|
||||
type: DependencyType.static
|
||||
}
|
||||
],
|
||||
grandparent: [
|
||||
{
|
||||
source: 'grandparent',
|
||||
target: 'parent',
|
||||
type: DependencyType.static
|
||||
}
|
||||
]
|
||||
}
|
||||
}).splitTasksIntoStages([
|
||||
{
|
||||
target: { project: 'parent' }
|
||||
},
|
||||
{
|
||||
target: { project: 'child1' }
|
||||
},
|
||||
{
|
||||
target: { project: 'child2' }
|
||||
},
|
||||
{
|
||||
target: { project: 'grandparent' }
|
||||
}
|
||||
] as any);
|
||||
|
||||
expect(stages).toEqual([
|
||||
[
|
||||
{
|
||||
target: { project: 'child1' }
|
||||
},
|
||||
{
|
||||
target: { project: 'child2' }
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
target: { project: 'parent' }
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
target: { project: 'grandparent' }
|
||||
}
|
||||
]
|
||||
]);
|
||||
});
|
||||
});
|
||||
59
packages/workspace/src/tasks-runner/task-orderer.ts
Normal file
59
packages/workspace/src/tasks-runner/task-orderer.ts
Normal file
@ -0,0 +1,59 @@
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { Task } from './tasks-runner';
|
||||
|
||||
export class TaskOrderer {
|
||||
constructor(
|
||||
private readonly target: string,
|
||||
private readonly projectGraph: ProjectGraph
|
||||
) {}
|
||||
|
||||
splitTasksIntoStages(tasks: Task[]) {
|
||||
if (this.target !== 'build') return [tasks];
|
||||
if (tasks.length === 0) return [];
|
||||
const res = [];
|
||||
this.topologicallySortTasks(tasks).forEach(t => {
|
||||
const stageWithNoDeps = res.find(
|
||||
tasksInStage => !this.taskDependsOnDeps(t, tasksInStage)
|
||||
);
|
||||
if (stageWithNoDeps) {
|
||||
stageWithNoDeps.push(t);
|
||||
} else {
|
||||
res.push([t]);
|
||||
}
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
private taskDependsOnDeps(task: Task, deps: Task[]) {
|
||||
const g = this.projectGraph;
|
||||
|
||||
function hasDep(source: string, target: string, visitedProjects: string[]) {
|
||||
if (!g.dependencies[source]) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (g.dependencies[source].find(d => d.target === target)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return !!g.dependencies[source].find(r => {
|
||||
if (visitedProjects.indexOf(r.target) > -1) return null;
|
||||
return hasDep(r.target, target, [...visitedProjects, r.target]);
|
||||
});
|
||||
}
|
||||
|
||||
return !!deps.find(dep =>
|
||||
hasDep(task.target.project, dep.target.project, [])
|
||||
);
|
||||
}
|
||||
|
||||
private topologicallySortTasks(tasks: Task[]) {
|
||||
const sortedTasks = [...tasks];
|
||||
sortedTasks.sort((a, b) => {
|
||||
if (this.taskDependsOnDeps(a, [b])) return 1;
|
||||
if (this.taskDependsOnDeps(b, [a])) return -1;
|
||||
return 0;
|
||||
});
|
||||
return sortedTasks;
|
||||
}
|
||||
}
|
||||
115
packages/workspace/src/tasks-runner/tasks-runner-v2.ts
Normal file
115
packages/workspace/src/tasks-runner/tasks-runner-v2.ts
Normal file
@ -0,0 +1,115 @@
|
||||
import { Observable } from 'rxjs';
|
||||
import {
|
||||
AffectedEventType,
|
||||
Task,
|
||||
TaskCompleteEvent,
|
||||
TasksRunner
|
||||
} from './tasks-runner';
|
||||
import { output } from '../utils/output';
|
||||
import { readJsonFile } from '../utils/fileutils';
|
||||
import { cliCommand } from '../core/file-utils';
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
import { TaskOrderer } from './task-orderer';
|
||||
import { TaskOrchestrator } from './task-orchestrator';
|
||||
|
||||
export interface RemoteCache {
|
||||
retrieve: (hash: string, cacheDirectory: string) => Promise<boolean>;
|
||||
store: (hash: string, cacheDirectory: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
export interface DefaultTasksRunnerOptions {
|
||||
parallel?: boolean;
|
||||
maxParallel?: number;
|
||||
cacheableOperations?: string[];
|
||||
cacheDirectory?: string;
|
||||
remoteCache?: RemoteCache;
|
||||
}
|
||||
|
||||
export const tasksRunnerV2: TasksRunner<DefaultTasksRunnerOptions> = (
|
||||
tasks: Task[],
|
||||
options: DefaultTasksRunnerOptions,
|
||||
context: { target: string; projectGraph: ProjectGraph; nxJson: NxJson }
|
||||
): Observable<TaskCompleteEvent> => {
|
||||
return new Observable(subscriber => {
|
||||
runAllTasks(tasks, options, context)
|
||||
.then(data => data.forEach(d => subscriber.next(d)))
|
||||
.catch(e => {
|
||||
console.error('Unexpected error:');
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(() => {
|
||||
subscriber.complete();
|
||||
// fix for https://github.com/nrwl/nx/issues/1666
|
||||
if (process.stdin['unref']) (process.stdin as any).unref();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
async function runAllTasks(
|
||||
tasks: Task[],
|
||||
options: DefaultTasksRunnerOptions,
|
||||
context: { target: string; projectGraph: ProjectGraph; nxJson: NxJson }
|
||||
): Promise<Array<{ task: Task; type: any; success: boolean }>> {
|
||||
assertPackageJsonScriptExists();
|
||||
const stages = new TaskOrderer(
|
||||
context.target,
|
||||
context.projectGraph
|
||||
).splitTasksIntoStages(tasks);
|
||||
|
||||
const orchestrator = new TaskOrchestrator(
|
||||
context.nxJson,
|
||||
context.projectGraph,
|
||||
options
|
||||
);
|
||||
|
||||
const res = [];
|
||||
for (let i = 0; i < stages.length; ++i) {
|
||||
const tasksInStage = stages[i];
|
||||
const statuses = await orchestrator.run(tasksInStage);
|
||||
res.push(...statuses);
|
||||
|
||||
// any task failed, we need to skip further stages
|
||||
if (statuses.find(s => !s.success)) {
|
||||
res.push(...markStagesAsNotSuccessful(stages.splice(i + 1)));
|
||||
return res;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function markStagesAsNotSuccessful(stages: Task[][]) {
|
||||
return stages.reduce((m, c) => [...m, ...tasksToStatuses(c, false)], []);
|
||||
}
|
||||
|
||||
function tasksToStatuses(tasks: Task[], success: boolean) {
|
||||
return tasks.map(task => ({
|
||||
task,
|
||||
type: AffectedEventType.TaskComplete,
|
||||
success
|
||||
}));
|
||||
}
|
||||
|
||||
function assertPackageJsonScriptExists() {
|
||||
const cli = cliCommand();
|
||||
// Make sure the `package.json` has the `nx: "nx"`
|
||||
const packageJson = readJsonFile('./package.json');
|
||||
if (!packageJson.scripts || !packageJson.scripts[cli]) {
|
||||
output.error({
|
||||
title: `The "scripts" section of your 'package.json' must contain "${cli}": "${cli}"`,
|
||||
bodyLines: [
|
||||
output.colors.gray('...'),
|
||||
' "scripts": {',
|
||||
output.colors.gray(' ...'),
|
||||
` "${cli}": "${cli}"`,
|
||||
output.colors.gray(' ...'),
|
||||
' }',
|
||||
output.colors.gray('...')
|
||||
]
|
||||
});
|
||||
return process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export default tasksRunnerV2;
|
||||
@ -2,6 +2,7 @@ import { Observable } from 'rxjs';
|
||||
import { Target } from '@angular-devkit/architect';
|
||||
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
|
||||
export interface Task {
|
||||
id: string;
|
||||
@ -10,7 +11,8 @@ export interface Task {
|
||||
}
|
||||
|
||||
export enum AffectedEventType {
|
||||
TaskComplete = '[Task] Complete'
|
||||
TaskComplete = '[Task] Complete',
|
||||
TaskCacheRead = '[Task] CacheRead'
|
||||
}
|
||||
|
||||
export interface AffectedEvent {
|
||||
@ -29,5 +31,6 @@ export type TasksRunner<T = unknown> = (
|
||||
context?: {
|
||||
target?: string;
|
||||
projectGraph: ProjectGraph;
|
||||
nxJson: NxJson;
|
||||
}
|
||||
) => Observable<AffectedEvent>;
|
||||
|
||||
@ -3,25 +3,46 @@ import { ProjectGraphNode } from '../core/project-graph';
|
||||
|
||||
const commonCommands = ['build', 'test', 'lint', 'e2e', 'deploy'];
|
||||
|
||||
export function getCommandAsString(
|
||||
cliCommand: string,
|
||||
isYarn: boolean,
|
||||
task: Task
|
||||
) {
|
||||
return getCommand(cliCommand, isYarn, task)
|
||||
.join(' ')
|
||||
.trim();
|
||||
}
|
||||
|
||||
export function getCommand(cliCommand: string, isYarn: boolean, task: Task) {
|
||||
const args = Object.entries(task.overrides || {})
|
||||
.map(([prop, value]) => `--${prop}=${value}`)
|
||||
.join(' ');
|
||||
const args = Object.entries(task.overrides || {}).map(
|
||||
([prop, value]) => `--${prop}=${value}`
|
||||
);
|
||||
|
||||
if (commonCommands.includes(task.target.target)) {
|
||||
const config = task.target.configuration
|
||||
? `--configuration ${task.target.configuration} `
|
||||
: '';
|
||||
return `${cliCommand}${isYarn ? '' : ' --'} ${task.target.target} ${
|
||||
task.target.project
|
||||
} ${config} ${args}`.trim();
|
||||
? [`--configuration`, task.target.configuration]
|
||||
: [];
|
||||
|
||||
return [
|
||||
cliCommand,
|
||||
...(isYarn ? [] : ['--']),
|
||||
task.target.target,
|
||||
task.target.project,
|
||||
...config,
|
||||
...args
|
||||
];
|
||||
} else {
|
||||
const config = task.target.configuration
|
||||
? `:${task.target.configuration} `
|
||||
: '';
|
||||
return `${cliCommand}${isYarn ? '' : ' --'} run ${task.target.project}:${
|
||||
task.target.target
|
||||
}${config} ${args}`.trim();
|
||||
|
||||
return [
|
||||
cliCommand,
|
||||
...(isYarn ? [] : ['--']),
|
||||
'run',
|
||||
`${task.target.project}:${task.target.target}${config}`,
|
||||
...args
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -81,7 +81,7 @@ function createRunUpdateTask(): TaskExecutorFactory<UpdateTaskOptions> {
|
||||
'--allow-dirty'
|
||||
].filter(e => !!e);
|
||||
return new Observable(obs => {
|
||||
fork(ng, args, forkOptions).on('close', (code: number) => {
|
||||
fork(ng, args, forkOptions as any).on('close', (code: number) => {
|
||||
if (code === 0) {
|
||||
obs.next();
|
||||
obs.complete();
|
||||
|
||||
@ -18,4 +18,5 @@ jest --maxWorkers=1 ./build/e2e/report.test.js &&
|
||||
jest --maxWorkers=1 ./build/e2e/run-many.test.js &&
|
||||
jest --maxWorkers=1 ./build/e2e/storybook.test.js &&
|
||||
jest --maxWorkers=1 ./build/e2e/upgrade-module.test.js &&
|
||||
jest --maxWorkers=1 ./build/e2e/web.test.js
|
||||
jest --maxWorkers=1 ./build/e2e/web.test.js &&
|
||||
jest --maxWorkers=1 ./build/e2e/cache.test.js
|
||||
|
||||
52
yarn.lock
52
yarn.lock
@ -468,7 +468,7 @@
|
||||
"@babel/helper-regex" "^7.4.4"
|
||||
regexpu-core "^4.6.0"
|
||||
|
||||
"@babel/helper-define-map@^7.7.4":
|
||||
"@babel/helper-define-map@^7.5.5", "@babel/helper-define-map@^7.7.4":
|
||||
version "7.7.4"
|
||||
resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.7.4.tgz#2841bf92eb8bd9c906851546fe6b9d45e162f176"
|
||||
integrity sha512-v5LorqOa0nVQUvAUTUF3KPastvUt/HzByXNamKQ6RdJRTV7j8rLL+WB5C/MzzWAwOomxDhYFb1wLLxHqox86lg==
|
||||
@ -3198,16 +3198,16 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.5.tgz#e19436e7f8e9b4601005d73673b6dc4784ffcc2f"
|
||||
integrity sha512-9fq4jZVhPNW8r+UYKnxF1e2HkDWOWKM5bC2/7c9wPV835I0aOrVbS/Hw/pWPk2uKrNXQqg9Z959Kz+IYDd5p3w==
|
||||
|
||||
"@types/node@10.17.13":
|
||||
version "10.17.13"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.13.tgz#ccebcdb990bd6139cd16e84c39dc2fb1023ca90c"
|
||||
integrity sha512-pMCcqU2zT4TjqYFrWtYHKal7Sl30Ims6ulZ4UFXxI4xbtQqK/qqKwkDoBFCfooRqqmRu9vY3xaJRwxSh673aYg==
|
||||
|
||||
"@types/node@^12.7.2":
|
||||
version "12.7.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.8.tgz#cb1bf6800238898bc2ff6ffa5702c3cadd350708"
|
||||
integrity sha512-FMdVn84tJJdV+xe+53sYiZS4R5yn1mAIxfj+DVoNiQjTYz1+OYmjwEZr1ev9nU0axXwda0QDbYl06QHanRVH3A==
|
||||
|
||||
"@types/node@~8.9.4":
|
||||
version "8.9.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-8.9.5.tgz#162b864bc70be077e6db212b322754917929e976"
|
||||
integrity sha512-jRHfWsvyMtXdbhnz5CVHxaBgnV6duZnPlQuRSo/dm/GnmikNcmZhxIES4E9OZjUmQ8C+HCl4KJux+cXN/ErGDQ==
|
||||
|
||||
"@types/normalize-package-data@^2.4.0":
|
||||
version "2.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e"
|
||||
@ -6037,7 +6037,7 @@ chownr@^1.1.1:
|
||||
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.2.tgz#a18f1e0b269c8a6a5d3c86eb298beb14c3dd7bf6"
|
||||
integrity sha512-GkfeAQh+QNy3wquu9oIZr6SS5x7wGdSgNQvD10X3r+AZr1Oys22HW8kAmDMvNg2+Dm0TeGaEuO8gFwdBXxwO8A==
|
||||
|
||||
chownr@^1.1.2:
|
||||
chownr@^1.1.2, chownr@^1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142"
|
||||
integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw==
|
||||
@ -10167,6 +10167,14 @@ hash.js@^1.0.0, hash.js@^1.0.3:
|
||||
inherits "^2.0.3"
|
||||
minimalistic-assert "^1.0.1"
|
||||
|
||||
hasha@5.1.0:
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.1.0.tgz#dd05ccdfcfe7dab626247ce2a58efe461922f4ca"
|
||||
integrity sha512-OFPDWmzPN1l7atOV1TgBVmNtBxaIysToK6Ve9DK+vT6pYuklw/nPNT+HJbZi0KDcI6vWB+9tgvZ5YD7fA3CXcA==
|
||||
dependencies:
|
||||
is-stream "^2.0.0"
|
||||
type-fest "^0.8.0"
|
||||
|
||||
hast-util-parse-selector@^2.2.0:
|
||||
version "2.2.2"
|
||||
resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-2.2.2.tgz#66aabccb252c47d94975f50a281446955160380b"
|
||||
@ -11255,6 +11263,11 @@ is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0:
|
||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
||||
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
|
||||
|
||||
is-stream@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3"
|
||||
integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==
|
||||
|
||||
is-subset@^0.1.1:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/is-subset/-/is-subset-0.1.1.tgz#8a59117d932de1de00f245fcdd39ce43f1e939a6"
|
||||
@ -13295,6 +13308,14 @@ minizlib@^1.2.1:
|
||||
dependencies:
|
||||
minipass "^2.2.1"
|
||||
|
||||
minizlib@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.0.tgz#fd52c645301ef09a63a2c209697c294c6ce02cf3"
|
||||
integrity sha512-EzTZN/fjSvifSX0SlqUERCN39o6T40AMarPbv0MrarSFtIITCBh7bi+dU8nxGFHuqs9jdIAeoYoKuQAAASsPPA==
|
||||
dependencies:
|
||||
minipass "^3.0.0"
|
||||
yallist "^4.0.0"
|
||||
|
||||
mississippi@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022"
|
||||
@ -18461,6 +18482,18 @@ tapable@^1.0.0, tapable@^1.1.3:
|
||||
resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
|
||||
integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
|
||||
|
||||
tar@5.0.5:
|
||||
version "5.0.5"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-5.0.5.tgz#03fcdb7105bc8ea3ce6c86642b9c942495b04f93"
|
||||
integrity sha512-MNIgJddrV2TkuwChwcSNds/5E9VijOiw7kAc1y5hTNJoLDSuIyid2QtLYiCYNnICebpuvjhPQZsXwUL0O3l7OQ==
|
||||
dependencies:
|
||||
chownr "^1.1.3"
|
||||
fs-minipass "^2.0.0"
|
||||
minipass "^3.0.0"
|
||||
minizlib "^2.1.0"
|
||||
mkdirp "^0.5.0"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@^4, tar@^4.4.8:
|
||||
version "4.4.10"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.10.tgz#946b2810b9a5e0b26140cf78bea6b0b0d689eba1"
|
||||
@ -19014,6 +19047,11 @@ type-fest@^0.6.0:
|
||||
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b"
|
||||
integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
|
||||
|
||||
type-fest@^0.8.0:
|
||||
version "0.8.1"
|
||||
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
|
||||
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
|
||||
|
||||
type-is@^1.6.4, type-is@~1.6.17, type-is@~1.6.18:
|
||||
version "1.6.18"
|
||||
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user