feat(core): redesign workspace file hashing

This commit is contained in:
Victor Savkin 2020-07-14 11:12:05 -04:00 committed by Victor Savkin
parent 840b7deb84
commit 67dda10d55
31 changed files with 674 additions and 542 deletions

View File

@ -165,7 +165,7 @@ forEachCli((cliName) => {
expect(failedTests).toContain(`- ${myapp}`); expect(failedTests).toContain(`- ${myapp}`);
expect(failedTests).toContain(`- ${myapp2}`); expect(failedTests).toContain(`- ${myapp2}`);
expect(failedTests).toContain(`Failed projects:`); expect(failedTests).toContain(`Failed projects:`);
expect(readJson('dist/.nx-results')).toEqual({ expect(readJson('node_modules/.cache/nx/results.json')).toEqual({
command: 'test', command: 'test',
results: { results: {
[myapp]: false, [myapp]: false,
@ -318,7 +318,7 @@ forEachCli((cliName) => {
expect(failedTests).toContain( expect(failedTests).toContain(
'You can isolate the above projects by passing: --only-failed' 'You can isolate the above projects by passing: --only-failed'
); );
expect(readJson('dist/.nx-results')).toEqual({ expect(readJson('node_modules/.cache/nx/results.json')).toEqual({
command: 'test', command: 'test',
results: { results: {
[myapp]: false, [myapp]: false,

View File

@ -140,7 +140,6 @@
"fork-ts-checker-webpack-plugin": "^3.1.1", "fork-ts-checker-webpack-plugin": "^3.1.1",
"fs-extra": "7.0.1", "fs-extra": "7.0.1",
"glob": "7.1.4", "glob": "7.1.4",
"hasha": "5.1.0",
"html-webpack-plugin": "^3.2.0", "html-webpack-plugin": "^3.2.0",
"husky": "^3.0.3", "husky": "^3.0.3",
"identity-obj-proxy": "3.0.0", "identity-obj-proxy": "3.0.0",

View File

@ -993,7 +993,7 @@ linter.defineParser('@typescript-eslint/parser', parser);
linter.defineRule(enforceModuleBoundariesRuleName, enforceModuleBoundaries); linter.defineRule(enforceModuleBoundariesRuleName, enforceModuleBoundaries);
function createFile(f) { function createFile(f) {
return { file: f, ext: extname(f), mtime: 1 }; return { file: f, ext: extname(f), hash: '' };
} }
function runRule( function runRule(

View File

@ -1,5 +1,5 @@
module.exports = { module.exports = {
name: 'tao', name: 'workspace',
preset: '../../jest.config.js', preset: '../../jest.config.js',
transform: { transform: {
'^.+\\.[tj]sx?$': 'ts-jest', '^.+\\.[tj]sx?$': 'ts-jest',

View File

@ -59,7 +59,6 @@
"dotenv": "8.2.0", "dotenv": "8.2.0",
"ignore": "5.0.4", "ignore": "5.0.4",
"npm-run-all": "4.1.5", "npm-run-all": "4.1.5",
"hasha": "5.1.0",
"opn": "^5.3.0", "opn": "^5.3.0",
"rxjs": "^6.5.4", "rxjs": "^6.5.4",
"semver": "5.4.1", "semver": "5.4.1",

View File

@ -106,5 +106,5 @@ describe('WorkspaceIntegrityChecks', () => {
}); });
function createFile(f) { function createFile(f) {
return { file: f, ext: extname(f), mtime: 1 }; return { file: f, ext: extname(f), hash: '' };
} }

View File

@ -2,7 +2,7 @@ import * as fs from 'fs';
import { WorkspaceResults } from './workspace-results'; import { WorkspaceResults } from './workspace-results';
import { serializeJson } from '../utils/fileutils'; import { serializeJson } from '../utils/fileutils';
import { ProjectType } from '..//core/project-graph'; import { ProjectType } from '../core/project-graph';
describe('WorkspacesResults', () => { describe('WorkspacesResults', () => {
let results: WorkspaceResults; let results: WorkspaceResults;
@ -43,7 +43,7 @@ describe('WorkspacesResults', () => {
results.saveResults(); results.saveResults();
expect(fs.writeSync).not.toHaveBeenCalled(); expect(fs.writeSync).not.toHaveBeenCalled();
expect(fs.unlinkSync).toHaveBeenCalledWith('dist/.nx-results'); expect(fs.unlinkSync).toHaveBeenCalled();
}); });
}); });
@ -53,23 +53,6 @@ describe('WorkspacesResults', () => {
expect(results.getResult('proj')).toBe(false); expect(results.getResult('proj')).toBe(false);
}); });
it('should save results to file system', () => {
spyOn(fs, 'writeFileSync');
results.setResult('proj', false);
results.saveResults();
expect(fs.writeFileSync).toHaveBeenCalledWith(
'dist/.nx-results',
serializeJson({
command: 'test',
results: {
proj: false,
},
})
);
});
}); });
describe('when results already exist', () => { describe('when results already exist', () => {
@ -97,7 +80,6 @@ describe('WorkspacesResults', () => {
}, },
}); });
expect(fs.readFileSync).toHaveBeenCalledWith('dist/.nx-results', 'utf-8');
expect(results.getResult('proj')).toBe(false); expect(results.getResult('proj')).toBe(false);
}); });

View File

@ -1,9 +1,17 @@
import * as fs from 'fs'; import * as fs from 'fs';
import { readJsonFile, writeJsonFile } from '../utils/fileutils'; import {
import { unlinkSync } from 'fs'; directoryExists,
readJsonFile,
writeJsonFile,
} from '../utils/fileutils';
import { existsSync, unlinkSync } from 'fs';
import { ProjectGraphNode } from '../core/project-graph'; import { ProjectGraphNode } from '../core/project-graph';
import { join } from 'path';
import { appRootPath } from '@nrwl/workspace/src/utils/app-root';
import * as fsExtra from 'fs-extra';
const RESULTS_FILE = 'dist/.nx-results'; const resultsDir = join(appRootPath, 'node_modules', '.cache', 'nx');
const resultsFile = join(resultsDir, 'results.json');
interface NxResults { interface NxResults {
command: string; command: string;
@ -31,11 +39,11 @@ export class WorkspaceResults {
private command: string, private command: string,
private projects: Record<string, ProjectGraphNode> private projects: Record<string, ProjectGraphNode>
) { ) {
const resultsExists = fs.existsSync(RESULTS_FILE); const resultsExists = fs.existsSync(resultsFile);
this.startedWithFailedProjects = false; this.startedWithFailedProjects = false;
if (resultsExists) { if (resultsExists) {
try { try {
const commandResults = readJsonFile(RESULTS_FILE); const commandResults = readJsonFile(resultsFile);
this.startedWithFailedProjects = commandResults.command === command; this.startedWithFailedProjects = commandResults.command === command;
if (this.startedWithFailedProjects) { if (this.startedWithFailedProjects) {
this.commandResults = commandResults; this.commandResults = commandResults;
@ -56,10 +64,19 @@ export class WorkspaceResults {
} }
saveResults() { saveResults() {
try {
if (!existsSync(resultsDir)) {
fsExtra.ensureDirSync(resultsDir);
}
} catch (e) {
if (!directoryExists(resultsDir)) {
throw new Error(`Failed to create directory: ${resultsDir}`);
}
}
if (Object.values<boolean>(this.commandResults.results).includes(false)) { if (Object.values<boolean>(this.commandResults.results).includes(false)) {
writeJsonFile(RESULTS_FILE, this.commandResults); writeJsonFile(resultsFile, this.commandResults);
} else if (fs.existsSync(RESULTS_FILE)) { } else if (fs.existsSync(resultsFile)) {
unlinkSync(RESULTS_FILE); unlinkSync(resultsFile);
} }
} }

View File

@ -116,7 +116,7 @@ describe('project graph', () => {
files = Object.keys(filesJson).map((f) => ({ files = Object.keys(filesJson).map((f) => ({
file: f, file: f,
ext: extname(f), ext: extname(f),
mtime: 1, hash: 'some-hash',
})); }));
readFileAtRevision = (p, r) => { readFileAtRevision = (p, r) => {
const fromFs = filesJson[`./${p}`]; const fromFs = filesJson[`./${p}`];
@ -139,13 +139,13 @@ describe('project graph', () => {
{ {
file: 'something-for-api.txt', file: 'something-for-api.txt',
ext: '.txt', ext: '.txt',
mtime: 1, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
{ {
file: 'libs/ui/src/index.ts', file: 'libs/ui/src/index.ts',
ext: '.ts', ext: '.ts',
mtime: 1, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
]); ]);
@ -211,7 +211,7 @@ describe('project graph', () => {
{ {
file: 'package.json', file: 'package.json',
ext: '.json', ext: '.json',
mtime: 1, hash: 'some-hash',
getChanges: () => jsonDiff(packageJson, updatedPackageJson), getChanges: () => jsonDiff(packageJson, updatedPackageJson),
}, },
]); ]);
@ -279,7 +279,7 @@ describe('project graph', () => {
{ {
file: 'package.json', file: 'package.json',
ext: '.json', ext: '.json',
mtime: 1, hash: 'some-hash',
getChanges: () => jsonDiff(packageJson, updatedPackageJson), getChanges: () => jsonDiff(packageJson, updatedPackageJson),
}, },
]); ]);
@ -300,7 +300,7 @@ describe('project graph', () => {
{ {
file: 'package.json', file: 'package.json',
ext: '.json', ext: '.json',
mtime: 1, hash: 'some-hash',
getChanges: () => jsonDiff(packageJson, updatedPackageJson), getChanges: () => jsonDiff(packageJson, updatedPackageJson),
}, },
]); ]);

View File

@ -35,7 +35,7 @@ describe('getImplicitlyTouchedProjectsByJsonChanges', () => {
[ [
{ {
file: 'package.json', file: 'package.json',
mtime: 0, hash: 'some-hash',
ext: '.json', ext: '.json',
getChanges: () => [ getChanges: () => [
{ {
@ -60,7 +60,7 @@ describe('getImplicitlyTouchedProjectsByJsonChanges', () => {
[ [
{ {
file: 'package.json', file: 'package.json',
mtime: 0, hash: 'some-hash',
ext: '.json', ext: '.json',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },

View File

@ -67,7 +67,7 @@ describe('getTouchedNpmPackages', () => {
[ [
{ {
file: 'package.json', file: 'package.json',
mtime: 0, hash: 'some-hash',
ext: '.json', ext: '.json',
getChanges: () => [ getChanges: () => [
{ {
@ -98,7 +98,7 @@ describe('getTouchedNpmPackages', () => {
[ [
{ {
file: 'package.json', file: 'package.json',
mtime: 0, hash: 'some-hash',
ext: '.json', ext: '.json',
getChanges: () => [ getChanges: () => [
{ {
@ -137,7 +137,7 @@ describe('getTouchedNpmPackages', () => {
[ [
{ {
file: 'package.json', file: 'package.json',
mtime: 0, hash: 'some-hash',
ext: '.json', ext: '.json',
getChanges: () => [ getChanges: () => [
{ {
@ -177,7 +177,7 @@ describe('getTouchedNpmPackages', () => {
[ [
{ {
file: 'package.json', file: 'package.json',
mtime: 0, hash: 'some-hash',
ext: '.json', ext: '.json',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },

View File

@ -9,7 +9,7 @@ describe('getTouchedProjectsInNxJson', () => {
{ {
file: 'source.ts', file: 'source.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
], ],
@ -32,7 +32,7 @@ describe('getTouchedProjectsInNxJson', () => {
{ {
file: 'nx.json', file: 'nx.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
], ],
@ -58,7 +58,7 @@ describe('getTouchedProjectsInNxJson', () => {
{ {
file: 'nx.json', file: 'nx.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Modified, type: DiffType.Modified,
@ -93,7 +93,7 @@ describe('getTouchedProjectsInNxJson', () => {
{ {
file: 'nx.json', file: 'nx.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Added, type: DiffType.Added,
@ -138,7 +138,7 @@ describe('getTouchedProjectsInNxJson', () => {
{ {
file: 'nx.json', file: 'nx.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Deleted, type: DiffType.Deleted,
@ -175,7 +175,7 @@ describe('getTouchedProjectsInNxJson', () => {
{ {
file: 'nx.json', file: 'nx.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Modified, type: DiffType.Modified,

View File

@ -43,7 +43,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'source.ts', file: 'source.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
], ],
@ -67,7 +67,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
], ],
@ -87,7 +87,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {
@ -119,7 +119,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {
@ -151,7 +151,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {
@ -185,7 +185,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {
@ -217,7 +217,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {
@ -254,7 +254,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {
@ -289,7 +289,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
{ {
file: 'tsconfig.json', file: 'tsconfig.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => getChanges: () =>
jsonDiff( jsonDiff(
{ {

View File

@ -9,7 +9,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
{ {
file: 'source.ts', file: 'source.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
], ],
@ -32,7 +32,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
{ {
file: 'workspace.json', file: 'workspace.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
], ],
@ -57,7 +57,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
{ {
file: 'workspace.json', file: 'workspace.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Modified, type: DiffType.Modified,
@ -91,7 +91,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
{ {
file: 'workspace.json', file: 'workspace.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Added, type: DiffType.Added,
@ -132,7 +132,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
{ {
file: 'workspace.json', file: 'workspace.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Deleted, type: DiffType.Deleted,
@ -167,7 +167,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
{ {
file: 'workspace.json', file: 'workspace.json',
ext: '.json', ext: '.json',
mtime: 0, hash: 'some-hash',
getChanges: () => [ getChanges: () => [
{ {
type: DiffType.Modified, type: DiffType.Modified,

View File

@ -7,13 +7,13 @@ describe('getTouchedProjects', () => {
{ {
file: 'libs/a/index.ts', file: 'libs/a/index.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
{ {
file: 'libs/b/index.ts', file: 'libs/b/index.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
]; ];
@ -30,7 +30,7 @@ describe('getTouchedProjects', () => {
{ {
file: 'libs/a-b/index.ts', file: 'libs/a-b/index.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
]; ];
@ -47,7 +47,7 @@ describe('getTouchedProjects', () => {
{ {
file: 'libs/a-b/index.ts', file: 'libs/a-b/index.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
]; ];
@ -64,7 +64,7 @@ describe('getTouchedProjects', () => {
{ {
file: 'libs/a/b/index.ts', file: 'libs/a/b/index.ts',
ext: '.ts', ext: '.ts',
mtime: 0, hash: 'some-hash',
getChanges: () => [new WholeFileChange()], getChanges: () => [new WholeFileChange()],
}, },
]; ];

View File

@ -22,17 +22,19 @@ describe('createFileMap', () => {
}, },
}; };
const files = [ const files = [
{ file: 'apps/demo/src/main.ts', mtime: 1, ext: '.ts' }, { file: 'apps/demo/src/main.ts', hash: 'some-hash', ext: '.ts' },
{ file: 'apps/demo-e2e/src/main.ts', mtime: 1, ext: '.ts' }, { file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash', ext: '.ts' },
{ file: 'libs/ui/src/index.ts', mtime: 1, ext: '.ts' }, { file: 'libs/ui/src/index.ts', hash: 'some-hash', ext: '.ts' },
]; ];
const result = createFileMap(workspaceJson, files); const result = createFileMap(workspaceJson, files);
expect(result).toEqual({ expect(result).toEqual({
demo: [{ file: 'apps/demo/src/main.ts', mtime: 1, ext: '.ts' }], demo: [{ file: 'apps/demo/src/main.ts', hash: 'some-hash', ext: '.ts' }],
'demo-e2e': [{ file: 'apps/demo-e2e/src/main.ts', mtime: 1, ext: '.ts' }], 'demo-e2e': [
ui: [{ file: 'libs/ui/src/index.ts', mtime: 1, ext: '.ts' }], { file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash', ext: '.ts' },
],
ui: [{ file: 'libs/ui/src/index.ts', hash: 'some-hash', ext: '.ts' }],
}); });
}); });
}); });

View File

@ -6,16 +6,17 @@ import { extname } from 'path';
import { NxArgs } from '../command-line/utils'; import { NxArgs } from '../command-line/utils';
import { WorkspaceResults } from '../command-line/workspace-results'; import { WorkspaceResults } from '../command-line/workspace-results';
import { appRootPath } from '../utils/app-root'; import { appRootPath } from '../utils/app-root';
import { readJsonFile, fileExists } from '../utils/fileutils'; import { fileExists, readJsonFile } from '../utils/fileutils';
import { jsonDiff } from '../utils/json-diff'; import { jsonDiff } from '../utils/json-diff';
import { ProjectGraphNode } from './project-graph'; import { ProjectGraphNode } from './project-graph';
import { Environment, NxJson } from './shared-interfaces'; import { Environment, NxJson } from './shared-interfaces';
import { defaultFileHasher } from './hasher/file-hasher';
const ignore = require('ignore'); const ignore = require('ignore');
export interface FileData { export interface FileData {
file: string; file: string;
mtime: number; hash: string;
ext: string; ext: string;
} }
@ -47,15 +48,15 @@ export function calculateFileChanges(
if (ignore) { if (ignore) {
files = files.filter((f) => !ignore.ignores(f)); files = files.filter((f) => !ignore.ignores(f));
} }
return files.map((f) => { return files.map((f) => {
const ext = extname(f); const ext = extname(f);
const _mtime = mtime(`${appRootPath}/${f}`); const hash = defaultFileHasher.hashFile(f);
// Memoize results so we don't recalculate on successive invocation.
return { return {
file: f, file: f,
ext, ext,
mtime: _mtime, hash,
getChanges: (): Change[] => { getChanges: (): Change[] => {
if (!nxArgs) { if (!nxArgs) {
return [new WholeFileChange()]; return [new WholeFileChange()];
@ -110,11 +111,11 @@ function defaultReadFileAtRevision(
} }
function getFileData(filePath: string): FileData { function getFileData(filePath: string): FileData {
const stat = fs.statSync(filePath); const file = path.relative(appRootPath, filePath).split(path.sep).join('/');
return { return {
file: path.relative(appRootPath, filePath).split(path.sep).join('/'), file: file,
hash: defaultFileHasher.hashFile(filePath),
ext: path.extname(filePath), ext: path.extname(filePath),
mtime: stat.mtimeMs,
}; };
} }
@ -197,25 +198,35 @@ export function rootWorkspaceFileNames(): string[] {
return [`package.json`, workspaceFileName(), `nx.json`, `tsconfig.base.json`]; return [`package.json`, workspaceFileName(), `nx.json`, `tsconfig.base.json`];
} }
export function rootWorkspaceFileData(): FileData[] {
return rootWorkspaceFileNames().map((f) =>
getFileData(`${appRootPath}/${f}`)
);
}
export function readWorkspaceFiles(): FileData[] { export function readWorkspaceFiles(): FileData[] {
const workspaceJson = readWorkspaceJson(); const workspaceJson = readWorkspaceJson();
const files = [];
files.push( if (defaultFileHasher.usesGitForHashing) {
...rootWorkspaceFileNames().map((f) => getFileData(`${appRootPath}/${f}`)) return defaultFileHasher
); .allFiles()
.map((f) => getFileData(`${appRootPath}/${f}`));
} else {
const files = [];
files.push(...rootWorkspaceFileData());
// Add known workspace files and directories // Add known workspace files and directories
files.push(...allFilesInDir(appRootPath, false)); files.push(...allFilesInDir(appRootPath, false));
files.push(...allFilesInDir(`${appRootPath}/tools`)); files.push(...allFilesInDir(`${appRootPath}/tools`));
// Add files for workspace projects // Add files for workspace projects
Object.keys(workspaceJson.projects).forEach((projectName) => { Object.keys(workspaceJson.projects).forEach((projectName) => {
const project = workspaceJson.projects[projectName]; const project = workspaceJson.projects[projectName];
files.push(...allFilesInDir(`${appRootPath}/${project.root}`)); files.push(...allFilesInDir(`${appRootPath}/${project.root}`));
}); });
return files; return files;
}
} }
export function readEnvironment( export function readEnvironment(
@ -229,17 +240,6 @@ export function readEnvironment(
return { nxJson, workspaceJson, workspaceResults }; return { nxJson, workspaceJson, workspaceResults };
} }
/**
* Returns the time when file was last modified
* Returns -Infinity for a non-existent file
*/
export function mtime(filePath: string): number {
if (!fs.existsSync(filePath)) {
return -Infinity;
}
return fs.statSync(filePath).mtimeMs;
}
export function normalizedProjectRoot(p: ProjectGraphNode): string { export function normalizedProjectRoot(p: ProjectGraphNode): string {
if (p.data && p.data.root) { if (p.data && p.data.root) {
const path = p.data.root.split('/').filter((v) => !!v); const path = p.data.root.split('/').filter((v) => !!v);
@ -252,3 +252,15 @@ export function normalizedProjectRoot(p: ProjectGraphNode): string {
return ''; return '';
} }
} }
export function filesChanged(a: FileData[], b: FileData[]) {
if (a.length !== b.length) return true;
const sortedA = a.sort((x, y) => x.file.localeCompare(y.file));
const sortedB = b.sort((x, y) => x.file.localeCompare(y.file));
for (let i = 0; i < sortedA.length; ++i) {
if (sortedA[i].file !== sortedB[i].file) return true;
if (sortedA[i].hash !== sortedB[i].hash) return true;
}
return false;
}

View File

@ -0,0 +1,71 @@
import { getFileHashes } from './git-hasher';
import { readFileSync } from 'fs';
import { defaultHashing, HashingImp } from './hashing-impl';
import { appRootPath } from '../../utils/app-root';
type PathAndTransformer = {
path: string;
transformer: (x: string) => string | null;
};
export function extractNameAndVersion(content: string): string {
try {
const c = JSON.parse(content);
return `${c.name}${c.version}`;
} catch (e) {
return '';
}
}
export class FileHasher {
fileHashes: { [path: string]: string } = {};
usesGitForHashing = false;
constructor(private readonly hashing: HashingImp) {
this.init();
}
init() {
this.fileHashes = {};
this.getHashesFromGit();
this.usesGitForHashing = Object.keys(this.fileHashes).length > 0;
}
hashFile(path: string, transformer: (x: string) => string | null = null) {
const relativePath = path.startsWith(appRootPath)
? path.substr(appRootPath.length + 1)
: path;
if (!this.fileHashes[relativePath]) {
this.fileHashes[relativePath] = this.processPath({ path, transformer });
}
return this.fileHashes[relativePath];
}
allFiles() {
return Object.keys(this.fileHashes);
}
private getHashesFromGit() {
const sliceIndex = appRootPath.length + 1;
getFileHashes(appRootPath).forEach((hash, filename) => {
this.fileHashes[filename.substr(sliceIndex)] = hash;
});
}
private processPath(pathAndTransformer: PathAndTransformer): string {
try {
if (pathAndTransformer.transformer) {
const transformedFile = pathAndTransformer.transformer(
readFileSync(pathAndTransformer.path).toString()
);
return this.hashing.hashArray([transformedFile]);
} else {
return this.hashing.hashFile(pathAndTransformer.path);
}
} catch (e) {
return '';
}
}
}
export const defaultFileHasher = new FileHasher(defaultHashing);

View File

@ -0,0 +1,115 @@
import { spawnSync } from 'child_process';
function parseGitLsTree(output: string): Map<string, string> {
const changes: Map<string, string> = new Map<string, string>();
if (output) {
const gitRegex: RegExp = /([0-9]{6})\s(blob|commit)\s([a-f0-9]{40})\s*(.*)/;
output.split('\n').forEach((line) => {
if (line) {
const matches: RegExpMatchArray | null = line.match(gitRegex);
if (matches && matches[3] && matches[4]) {
const hash: string = matches[3];
const filename: string = matches[4];
changes.set(filename, hash);
} else {
throw new Error(`Cannot parse git ls-tree input: "${line}"`);
}
}
});
}
return changes;
}
function parseGitStatus(output: string): Map<string, string> {
const changes: Map<string, string> = new Map<string, string>();
if (!output) {
return changes;
}
output
.trim()
.split('\n')
.forEach((line) => {
const [changeType, ...filenames]: string[] = line
.trim()
.split(' ')
.filter((linePart) => !!linePart);
if (changeType && filenames && filenames.length > 0) {
changes.set(filenames[filenames.length - 1], changeType);
}
});
return changes;
}
function spawnProcess(command: string, args: string[], cwd: string): string {
const r = spawnSync(command, args, { cwd });
if (r.status !== 0) {
throw new Error(`Failed to run ${command} ${args.join(' ')}`);
}
return r.stdout.toString().trim();
}
function getGitHashForFiles(
filesToHash: string[],
path: string
): Map<string, string> {
const changes: Map<string, string> = new Map<string, string>();
if (filesToHash.length) {
const hashStdout = spawnProcess(
'git',
['hash-object', ...filesToHash],
path
);
const hashes: string[] = hashStdout.split('\n');
if (hashes.length !== filesToHash.length) {
throw new Error(
`Passed ${filesToHash.length} file paths to Git to hash, but received ${hashes.length} hashes.`
);
}
for (let i: number = 0; i < hashes.length; i++) {
const hash: string = hashes[i];
const filePath: string = filesToHash[i];
changes.set(filePath, hash);
}
}
return changes;
}
function gitLsTree(path: string): Map<string, string> {
return parseGitLsTree(spawnProcess('git', ['ls-tree', 'HEAD', '-r'], path));
}
function gitStatus(path: string): Map<string, string> {
const filesToHash: string[] = [];
parseGitStatus(
spawnProcess('git', ['status', '-s', '-u', '.'], path)
).forEach((changeType: string, filename: string) => {
if (changeType !== 'D') {
filesToHash.push(filename);
}
});
return getGitHashForFiles(filesToHash, path);
}
export function getFileHashes(path: string): Map<string, string> {
const res = new Map<string, string>();
try {
const m1 = gitLsTree(path);
m1.forEach((hash: string, filename: string) => {
res.set(`${path}/${filename}`, hash);
});
const m2 = gitStatus(path);
m2.forEach((hash: string, filename: string) => {
res.set(`${path}/${filename}`, hash);
});
return res;
} catch (e) {
// this strategy is only used for speeding things up.
// ignoring all the errors
if (process.env.NX_GIT_HASHER_LOGGING) {
console.error(`Internal error:`);
console.error(e);
}
return new Map<string, string>();
}
}

View File

@ -1,8 +1,7 @@
import { Hasher, extractNameAndVersion } from './hasher'; import { Hasher } from './hasher';
import { extractNameAndVersion } from '@nrwl/workspace/src/core/hasher/file-hasher';
const hasha = require('hasha');
const fs = require('fs'); const fs = require('fs');
jest.mock('hasha');
jest.mock('fs'); jest.mock('fs');
describe('Hasher', () => { describe('Hasher', () => {
@ -14,14 +13,13 @@ describe('Hasher', () => {
'tsconfig.base.json': 'tsconfig.base.json.hash', 'tsconfig.base.json': 'tsconfig.base.json.hash',
'workspace.json': 'workspace.json.hash', 'workspace.json': 'workspace.json.hash',
}; };
beforeEach(() => {
hasha.mockImplementation((values) => values.join('|')); function createHashing(): any {
hasha.fromFile.mockImplementation((path) => Promise.resolve(hashes[path])); return {
fs.statSync.mockReturnValue({ size: 100 }); hashArray: (values: string[]) => values.join('|'),
fs.readFileSync.mockImplementation(() => hashFile: (path: string) => hashes[path],
JSON.stringify({ dependencies: {}, devDependencies: {} }) };
); }
});
it('should create project hash', async (done) => { it('should create project hash', async (done) => {
hashes['/file'] = 'file.hash'; hashes['/file'] = 'file.hash';
@ -31,7 +29,7 @@ describe('Hasher', () => {
proj: { proj: {
name: 'proj', name: 'proj',
type: 'lib', type: 'lib',
data: { files: [{ file: '/file', ext: '.ts', mtime: 1 }] }, data: { files: [{ file: '/file', ext: '.ts', hash: 'some-hash' }] },
}, },
}, },
dependencies: { dependencies: {
@ -41,14 +39,19 @@ describe('Hasher', () => {
{} as any, {} as any,
{ {
runtimeCacheInputs: ['echo runtime123', 'echo runtime456'], runtimeCacheInputs: ['echo runtime123', 'echo runtime456'],
} },
createHashing()
); );
const hash = await hasher.hash({ const hash = (
target: { project: 'proj', target: 'build' }, await hasher.hashTasks([
id: 'proj-build', {
overrides: { prop: 'prop-value' }, target: { project: 'proj', target: 'build' },
}); id: 'proj-build',
overrides: { prop: 'prop-value' },
},
])
)[0];
expect(hash.value).toContain('yarn.lock.hash'); //implicits expect(hash.value).toContain('yarn.lock.hash'); //implicits
expect(hash.value).toContain('file.hash'); //project files expect(hash.value).toContain('file.hash'); //project files
@ -87,15 +90,18 @@ describe('Hasher', () => {
{} as any, {} as any,
{ {
runtimeCacheInputs: ['boom'], runtimeCacheInputs: ['boom'],
} },
createHashing()
); );
try { try {
await hasher.hash({ await hasher.hashTasks([
target: { project: 'proj', target: 'build' }, {
id: 'proj-build', target: { project: 'proj', target: 'build' },
overrides: {}, id: 'proj-build',
}); overrides: {},
},
]);
fail('Should not be here'); fail('Should not be here');
} catch (e) { } catch (e) {
expect(e.message).toContain( expect(e.message).toContain(
@ -115,12 +121,16 @@ describe('Hasher', () => {
parent: { parent: {
name: 'parent', name: 'parent',
type: 'lib', type: 'lib',
data: { files: [{ file: '/filea', ext: '.ts', mtime: 1 }] }, data: {
files: [{ file: '/filea', ext: '.ts', hash: 'some-hash' }],
},
}, },
child: { child: {
name: 'child', name: 'child',
type: 'lib', type: 'lib',
data: { files: [{ file: '/fileb', ext: '.ts', mtime: 1 }] }, data: {
files: [{ file: '/fileb', ext: '.ts', hash: 'some-hash' }],
},
}, },
}, },
dependencies: { dependencies: {
@ -128,17 +138,22 @@ describe('Hasher', () => {
}, },
}, },
{} as any, {} as any,
{} {},
createHashing()
); );
const hasha = await hasher.hash({ const hash = (
target: { project: 'parent', target: 'build' }, await hasher.hashTasks([
id: 'parent-build', {
overrides: { prop: 'prop-value' }, target: { project: 'parent', target: 'build' },
}); id: 'parent-build',
overrides: { prop: 'prop-value' },
},
])
)[0];
// note that the parent hash is based on parent source files only! // note that the parent hash is based on parent source files only!
expect(hasha.details.sources).toEqual({ expect(hash.details.sources).toEqual({
parent: 'a.hash', parent: 'a.hash',
child: 'b.hash', child: 'b.hash',
}); });
@ -155,12 +170,16 @@ describe('Hasher', () => {
proja: { proja: {
name: 'proja', name: 'proja',
type: 'lib', type: 'lib',
data: { files: [{ file: '/filea', ext: '.ts', mtime: 1 }] }, data: {
files: [{ file: '/filea', ext: '.ts', hash: 'some-hash' }],
},
}, },
projb: { projb: {
name: 'projb', name: 'projb',
type: 'lib', type: 'lib',
data: { files: [{ file: '/fileb', ext: '.ts', mtime: 1 }] }, data: {
files: [{ file: '/fileb', ext: '.ts', hash: 'some-hash' }],
},
}, },
}, },
dependencies: { dependencies: {
@ -169,14 +188,19 @@ describe('Hasher', () => {
}, },
}, },
{} as any, {} as any,
{} {},
createHashing()
); );
const hasha = await hasher.hash({ const hasha = (
target: { project: 'proja', target: 'build' }, await hasher.hashTasks([
id: 'proja-build', {
overrides: { prop: 'prop-value' }, target: { project: 'proja', target: 'build' },
}); id: 'proja-build',
overrides: { prop: 'prop-value' },
},
])
)[0];
expect(hasha.value).toContain('yarn.lock.hash'); //implicits expect(hasha.value).toContain('yarn.lock.hash'); //implicits
expect(hasha.value).toContain('a.hash'); //project files expect(hasha.value).toContain('a.hash'); //project files
@ -186,11 +210,15 @@ describe('Hasher', () => {
expect(hasha.value).toContain('build'); //target expect(hasha.value).toContain('build'); //target
expect(hasha.details.sources).toEqual({ proja: 'a.hash', projb: 'b.hash' }); expect(hasha.details.sources).toEqual({ proja: 'a.hash', projb: 'b.hash' });
const hashb = await hasher.hash({ const hashb = (
target: { project: 'projb', target: 'build' }, await hasher.hashTasks([
id: 'projb-build', {
overrides: { prop: 'prop-value' }, target: { project: 'projb', target: 'build' },
}); id: 'projb-build',
overrides: { prop: 'prop-value' },
},
])
)[0];
expect(hashb.value).toContain('yarn.lock.hash'); //implicits expect(hashb.value).toContain('yarn.lock.hash'); //implicits
expect(hashb.value).toContain('a.hash'); //project files expect(hashb.value).toContain('a.hash'); //project files
@ -203,44 +231,6 @@ describe('Hasher', () => {
done(); done();
}); });
it('should handle large binary files in a special way', async (done) => {
fs.statSync.mockImplementation((f) => {
if (f === '/file') return { size: 1000000 * 5 + 1 };
return { size: 100 };
});
hashes['/file'] = 'file.hash';
const hasher = new Hasher(
{
nodes: {
proja: {
name: 'proj',
type: 'lib',
data: { files: [{ file: '/file', ext: '.ts', mtime: 1 }] },
},
},
dependencies: {},
},
{} as any,
{}
);
const hash = (
await hasher.hash({
target: { project: 'proja', target: 'build' },
id: 'proja-build',
overrides: { prop: 'prop-value' },
})
).value;
expect(hash).toContain('yarn.lock.hash'); //implicits
expect(hash).toContain('5000001'); //project files
expect(hash).toContain('prop-value'); //overrides
expect(hash).toContain('proj'); //project
expect(hash).toContain('build'); //target
done();
});
describe('extractNameAndVersion', () => { describe('extractNameAndVersion', () => {
it('should work', () => { it('should work', () => {
const nameAndVersion = extractNameAndVersion(` const nameAndVersion = extractNameAndVersion(`

View File

@ -1,11 +1,17 @@
import { ProjectGraph } from '../core/project-graph'; import { ProjectGraph } from '../project-graph';
import { NxJson } from '../core/shared-interfaces'; import { NxJson } from '../shared-interfaces';
import { Task } from './tasks-runner'; import { Task } from '../../tasks-runner/tasks-runner';
import { statSync, readFileSync } from 'fs'; import { readFileSync } from 'fs';
import { rootWorkspaceFileNames } from '../core/file-utils'; import { rootWorkspaceFileNames } from '../file-utils';
import { execSync } from 'child_process'; import { execSync } from 'child_process';
import {
defaultFileHasher,
extractNameAndVersion,
FileHasher,
} from './file-hasher';
import { defaultHashing, HashingImp } from './hashing-impl';
const resolve = require('resolve'); const resolve = require('resolve');
const hasha = require('hasha');
export interface Hash { export interface Hash {
value: string; value: string;
@ -38,28 +44,44 @@ interface NodeModulesResult {
export class Hasher { export class Hasher {
static version = '1.0'; static version = '1.0';
implicitDependencies: Promise<ImplicitHashResult>; private implicitDependencies: Promise<ImplicitHashResult>;
nodeModules: Promise<NodeModulesResult>; private nodeModules: Promise<NodeModulesResult>;
runtimeInputs: Promise<RuntimeHashResult>; private runtimeInputs: Promise<RuntimeHashResult>;
fileHashes = new FileHashes(); private fileHasher: FileHasher;
projectHashes = new ProjectHashes(this.projectGraph, this.fileHashes); private projectHashes: ProjectHasher;
private hashing: HashingImp;
constructor( constructor(
private readonly projectGraph: ProjectGraph, private readonly projectGraph: ProjectGraph,
private readonly nxJson: NxJson, private readonly nxJson: NxJson,
private readonly options: any private readonly options: any,
) {} hashing: HashingImp = undefined
) {
async hash(task: Task): Promise<Hash> { if (!hashing) {
const command = hasha( this.hashing = defaultHashing;
[ this.fileHasher = defaultFileHasher;
task.target.project || '', } else {
task.target.target || '', this.hashing = hashing;
task.target.configuration || '', this.fileHasher = new FileHasher(hashing);
JSON.stringify(task.overrides), }
], this.projectHashes = new ProjectHasher(
{ algorithm: 'sha256' } this.projectGraph,
this.fileHasher,
this.hashing
); );
}
async hashTasks(tasks: Task[]): Promise<Hash[]> {
return Promise.all(tasks.map((t) => this.hash(t)));
}
private async hash(task: Task): Promise<Hash> {
const command = this.hashing.hashArray([
task.target.project || '',
task.target.target || '',
task.target.configuration || '',
JSON.stringify(task.overrides),
]);
const values = (await Promise.all([ const values = (await Promise.all([
this.projectHashes.hashProject(task.target.project, [ this.projectHashes.hashProject(task.target.project, [
@ -75,12 +97,11 @@ export class Hasher {
NodeModulesResult NodeModulesResult
]; ];
const value = hasha( const value = this.hashing.hashArray([
[Hasher.version, command, ...values.map((v) => v.value)], Hasher.version,
{ command,
algorithm: 'sha256', ...values.map((v) => v.value),
} ]);
);
return { return {
value, value,
@ -109,12 +130,7 @@ export class Hasher {
}) })
)) as any; )) as any;
const value = await hasha( const value = this.hashing.hashArray(values.map((v) => v.value));
values.map((v) => v.value),
{
algorithm: 'sha256',
}
);
const runtime = values.reduce( const runtime = values.reduce(
(m, c) => ((m[c.input] = c.value), m), (m, c) => ((m[c.input] = c.value), m),
{} {}
@ -143,18 +159,12 @@ export class Hasher {
]; ];
this.implicitDependencies = Promise.resolve().then(async () => { this.implicitDependencies = Promise.resolve().then(async () => {
const fileHashes = await Promise.all( const fileHashes = fileNames.map((file) => {
fileNames.map(async (file) => { const hash = this.fileHasher.hashFile(file);
const hash = await this.fileHashes.hashFile(file); return { file, hash };
return { file, hash }; });
}) const combinedHash = this.hashing.hashArray(
); fileHashes.map((v) => v.hash)
const combinedHash = await hasha(
fileHashes.map((v) => v.hash),
{
algorithm: 'sha256',
}
); );
return { return {
value: combinedHash, value: combinedHash,
@ -174,21 +184,17 @@ export class Hasher {
...Object.keys(j.dependencies), ...Object.keys(j.dependencies),
...Object.keys(j.devDependencies), ...Object.keys(j.devDependencies),
]; ];
const packageJsonHashes = await Promise.all( const packageJsonHashes = allPackages.map((d) => {
allPackages.map((d) => { try {
try { const path = resolve.sync(`${d}/package.json`, {
const path = resolve.sync(`${d}/package.json`, { basedir: process.cwd(),
basedir: process.cwd(), });
}); return this.fileHasher.hashFile(path, extractNameAndVersion);
return this.fileHashes } catch (e) {
.hashFile(path, extractNameAndVersion) return '';
.catch(() => ''); }
} catch (e) { });
return ''; return { value: this.hashing.hashArray(packageJsonHashes) };
}
})
);
return { value: await hasha(packageJsonHashes) };
} catch (e) { } catch (e) {
return { value: '' }; return { value: '' };
} }
@ -198,12 +204,13 @@ export class Hasher {
} }
} }
export class ProjectHashes { class ProjectHasher {
private sourceHashes: { [projectName: string]: Promise<string> } = {}; private sourceHashes: { [projectName: string]: Promise<string> } = {};
constructor( constructor(
private readonly projectGraph: ProjectGraph, private readonly projectGraph: ProjectGraph,
private readonly fileHashes: FileHashes private readonly fileHasher: FileHasher,
private readonly hashing: HashingImp
) {} ) {}
async hashProject( async hashProject(
@ -231,7 +238,7 @@ export class ProjectHashes {
}, },
{ [projectName]: projectHash } { [projectName]: projectHash }
); );
const value = await hasha([ const value = this.hashing.hashArray([
...depHashes.map((d) => d.value), ...depHashes.map((d) => d.value),
projectHash, projectHash,
]); ]);
@ -244,88 +251,11 @@ export class ProjectHashes {
this.sourceHashes[projectName] = new Promise(async (res) => { this.sourceHashes[projectName] = new Promise(async (res) => {
const p = this.projectGraph.nodes[projectName]; const p = this.projectGraph.nodes[projectName];
const values = await Promise.all( const values = await Promise.all(
p.data.files.map((f) => this.fileHashes.hashFile(f.file)) p.data.files.map((f) => this.fileHasher.hashFile(f.file))
); );
res(hasha(values, { algorithm: 'sha256' })); res(this.hashing.hashArray(values));
}); });
} }
return this.sourceHashes[projectName]; return this.sourceHashes[projectName];
} }
} }
export function extractNameAndVersion(content: string): string {
try {
const c = JSON.parse(content);
return `${c.name}${c.version}`;
} catch (e) {
return '';
}
}
type PathAndTransformer = {
path: string;
transformer: (x: string) => string | null;
};
export class FileHashes {
private queue = [] as PathAndTransformer[];
private numberOfConcurrentReads = 0;
private fileHashes: { [path: string]: Promise<string> } = {};
private resolvers: { [path: string]: Function } = {};
async hashFile(
path: string,
transformer: (x: string) => string | null = null
) {
if (!this.fileHashes[path]) {
this.fileHashes[path] = new Promise((res) => {
this.resolvers[path] = res;
this.pushFileIntoQueue({ path, transformer });
});
}
return this.fileHashes[path];
}
private pushFileIntoQueue(pathAndTransformer: PathAndTransformer) {
this.queue.push(pathAndTransformer);
if (this.numberOfConcurrentReads < 2000) {
this.numberOfConcurrentReads++;
this.takeFromQueue();
}
}
private takeFromQueue() {
if (this.queue.length > 0) {
const pathAndTransformer = this.queue.pop();
this.processPath(pathAndTransformer)
.then((value) => {
this.resolvers[pathAndTransformer.path](value);
})
.then(() => this.takeFromQueue());
} else {
this.numberOfConcurrentReads--;
}
}
private processPath(pathAndTransformer: PathAndTransformer) {
try {
const stats = statSync(pathAndTransformer.path);
const fileSizeInMegabytes = stats.size / 1000000;
// large binary file, skip it
if (fileSizeInMegabytes > 5) {
return Promise.resolve(stats.size.toString());
} else if (pathAndTransformer.transformer) {
const transformedFile = pathAndTransformer.transformer(
readFileSync(pathAndTransformer.path).toString()
);
return Promise.resolve('').then(() =>
hasha([transformedFile], { algorithm: 'sha256' })
);
} else {
return hasha.fromFile(pathAndTransformer.path, { algorithm: 'sha256' });
}
} catch (e) {
return Promise.resolve('');
}
}
}

View File

@ -0,0 +1,23 @@
import * as crypto from 'crypto';
import { readFileSync } from 'fs';
export class HashingImp {
hashArray(input: string[]): string {
const hasher = crypto.createHash('sha256');
for (const part of input) {
hasher.update(part);
}
const hash = hasher.digest().buffer;
return Buffer.from(hash).toString('hex');
}
hashFile(path: string): string {
const hasher = crypto.createHash('sha256');
const file = readFileSync(path);
hasher.update(file);
const hash = hasher.digest().buffer;
return Buffer.from(hash).toString('hex');
}
}
export const defaultHashing = new HashingImp();

View File

@ -0,0 +1,113 @@
import { FileData, filesChanged } from '../file-utils';
import {
ProjectGraph,
ProjectGraphDependency,
ProjectGraphNode,
} from '../project-graph';
import { join } from 'path';
import { appRootPath } from '../../utils/app-root';
import { existsSync } from 'fs';
import * as fsExtra from 'fs-extra';
import {
directoryExists,
fileExists,
readJsonFile,
writeJsonFile,
} from '../../utils/fileutils';
import { FileMap } from '@nrwl/workspace/src/core/file-graph';
export interface ProjectGraphCache {
version: string;
rootFiles: FileData[];
nodes: Record<string, ProjectGraphNode>;
dependencies: Record<string, ProjectGraphDependency[]>;
}
const nxDepsDir = join(appRootPath, 'node_modules', '.cache', 'nx');
const nxDepsPath = join(nxDepsDir, 'nxdeps.json');
export function readCache(): false | ProjectGraphCache {
try {
if (!existsSync(nxDepsDir)) {
fsExtra.ensureDirSync(nxDepsDir);
}
} catch (e) {
/*
* @jeffbcross: Node JS docs recommend against checking for existence of directory immediately before creating it.
* Instead, just try to create the directory and handle the error.
*
* We ran into race conditions when running scripts concurrently, where multiple scripts were
* arriving here simultaneously, checking for directory existence, then trying to create the directory simultaneously.
*
* In this case, we're creating the directory. If the operation failed, we ensure that the directory
* exists before continuing (or raise an exception).
*/
if (!directoryExists(nxDepsDir)) {
throw new Error(`Failed to create directory: ${nxDepsDir}`);
}
}
const data = fileExists(nxDepsPath) ? readJsonFile(nxDepsPath) : null;
return data ? data : false;
}
export function writeCache(
rootFiles: FileData[],
projectGraph: ProjectGraph
): void {
writeJsonFile(nxDepsPath, {
version: '2.0',
rootFiles,
nodes: projectGraph.nodes,
dependencies: projectGraph.dependencies,
});
}
export function differentFromCache(
fileMap: FileMap,
c: ProjectGraphCache
): {
noDifference: boolean;
filesDifferentFromCache: FileMap;
partiallyConstructedProjectGraph?: ProjectGraph;
} {
const currentProjects = Object.keys(fileMap).sort();
const previousProjects = Object.keys(c.nodes)
.sort()
.filter((name) => c.nodes[name].data.files.length > 0);
// Projects changed -> compute entire graph
if (
currentProjects.length !== previousProjects.length ||
currentProjects.some((val, idx) => val !== previousProjects[idx])
) {
return {
filesDifferentFromCache: fileMap,
partiallyConstructedProjectGraph: null,
noDifference: false,
};
}
// Projects are same -> compute projects with file changes
const filesDifferentFromCache: FileMap = {};
currentProjects.forEach((p) => {
if (filesChanged(c.nodes[p].data.files, fileMap[p])) {
filesDifferentFromCache[p] = fileMap[p];
}
});
// Re-compute nodes and dependencies for each project in file map.
Object.keys(filesDifferentFromCache).forEach((key) => {
delete c.dependencies[key];
});
const partiallyConstructedProjectGraph = {
nodes: c.nodes,
dependencies: c.dependencies,
};
return {
filesDifferentFromCache: filesDifferentFromCache,
partiallyConstructedProjectGraph,
noDifference: Object.keys(filesDifferentFromCache).length === 0,
};
}

View File

@ -10,6 +10,7 @@ export function buildWorkspaceProjectNodes(
Object.keys(ctx.fileMap).forEach((key) => { Object.keys(ctx.fileMap).forEach((key) => {
const p = ctx.workspaceJson.projects[key]; const p = ctx.workspaceJson.projects[key];
// TODO, types and projectType should allign
const projectType = const projectType =
p.projectType === 'application' p.projectType === 'application'
? key.endsWith('-e2e') ? key.endsWith('-e2e')

View File

@ -1,11 +1,12 @@
import { vol, fs } from 'memfs'; import { vol, fs } from 'memfs';
jest.mock('fs', () => require('memfs').fs);
jest.mock('../../utils/app-root', () => ({ appRootPath: '/root' }));
import { stripIndents } from '@angular-devkit/core/src/utils/literals'; import { stripIndents } from '@angular-devkit/core/src/utils/literals';
import { createProjectGraph } from './project-graph'; import { createProjectGraph } from './project-graph';
import { DependencyType } from './project-graph-models'; import { DependencyType } from './project-graph-models';
import { NxJson } from '../shared-interfaces'; import { NxJson } from '../shared-interfaces';
import { defaultFileHasher } from '@nrwl/workspace/src/core/hasher/file-hasher';
jest.mock('fs', () => require('memfs').fs);
jest.mock('../../utils/app-root', () => ({ appRootPath: '/root' }));
describe('project graph', () => { describe('project graph', () => {
let packageJson: any; let packageJson: any;
@ -198,6 +199,9 @@ describe('project graph', () => {
//wait a tick to ensure the modified time of workspace.json will be after the creation of the project graph file //wait a tick to ensure the modified time of workspace.json will be after the creation of the project graph file
await new Promise((resolve) => setTimeout(resolve, 1)); await new Promise((resolve) => setTimeout(resolve, 1));
fs.writeFileSync('/root/workspace.json', JSON.stringify(workspaceJson)); fs.writeFileSync('/root/workspace.json', JSON.stringify(workspaceJson));
defaultFileHasher.init();
graph = createProjectGraph(); graph = createProjectGraph();
expect(graph.nodes).toMatchObject({ expect(graph.nodes).toMatchObject({
demo: { name: 'demo', type: 'lib' }, demo: { name: 'demo', type: 'lib' },

View File

@ -1,20 +1,12 @@
import { mkdirSync } from 'fs';
import { appRootPath } from '../../utils/app-root';
import {
directoryExists,
fileExists,
readJsonFile,
writeJsonFile,
} from '../../utils/fileutils';
import { assertWorkspaceValidity } from '../assert-workspace-validity'; import { assertWorkspaceValidity } from '../assert-workspace-validity';
import { createFileMap, FileMap } from '../file-graph'; import { createFileMap, FileMap } from '../file-graph';
import { import {
defaultFileRead, defaultFileRead,
FileData, filesChanged,
mtime,
readNxJson, readNxJson,
readWorkspaceFiles, readWorkspaceFiles,
readWorkspaceJson, readWorkspaceJson,
rootWorkspaceFileData,
} from '../file-utils'; } from '../file-utils';
import { normalizeNxJson } from '../normalize-nx-json'; import { normalizeNxJson } from '../normalize-nx-json';
import { import {
@ -30,188 +22,80 @@ import {
} from './build-nodes'; } from './build-nodes';
import { ProjectGraphBuilder } from './project-graph-builder'; import { ProjectGraphBuilder } from './project-graph-builder';
import { ProjectGraph } from './project-graph-models'; import { ProjectGraph } from './project-graph-models';
import {
/** differentFromCache,
* This version is stored in the project graph cache to determine if it can be reused. ProjectGraphCache,
*/ readCache,
const projectGraphCacheVersion = '1'; writeCache,
} from '../nx-deps/nx-deps-cache';
import { NxJson } from '../shared-interfaces';
export function createProjectGraph( export function createProjectGraph(
workspaceJson = readWorkspaceJson(), workspaceJson = readWorkspaceJson(),
nxJson = readNxJson(), nxJson = readNxJson(),
workspaceFiles = readWorkspaceFiles(), workspaceFiles = readWorkspaceFiles(),
fileRead: (s: string) => string = defaultFileRead, fileRead: (s: string) => string = defaultFileRead,
cache: false | { data: ProjectGraphCache; mtime: number } = readCache(), cache: false | ProjectGraphCache = readCache(),
shouldCache: boolean = true shouldCache: boolean = true
): ProjectGraph { ): ProjectGraph {
assertWorkspaceValidity(workspaceJson, nxJson); assertWorkspaceValidity(workspaceJson, nxJson);
const normalizedNxJson = normalizeNxJson(nxJson); const normalizedNxJson = normalizeNxJson(nxJson);
if (cache && maxMTime(rootWorkspaceFileData(workspaceFiles)) > cache.mtime) {
cache = false;
}
if (!cache || maxMTime(workspaceFiles) > cache.mtime) { const rootFiles = rootWorkspaceFileData();
const fileMap = createFileMap(workspaceJson, workspaceFiles); const fileMap = createFileMap(workspaceJson, workspaceFiles);
const incremental = modifiedSinceCache(fileMap, cache);
if (cache && !filesChanged(rootFiles, cache.rootFiles)) {
const diff = differentFromCache(fileMap, cache);
if (diff.noDifference) {
return diff.partiallyConstructedProjectGraph;
}
const ctx = { const ctx = {
workspaceJson, workspaceJson,
nxJson: normalizedNxJson, nxJson: normalizedNxJson,
fileMap: incremental.fileMap, fileMap: diff.filesDifferentFromCache,
}; };
const builder = new ProjectGraphBuilder(incremental.projectGraph); const projectGraph = buildProjectGraph(
const buildNodesFns: BuildNodes[] = [ ctx,
buildWorkspaceProjectNodes, fileRead,
buildNpmPackageNodes, diff.partiallyConstructedProjectGraph
];
const buildDependenciesFns: BuildDependencies[] = [
buildExplicitTypeScriptDependencies,
buildImplicitProjectDependencies,
buildExplicitNpmDependencies,
];
buildNodesFns.forEach((f) =>
f(ctx, builder.addNode.bind(builder), fileRead)
); );
buildDependenciesFns.forEach((f) =>
f(ctx, builder.nodes, builder.addDependency.bind(builder), fileRead)
);
const projectGraph = builder.build();
if (shouldCache) { if (shouldCache) {
writeCache({ writeCache(rootFiles, projectGraph);
version: projectGraphCacheVersion,
projectGraph,
fileMap,
});
} }
return projectGraph; return projectGraph;
} else { } else {
// Cache file was modified _after_ all workspace files. const ctx = {
// Safe to return the cached graph. workspaceJson,
return cache.data.projectGraph; nxJson: normalizedNxJson,
} fileMap: fileMap,
} };
const projectGraph = buildProjectGraph(ctx, fileRead, null);
// ----------------------------------------------------------------------------- if (shouldCache) {
writeCache(rootFiles, projectGraph);
interface ProjectGraphCache {
version: string;
projectGraph: ProjectGraph;
fileMap: FileMap;
}
const distPath = `${appRootPath}/dist`;
const nxDepsPath = `${distPath}/nxdeps.json`;
function readCache(): false | { data: ProjectGraphCache; mtime: number } {
try {
mkdirSync(distPath);
} catch (e) {
/*
* @jeffbcross: Node JS docs recommend against checking for existence of directory immediately before creating it.
* Instead, just try to create the directory and handle the error.
*
* We ran into race conditions when running scripts concurrently, where multiple scripts were
* arriving here simultaneously, checking for directory existence, then trying to create the directory simultaneously.
*
* In this case, we're creating the directory. If the operation failed, we ensure that the directory
* exists before continuing (or raise an exception).
*/
if (!directoryExists(distPath)) {
throw new Error(`Failed to create directory: ${distPath}`);
} }
return projectGraph;
} }
}
const data = getValidCache( function buildProjectGraph(
fileExists(nxDepsPath) ? readJsonFile(nxDepsPath) : null ctx: { nxJson: NxJson<string[]>; workspaceJson: any; fileMap: FileMap },
fileRead: (s: string) => string,
projectGraph: ProjectGraph
) {
const builder = new ProjectGraphBuilder(projectGraph);
const buildNodesFns: BuildNodes[] = [
buildWorkspaceProjectNodes,
buildNpmPackageNodes,
];
const buildDependenciesFns: BuildDependencies[] = [
buildExplicitTypeScriptDependencies,
buildImplicitProjectDependencies,
buildExplicitNpmDependencies,
];
buildNodesFns.forEach((f) => f(ctx, builder.addNode.bind(builder), fileRead));
buildDependenciesFns.forEach((f) =>
f(ctx, builder.nodes, builder.addDependency.bind(builder), fileRead)
); );
return builder.build();
return data ? { data, mtime: mtime(nxDepsPath) } : false;
}
function getValidCache(cache: ProjectGraphCache | null) {
if (!cache) {
return null;
}
if (
cache.projectGraph &&
cache.fileMap &&
cache.version &&
cache.version === projectGraphCacheVersion
) {
return cache;
} else {
return null;
}
}
function writeCache(cache: ProjectGraphCache): void {
writeJsonFile(nxDepsPath, cache);
}
function maxMTime(files: FileData[]) {
return Math.max(...files.map((f) => f.mtime));
}
function rootWorkspaceFileData(workspaceFiles: FileData[]): FileData[] {
return [
`package.json`,
'workspace.json',
'angular.json',
`nx.json`,
`tsconfig.base.json`,
].reduce((acc: FileData[], curr: string) => {
const fileData = workspaceFiles.find((x) => x.file === curr);
if (fileData) {
acc.push(fileData);
}
return acc;
}, []);
}
function modifiedSinceCache(
fileMap: FileMap,
c: false | { data: ProjectGraphCache; mtime: number }
): { fileMap: FileMap; projectGraph?: ProjectGraph } {
// No cache -> compute entire graph
if (!c) {
return { fileMap };
}
const cachedFileMap = c.data.fileMap;
const currentProjects = Object.keys(fileMap).sort();
const previousProjects = Object.keys(cachedFileMap).sort();
// Projects changed -> compute entire graph
if (
currentProjects.length !== previousProjects.length ||
currentProjects.some((val, idx) => val !== previousProjects[idx])
) {
return { fileMap };
}
// Projects are same -> compute projects with file changes
const modifiedSince: FileMap = {};
currentProjects.forEach((p) => {
let projectFilesChanged = false;
for (const f of fileMap[p]) {
const fromCache = cachedFileMap[p].find((x) => x.file === f.file);
if (!fromCache || f.mtime > fromCache.mtime) {
projectFilesChanged = true;
break;
}
}
if (projectFilesChanged) {
modifiedSince[p] = fileMap[p];
}
});
// Re-compute nodes and dependencies for each project in file map.
Object.keys(modifiedSince).forEach((key) => {
delete c.data.projectGraph.dependencies[key];
});
return { fileMap: modifiedSince, projectGraph: c.data.projectGraph };
} }

View File

@ -65,49 +65,49 @@ describe('findTargetProjectWithImport', () => {
proj: [ proj: [
{ {
file: 'libs/proj/index.ts', file: 'libs/proj/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],
proj2: [ proj2: [
{ {
file: 'libs/proj2/index.ts', file: 'libs/proj2/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],
proj3a: [ proj3a: [
{ {
file: 'libs/proj3a/index.ts', file: 'libs/proj3a/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],
proj4ab: [ proj4ab: [
{ {
file: 'libs/proj4ab/index.ts', file: 'libs/proj4ab/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],
proj123: [ proj123: [
{ {
file: 'libs/proj123/index.ts', file: 'libs/proj123/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],
proj1234: [ proj1234: [
{ {
file: 'libs/proj1234/index.ts', file: 'libs/proj1234/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],
'proj1234-child': [ 'proj1234-child': [
{ {
file: 'libs/proj1234-child/index.ts', file: 'libs/proj1234-child/index.ts',
mtime: 0, hash: 'some-hash',
ext: '.ts', ext: '.ts',
}, },
], ],

View File

@ -33,7 +33,6 @@ export function checkDependencies(schema: Schema): Rule {
ig = ig.add(tree.read('.gitignore').toString()); ig = ig.add(tree.read('.gitignore').toString());
} }
const files: FileData[] = []; const files: FileData[] = [];
const mtime = Date.now(); //can't get mtime data from the tree :(
const workspaceDir = path.dirname(getWorkspacePath(tree)); const workspaceDir = path.dirname(getWorkspacePath(tree));
for (const dir of tree.getDir('/').subdirs) { for (const dir of tree.getDir('/').subdirs) {
@ -45,7 +44,7 @@ export function checkDependencies(schema: Schema): Rule {
files.push({ files.push({
file: path.relative(workspaceDir, file), file: path.relative(workspaceDir, file),
ext: path.extname(file), ext: path.extname(file),
mtime, hash: '',
}); });
}); });
} }

View File

@ -7,7 +7,7 @@ import { ProjectGraph, ProjectGraphNode } from '../core/project-graph';
import { Environment, NxJson } from '../core/shared-interfaces'; import { Environment, NxJson } from '../core/shared-interfaces';
import { NxArgs } from '@nrwl/workspace/src/command-line/utils'; import { NxArgs } from '@nrwl/workspace/src/command-line/utils';
import { isRelativePath } from '../utils/fileutils'; import { isRelativePath } from '../utils/fileutils';
import { Hasher } from './hasher'; import { Hasher } from '../core/hasher/hasher';
import { projectHasTargetAndConfiguration } from '../utils/project-graph-utils'; import { projectHasTargetAndConfiguration } from '../utils/project-graph-utils';
type RunArgs = yargs.Arguments & ReporterArgs; type RunArgs = yargs.Arguments & ReporterArgs;
@ -42,14 +42,11 @@ export async function runCommand<T extends RunArgs>(
}); });
const hasher = new Hasher(projectGraph, nxJson, tasksOptions); const hasher = new Hasher(projectGraph, nxJson, tasksOptions);
await Promise.all( const res = await hasher.hashTasks(tasks);
tasks.map(async (t) => { for (let i = 0; i < res.length; ++i) {
const hash = await hasher.hash(t); tasks[i].hash = res[i].value;
t.hash = hash.value; tasks[i].hashDetails = res[i].details;
t.hashDetails = hash.details; }
})
);
const cached = []; const cached = [];
tasksRunner(tasks, tasksOptions, { tasksRunner(tasks, tasksOptions, {
initiatingProject: initiatingProject, initiatingProject: initiatingProject,

View File

@ -976,7 +976,7 @@ describe('Enforce Module Boundaries', () => {
}); });
function createFile(f) { function createFile(f) {
return { file: f, ext: extname(f), mtime: 1 }; return { file: f, ext: extname(f), hash: '' };
} }
function runRule( function runRule(

View File

@ -409,16 +409,14 @@ export function getFullProjectGraphFromHost(host: Tree): ProjectGraph {
const workspaceFiles: FileData[] = []; const workspaceFiles: FileData[] = [];
const mtime = +Date.now();
workspaceFiles.push( workspaceFiles.push(
...allFilesInDirInHost(host, normalize(''), { recursive: false }).map((f) => ...allFilesInDirInHost(host, normalize(''), { recursive: false }).map((f) =>
getFileDataInHost(host, f, mtime) getFileDataInHost(host, f)
) )
); );
workspaceFiles.push( workspaceFiles.push(
...allFilesInDirInHost(host, normalize('tools')).map((f) => ...allFilesInDirInHost(host, normalize('tools')).map((f) =>
getFileDataInHost(host, f, mtime) getFileDataInHost(host, f)
) )
); );
@ -427,7 +425,7 @@ export function getFullProjectGraphFromHost(host: Tree): ProjectGraph {
const project = workspaceJson.projects[projectName]; const project = workspaceJson.projects[projectName];
workspaceFiles.push( workspaceFiles.push(
...allFilesInDirInHost(host, normalize(project.root)).map((f) => ...allFilesInDirInHost(host, normalize(project.root)).map((f) =>
getFileDataInHost(host, f, mtime) getFileDataInHost(host, f)
) )
); );
}); });
@ -441,15 +439,11 @@ export function getFullProjectGraphFromHost(host: Tree): ProjectGraph {
); );
} }
export function getFileDataInHost( export function getFileDataInHost(host: Tree, path: Path): FileData {
host: Tree,
path: Path,
mtime: number
): FileData {
return { return {
file: path, file: path,
ext: extname(normalize(path)), ext: extname(normalize(path)),
mtime, hash: '',
}; };
} }