feat(core): add WorkspaceContext class (#18999)

This commit is contained in:
Jonathan Cammisuli 2023-09-13 09:56:36 -04:00 committed by GitHub
parent ae154e777e
commit 537d7eb8be
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 555 additions and 387 deletions

1
.gitignore vendored
View File

@ -12,6 +12,7 @@ tmp
jest.debug.config.js jest.debug.config.js
.tool-versions .tool-versions
/.nx-cache /.nx-cache
/.nx
/.verdaccio/build/local-registry /.verdaccio/build/local-registry
/graph/client/src/assets/environment.js /graph/client/src/assets/environment.js
/graph/client/src/assets/dev/environment.js /graph/client/src/assets/dev/environment.js

1
Cargo.lock generated
View File

@ -1354,6 +1354,7 @@ dependencies = [
"napi-derive", "napi-derive",
"once_cell", "once_cell",
"os_type", "os_type",
"parking_lot",
"rayon", "rayon",
"regex", "regex",
"swc_common", "swc_common",

View File

@ -31,6 +31,10 @@ describe('Nx Commands', () => {
runCLI(`generate @nx/js:lib ${proj3}`); runCLI(`generate @nx/js:lib ${proj3}`);
}); });
afterEach(() => {
runCLI('reset');
});
afterAll(() => cleanupProject()); afterAll(() => cleanupProject());
it('should watch for project changes', async () => { it('should watch for project changes', async () => {

View File

@ -31,6 +31,7 @@ import {
import { NxJsonConfiguration, output } from '@nx/devkit'; import { NxJsonConfiguration, output } from '@nx/devkit';
import { readFileSync } from 'fs'; import { readFileSync } from 'fs';
import { join } from 'path'; import { join } from 'path';
import { resetWorkspaceContext } from 'nx/src/utils/workspace-context';
let projName: string; let projName: string;
@ -566,4 +567,5 @@ export function cleanupProject({
removeSync(tmpProjPath()); removeSync(tmpProjPath());
} catch {} } catch {}
} }
resetWorkspaceContext();
} }

View File

@ -15,6 +15,7 @@ ignore-files = "1.3.0"
itertools = "0.10.5" itertools = "0.10.5"
once_cell = "1.18.0" once_cell = "1.18.0"
os_type = "2.6.0" os_type = "2.6.0"
parking_lot = { version = "0.12.1", features = ["send_guard"] }
napi = { version = '2.12.6', default-features = false, features = ['anyhow', 'napi4', 'tokio_rt'] } napi = { version = '2.12.6', default-features = false, features = ['anyhow', 'napi4', 'tokio_rt'] }
napi-derive = '2.9.3' napi-derive = '2.9.3'
regex = "1.9.1" regex = "1.9.1"

View File

@ -19,6 +19,8 @@ import { execSync } from 'child_process';
import { join } from 'path'; import { join } from 'path';
import { assertSupportedPlatform } from '../src/native/assert-supported-platform'; import { assertSupportedPlatform } from '../src/native/assert-supported-platform';
import { performance } from 'perf_hooks'; import { performance } from 'perf_hooks';
import { setupWorkspaceContext } from '../src/utils/workspace-context';
import { daemonClient } from '../src/daemon/client/client';
function main() { function main() {
if ( if (
@ -64,6 +66,11 @@ function main() {
) { ) {
require('v8-compile-cache'); require('v8-compile-cache');
} }
if (!daemonClient.enabled() && workspace !== null) {
setupWorkspaceContext(workspace.dir);
}
// polyfill rxjs observable to avoid issues with multiple version of Observable installed in node_modules // polyfill rxjs observable to avoid issues with multiple version of Observable installed in node_modules
// https://twitter.com/BenLesh/status/1192478226385428483?s=20 // https://twitter.com/BenLesh/status/1192478226385428483?s=20
if (!(Symbol as any).observable) if (!(Symbol as any).observable)

View File

@ -13,7 +13,6 @@ import { NxJsonConfiguration } from '../../config/nx-json';
import { InProcessTaskHasher } from '../../hasher/task-hasher'; import { InProcessTaskHasher } from '../../hasher/task-hasher';
import { hashTask } from '../../hasher/hash-task'; import { hashTask } from '../../hasher/hash-task';
import { getPackageManagerCommand } from '../../utils/package-manager'; import { getPackageManagerCommand } from '../../utils/package-manager';
import { fileHasher } from '../../hasher/file-hasher';
import { printAffectedDeprecationMessage } from './command-object'; import { printAffectedDeprecationMessage } from './command-object';
import { logger, NX_PREFIX } from '../../utils/logger'; import { logger, NX_PREFIX } from '../../utils/logger';
@ -72,14 +71,7 @@ async function createTasks(
nxArgs.configuration, nxArgs.configuration,
overrides overrides
); );
const hasher = new InProcessTaskHasher( const hasher = new InProcessTaskHasher({}, [], projectGraph, nxJson, {});
{},
[],
projectGraph,
nxJson,
{},
fileHasher
);
const execCommand = getPackageManagerCommand().exec; const execCommand = getPackageManagerCommand().exec;
const tasks = Object.values(taskGraph.tasks); const tasks = Object.values(taskGraph.tasks);

View File

@ -22,12 +22,10 @@ import {
createTaskGraph, createTaskGraph,
mapTargetDefaultsToDependencies, mapTargetDefaultsToDependencies,
} from '../../tasks-runner/create-task-graph'; } from '../../tasks-runner/create-task-graph';
import { TargetDefaults, TargetDependencies } from '../../config/nx-json';
import { TaskGraph } from '../../config/task-graph'; import { TaskGraph } from '../../config/task-graph';
import { daemonClient } from '../../daemon/client/client'; import { daemonClient } from '../../daemon/client/client';
import { Server } from 'net'; import { Server } from 'net';
import { readProjectFileMapCache } from '../../project-graph/nx-deps-cache'; import { readProjectFileMapCache } from '../../project-graph/nx-deps-cache';
import { fileHasher } from '../../hasher/file-hasher';
import { getAffectedGraphNodes } from '../affected/affected'; import { getAffectedGraphNodes } from '../affected/affected';
import { splitArgsIntoNxArgsAndOverrides } from '../../utils/command-line-utils'; import { splitArgsIntoNxArgsAndOverrides } from '../../utils/command-line-utils';
@ -574,7 +572,6 @@ async function createDepGraphClientResponse(
affected: string[] = [] affected: string[] = []
): Promise<ProjectGraphClientResponse> { ): Promise<ProjectGraphClientResponse> {
performance.mark('project graph watch calculation:start'); performance.mark('project graph watch calculation:start');
await fileHasher.init();
let graph = pruneExternalNodes( let graph = pruneExternalNodes(
await createProjectGraphAsync({ exitOnError: true }) await createProjectGraphAsync({ exitOnError: true })

View File

@ -2,7 +2,6 @@ import { Task, TaskGraph } from '../../config/task-graph';
import { getCachedSerializedProjectGraphPromise } from './project-graph-incremental-recomputation'; import { getCachedSerializedProjectGraphPromise } from './project-graph-incremental-recomputation';
import { InProcessTaskHasher } from '../../hasher/task-hasher'; import { InProcessTaskHasher } from '../../hasher/task-hasher';
import { readNxJson } from '../../config/configuration'; import { readNxJson } from '../../config/configuration';
import { fileHasher } from '../../hasher/file-hasher';
import { setHashEnv } from '../../hasher/set-hash-env'; import { setHashEnv } from '../../hasher/set-hash-env';
/** /**
@ -31,8 +30,7 @@ export async function handleHashTasks(payload: {
allWorkspaceFiles, allWorkspaceFiles,
projectGraph, projectGraph,
nxJson, nxJson,
payload.runnerOptions, payload.runnerOptions
fileHasher
); );
} }
const response = JSON.stringify( const response = JSON.stringify(

View File

@ -1,7 +1,8 @@
import { fileHasher } from '../../hasher/file-hasher'; import { getAllFileDataInContext } from '../../utils/workspace-context';
import { workspaceRoot } from '../../utils/workspace-root';
export async function handleRequestFileData() { export async function handleRequestFileData() {
const response = JSON.stringify(fileHasher.allFileData()); const response = JSON.stringify(getAllFileDataInContext(workspaceRoot));
return { return {
response, response,
description: 'handleRequestFileData', description: 'handleRequestFileData',

View File

@ -17,7 +17,7 @@ import { notifyFileWatcherSockets } from './file-watching/file-watcher-sockets';
import { serverLogger } from './logger'; import { serverLogger } from './logger';
import { workspaceRoot } from '../../utils/workspace-root'; import { workspaceRoot } from '../../utils/workspace-root';
import { execSync } from 'child_process'; import { execSync } from 'child_process';
import { fileHasher, hashArray } from '../../hasher/file-hasher'; import { hashArray } from '../../hasher/file-hasher';
import { import {
retrieveWorkspaceFiles, retrieveWorkspaceFiles,
retrieveProjectConfigurations, retrieveProjectConfigurations,
@ -27,6 +27,10 @@ import {
ProjectsConfigurations, ProjectsConfigurations,
} from '../../config/workspace-json-project-json'; } from '../../config/workspace-json-project-json';
import { readNxJson } from '../../config/nx-json'; import { readNxJson } from '../../config/nx-json';
import {
resetWorkspaceContext,
updateFilesInContext,
} from '../../utils/workspace-context';
let cachedSerializedProjectGraphPromise: Promise<{ let cachedSerializedProjectGraphPromise: Promise<{
error: Error | null; error: Error | null;
@ -163,17 +167,17 @@ function filterUpdatedFiles(files: string[]) {
async function processCollectedUpdatedAndDeletedFiles() { async function processCollectedUpdatedAndDeletedFiles() {
try { try {
performance.mark('hash-watched-changes-start'); performance.mark('hash-watched-changes-start');
const updatedFiles = await fileHasher.hashFiles( const updatedFiles = filterUpdatedFiles([
filterUpdatedFiles([...collectedUpdatedFiles.values()]) ...collectedUpdatedFiles.values(),
); ]);
const deletedFiles = [...collectedDeletedFiles.values()]; const deletedFiles = [...collectedDeletedFiles.values()];
let updatedFileHashes = updateFilesInContext(updatedFiles, deletedFiles);
performance.mark('hash-watched-changes-end'); performance.mark('hash-watched-changes-end');
performance.measure( performance.measure(
'hash changed files from watcher', 'hash changed files from watcher',
'hash-watched-changes-start', 'hash-watched-changes-start',
'hash-watched-changes-end' 'hash-watched-changes-end'
); );
fileHasher.incrementalUpdate(updatedFiles, deletedFiles);
const nxJson = readNxJson(workspaceRoot); const nxJson = readNxJson(workspaceRoot);
@ -201,7 +205,7 @@ async function processCollectedUpdatedAndDeletedFiles() {
projectNodes, projectNodes,
projectFileMapWithFiles.projectFileMap, projectFileMapWithFiles.projectFileMap,
projectFileMapWithFiles.allWorkspaceFiles, projectFileMapWithFiles.allWorkspaceFiles,
updatedFiles, new Map(Object.entries(updatedFileHashes)),
deletedFiles deletedFiles
); );
} else { } else {
@ -330,8 +334,7 @@ async function resetInternalState() {
currentProjectGraph = undefined; currentProjectGraph = undefined;
collectedUpdatedFiles.clear(); collectedUpdatedFiles.clear();
collectedDeletedFiles.clear(); collectedDeletedFiles.clear();
fileHasher.clear(); resetWorkspaceContext();
await fileHasher.ensureInitialized();
waitPeriod = 100; waitPeriod = 100;
} }

View File

@ -56,8 +56,10 @@ import { readJsonFile } from '../../utils/fileutils';
import { PackageJson } from '../../utils/package-json'; import { PackageJson } from '../../utils/package-json';
import { getDaemonProcessIdSync, writeDaemonJsonProcessCache } from '../cache'; import { getDaemonProcessIdSync, writeDaemonJsonProcessCache } from '../cache';
import { handleHashTasks } from './handle-hash-tasks'; import { handleHashTasks } from './handle-hash-tasks';
import { fileHasher, hashArray } from '../../hasher/file-hasher'; import { hashArray } from '../../hasher/file-hasher';
import { handleRequestFileData } from './handle-request-file-data'; import { handleRequestFileData } from './handle-request-file-data';
import { setupWorkspaceContext } from '../../utils/workspace-context';
import { hashFile } from '../../native';
let performanceObserver: PerformanceObserver | undefined; let performanceObserver: PerformanceObserver | undefined;
let workspaceWatcherError: Error | undefined; let workspaceWatcherError: Error | undefined;
@ -283,7 +285,7 @@ function lockFileHashChanged(): boolean {
join(workspaceRoot, 'pnpm-lock.yaml'), join(workspaceRoot, 'pnpm-lock.yaml'),
] ]
.filter((file) => existsSync(file)) .filter((file) => existsSync(file))
.map((file) => fileHasher.hashFile(file)); .map((file) => hashFile(file));
const newHash = hashArray(lockHashes); const newHash = hashArray(lockHashes);
if (existingLockHash && newHash != existingLockHash) { if (existingLockHash && newHash != existingLockHash) {
existingLockHash = newHash; existingLockHash = newHash;
@ -397,6 +399,8 @@ const handleOutputsChanges: FileWatcherCallback = async (err, changeEvents) => {
}; };
export async function startServer(): Promise<Server> { export async function startServer(): Promise<Server> {
setupWorkspaceContext(workspaceRoot);
// Persist metadata about the background process so that it can be cleaned up later if needed // Persist metadata about the background process so that it can be cleaned up later if needed
await writeDaemonJsonProcessCache({ await writeDaemonJsonProcessCache({
processId: process.pid, processId: process.pid,
@ -409,7 +413,6 @@ export async function startServer(): Promise<Server> {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
await fileHasher.ensureInitialized();
server.listen(FULL_OS_SOCKET_PATH, async () => { server.listen(FULL_OS_SOCKET_PATH, async () => {
try { try {
serverLogger.log(`Started listening on: ${FULL_OS_SOCKET_PATH}`); serverLogger.log(`Started listening on: ${FULL_OS_SOCKET_PATH}`);

View File

@ -1,89 +1,3 @@
import { performance } from 'perf_hooks';
import { workspaceRoot } from '../utils/workspace-root';
import { FileData } from '../config/project-graph';
export class FileHasher {
private fileHashes: Map<string, string>;
private isInitialized = false;
async init(): Promise<void> {
performance.mark('init hashing:start');
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
const { hashFiles } = require('../native');
this.clear();
const filesObject = hashFiles(workspaceRoot);
this.fileHashes = new Map(Object.entries(filesObject));
performance.mark('init hashing:end');
performance.measure(
'init hashing',
'init hashing:start',
'init hashing:end'
);
}
hashFile(path: string): string {
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
const { hashFile } = require('../native');
return hashFile(path).hash;
}
clear(): void {
this.fileHashes = new Map<string, string>();
this.isInitialized = false;
}
async ensureInitialized() {
if (!this.isInitialized) {
await this.init();
}
}
async hashFiles(files: string[]): Promise<Map<string, string>> {
const r = new Map<string, string>();
for (let f of files) {
r.set(f, this.hashFile(f));
}
return r;
}
allFileData(): FileData[] {
const res = [];
this.fileHashes.forEach((hash, file) => {
res.push({
file,
hash,
});
});
res.sort((x, y) => x.file.localeCompare(y.file));
return res;
}
incrementalUpdate(
updatedFiles: Map<string, string>,
deletedFiles: string[] = []
): void {
performance.mark('incremental hashing:start');
updatedFiles.forEach((hash, filename) => {
this.fileHashes.set(filename, hash);
});
for (const deletedFile of deletedFiles) {
this.fileHashes.delete(deletedFile);
}
performance.mark('incremental hashing:end');
performance.measure(
'incremental hashing',
'incremental hashing:start',
'incremental hashing:end'
);
}
}
export const fileHasher = new FileHasher();
export function hashArray(content: string[]): string { export function hashArray(content: string[]): string {
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import. // Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
const { hashArray } = require('../native'); const { hashArray } = require('../native');

View File

@ -8,7 +8,6 @@ import {
Hash, Hash,
InProcessTaskHasher, InProcessTaskHasher,
} from './task-hasher'; } from './task-hasher';
import { fileHasher } from './file-hasher';
import { withEnvironmentVariables } from '../../internal-testing-utils/with-environment'; import { withEnvironmentVariables } from '../../internal-testing-utils/with-environment';
jest.mock('../utils/workspace-root', () => { jest.mock('../utils/workspace-root', () => {
@ -114,8 +113,7 @@ describe('TaskHasher', () => {
{} as any, {} as any,
{ {
runtimeCacheInputs: ['echo runtime456'], runtimeCacheInputs: ['echo runtime456'],
}, }
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -178,8 +176,7 @@ describe('TaskHasher', () => {
}, },
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -261,8 +258,7 @@ describe('TaskHasher', () => {
prod: ['!{projectRoot}/**/*.spec.ts'], prod: ['!{projectRoot}/**/*.spec.ts'],
}, },
} as any, } as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -334,8 +330,7 @@ describe('TaskHasher', () => {
prod: ['!{projectRoot}/**/*.spec.ts'], prod: ['!{projectRoot}/**/*.spec.ts'],
}, },
} as any, } as any,
{}, {}
fileHasher
); );
const taskGraph = { const taskGraph = {
@ -437,8 +432,7 @@ describe('TaskHasher', () => {
prod: ['!{projectRoot}/**/*.spec.ts'], prod: ['!{projectRoot}/**/*.spec.ts'],
}, },
} as any, } as any,
{}, {}
fileHasher
); );
const taskGraph = { const taskGraph = {
@ -535,8 +529,7 @@ describe('TaskHasher', () => {
}, },
}, },
} as any, } as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -594,8 +587,7 @@ describe('TaskHasher', () => {
{ {
runtimeCacheInputs: ['echo runtime123', 'echo runtime456'], runtimeCacheInputs: ['echo runtime123', 'echo runtime456'],
selectivelyHashTsConfig: true, selectivelyHashTsConfig: true,
}, }
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -654,8 +646,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const taskGraph = { const taskGraph = {
@ -725,8 +716,7 @@ describe('TaskHasher', () => {
{} as any, {} as any,
{ {
runtimeCacheInputs: ['boom'], runtimeCacheInputs: ['boom'],
}, }
fileHasher
); );
try { try {
@ -794,8 +784,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -859,8 +848,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -915,8 +903,7 @@ describe('TaskHasher', () => {
dependencies: {}, dependencies: {},
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -999,8 +986,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
} }
@ -1142,8 +1128,7 @@ describe('TaskHasher', () => {
}, },
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -1285,8 +1270,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const computeTaskHash = async (hasher, appName) => { const computeTaskHash = async (hasher, appName) => {
@ -1356,8 +1340,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -1437,8 +1420,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -1516,8 +1498,7 @@ describe('TaskHasher', () => {
}, },
{} as any, {} as any,
{}, {}
fileHasher
); );
const hash = await hasher.hashTask( const hash = await hasher.hashTask(
@ -1634,8 +1615,7 @@ describe('TaskHasher', () => {
}, },
}, },
} as any, } as any,
{}, {}
fileHasher
); );
await tempFs.createFiles({ await tempFs.createFiles({
@ -1769,8 +1749,7 @@ describe('TaskHasher', () => {
}, },
}, },
} as any, } as any,
{}, {}
fileHasher
); );
await tempFs.createFiles({ await tempFs.createFiles({

View File

@ -14,13 +14,14 @@ import { hashTsConfig } from '../plugins/js/hasher/hasher';
import { DaemonClient } from '../daemon/client/client'; import { DaemonClient } from '../daemon/client/client';
import { createProjectRootMappings } from '../project-graph/utils/find-project-for-path'; import { createProjectRootMappings } from '../project-graph/utils/find-project-for-path';
import { findMatchingProjects } from '../utils/find-matching-projects'; import { findMatchingProjects } from '../utils/find-matching-projects';
import { FileHasher, hashArray, hashObject } from './file-hasher'; import { hashArray, hashObject } from './file-hasher';
import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils'; import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils';
import { getHashEnv } from './set-hash-env'; import { getHashEnv } from './set-hash-env';
import { workspaceRoot } from '../utils/workspace-root'; import { workspaceRoot } from '../utils/workspace-root';
import { join, relative } from 'path'; import { join, relative } from 'path';
import { normalizePath } from '../utils/path'; import { normalizePath } from '../utils/path';
import { findAllProjectNodeDependencies } from '../utils/project-graph-utils'; import { findAllProjectNodeDependencies } from '../utils/project-graph-utils';
import { hashFile } from '../native';
type ExpandedSelfInput = type ExpandedSelfInput =
| { fileset: string } | { fileset: string }
@ -102,8 +103,7 @@ export class InProcessTaskHasher implements TaskHasher {
private readonly allWorkspaceFiles: FileData[], private readonly allWorkspaceFiles: FileData[],
private readonly projectGraph: ProjectGraph, private readonly projectGraph: ProjectGraph,
private readonly nxJson: NxJsonConfiguration, private readonly nxJson: NxJsonConfiguration,
private readonly options: any, private readonly options: any
private readonly fileHasher: FileHasher
) { ) {
const legacyRuntimeInputs = ( const legacyRuntimeInputs = (
this.options && this.options.runtimeCacheInputs this.options && this.options.runtimeCacheInputs
@ -130,7 +130,6 @@ export class InProcessTaskHasher implements TaskHasher {
this.projectFileMap, this.projectFileMap,
this.allWorkspaceFiles, this.allWorkspaceFiles,
this.projectGraph, this.projectGraph,
this.fileHasher,
{ selectivelyHashTsConfig: this.options.selectivelyHashTsConfig ?? false } { selectivelyHashTsConfig: this.options.selectivelyHashTsConfig ?? false }
); );
} }
@ -205,7 +204,6 @@ class TaskHasherImpl {
private readonly projectFileMap: ProjectFileMap, private readonly projectFileMap: ProjectFileMap,
private readonly allWorkspaceFiles: FileData[], private readonly allWorkspaceFiles: FileData[],
private readonly projectGraph: ProjectGraph, private readonly projectGraph: ProjectGraph,
private readonly fileHasher: FileHasher,
private readonly options: { selectivelyHashTsConfig: boolean } private readonly options: { selectivelyHashTsConfig: boolean }
) { ) {
// External Dependencies are all calculated up front in a deterministic order // External Dependencies are all calculated up front in a deterministic order
@ -412,7 +410,7 @@ class TaskHasherImpl {
); );
const hashDetails = {}; const hashDetails = {};
const hashes: string[] = []; const hashes: string[] = [];
for (const [file, hash] of await this.fileHasher.hashFiles( for (const [file, hash] of this.hashFiles(
filteredFiles.map((p) => join(workspaceRoot, p)) filteredFiles.map((p) => join(workspaceRoot, p))
)) { )) {
hashes.push(hash); hashes.push(hash);
@ -437,6 +435,14 @@ class TaskHasherImpl {
return partialHashes; return partialHashes;
} }
private hashFiles(files: string[]): Map<string, string> {
const r = new Map<string, string>();
for (let f of files) {
r.set(f, hashFile(f));
}
return r;
}
private getExternalDependencyHash(externalNodeName: string) { private getExternalDependencyHash(externalNodeName: string) {
const combinedHash = this.combinePartialHashes( const combinedHash = this.combinePartialHashes(
this.externalDependencyHashes.get(externalNodeName) this.externalDependencyHashes.get(externalNodeName)

View File

@ -1,4 +1,3 @@
use crate::native::types::FileData;
use crate::native::utils::path::Normalize; use crate::native::utils::path::Normalize;
use crate::native::walker::nx_walker; use crate::native::walker::nx_walker;
use std::collections::HashMap; use std::collections::HashMap;
@ -12,14 +11,12 @@ pub fn hash_array(input: Vec<String>) -> String {
} }
#[napi] #[napi]
pub fn hash_file(file: String) -> Option<FileData> { pub fn hash_file(file: String) -> Option<String> {
let Ok(content) = std::fs::read(&file) else { let Ok(content) = std::fs::read(file) else {
return None; return None;
}; };
let hash = xxh3::xxh3_64(&content).to_string(); Some(xxh3::xxh3_64(&content).to_string())
Some(FileData { hash, file })
} }
#[napi] #[napi]
@ -72,6 +69,6 @@ mod tests {
let test_file_path = temp_dir.display().to_string() + "/test.txt"; let test_file_path = temp_dir.display().to_string() + "/test.txt";
let content = hash_file(test_file_path); let content = hash_file(test_file_path);
assert_eq!(content.unwrap().hash, "6193209363630369380"); assert_eq!(content.unwrap(), "6193209363630369380");
} }
} }

View File

@ -16,7 +16,7 @@ export function getFilesForOutputs(directory: string, entries: Array<string>): A
export function remove(src: string): void export function remove(src: string): void
export function copy(src: string, dest: string): void export function copy(src: string, dest: string): void
export function hashArray(input: Array<string>): string export function hashArray(input: Array<string>): string
export function hashFile(file: string): FileData | null export function hashFile(file: string): string | null
export function hashFiles(workspaceRoot: string): Record<string, string> export function hashFiles(workspaceRoot: string): Record<string, string>
export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult> export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult>
export interface ExternalNodeData { export interface ExternalNodeData {
@ -69,10 +69,6 @@ export interface FileData {
file: string file: string
hash: string hash: string
} }
/**
* Newly created files will have the `update` EventType as well.
* This simplifies logic between OS's, IDEs and git operations
*/
export const enum EventType { export const enum EventType {
delete = 'delete', delete = 'delete',
update = 'update', update = 'update',
@ -87,21 +83,16 @@ export const enum WorkspaceErrors {
ParseError = 'ParseError', ParseError = 'ParseError',
Generic = 'Generic' Generic = 'Generic'
} }
/** Get workspace config files based on provided globs */ export interface ConfigurationParserResult {
export function getProjectConfigurationFiles(workspaceRoot: string, globs: Array<string>): Array<string> projectNodes: Record<string, object>
/** Get workspace config files based on provided globs */ externalNodes: Record<string, object>
export function getProjectConfigurations(workspaceRoot: string, globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): ConfigurationParserResult }
export interface NxWorkspaceFiles { export interface NxWorkspaceFiles {
projectFileMap: Record<string, Array<FileData>> projectFileMap: Record<string, Array<FileData>>
globalFiles: Array<FileData> globalFiles: Array<FileData>
projectConfigurations: Record<string, object> projectConfigurations: Record<string, object>
externalNodes: Record<string, object> externalNodes: Record<string, object>
} }
export function getWorkspaceFilesNative(workspaceRoot: string, globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): NxWorkspaceFiles
export interface ConfigurationParserResult {
projectNodes: Record<string, object>
externalNodes: Record<string, object>
}
export class ImportResult { export class ImportResult {
file: string file: string
sourceProject: string sourceProject: string
@ -118,3 +109,12 @@ export class Watcher {
watch(callback: (err: string | null, events: WatchEvent[]) => void): void watch(callback: (err: string | null, events: WatchEvent[]) => void): void
stop(): Promise<void> stop(): Promise<void>
} }
export class WorkspaceContext {
workspaceRoot: string
constructor(workspaceRoot: string)
getWorkspaceFiles(globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): NxWorkspaceFiles
getProjectConfigurationFiles(globs: Array<string>): Array<string>
getProjectConfigurations(globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): ConfigurationParserResult
incrementalUpdate(updatedFiles: Array<string>, deletedFiles: Array<string>): Record<string, string>
allFileData(): Array<FileData>
}

View File

@ -246,7 +246,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`) throw new Error(`Failed to load native binding`)
} }
const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, EventType, Watcher, WorkspaceErrors, getProjectConfigurationFiles, getProjectConfigurations, getWorkspaceFilesNative } = nativeBinding const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, EventType, Watcher, WorkspaceContext, WorkspaceErrors } = nativeBinding
module.exports.expandOutputs = expandOutputs module.exports.expandOutputs = expandOutputs
module.exports.getFilesForOutputs = getFilesForOutputs module.exports.getFilesForOutputs = getFilesForOutputs
@ -259,7 +259,5 @@ module.exports.ImportResult = ImportResult
module.exports.findImports = findImports module.exports.findImports = findImports
module.exports.EventType = EventType module.exports.EventType = EventType
module.exports.Watcher = Watcher module.exports.Watcher = Watcher
module.exports.WorkspaceContext = WorkspaceContext
module.exports.WorkspaceErrors = WorkspaceErrors module.exports.WorkspaceErrors = WorkspaceErrors
module.exports.getProjectConfigurationFiles = getProjectConfigurationFiles
module.exports.getProjectConfigurations = getProjectConfigurations
module.exports.getWorkspaceFilesNative = getWorkspaceFilesNative

View File

@ -18,18 +18,24 @@ where
) -> std::fmt::Result { ) -> std::fmt::Result {
// Format values from the event's's metadata: // Format values from the event's's metadata:
let metadata = event.metadata(); let metadata = event.metadata();
let level = *metadata.level();
if metadata.level() != &Level::WARN && metadata.level() != &Level::TRACE { match level {
write!(&mut writer, "\n{} {} ", ">".cyan(), "NX".bold().cyan())?; Level::TRACE | Level::DEBUG => {
}
if metadata.level() == &Level::TRACE {
write!( write!(
&mut writer, &mut writer,
"{}: ", "{} {}: ",
format!("{}", metadata.level()).bold().red() format!("{}", metadata.level()).bold().red(),
metadata.target()
)?; )?;
} }
Level::WARN => {
write!(&mut writer, "\n{} {} ", ">".yellow(), "NX".bold().yellow())?;
}
_ => {
write!(&mut writer, "\n{} {} ", ">".cyan(), "NX".bold().cyan())?;
}
}
// Format all the spans in the event's span context. // Format all the spans in the event's span context.
if let Some(scope) = ctx.event_scope() { if let Some(scope) = ctx.event_scope() {
@ -57,6 +63,10 @@ where
// Write fields on the event // Write fields on the event
ctx.field_format().format_fields(writer.by_ref(), event)?; ctx.field_format().format_fields(writer.by_ref(), event)?;
if !(matches!(level, Level::TRACE)) && !(matches!(level, Level::DEBUG)) {
writeln!(&mut writer)?;
}
writeln!(writer) writeln!(writer)
} }
} }

View File

@ -12,7 +12,7 @@ describe('hasher', () => {
const tempFilePath = join(tempDirPath, 'temp.txt'); const tempFilePath = join(tempDirPath, 'temp.txt');
await writeFile(tempFilePath, 'content'); await writeFile(tempFilePath, 'content');
expect(hashFile(tempFilePath).hash).toBe('6193209363630369380'); expect(hashFile(tempFilePath)).toBe('6193209363630369380');
}); });
it('should hash content', async () => { it('should hash content', async () => {

View File

@ -1,4 +1,4 @@
import { getProjectConfigurations, getWorkspaceFilesNative } from '../index'; import { WorkspaceContext } from '../index';
import { TempFs } from '../../utils/testing/temp-fs'; import { TempFs } from '../../utils/testing/temp-fs';
import { NxJsonConfiguration } from '../../config/nx-json'; import { NxJsonConfiguration } from '../../config/nx-json';
import { dirname, join } from 'path'; import { dirname, join } from 'path';
@ -17,7 +17,7 @@ describe('workspace files', () => {
} }
return { return {
projectNodes: res, projectNodes: res,
externalNodes: {} externalNodes: {},
}; };
}; };
} }
@ -54,11 +54,11 @@ describe('workspace files', () => {
'./libs/package-project/index.js': '', './libs/package-project/index.js': '',
'./nested/non-project/file.txt': '', './nested/non-project/file.txt': '',
}); });
let globs = ['project.json', '**/project.json', 'libs/*/package.json']; let globs = ['project.json', '**/project.json', 'libs/*/package.json'];
const context = new WorkspaceContext(fs.tempDir);
let { projectFileMap, projectConfigurations, globalFiles } = let { projectFileMap, projectConfigurations, globalFiles } =
getWorkspaceFilesNative( context.getWorkspaceFiles(
fs.tempDir,
globs, globs,
createParseConfigurationsFunction(fs.tempDir) createParseConfigurationsFunction(fs.tempDir)
); );
@ -179,9 +179,11 @@ describe('workspace files', () => {
'./src/index.js': '', './src/index.js': '',
'./jest.config.js': '', './jest.config.js': '',
}); });
const context = new WorkspaceContext(fs.tempDir);
const globs = ['project.json', '**/project.json', '**/package.json']; const globs = ['project.json', '**/project.json', '**/package.json'];
const { globalFiles, projectFileMap } = getWorkspaceFilesNative( const { globalFiles, projectFileMap } = context.getWorkspaceFiles(
fs.tempDir,
globs, globs,
createParseConfigurationsFunction(fs.tempDir) createParseConfigurationsFunction(fs.tempDir)
); );
@ -235,12 +237,10 @@ describe('workspace files', () => {
'./libs/project1/index.js': '', './libs/project1/index.js': '',
}); });
const context = new WorkspaceContext(fs.tempDir);
let globs = ['project.json', '**/project.json', '**/package.json']; let globs = ['project.json', '**/project.json', '**/package.json'];
let nodes = getProjectConfigurations( let nodes = context.getProjectConfigurations(globs, (filenames) => {
fs.tempDir,
globs,
(filenames) => {
const res = {}; const res = {};
for (const filename of filenames) { for (const filename of filenames) {
const json = readJsonFile(join(fs.tempDir, filename)); const json = readJsonFile(join(fs.tempDir, filename));
@ -250,18 +250,18 @@ describe('workspace files', () => {
}; };
} }
return { return {
externalNodes: {}, projectNodes: res externalNodes: {},
projectNodes: res,
}; };
} });
);
expect(nodes.projectNodes).toEqual({ expect(nodes.projectNodes).toEqual({
"project1": { project1: {
"name": "project1", name: 'project1',
"root": "libs/project1", root: 'libs/project1',
}, },
"repo-name": expect.objectContaining({ 'repo-name': expect.objectContaining({
"name": "repo-name", name: 'repo-name',
"root": ".", root: '.',
}), }),
}); });
}); });

View File

@ -8,8 +8,6 @@ use crate::native::watch::utils::transform_event;
#[napi(string_enum)] #[napi(string_enum)]
#[derive(Debug)] #[derive(Debug)]
/// Newly created files will have the `update` EventType as well.
/// This simplifies logic between OS's, IDEs and git operations
pub enum EventType { pub enum EventType {
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
delete, delete,

View File

@ -0,0 +1,40 @@
use crate::native::utils::glob::build_glob_set;
use crate::native::utils::path::Normalize;
use crate::native::workspace::types::ConfigurationParserResult;
use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors};
use rayon::prelude::*;
use std::path::PathBuf;
/// Get workspace config files based on provided globs
pub(super) fn get_project_configuration_files(
globs: Vec<String>,
files: Option<&[(PathBuf, String)]>,
) -> napi::Result<Vec<String>, WorkspaceErrors> {
let Some(files) = files else {
return Ok(Default::default())
};
let globs =
build_glob_set(&globs).map_err(|err| InternalWorkspaceErrors::Generic(err.to_string()))?;
Ok(files
.par_iter()
.map(|file| file.0.to_normalized_string())
.filter(|path| globs.is_match(path))
.collect())
}
/// Get workspace config files based on provided globs
pub(super) fn get_project_configurations<ConfigurationParser>(
globs: Vec<String>,
files: Option<&[(PathBuf, String)]>,
parse_configurations: ConfigurationParser,
) -> napi::Result<ConfigurationParserResult>
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
{
let config_paths =
get_project_configuration_files(globs, files).map_err(anyhow::Error::from)?;
parse_configurations(config_paths)
}

View File

@ -0,0 +1,225 @@
use crate::native::logger::enable_logger;
use std::collections::HashMap;
use crate::native::types::FileData;
use crate::native::utils::path::Normalize;
use parking_lot::lock_api::MutexGuard;
use parking_lot::{Condvar, Mutex, RawMutex};
use rayon::prelude::*;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::thread;
use tracing::{trace, warn};
use xxhash_rust::xxh3;
use crate::native::walker::nx_walker;
use crate::native::workspace::errors::WorkspaceErrors;
use crate::native::workspace::workspace_files::NxWorkspaceFiles;
use crate::native::workspace::{config_files, workspace_files};
use crate::native::workspace::types::ConfigurationParserResult;
#[napi]
pub struct WorkspaceContext {
pub workspace_root: String,
workspace_root_path: PathBuf,
files_worker: FilesWorker,
}
type Files = Vec<(PathBuf, String)>;
struct FilesWorker(Option<Arc<(Mutex<Files>, Condvar)>>);
impl FilesWorker {
fn gather_files(workspace_root: &Path) -> Self {
if !workspace_root.exists() {
warn!(
"workspace root does not exist: {}",
workspace_root.display()
);
return FilesWorker(None);
}
let files_lock = Arc::new((Mutex::new(Vec::new()), Condvar::new()));
let files_lock_clone = Arc::clone(&files_lock);
let workspace_root = workspace_root.to_owned();
thread::spawn(move || {
trace!("locking files");
let (lock, cvar) = &*files_lock_clone;
let mut workspace_files = lock.lock();
let files = nx_walker(workspace_root, |rec| {
let mut file_hashes: Vec<(PathBuf, String)> = vec![];
for (path, content) in rec {
file_hashes.push((path, xxh3::xxh3_64(&content).to_string()));
}
file_hashes
});
workspace_files.extend(files);
workspace_files.par_sort();
let files_len = workspace_files.len();
trace!(?files_len, "files retrieved");
cvar.notify_all();
});
FilesWorker(Some(files_lock))
}
pub fn get_files(&self) -> Option<MutexGuard<'_, RawMutex, Files>> {
let Some(files_sync) = &self.0 else {
trace!("there were no files because the workspace root did not exist");
return None
};
let (files_lock, cvar) = &files_sync.deref();
let mut files = files_lock.lock();
let files_len = files.len();
if files_len == 0 {
trace!("waiting for files");
cvar.wait(&mut files);
}
trace!("files are available");
Some(files)
}
pub fn update_files(
&self,
workspace_root_path: &Path,
updated_files: Vec<&str>,
deleted_files: Vec<&str>,
) -> HashMap<String, String> {
let Some(files_sync) = &self.0 else {
trace!("there were no files because the workspace root did not exist");
return HashMap::new();
};
let (files_lock, _) = &files_sync.deref();
let mut files = files_lock.lock();
let mut map: HashMap<PathBuf, String> = files.drain(..).collect();
for deleted_file in deleted_files {
map.remove(&PathBuf::from(deleted_file));
}
let updated_files_hashes: HashMap<String, String> = updated_files
.par_iter()
.filter_map(|path| {
let full_path = workspace_root_path.join(path);
let Ok( content ) = std::fs::read(full_path) else {
trace!( "could not read file: ?full_path");
return None;
};
Some((path.to_string(), xxh3::xxh3_64(&content).to_string()))
})
.collect();
for (file, hash) in &updated_files_hashes {
map.entry(file.into())
.and_modify(|e| *e = hash.clone())
.or_insert(hash.clone());
}
*files = map.into_iter().collect();
files.par_sort();
updated_files_hashes
}
}
#[napi]
impl WorkspaceContext {
#[napi(constructor)]
pub fn new(workspace_root: String) -> Self {
enable_logger();
trace!(?workspace_root);
let workspace_root_path = PathBuf::from(&workspace_root);
WorkspaceContext {
files_worker: FilesWorker::gather_files(&workspace_root_path),
workspace_root,
workspace_root_path,
}
}
#[napi]
pub fn get_workspace_files<ConfigurationParser>(
&self,
globs: Vec<String>,
parse_configurations: ConfigurationParser,
) -> napi::Result<NxWorkspaceFiles, WorkspaceErrors>
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
{
workspace_files::get_files(
globs,
parse_configurations,
self.files_worker
.get_files()
.as_deref()
.map(|files| files.as_slice()),
)
}
#[napi]
pub fn get_project_configuration_files(
&self,
globs: Vec<String>,
) -> napi::Result<Vec<String>, WorkspaceErrors> {
config_files::get_project_configuration_files(
globs,
self.files_worker
.get_files()
.as_deref()
.map(|files| files.as_slice()),
)
}
#[napi]
pub fn get_project_configurations<ConfigurationParser>(
&self,
globs: Vec<String>,
parse_configurations: ConfigurationParser,
) -> napi::Result<ConfigurationParserResult>
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
{
config_files::get_project_configurations(
globs,
self.files_worker
.get_files()
.as_deref()
.map(|files| files.as_slice()),
parse_configurations,
)
}
#[napi]
pub fn incremental_update(
&self,
updated_files: Vec<&str>,
deleted_files: Vec<&str>,
) -> HashMap<String, String> {
self.files_worker
.update_files(&self.workspace_root_path, updated_files, deleted_files)
}
#[napi]
pub fn all_file_data(&self) -> Vec<FileData> {
self.files_worker
.get_files()
.map_or_else(Vec::new, |files| {
files
.iter()
.map(|(path, content)| FileData {
file: path.to_normalized_string(),
hash: content.clone(),
})
.collect()
})
}
}

View File

@ -1,5 +1,3 @@
use std::path::PathBuf;
use napi::bindgen_prelude::*; use napi::bindgen_prelude::*;
use thiserror::Error; use thiserror::Error;
@ -23,19 +21,18 @@ impl AsRef<str> for WorkspaceErrors {
#[derive(Debug, Error)] #[derive(Debug, Error)]
#[non_exhaustive] #[non_exhaustive]
pub enum InternalWorkspaceErrors { pub enum InternalWorkspaceErrors {
#[error("{file}")] #[error("{0}")]
ParseError { file: PathBuf }, ParseError(String),
#[error("{msg}")] #[error("{0}")]
Generic { msg: String }, Generic(String),
} }
impl From<InternalWorkspaceErrors> for napi::Error<WorkspaceErrors> { impl From<InternalWorkspaceErrors> for napi::Error<WorkspaceErrors> {
fn from(value: InternalWorkspaceErrors) -> Self { fn from(value: InternalWorkspaceErrors) -> Self {
let msg = value.to_string();
match value { match value {
InternalWorkspaceErrors::ParseError { file } => { InternalWorkspaceErrors::ParseError(_) => Error::new(WorkspaceErrors::ParseError, msg),
Error::new(WorkspaceErrors::ParseError, file.display().to_string()) InternalWorkspaceErrors::Generic(_) => Error::new(WorkspaceErrors::Generic, msg),
}
InternalWorkspaceErrors::Generic { msg } => Error::new(WorkspaceErrors::Generic, msg),
} }
} }
} }

View File

@ -1,48 +0,0 @@
use crate::native::utils::glob::build_glob_set;
use crate::native::utils::path::Normalize;
use crate::native::walker::nx_walker;
use crate::native::workspace::types::ConfigurationParserResult;
use std::path::PathBuf;
#[napi]
/// Get workspace config files based on provided globs
pub fn get_project_configuration_files(
workspace_root: String,
globs: Vec<String>,
) -> napi::Result<Vec<String>> {
let globs = build_glob_set(&globs)?;
let config_paths: Vec<String> = nx_walker(workspace_root, move |rec| {
let mut config_paths: Vec<PathBuf> = Vec::new();
for (path, _) in rec {
if globs.is_match(&path) {
config_paths.push(path);
}
}
config_paths
.into_iter()
.map(|p| p.to_normalized_string())
.collect()
});
Ok(config_paths)
}
#[napi]
/// Get workspace config files based on provided globs
pub fn get_project_configurations<ConfigurationParser>(
workspace_root: String,
globs: Vec<String>,
parse_configurations: ConfigurationParser,
) -> napi::Result<ConfigurationParserResult>
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
{
let config_paths: Vec<String> = get_project_configuration_files(workspace_root, globs).unwrap();
parse_configurations(config_paths)
}
#[cfg(test)]
mod test {}

View File

@ -1,4 +1,5 @@
pub mod config_files;
pub mod context;
mod errors; mod errors;
pub mod get_config_files;
pub mod get_nx_workspace_files;
mod types; mod types;
pub mod workspace_files;

View File

@ -1,20 +1,18 @@
use napi::JsObject; use napi::JsObject;
use std::collections::{HashMap, HashSet}; use std::collections::HashMap;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use rayon::prelude::*; use rayon::prelude::*;
use tracing::trace; use tracing::trace;
use xxhash_rust::xxh3;
use crate::native::logger::enable_logger;
use crate::native::types::FileData; use crate::native::types::FileData;
use crate::native::utils::glob::build_glob_set;
use crate::native::utils::path::Normalize; use crate::native::utils::path::Normalize;
use crate::native::walker::nx_walker; use crate::native::workspace::config_files;
use crate::native::workspace::errors::WorkspaceErrors; use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors};
use crate::native::workspace::types::{ConfigurationParserResult, FileLocation}; use crate::native::workspace::types::{ConfigurationParserResult, FileLocation};
#[napi(object)] #[napi(object)]
#[derive(Default)]
pub struct NxWorkspaceFiles { pub struct NxWorkspaceFiles {
pub project_file_map: HashMap<String, Vec<FileData>>, pub project_file_map: HashMap<String, Vec<FileData>>,
pub global_files: Vec<FileData>, pub global_files: Vec<FileData>,
@ -22,44 +20,41 @@ pub struct NxWorkspaceFiles {
pub external_nodes: HashMap<String, JsObject>, pub external_nodes: HashMap<String, JsObject>,
} }
#[napi] pub(super) fn get_files<ConfigurationParser>(
pub fn get_workspace_files_native<ConfigurationParser>(
workspace_root: String,
globs: Vec<String>, globs: Vec<String>,
parse_configurations: ConfigurationParser, parse_configurations: ConfigurationParser,
file_data: Option<&[(PathBuf, String)]>,
) -> napi::Result<NxWorkspaceFiles, WorkspaceErrors> ) -> napi::Result<NxWorkspaceFiles, WorkspaceErrors>
where where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>, ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
{ {
enable_logger(); let Some(file_data) = file_data else {
return Ok(Default::default())
};
trace!("{workspace_root}, {globs:?}"); trace!("{globs:?}");
let parsed_graph_nodes =
let (projects, mut file_data) = get_file_data(&workspace_root, globs) config_files::get_project_configurations(globs, Some(file_data), parse_configurations)
.map_err(|err| napi::Error::new(WorkspaceErrors::Generic, err.to_string()))?; .map_err(|e| InternalWorkspaceErrors::ParseError(e.to_string()))?;
let projects_vec: Vec<String> = projects.iter().map(|p| p.to_normalized_string()).collect();
let parsed_graph_nodes = parse_configurations(projects_vec)
.map_err(|e| napi::Error::new(WorkspaceErrors::ParseError, e.to_string()))?;
let root_map = create_root_map(&parsed_graph_nodes.project_nodes); let root_map = create_root_map(&parsed_graph_nodes.project_nodes);
trace!(?root_map); trace!(?root_map);
// Files need to be sorted each time because when we do hashArray in the TaskHasher.js, the order of the files should be deterministic
file_data.par_sort();
let file_locations = file_data let file_locations = file_data
.into_par_iter() .into_par_iter()
.map(|file_data| { .map(|(file_path, hash)| {
let file_path = Path::new(&file_data.file);
let mut parent = file_path.parent().unwrap_or_else(|| Path::new(".")); let mut parent = file_path.parent().unwrap_or_else(|| Path::new("."));
while root_map.get(parent).is_none() && parent != Path::new(".") { while root_map.get(parent).is_none() && parent != Path::new(".") {
parent = parent.parent().unwrap_or_else(|| Path::new(".")); parent = parent.parent().unwrap_or_else(|| Path::new("."));
} }
let file_data = FileData {
file: file_path.to_normalized_string(),
hash: hash.clone(),
};
match root_map.get(parent) { match root_map.get(parent) {
Some(project_name) => (FileLocation::Project(project_name.into()), file_data), Some(project_name) => (FileLocation::Project(project_name.into()), file_data),
None => (FileLocation::Global, file_data), None => (FileLocation::Global, file_data),
@ -110,23 +105,3 @@ fn create_root_map(
}) })
.collect() .collect()
} }
type WorkspaceData = (HashSet<PathBuf>, Vec<FileData>);
fn get_file_data(workspace_root: &str, globs: Vec<String>) -> anyhow::Result<WorkspaceData> {
let globs = build_glob_set(&globs)?;
let (projects, file_data) = nx_walker(workspace_root, move |rec| {
let mut projects: HashSet<PathBuf> = HashSet::new();
let mut file_hashes: Vec<FileData> = vec![];
for (path, content) in rec {
file_hashes.push(FileData {
file: path.to_normalized_string(),
hash: xxh3::xxh3_64(&content).to_string(),
});
if globs.is_match(&path) {
projects.insert(path);
}
}
(projects, file_hashes)
});
Ok((projects, file_data))
}

View File

@ -6,8 +6,8 @@ import { buildExplicitPackageJsonDependencies } from './explicit-package-json-de
import { ProjectGraphProjectNode } from '../../../../config/project-graph'; import { ProjectGraphProjectNode } from '../../../../config/project-graph';
import { ProjectGraphBuilder } from '../../../../project-graph/project-graph-builder'; import { ProjectGraphBuilder } from '../../../../project-graph/project-graph-builder';
import { createProjectFileMap } from '../../../../project-graph/file-map-utils'; import { createProjectFileMap } from '../../../../project-graph/file-map-utils';
import { fileHasher } from '../../../../hasher/file-hasher';
import { CreateDependenciesContext } from '../../../../utils/nx-plugin'; import { CreateDependenciesContext } from '../../../../utils/nx-plugin';
import { getAllFileDataInContext } from '../../../../utils/workspace-context';
describe('explicit package json dependencies', () => { describe('explicit package json dependencies', () => {
let ctx: CreateDependenciesContext; let ctx: CreateDependenciesContext;
@ -51,8 +51,6 @@ describe('explicit package json dependencies', () => {
}), }),
}); });
await fileHasher.init();
projects = { projects = {
proj: { proj: {
name: 'proj', name: 'proj',
@ -75,7 +73,7 @@ describe('explicit package json dependencies', () => {
const projectFileMap = createProjectFileMap( const projectFileMap = createProjectFileMap(
projectsConfigurations as any, projectsConfigurations as any,
fileHasher.allFileData() getAllFileDataInContext(tempFs.tempDir)
).projectFileMap; ).projectFileMap;
const builder = new ProjectGraphBuilder(undefined, projectFileMap); const builder = new ProjectGraphBuilder(undefined, projectFileMap);

View File

@ -5,6 +5,7 @@ import { ProjectGraphBuilder } from '../../../../project-graph/project-graph-bui
import { buildExplicitTypeScriptDependencies } from './explicit-project-dependencies'; import { buildExplicitTypeScriptDependencies } from './explicit-project-dependencies';
import { retrieveWorkspaceFiles } from '../../../../project-graph/utils/retrieve-workspace-files'; import { retrieveWorkspaceFiles } from '../../../../project-graph/utils/retrieve-workspace-files';
import { CreateDependenciesContext } from '../../../../utils/nx-plugin'; import { CreateDependenciesContext } from '../../../../utils/nx-plugin';
import { setupWorkspaceContext } from '../../../../utils/workspace-context';
// projectName => tsconfig import path // projectName => tsconfig import path
const dependencyProjectNamesToImportPaths = { const dependencyProjectNamesToImportPaths = {
@ -559,6 +560,8 @@ async function createContext(
...projectsFs, ...projectsFs,
}); });
setupWorkspaceContext(tempFs.tempDir);
const { projectFileMap, projectConfigurations } = const { projectFileMap, projectConfigurations } =
await retrieveWorkspaceFiles(tempFs.tempDir, nxJson); await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);

View File

@ -13,7 +13,8 @@ import {
} from '../config/workspace-json-project-json'; } from '../config/workspace-json-project-json';
import { daemonClient } from '../daemon/client/client'; import { daemonClient } from '../daemon/client/client';
import { readProjectsConfigurationFromProjectGraph } from './project-graph'; import { readProjectsConfigurationFromProjectGraph } from './project-graph';
import { fileHasher } from '../hasher/file-hasher'; import { getAllFileDataInContext } from '../utils/workspace-context';
import { workspaceRoot } from '../utils/workspace-root';
export async function createProjectFileMapUsingProjectGraph( export async function createProjectFileMapUsingProjectGraph(
graph: ProjectGraph graph: ProjectGraph
@ -24,8 +25,7 @@ export async function createProjectFileMapUsingProjectGraph(
if (daemonClient.enabled()) { if (daemonClient.enabled()) {
files = await daemonClient.getAllFileData(); files = await daemonClient.getAllFileData();
} else { } else {
await fileHasher.ensureInitialized(); files = getAllFileDataInContext(workspaceRoot);
files = fileHasher.allFileData();
} }
return createProjectFileMap(configs, files).projectFileMap; return createProjectFileMap(configs, files).projectFileMap;

View File

@ -27,6 +27,11 @@ import {
NxPluginV2, NxPluginV2,
} from '../../utils/nx-plugin'; } from '../../utils/nx-plugin';
import { CreateProjectJsonProjectsPlugin } from '../../plugins/project-json/build-nodes/project-json'; import { CreateProjectJsonProjectsPlugin } from '../../plugins/project-json/build-nodes/project-json';
import {
getProjectConfigurationFilesFromContext,
getProjectConfigurationsFromContext,
getNxWorkspaceFilesFromContext,
} from '../../utils/workspace-context';
/** /**
* Walks the workspace directory to create the `projectFileMap`, `ProjectConfigurations` and `allWorkspaceFiles` * Walks the workspace directory to create the `projectFileMap`, `ProjectConfigurations` and `allWorkspaceFiles`
@ -38,9 +43,6 @@ export async function retrieveWorkspaceFiles(
workspaceRoot: string, workspaceRoot: string,
nxJson: NxJsonConfiguration nxJson: NxJsonConfiguration
) { ) {
const { getWorkspaceFilesNative } =
require('../../native') as typeof import('../../native');
performance.mark('native-file-deps:start'); performance.mark('native-file-deps:start');
const plugins = await loadNxPlugins( const plugins = await loadNxPlugins(
nxJson?.plugins ?? [], nxJson?.plugins ?? [],
@ -58,7 +60,10 @@ export async function retrieveWorkspaceFiles(
performance.mark('get-workspace-files:start'); performance.mark('get-workspace-files:start');
const { projectConfigurations, projectFileMap, globalFiles, externalNodes } = const { projectConfigurations, projectFileMap, globalFiles, externalNodes } =
getWorkspaceFilesNative(workspaceRoot, globs, (configs: string[]) => { getNxWorkspaceFilesFromContext(
workspaceRoot,
globs,
(configs: string[]) => {
const projectConfigurations = createProjectConfigurations( const projectConfigurations = createProjectConfigurations(
workspaceRoot, workspaceRoot,
nxJson, nxJson,
@ -70,7 +75,8 @@ export async function retrieveWorkspaceFiles(
projectNodes: projectConfigurations.projects, projectNodes: projectConfigurations.projects,
externalNodes: projectConfigurations.externalNodes, externalNodes: projectConfigurations.externalNodes,
}; };
}) as NxWorkspaceFiles; }
) as NxWorkspaceFiles;
performance.mark('get-workspace-files:end'); performance.mark('get-workspace-files:end');
performance.measure( performance.measure(
'get-workspace-files', 'get-workspace-files',
@ -165,9 +171,10 @@ function _retrieveProjectConfigurations(
externalNodes: Record<string, ProjectGraphExternalNode>; externalNodes: Record<string, ProjectGraphExternalNode>;
projectNodes: Record<string, ProjectConfiguration>; projectNodes: Record<string, ProjectConfiguration>;
} { } {
const { getProjectConfigurations } = return getProjectConfigurationsFromContext(
require('../../native') as typeof import('../../native'); workspaceRoot,
return getProjectConfigurations(workspaceRoot, globs, (configs: string[]) => { globs,
(configs: string[]) => {
const projectConfigurations = createProjectConfigurations( const projectConfigurations = createProjectConfigurations(
workspaceRoot, workspaceRoot,
nxJson, nxJson,
@ -179,7 +186,8 @@ function _retrieveProjectConfigurations(
projectNodes: projectConfigurations.projects, projectNodes: projectConfigurations.projects,
externalNodes: projectConfigurations.externalNodes, externalNodes: projectConfigurations.externalNodes,
}; };
}) as { }
) as {
externalNodes: Record<string, ProjectGraphExternalNode>; externalNodes: Record<string, ProjectGraphExternalNode>;
projectNodes: Record<string, ProjectConfiguration>; projectNodes: Record<string, ProjectConfiguration>;
}; };
@ -193,17 +201,13 @@ export async function retrieveProjectConfigurationPaths(
root, root,
await loadNxPlugins(nxJson?.plugins ?? [], getNxRequirePaths(root), root) await loadNxPlugins(nxJson?.plugins ?? [], getNxRequirePaths(root), root)
); );
const { getProjectConfigurationFiles } = return getProjectConfigurationFilesFromContext(root, projectGlobPatterns);
require('../../native') as typeof import('../../native');
return getProjectConfigurationFiles(root, projectGlobPatterns);
} }
export function retrieveProjectConfigurationPathsWithoutPluginInference( export function retrieveProjectConfigurationPathsWithoutPluginInference(
root: string root: string
): string[] { ): string[] {
const { getProjectConfigurationFiles } = return getProjectConfigurationFilesFromContext(
require('../../native') as typeof import('../../native');
return getProjectConfigurationFiles(
root, root,
configurationGlobsWithoutPlugins(root) configurationGlobsWithoutPlugins(root)
); );
@ -226,9 +230,7 @@ export function retrieveProjectConfigurationsWithoutPluginInference(
return projectsWithoutPluginCache.get(cacheKey); return projectsWithoutPluginCache.get(cacheKey);
} }
const { getProjectConfigurations } = const projectConfigurations = getProjectConfigurationsFromContext(
require('../../native') as typeof import('../../native');
const projectConfigurations = getProjectConfigurations(
root, root,
projectGlobPatterns, projectGlobPatterns,
(configs: string[]) => { (configs: string[]) => {
@ -266,7 +268,7 @@ function buildAllWorkspaceFiles(
return fileData; return fileData;
} }
function createProjectConfigurations( export function createProjectConfigurations(
workspaceRoot: string, workspaceRoot: string,
nxJson: NxJsonConfiguration, nxJson: NxJsonConfiguration,
configFiles: string[], configFiles: string[],

View File

@ -32,7 +32,6 @@ import {
import { hashTasksThatDoNotDependOnOutputsOfOtherTasks } from '../hasher/hash-task'; import { hashTasksThatDoNotDependOnOutputsOfOtherTasks } from '../hasher/hash-task';
import { daemonClient } from '../daemon/client/client'; import { daemonClient } from '../daemon/client/client';
import { StoreRunInformationLifeCycle } from './life-cycles/store-run-information-life-cycle'; import { StoreRunInformationLifeCycle } from './life-cycles/store-run-information-life-cycle';
import { fileHasher } from '../hasher/file-hasher';
import { getProjectFileMap } from '../project-graph/build-project-graph'; import { getProjectFileMap } from '../project-graph/build-project-graph';
import { performance } from 'perf_hooks'; import { performance } from 'perf_hooks';
@ -241,8 +240,7 @@ export async function invokeTasksRunner({
allWorkspaceFiles, allWorkspaceFiles,
projectGraph, projectGraph,
nxJson, nxJson,
runnerOptions, runnerOptions
fileHasher
); );
} }

View File

@ -1,12 +1,12 @@
import { FileData } from '../config/project-graph'; import { FileData } from '../config/project-graph';
import { daemonClient } from '../daemon/client/client'; import { daemonClient } from '../daemon/client/client';
import { fileHasher } from '../hasher/file-hasher'; import { getAllFileDataInContext } from './workspace-context';
import { workspaceRoot } from './workspace-root';
export function allFileData(): Promise<FileData[]> { export function allFileData(): Promise<FileData[]> {
if (daemonClient.enabled()) { if (daemonClient.enabled()) {
return daemonClient.getAllFileData(); return daemonClient.getAllFileData();
} else { } else {
fileHasher.ensureInitialized(); return Promise.resolve(getAllFileDataInContext(workspaceRoot));
return Promise.resolve(fileHasher.allFileData());
} }
} }

View File

@ -0,0 +1,65 @@
import type { ConfigurationParserResult, WorkspaceContext } from '../native';
import { performance } from 'perf_hooks';
let workspaceContext: WorkspaceContext | undefined;
export function setupWorkspaceContext(workspaceRoot: string) {
const { WorkspaceContext } =
require('../native') as typeof import('../native');
performance.mark('workspace-context');
workspaceContext = new WorkspaceContext(workspaceRoot);
performance.mark('workspace-context:end');
performance.measure(
'workspace context init',
'workspace-context',
'workspace-context:end'
);
}
export function getNxWorkspaceFilesFromContext(
workspaceRoot: string,
globs: string[],
parseConfigurations: (files: string[]) => ConfigurationParserResult
) {
ensureContextAvailable(workspaceRoot);
return workspaceContext.getWorkspaceFiles(globs, parseConfigurations);
}
export function getProjectConfigurationFilesFromContext(
workspaceRoot: string,
globs: string[]
) {
ensureContextAvailable(workspaceRoot);
return workspaceContext.getProjectConfigurationFiles(globs);
}
export function getProjectConfigurationsFromContext(
workspaceRoot: string,
globs: string[],
parseConfigurations: (files: string[]) => ConfigurationParserResult
) {
ensureContextAvailable(workspaceRoot);
return workspaceContext.getProjectConfigurations(globs, parseConfigurations);
}
export function updateFilesInContext(
updatedFiles: string[],
deletedFiles: string[]
) {
return workspaceContext?.incrementalUpdate(updatedFiles, deletedFiles);
}
export function getAllFileDataInContext(workspaceRoot: string) {
ensureContextAvailable(workspaceRoot);
return workspaceContext.allFileData();
}
function ensureContextAvailable(workspaceRoot: string) {
if (!workspaceContext || workspaceContext?.workspaceRoot !== workspaceRoot) {
setupWorkspaceContext(workspaceRoot);
}
}
export function resetWorkspaceContext() {
workspaceContext = undefined;
}