feat(core): build project file map with rust (#17472)
Co-authored-by: FrozenPandaz <jasonjean1993@gmail.com>
This commit is contained in:
parent
8d33ab67a7
commit
18f95a99ad
111
Cargo.lock
generated
111
Cargo.lock
generated
@ -2,6 +2,17 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.20"
|
||||
@ -11,6 +22,12 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4f263788a35611fba42eb41ff811c5d0360c58b97402570312a350736e2542e"
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.71"
|
||||
@ -68,6 +85,17 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8ab6b55fe97976e46f91ddbed8d147d966475dc29b2032757ba47e02376fbc3"
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||
dependencies = [
|
||||
"hermit-abi 0.1.19",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
@ -138,6 +166,17 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colored"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"lazy_static",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "command-group"
|
||||
version = "2.1.0"
|
||||
@ -659,6 +698,26 @@ dependencies = [
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.2.6"
|
||||
@ -779,6 +838,15 @@ version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
|
||||
|
||||
[[package]]
|
||||
name = "jsonc-parser"
|
||||
version = "0.21.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b56a20e76235284255a09fcd1f45cf55d3c524ea657ebd3854735925c57743d"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue"
|
||||
version = "1.0.7"
|
||||
@ -1089,15 +1157,21 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_fs",
|
||||
"colored",
|
||||
"crossbeam-channel",
|
||||
"globset",
|
||||
"hashbrown",
|
||||
"ignore",
|
||||
"ignore-files",
|
||||
"itertools",
|
||||
"jsonc-parser",
|
||||
"napi",
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"rayon",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"watchexec",
|
||||
@ -1364,6 +1438,12 @@ dependencies = [
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
@ -1390,6 +1470,31 @@ name = "serde"
|
||||
version = "1.0.152"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.152"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.96"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha1_smol"
|
||||
@ -1686,6 +1791,12 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.3.2"
|
||||
|
||||
@ -20,8 +20,16 @@ watchexec-signals = "1.0.0"
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"]}
|
||||
anyhow = "1.0.71"
|
||||
thiserror = "1.0.40"
|
||||
itertools = "0.10.5"
|
||||
rayon = "1.7.0"
|
||||
hashbrown = {version = "0.14.0", features = ["rayon"]}
|
||||
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
jsonc-parser = {version = "0.21.1", features = ["serde"] }
|
||||
|
||||
colored = "2"
|
||||
|
||||
[lib]
|
||||
crate-type = ['cdylib']
|
||||
|
||||
@ -46,8 +46,8 @@ function readAngularJson(angularCliWorkspaceRoot: string) {
|
||||
).projects;
|
||||
}
|
||||
|
||||
export function mergeAngularJsonAndGlobProjects(
|
||||
globProjects: {
|
||||
export function mergeAngularJsonAndProjects(
|
||||
projects: {
|
||||
[name: string]: ProjectConfiguration;
|
||||
},
|
||||
angularCliWorkspaceRoot: string
|
||||
@ -57,9 +57,9 @@ export function mergeAngularJsonAndGlobProjects(
|
||||
for (let k of Object.keys(res)) {
|
||||
folders.add(res[k].root);
|
||||
}
|
||||
for (let k of Object.keys(globProjects)) {
|
||||
if (!folders.has(globProjects[k].root)) {
|
||||
res[k] = globProjects[k];
|
||||
for (let k of Object.keys(projects)) {
|
||||
if (!folders.has(projects[k].root)) {
|
||||
res[k] = projects[k];
|
||||
}
|
||||
}
|
||||
return res;
|
||||
|
||||
@ -33,7 +33,7 @@ import { PackageJson } from '../utils/package-json';
|
||||
import { output } from '../utils/output';
|
||||
import { joinPathFragments } from '../utils/path';
|
||||
import {
|
||||
mergeAngularJsonAndGlobProjects,
|
||||
mergeAngularJsonAndProjects,
|
||||
shouldMergeAngularProjects,
|
||||
} from '../adapter/angular-json';
|
||||
import { getNxRequirePaths } from '../utils/installation-directory';
|
||||
@ -95,7 +95,7 @@ export class Workspaces {
|
||||
return this.cachedProjectsConfig;
|
||||
}
|
||||
const nxJson = this.readNxJson();
|
||||
const projectsConfigurations = buildProjectsConfigurationsFromGlobs(
|
||||
const projectsConfigurations = buildProjectsConfigurationsFromProjectPaths(
|
||||
nxJson,
|
||||
globForProjectFiles(
|
||||
this.root,
|
||||
@ -116,7 +116,7 @@ export class Workspaces {
|
||||
opts?._includeProjectsFromAngularJson
|
||||
)
|
||||
) {
|
||||
projectsConfigurations.projects = mergeAngularJsonAndGlobProjects(
|
||||
projectsConfigurations.projects = mergeAngularJsonAndProjects(
|
||||
projectsConfigurations.projects,
|
||||
this.root
|
||||
);
|
||||
@ -623,7 +623,9 @@ export function getGlobPatternsFromPackageManagerWorkspaces(
|
||||
// TODO(@AgentEnder): update logic after better way to determine root project inclusion
|
||||
// Include the root project
|
||||
return packageJson.nx ? patterns.concat('package.json') : patterns;
|
||||
} catch {}
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function normalizePatterns(patterns: string[]): string[] {
|
||||
@ -801,7 +803,7 @@ export function inferProjectFromNonStandardFile(
|
||||
};
|
||||
}
|
||||
|
||||
export function buildProjectsConfigurationsFromGlobs(
|
||||
export function buildProjectsConfigurationsFromProjectPaths(
|
||||
nxJson: NxJsonConfiguration,
|
||||
projectFiles: string[], // making this parameter allows devkit to pick up newly created projects
|
||||
readJson: <T extends Object>(string) => T = <T extends Object>(string) =>
|
||||
@ -858,10 +860,9 @@ export function buildProjectsConfigurationsFromGlobs(
|
||||
if (!projects[name]) {
|
||||
projects[name] = config;
|
||||
} else {
|
||||
logger.error(
|
||||
logger.warn(
|
||||
`Skipping project inferred from ${file} since project ${name} already exists.`
|
||||
);
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,15 +5,11 @@ import {
|
||||
ProjectGraph,
|
||||
} from '../../config/project-graph';
|
||||
import { buildProjectGraphUsingProjectFileMap } from '../../project-graph/build-project-graph';
|
||||
import {
|
||||
createProjectFileMap,
|
||||
updateProjectFileMap,
|
||||
} from '../../project-graph/file-map-utils';
|
||||
import { updateProjectFileMap } from '../../project-graph/file-map-utils';
|
||||
import {
|
||||
nxProjectGraph,
|
||||
ProjectFileMapCache,
|
||||
readProjectFileMapCache,
|
||||
readProjectGraphCache,
|
||||
} from '../../project-graph/nx-deps-cache';
|
||||
import { fileExists } from '../../utils/fileutils';
|
||||
import { notifyFileWatcherSockets } from './file-watching/file-watcher-sockets';
|
||||
@ -22,6 +18,10 @@ import { Workspaces } from '../../config/workspaces';
|
||||
import { workspaceRoot } from '../../utils/workspace-root';
|
||||
import { execSync } from 'child_process';
|
||||
import { fileHasher, hashArray } from '../../hasher/file-hasher';
|
||||
import {
|
||||
retrieveWorkspaceFiles,
|
||||
retrieveProjectConfigurations,
|
||||
} from '../../project-graph/utils/retrieve-workspace-files';
|
||||
|
||||
let cachedSerializedProjectGraphPromise: Promise<{
|
||||
error: Error | null;
|
||||
@ -122,6 +122,8 @@ function computeWorkspaceConfigHash(projectsConfigurations: any) {
|
||||
/**
|
||||
* Temporary work around to handle nested gitignores. The parcel file watcher doesn't handle them well,
|
||||
* so we need to filter them out here.
|
||||
*
|
||||
* TODO(Cammisuli): remove after 16.4 - Rust watcher handles nested gitignores
|
||||
*/
|
||||
function filterUpdatedFiles(files: string[]) {
|
||||
try {
|
||||
@ -152,11 +154,16 @@ async function processCollectedUpdatedAndDeletedFiles() {
|
||||
'hash-watched-changes-end'
|
||||
);
|
||||
fileHasher.incrementalUpdate(updatedFiles, deletedFiles);
|
||||
const projectsConfiguration = new Workspaces(
|
||||
workspaceRoot
|
||||
).readProjectsConfigurations();
|
||||
|
||||
let nxJson = new Workspaces(workspaceRoot).readNxJson();
|
||||
|
||||
const projectConfigurations = await retrieveProjectConfigurations(
|
||||
workspaceRoot,
|
||||
nxJson
|
||||
);
|
||||
|
||||
const workspaceConfigHash = computeWorkspaceConfigHash(
|
||||
projectsConfiguration
|
||||
projectConfigurations
|
||||
);
|
||||
serverLogger.requestLog(
|
||||
`Updated file-hasher based on watched changes, recomputing project graph...`
|
||||
@ -167,20 +174,26 @@ async function processCollectedUpdatedAndDeletedFiles() {
|
||||
// when workspace config changes we cannot incrementally update project file map
|
||||
if (workspaceConfigHash !== storedWorkspaceConfigHash) {
|
||||
storedWorkspaceConfigHash = workspaceConfigHash;
|
||||
projectFileMapWithFiles = createProjectFileMap(
|
||||
projectsConfiguration,
|
||||
fileHasher.allFileData()
|
||||
|
||||
projectFileMapWithFiles = await retrieveWorkspaceFiles(
|
||||
workspaceRoot,
|
||||
nxJson
|
||||
);
|
||||
} else {
|
||||
projectFileMapWithFiles = projectFileMapWithFiles
|
||||
? updateProjectFileMap(
|
||||
projectsConfiguration,
|
||||
projectFileMapWithFiles.projectFileMap,
|
||||
projectFileMapWithFiles.allWorkspaceFiles,
|
||||
updatedFiles,
|
||||
deletedFiles
|
||||
)
|
||||
: createProjectFileMap(projectsConfiguration, fileHasher.allFileData());
|
||||
if (projectFileMapWithFiles) {
|
||||
projectFileMapWithFiles = updateProjectFileMap(
|
||||
projectConfigurations,
|
||||
projectFileMapWithFiles.projectFileMap,
|
||||
projectFileMapWithFiles.allWorkspaceFiles,
|
||||
updatedFiles,
|
||||
deletedFiles
|
||||
);
|
||||
} else {
|
||||
projectFileMapWithFiles = await retrieveWorkspaceFiles(
|
||||
workspaceRoot,
|
||||
nxJson
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
collectedUpdatedFiles.clear();
|
||||
|
||||
@ -406,9 +406,10 @@ export async function startServer(): Promise<Server> {
|
||||
if (!isWindows) {
|
||||
killSocketOrPath();
|
||||
}
|
||||
await fileHasher.ensureInitialized();
|
||||
return new Promise((resolve, reject) => {
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
await fileHasher.ensureInitialized();
|
||||
server.listen(FULL_OS_SOCKET_PATH, async () => {
|
||||
try {
|
||||
serverLogger.log(`Started listening on: ${FULL_OS_SOCKET_PATH}`);
|
||||
|
||||
@ -4,7 +4,7 @@ import {
|
||||
ProjectsConfigurations,
|
||||
} from '../../config/workspace-json-project-json';
|
||||
import {
|
||||
buildProjectsConfigurationsFromGlobs,
|
||||
buildProjectsConfigurationsFromProjectPaths,
|
||||
deduplicateProjectFiles,
|
||||
getGlobPatternsFromPlugins,
|
||||
globForProjectFiles,
|
||||
@ -194,8 +194,10 @@ function readAndCombineAllProjectConfigurations(tree: Tree): {
|
||||
(r) => deletedFiles.indexOf(r) === -1
|
||||
);
|
||||
|
||||
return buildProjectsConfigurationsFromGlobs(nxJson, projectFiles, (file) =>
|
||||
readJson(tree, file)
|
||||
return buildProjectsConfigurationsFromProjectPaths(
|
||||
nxJson,
|
||||
projectFiles,
|
||||
(file) => readJson(tree, file)
|
||||
).projects;
|
||||
}
|
||||
|
||||
|
||||
137
packages/nx/src/native/hasher.rs
Normal file
137
packages/nx/src/native/hasher.rs
Normal file
@ -0,0 +1,137 @@
|
||||
#![allow(unused)]
|
||||
|
||||
use crate::native::parallel_walker::nx_walker;
|
||||
use crate::native::types::FileData;
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
use anyhow::anyhow;
|
||||
use crossbeam_channel::unbounded;
|
||||
use globset::{Glob, GlobSetBuilder};
|
||||
use ignore::WalkBuilder;
|
||||
use itertools::Itertools;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::thread::available_parallelism;
|
||||
use xxhash_rust::xxh3;
|
||||
|
||||
type FileHashes = HashMap<String, String>;
|
||||
|
||||
#[napi]
|
||||
fn hash_array(input: Vec<String>) -> String {
|
||||
let joined = input.join(",");
|
||||
let content = joined.as_bytes();
|
||||
xxh3::xxh3_64(content).to_string()
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_file(file: String) -> Option<FileData> {
|
||||
let Ok(content) = std::fs::read(&file) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let hash = xxh3::xxh3_64(&content).to_string();
|
||||
|
||||
Some(FileData { hash, file })
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_files(workspace_root: String) -> HashMap<String, String> {
|
||||
nx_walker(workspace_root, |rec| {
|
||||
let mut collection: HashMap<String, String> = HashMap::new();
|
||||
for (path, content) in rec {
|
||||
collection.insert(path, xxh3::xxh3_64(&content).to_string());
|
||||
}
|
||||
collection
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_files_matching_globs(
|
||||
directory: String,
|
||||
glob_patterns: Vec<String>,
|
||||
) -> anyhow::Result<Option<String>> {
|
||||
let glob_set = build_glob_set(glob_patterns)?;
|
||||
|
||||
let mut hashes = nx_walker(directory, move |receiver| {
|
||||
let mut collection: Vec<FileData> = Vec::new();
|
||||
for (path, content) in receiver {
|
||||
if glob_set.is_match(&path) {
|
||||
collection.push(FileData {
|
||||
file: path,
|
||||
hash: xxh3::xxh3_64(&content).to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
collection
|
||||
});
|
||||
|
||||
if hashes.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Sort the file data so that its in deterministically ordered by file path
|
||||
hashes.sort();
|
||||
|
||||
let sorted_file_hashes: Vec<String> =
|
||||
hashes.into_iter().map(|file_data| file_data.hash).collect();
|
||||
Ok(Some(hash_array(sorted_file_hashes)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assert_fs::prelude::*;
|
||||
use assert_fs::TempDir;
|
||||
|
||||
///
|
||||
/// Setup a temporary directory to do testing in
|
||||
///
|
||||
fn setup_fs() -> TempDir {
|
||||
let temp = TempDir::new().unwrap();
|
||||
temp.child("test.txt").write_str("content").unwrap();
|
||||
temp.child("foo.txt").write_str("content1").unwrap();
|
||||
temp.child("bar.txt").write_str("content2").unwrap();
|
||||
temp.child("baz")
|
||||
.child("qux.txt")
|
||||
.write_str("content@qux")
|
||||
.unwrap();
|
||||
temp.child("node_modules")
|
||||
.child("node-module-dep")
|
||||
.write_str("content")
|
||||
.unwrap();
|
||||
temp
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_hashes_a_file() {
|
||||
// handle non existent files
|
||||
let content = hash_file("".into());
|
||||
assert!(content.is_none());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let test_file_path = temp_dir.display().to_string() + "/test.txt";
|
||||
let content = hash_file(test_file_path);
|
||||
|
||||
assert_eq!(content.unwrap().hash, "6193209363630369380");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_hashes_files_matching_globs() -> anyhow::Result<()> {
|
||||
// handle empty workspaces
|
||||
let content =
|
||||
hash_files_matching_globs("/does/not/exist".into(), Vec::from([String::from("**/*")]))?;
|
||||
assert!(content.is_none());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let content = hash_files_matching_globs(
|
||||
temp_dir.display().to_string(),
|
||||
Vec::from([String::from("fo*.txt")]),
|
||||
)?;
|
||||
// println!("{:?}", content);
|
||||
assert_eq!(content.unwrap(), String::from("12742692716897613184"),);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
22
packages/nx/src/native/index.d.ts
vendored
22
packages/nx/src/native/index.d.ts
vendored
@ -3,14 +3,14 @@
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
export interface FileData {
|
||||
file: string
|
||||
hash: string
|
||||
}
|
||||
export function hashArray(input: Array<string>): string
|
||||
export function hashFile(file: string): FileData | null
|
||||
export function hashFiles(workspaceRoot: string): Record<string, string>
|
||||
export function hashFilesMatchingGlobs(directory: string, globPatterns: Array<string>): string | null
|
||||
export interface FileData {
|
||||
file: string
|
||||
hash: string
|
||||
}
|
||||
/**
|
||||
* Newly created files will have the `update` EventType as well.
|
||||
* This simplifies logic between OS's, IDEs and git operations
|
||||
@ -23,6 +23,20 @@ export interface WatchEvent {
|
||||
path: string
|
||||
type: EventType
|
||||
}
|
||||
/** Public NAPI error codes that are for Node */
|
||||
export const enum WorkspaceErrors {
|
||||
ParseError = 'ParseError',
|
||||
Generic = 'Generic'
|
||||
}
|
||||
/** Get workspace config files based on provided globs */
|
||||
export function getConfigFiles(workspaceRoot: string, globs: Array<string>): Array<string>
|
||||
export interface NxWorkspaceFiles {
|
||||
projectFileMap: Record<string, Array<FileData>>
|
||||
globalFiles: Array<FileData>
|
||||
configFiles: Array<string>
|
||||
}
|
||||
/** Throws exceptions */
|
||||
export function getWorkspaceFilesNative(workspaceRoot: string, globs: Array<string>): NxWorkspaceFiles
|
||||
export class Watcher {
|
||||
origin: string
|
||||
/**
|
||||
|
||||
@ -246,7 +246,7 @@ if (!nativeBinding) {
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { hashArray, hashFile, hashFiles, hashFilesMatchingGlobs, EventType, Watcher } = nativeBinding
|
||||
const { hashArray, hashFile, hashFiles, hashFilesMatchingGlobs, EventType, Watcher, WorkspaceErrors, getConfigFiles, getWorkspaceFilesNative } = nativeBinding
|
||||
|
||||
module.exports.hashArray = hashArray
|
||||
module.exports.hashFile = hashFile
|
||||
@ -254,3 +254,6 @@ module.exports.hashFiles = hashFiles
|
||||
module.exports.hashFilesMatchingGlobs = hashFilesMatchingGlobs
|
||||
module.exports.EventType = EventType
|
||||
module.exports.Watcher = Watcher
|
||||
module.exports.WorkspaceErrors = WorkspaceErrors
|
||||
module.exports.getConfigFiles = getConfigFiles
|
||||
module.exports.getWorkspaceFilesNative = getWorkspaceFilesNative
|
||||
|
||||
72
packages/nx/src/native/logger/mod.rs
Normal file
72
packages/nx/src/native/logger/mod.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use colored::Colorize;
|
||||
use tracing::{Event, Level, Subscriber};
|
||||
use tracing_subscriber::fmt::{format, FmtContext, FormatEvent, FormatFields, FormattedFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
struct NxLogFormatter;
|
||||
impl<S, N> FormatEvent<S, N> for NxLogFormatter
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: format::Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> std::fmt::Result {
|
||||
// Format values from the event's's metadata:
|
||||
let metadata = event.metadata();
|
||||
|
||||
if metadata.level() != &Level::WARN && metadata.level() != &Level::TRACE {
|
||||
write!(&mut writer, "\n{} {} ", ">".cyan(), "NX".bold().cyan())?;
|
||||
}
|
||||
|
||||
if metadata.level() == &Level::TRACE {
|
||||
write!(
|
||||
&mut writer,
|
||||
"{}: ",
|
||||
format!("{}", metadata.level()).bold().red()
|
||||
)?;
|
||||
}
|
||||
|
||||
// Format all the spans in the event's span context.
|
||||
if let Some(scope) = ctx.event_scope() {
|
||||
for span in scope.from_root() {
|
||||
write!(writer, "{}", span.name())?;
|
||||
|
||||
// `FormattedFields` is a formatted representation of the span's
|
||||
// fields, which is stored in its extensions by the `fmt` layer's
|
||||
// `new_span` method. The fields will have been formatted
|
||||
// by the same field formatter that's provided to the event
|
||||
// formatter in the `FmtContext`.
|
||||
let ext = span.extensions();
|
||||
let fields = &ext
|
||||
.get::<FormattedFields<N>>()
|
||||
.expect("will never be `None`");
|
||||
|
||||
// Skip formatting the fields if the span had no fields.
|
||||
if !fields.is_empty() {
|
||||
write!(writer, "{{{}}}", fields.bold())?;
|
||||
}
|
||||
write!(writer, ": ")?;
|
||||
}
|
||||
}
|
||||
|
||||
// Write fields on the event
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn enable_logger() {
|
||||
let env_filter =
|
||||
EnvFilter::try_from_env("NX_NATIVE_LOGGING").unwrap_or_else(|_| EnvFilter::new("INFO"));
|
||||
_ = tracing_subscriber::fmt()
|
||||
.with_env_filter(env_filter)
|
||||
.event_format(NxLogFormatter)
|
||||
.try_init()
|
||||
.ok();
|
||||
}
|
||||
@ -1,2 +1,7 @@
|
||||
pub mod native_hasher;
|
||||
pub mod hasher;
|
||||
mod logger;
|
||||
mod parallel_walker;
|
||||
mod types;
|
||||
mod utils;
|
||||
pub mod watch;
|
||||
pub mod workspace;
|
||||
|
||||
@ -1,316 +0,0 @@
|
||||
#![allow(unused)]
|
||||
|
||||
use anyhow::anyhow;
|
||||
use crossbeam_channel::unbounded;
|
||||
use globset::{Glob, GlobSetBuilder};
|
||||
use ignore::WalkBuilder;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::thread::{self, available_parallelism};
|
||||
use xxhash_rust::xxh3;
|
||||
|
||||
type FileHashes = HashMap<String, String>;
|
||||
|
||||
#[napi(object)]
|
||||
pub struct FileData {
|
||||
pub file: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
impl Eq for FileData {}
|
||||
|
||||
impl PartialEq<Self> for FileData {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file.eq(&other.file)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<Self> for FileData {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
self.file.partial_cmp(&other.file)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for FileData {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.file.cmp(&other.file)
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_array(input: Vec<String>) -> String {
|
||||
let joined = input.join(",");
|
||||
let content = joined.as_bytes();
|
||||
xxh3::xxh3_64(content).to_string()
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_file(file: String) -> Option<FileData> {
|
||||
let Ok(content) = std::fs::read(&file) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let hash = xxh3::xxh3_64(&content).to_string();
|
||||
|
||||
Some(FileData { hash, file })
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_files(workspace_root: String) -> HashMap<String, String> {
|
||||
let workspace_root = Path::new(&workspace_root);
|
||||
let nx_ignore = workspace_root.join(".nxignore");
|
||||
let git_folder = workspace_root.join(".git");
|
||||
let node_folder = workspace_root.join("node_modules");
|
||||
|
||||
let mut walker = WalkBuilder::new(workspace_root);
|
||||
walker.hidden(false);
|
||||
walker.add_custom_ignore_filename(&nx_ignore);
|
||||
|
||||
// We should make sure to always ignore node_modules and the .git folder
|
||||
walker.filter_entry(move |entry| {
|
||||
!(entry.path().starts_with(&git_folder) || entry.path().starts_with(&node_folder))
|
||||
});
|
||||
|
||||
let (sender, receiver) = unbounded::<(String, Vec<u8>)>();
|
||||
|
||||
let receiver_thread = thread::spawn(move || {
|
||||
let mut collection: HashMap<String, String> = HashMap::new();
|
||||
for (path, content) in receiver {
|
||||
collection.insert(path, xxh3::xxh3_64(&content).to_string());
|
||||
}
|
||||
collection
|
||||
});
|
||||
|
||||
let cpus = available_parallelism().map_or(2, |n| n.get()) - 1;
|
||||
|
||||
walker.threads(cpus).build_parallel().run(|| {
|
||||
let tx = sender.clone();
|
||||
Box::new(move |entry| {
|
||||
use ignore::WalkState::*;
|
||||
|
||||
#[rustfmt::skip]
|
||||
let Ok(dir_entry) = entry else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Ok(content) = std::fs::read(dir_entry.path()) else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Ok(file_path) = dir_entry.path().strip_prefix(workspace_root) else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Some(file_path) = file_path.to_str() else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
// convert back-slashes in Windows paths, since the js expects only forward-slash path separators
|
||||
#[cfg(target_os = "windows")]
|
||||
let file_path = file_path.replace('\\', "/");
|
||||
|
||||
tx.send((file_path.to_string(), content)).ok();
|
||||
|
||||
Continue
|
||||
})
|
||||
});
|
||||
|
||||
drop(sender);
|
||||
receiver_thread.join().unwrap()
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_files_matching_globs(
|
||||
directory: String,
|
||||
glob_patterns: Vec<String>,
|
||||
) -> anyhow::Result<Option<String>> {
|
||||
let mut globset_builder = GlobSetBuilder::new();
|
||||
|
||||
for pattern in glob_patterns {
|
||||
globset_builder.add(Glob::new(&pattern).map_err(|_| anyhow!("Invalid Glob {pattern}"))?);
|
||||
}
|
||||
let globset = globset_builder
|
||||
.build()
|
||||
.map_err(|_| anyhow!("Error building globset builder"))?;
|
||||
|
||||
let cpus = available_parallelism().map_or(2, |n| n.get()) - 1;
|
||||
|
||||
let mut walker = WalkBuilder::new(&directory);
|
||||
walker.hidden(false);
|
||||
|
||||
let (sender, receiver) = unbounded::<(String, Vec<u8>)>();
|
||||
|
||||
let receiver_thread = thread::spawn(move || {
|
||||
let mut collection: Vec<FileData> = Vec::new();
|
||||
for (path, content) in receiver {
|
||||
if globset.is_match(&path) {
|
||||
collection.push(FileData {
|
||||
file: path,
|
||||
hash: xxh3::xxh3_64(&content).to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
collection
|
||||
});
|
||||
|
||||
walker.threads(cpus).build_parallel().run(|| {
|
||||
let tx = sender.clone();
|
||||
let directory = directory.clone();
|
||||
Box::new(move |entry| {
|
||||
use ignore::WalkState::*;
|
||||
|
||||
#[rustfmt::skip]
|
||||
let Ok(dir_entry) = entry else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Ok(content) = std::fs::read(dir_entry.path()) else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Ok(file_path) = dir_entry.path().strip_prefix(&directory) else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Some(file_path) = file_path.to_str() else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
// convert back-slashes in Windows paths, since the js expects only forward-slash path separators
|
||||
#[cfg(target_os = "windows")]
|
||||
let file_path = file_path.replace('\\', "/");
|
||||
|
||||
tx.send((file_path.to_string(), content)).ok();
|
||||
|
||||
Continue
|
||||
})
|
||||
});
|
||||
drop(sender);
|
||||
|
||||
let mut hashes = receiver_thread.join().unwrap();
|
||||
if hashes.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Sort the file data so that its in deterministically ordered by file path
|
||||
hashes.sort();
|
||||
|
||||
let sorted_file_hashes: Vec<String> =
|
||||
hashes.into_iter().map(|file_data| file_data.hash).collect();
|
||||
Ok(Some(hash_array(sorted_file_hashes)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assert_fs::prelude::*;
|
||||
use assert_fs::TempDir;
|
||||
|
||||
///
|
||||
/// Setup a temporary directory to do testing in
|
||||
///
|
||||
fn setup_fs() -> TempDir {
|
||||
let temp = TempDir::new().unwrap();
|
||||
temp.child("test.txt").write_str("content").unwrap();
|
||||
temp.child("foo.txt").write_str("content1").unwrap();
|
||||
temp.child("bar.txt").write_str("content2").unwrap();
|
||||
temp.child("baz")
|
||||
.child("qux.txt")
|
||||
.write_str("content@qux")
|
||||
.unwrap();
|
||||
temp.child("node_modules")
|
||||
.child("node-module-dep")
|
||||
.write_str("content")
|
||||
.unwrap();
|
||||
temp
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_hashes_a_file() {
|
||||
// handle non existent files
|
||||
let content = hash_file("".into());
|
||||
assert!(content.is_none());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let test_file_path = temp_dir.display().to_string() + "/test.txt";
|
||||
let content = hash_file(test_file_path);
|
||||
|
||||
assert_eq!(content.unwrap().hash, "6193209363630369380");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_hashes_a_directory() {
|
||||
// handle empty workspaces
|
||||
let content = hash_files("/does/not/exist".into());
|
||||
assert!(content.is_empty());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let content = hash_files(temp_dir.display().to_string());
|
||||
// println!("{:?}", content);
|
||||
assert_eq!(
|
||||
content,
|
||||
HashMap::from([
|
||||
("baz/qux.txt".into(), "8039819779822413286".into()),
|
||||
("foo.txt".into(), "8455857314690418558".into()),
|
||||
("test.txt".into(), "6193209363630369380".into()),
|
||||
("bar.txt".into(), "1707056588989152788".into()),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_hashes_files_matching_globs() -> anyhow::Result<()> {
|
||||
// handle empty workspaces
|
||||
let content =
|
||||
hash_files_matching_globs("/does/not/exist".into(), Vec::from([String::from("**/*")]))?;
|
||||
assert!(content.is_none());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let content = hash_files_matching_globs(
|
||||
temp_dir.display().to_string(),
|
||||
Vec::from([String::from("fo*.txt")]),
|
||||
)?;
|
||||
// println!("{:?}", content);
|
||||
assert_eq!(content.unwrap(), String::from("12742692716897613184"),);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_nx_ignore() {
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
temp_dir
|
||||
.child("nested")
|
||||
.child("child.txt")
|
||||
.write_str("data");
|
||||
temp_dir
|
||||
.child("nested")
|
||||
.child("child-two")
|
||||
.child("grand_child.txt")
|
||||
.write_str("data");
|
||||
|
||||
// add nxignore file with baz/
|
||||
temp_dir
|
||||
.child(".nxignore")
|
||||
.write_str(
|
||||
r"baz/
|
||||
nested/child.txt
|
||||
nested/child-two/
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let content = hash_files(temp_dir.display().to_string());
|
||||
let mut file_names = content.iter().map(|c| c.0).collect::<Vec<_>>();
|
||||
file_names.sort();
|
||||
assert_eq!(
|
||||
file_names,
|
||||
vec!(".nxignore", "bar.txt", "foo.txt", "test.txt")
|
||||
);
|
||||
}
|
||||
}
|
||||
170
packages/nx/src/native/parallel_walker.rs
Normal file
170
packages/nx/src/native/parallel_walker.rs
Normal file
@ -0,0 +1,170 @@
|
||||
use std::path::Path;
|
||||
use std::thread;
|
||||
use std::thread::available_parallelism;
|
||||
|
||||
use crossbeam_channel::{unbounded, Receiver};
|
||||
use ignore::WalkBuilder;
|
||||
|
||||
pub fn nx_walker<P, Fn, Re>(directory: P, f: Fn) -> Re
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
Fn: FnOnce(Receiver<(String, Vec<u8>)>) -> Re + Send + 'static,
|
||||
Re: Send + 'static,
|
||||
{
|
||||
let directory = directory.as_ref();
|
||||
let nx_ignore = directory.join(".nxignore");
|
||||
let git_folder = directory.join(".git");
|
||||
let node_folder = directory.join("node_modules");
|
||||
|
||||
let mut walker = WalkBuilder::new(directory);
|
||||
walker.hidden(false);
|
||||
walker.add_custom_ignore_filename(&nx_ignore);
|
||||
|
||||
// We should make sure to always ignore node_modules and the .git folder
|
||||
walker.filter_entry(move |entry| {
|
||||
!(entry.path().starts_with(&git_folder) || entry.path().starts_with(&node_folder))
|
||||
});
|
||||
|
||||
let cpus = available_parallelism().map_or(2, |n| n.get()) - 1;
|
||||
|
||||
let (sender, receiver) = unbounded::<(String, Vec<u8>)>();
|
||||
|
||||
let receiver_thread = thread::spawn(|| f(receiver));
|
||||
|
||||
walker.threads(cpus).build_parallel().run(|| {
|
||||
let tx = sender.clone();
|
||||
Box::new(move |entry| {
|
||||
use ignore::WalkState::*;
|
||||
|
||||
#[rustfmt::skip]
|
||||
let Ok(dir_entry) = entry else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Ok(content) = std::fs::read(dir_entry.path()) else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Ok(file_path) = dir_entry.path().strip_prefix(directory) else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
let Some(file_path) = file_path.to_str() else {
|
||||
return Continue;
|
||||
};
|
||||
|
||||
// convert back-slashes in Windows paths, since the js expects only forward-slash path separators
|
||||
#[cfg(target_os = "windows")]
|
||||
let file_path = file_path.replace('\\', "/");
|
||||
|
||||
tx.send((file_path.to_string(), content)).ok();
|
||||
|
||||
Continue
|
||||
})
|
||||
});
|
||||
|
||||
drop(sender);
|
||||
receiver_thread.join().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use assert_fs::prelude::*;
|
||||
use assert_fs::TempDir;
|
||||
use std::collections::HashMap;
|
||||
|
||||
///
|
||||
/// Setup a temporary directory to do testing in
|
||||
///
|
||||
fn setup_fs() -> TempDir {
|
||||
let temp = TempDir::new().unwrap();
|
||||
temp.child("test.txt").write_str("content").unwrap();
|
||||
temp.child("foo.txt").write_str("content1").unwrap();
|
||||
temp.child("bar.txt").write_str("content2").unwrap();
|
||||
temp.child("baz")
|
||||
.child("qux.txt")
|
||||
.write_str("content@qux")
|
||||
.unwrap();
|
||||
temp.child("node_modules")
|
||||
.child("node-module-dep")
|
||||
.write_str("content")
|
||||
.unwrap();
|
||||
temp
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_walks_a_directory() {
|
||||
// handle empty workspaces
|
||||
let content = nx_walker("/does/not/exist", |rec| {
|
||||
let mut paths = vec![];
|
||||
for (path, _) in rec {
|
||||
paths.push(path);
|
||||
}
|
||||
paths
|
||||
});
|
||||
assert!(content.is_empty());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let content = nx_walker(temp_dir, |rec| {
|
||||
let mut paths = HashMap::new();
|
||||
for (path, content) in rec {
|
||||
paths.insert(path, content);
|
||||
}
|
||||
paths
|
||||
});
|
||||
assert_eq!(
|
||||
content,
|
||||
HashMap::from([
|
||||
("baz/qux.txt".into(), "content@qux".into()),
|
||||
("foo.txt".into(), "content1".into()),
|
||||
("test.txt".into(), "content".into()),
|
||||
("bar.txt".into(), "content2".into()),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_nx_ignore() {
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
temp_dir
|
||||
.child("nested")
|
||||
.child("child.txt")
|
||||
.write_str("data")
|
||||
.unwrap();
|
||||
temp_dir
|
||||
.child("nested")
|
||||
.child("child-two")
|
||||
.child("grand_child.txt")
|
||||
.write_str("data")
|
||||
.unwrap();
|
||||
|
||||
// add nxignore file
|
||||
temp_dir
|
||||
.child(".nxignore")
|
||||
.write_str(
|
||||
r"baz/
|
||||
nested/child.txt
|
||||
nested/child-two/
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut file_names = nx_walker(temp_dir, |rec| {
|
||||
let mut file_names = vec![];
|
||||
for (path, _) in rec {
|
||||
file_names.push(path);
|
||||
}
|
||||
file_names
|
||||
});
|
||||
|
||||
file_names.sort();
|
||||
|
||||
assert_eq!(
|
||||
file_names,
|
||||
vec!(".nxignore", "bar.txt", "foo.txt", "test.txt")
|
||||
);
|
||||
}
|
||||
}
|
||||
23
packages/nx/src/native/tests/hasher.spec.ts
Normal file
23
packages/nx/src/native/tests/hasher.spec.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { hashArray, hashFile } from '../index';
|
||||
|
||||
import { tmpdir } from 'os';
|
||||
import { mkdtemp, writeFile } from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
|
||||
describe('hasher', () => {
|
||||
it('should hash files', async () => {
|
||||
expect(hashFile).toBeDefined();
|
||||
|
||||
const tempDirPath = await mkdtemp(join(tmpdir(), 'native-test'));
|
||||
const tempFilePath = join(tempDirPath, 'temp.txt');
|
||||
await writeFile(tempFilePath, 'content');
|
||||
|
||||
expect(hashFile(tempFilePath).hash).toBe('6193209363630369380');
|
||||
});
|
||||
|
||||
it('should hash content', async () => {
|
||||
expect(hashArray).toBeDefined();
|
||||
|
||||
expect(hashArray(['one', 'two'])).toEqual('10960201262927338690');
|
||||
});
|
||||
});
|
||||
@ -1,32 +1,6 @@
|
||||
import { hashArray, hashFile, Watcher } from '../index';
|
||||
|
||||
import { tmpdir } from 'os';
|
||||
import { mkdtemp, realpathSync, writeFile } from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
import { TempFs } from '../../utils/testing/temp-fs';
|
||||
|
||||
describe('native', () => {
|
||||
it('should hash files', async () => {
|
||||
expect(hashFile).toBeDefined();
|
||||
|
||||
const tempDirPath = await mkdtemp(join(tmpdir(), 'native-test'));
|
||||
const tempFilePath = join(tempDirPath, 'temp.txt');
|
||||
await writeFile(tempFilePath, 'content');
|
||||
|
||||
expect(hashFile(tempFilePath).hash).toBe('6193209363630369380');
|
||||
});
|
||||
|
||||
it('should hash content', async () => {
|
||||
expect(hashArray).toBeDefined();
|
||||
|
||||
expect(hashArray(['one', 'two'])).toEqual('10960201262927338690');
|
||||
});
|
||||
|
||||
it('should create an instance of NativeHasher', () => {
|
||||
// const nativeHasher = new NativeFileHasher('/root');
|
||||
// expect(nativeHasher instanceof NativeFileHasher).toBe(true);
|
||||
});
|
||||
});
|
||||
import { Watcher } from '../index';
|
||||
import { realpathSync } from 'fs-extra';
|
||||
|
||||
describe('watcher', () => {
|
||||
let temp: TempFs;
|
||||
244
packages/nx/src/native/tests/workspace_files.spec.ts
Normal file
244
packages/nx/src/native/tests/workspace_files.spec.ts
Normal file
@ -0,0 +1,244 @@
|
||||
import { getWorkspaceFilesNative, WorkspaceErrors } from '../index';
|
||||
import { TempFs } from '../../utils/testing/temp-fs';
|
||||
import { NxJsonConfiguration } from '../../config/nx-json';
|
||||
|
||||
describe('workspace files', () => {
|
||||
it('should gather workspace file information', async () => {
|
||||
const fs = new TempFs('workspace-files');
|
||||
const nxJson: NxJsonConfiguration = {};
|
||||
await fs.createFiles({
|
||||
'./nx.json': JSON.stringify(nxJson),
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'repo-name',
|
||||
version: '0.0.0',
|
||||
dependencies: {},
|
||||
}),
|
||||
'./libs/project1/project.json': JSON.stringify({
|
||||
name: 'project1',
|
||||
}),
|
||||
'./libs/project1/index.js': '',
|
||||
'./libs/project2/project.json': JSON.stringify({
|
||||
name: 'project2',
|
||||
}),
|
||||
'./libs/project2/index.js': '',
|
||||
'./libs/project3/project.json': JSON.stringify({
|
||||
name: 'project3',
|
||||
}),
|
||||
'./libs/project3/index.js': '',
|
||||
'./libs/nested/project/project.json': JSON.stringify({
|
||||
name: 'nested-project',
|
||||
}),
|
||||
'./libs/nested/project/index.js': '',
|
||||
'./libs/package-project/package.json': JSON.stringify({
|
||||
name: 'package-project',
|
||||
}),
|
||||
'./libs/package-project/index.js': '',
|
||||
'./nested/non-project/file.txt': '',
|
||||
});
|
||||
|
||||
let globs = ['project.json', '**/project.json', 'libs/*/package.json'];
|
||||
let { projectFileMap, configFiles, globalFiles } = getWorkspaceFilesNative(
|
||||
fs.tempDir,
|
||||
globs
|
||||
);
|
||||
|
||||
let sortedConfigs = configFiles.sort();
|
||||
|
||||
expect(projectFileMap).toMatchInlineSnapshot(`
|
||||
{
|
||||
"nested-project": [
|
||||
{
|
||||
"file": "libs/nested/project/index.js",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "libs/nested/project/project.json",
|
||||
"hash": "2709826705451517790",
|
||||
},
|
||||
],
|
||||
"package-project": [
|
||||
{
|
||||
"file": "libs/package-project/index.js",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "libs/package-project/package.json",
|
||||
"hash": "1637510190365604632",
|
||||
},
|
||||
],
|
||||
"project1": [
|
||||
{
|
||||
"file": "libs/project1/index.js",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "libs/project1/project.json",
|
||||
"hash": "13466615737813422520",
|
||||
},
|
||||
],
|
||||
"project2": [
|
||||
{
|
||||
"file": "libs/project2/index.js",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "libs/project2/project.json",
|
||||
"hash": "1088730393343835373",
|
||||
},
|
||||
],
|
||||
"project3": [
|
||||
{
|
||||
"file": "libs/project3/index.js",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "libs/project3/project.json",
|
||||
"hash": "4575237344652189098",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
expect(sortedConfigs).toMatchInlineSnapshot(`
|
||||
[
|
||||
"libs/nested/project/project.json",
|
||||
"libs/package-project/package.json",
|
||||
"libs/project1/project.json",
|
||||
"libs/project2/project.json",
|
||||
"libs/project3/project.json",
|
||||
]
|
||||
`);
|
||||
expect(globalFiles).toMatchInlineSnapshot(`
|
||||
[
|
||||
{
|
||||
"file": "nested/non-project/file.txt",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "nx.json",
|
||||
"hash": "1389868326933519382",
|
||||
},
|
||||
{
|
||||
"file": "package.json",
|
||||
"hash": "14409636362330144230",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
describe('errors', () => {
|
||||
it('it should infer names of configuration files without a name', async () => {
|
||||
const fs = new TempFs('workspace-files');
|
||||
const nxJson: NxJsonConfiguration = {};
|
||||
await fs.createFiles({
|
||||
'./nx.json': JSON.stringify(nxJson),
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'repo-name',
|
||||
version: '0.0.0',
|
||||
dependencies: {},
|
||||
}),
|
||||
'./libs/project1/project.json': JSON.stringify({
|
||||
name: 'project1',
|
||||
}),
|
||||
'./libs/project1/index.js': '',
|
||||
'./libs/project2/project.json': JSON.stringify({}),
|
||||
});
|
||||
|
||||
let globs = ['project.json', '**/project.json', 'libs/*/package.json'];
|
||||
expect(getWorkspaceFilesNative(fs.tempDir, globs).projectFileMap)
|
||||
.toMatchInlineSnapshot(`
|
||||
{
|
||||
"project1": [
|
||||
{
|
||||
"file": "libs/project1/index.js",
|
||||
"hash": "3244421341483603138",
|
||||
},
|
||||
{
|
||||
"file": "libs/project1/project.json",
|
||||
"hash": "13466615737813422520",
|
||||
},
|
||||
],
|
||||
"project2": [
|
||||
{
|
||||
"file": "libs/project2/project.json",
|
||||
"hash": "1389868326933519382",
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('handles comments', async () => {
|
||||
const fs = new TempFs('workspace-files');
|
||||
const nxJson: NxJsonConfiguration = {};
|
||||
await fs.createFiles({
|
||||
'./nx.json': JSON.stringify(nxJson),
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'repo-name',
|
||||
version: '0.0.0',
|
||||
dependencies: {},
|
||||
}),
|
||||
'./libs/project1/project.json': `{
|
||||
"name": "temp"
|
||||
// this should not fail
|
||||
}`,
|
||||
'./libs/project1/index.js': '',
|
||||
});
|
||||
|
||||
let globs = ['project.json', '**/project.json', 'libs/*/package.json'];
|
||||
expect(() => getWorkspaceFilesNative(fs.tempDir, globs)).not.toThrow();
|
||||
});
|
||||
|
||||
it('handles extra comma', async () => {
|
||||
const fs = new TempFs('workspace-files');
|
||||
const nxJson: NxJsonConfiguration = {};
|
||||
await fs.createFiles({
|
||||
'./nx.json': JSON.stringify(nxJson),
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'repo-name',
|
||||
version: '0.0.0',
|
||||
dependencies: {},
|
||||
}),
|
||||
'./libs/project1/project.json': `{
|
||||
"name": "temp",
|
||||
}`,
|
||||
'./libs/project1/index.js': '',
|
||||
});
|
||||
|
||||
let globs = ['**/project.json'];
|
||||
expect(() => getWorkspaceFilesNative(fs.tempDir, globs)).not.toThrow();
|
||||
});
|
||||
|
||||
it('throws parsing errors: missing brackets', async () => {
|
||||
const fs = new TempFs('workspace-files');
|
||||
const nxJson: NxJsonConfiguration = {};
|
||||
await fs.createFiles({
|
||||
'./nx.json': JSON.stringify(nxJson),
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'repo-name',
|
||||
version: '0.0.0',
|
||||
dependencies: {},
|
||||
}),
|
||||
'./libs/project1/project.json': `{
|
||||
"name": "temp", "property": "child": 2 }
|
||||
}`,
|
||||
'./libs/project1/index.js': '',
|
||||
});
|
||||
|
||||
let globs = ['**/project.json'];
|
||||
|
||||
const error = getError(() => getWorkspaceFilesNative(fs.tempDir, globs));
|
||||
expect(error.message).toMatchInlineSnapshot(
|
||||
`"libs/project1/project.json"`
|
||||
);
|
||||
expect(error).toHaveProperty('code', WorkspaceErrors.ParseError);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const getError = (fn: () => unknown): Error => {
|
||||
try {
|
||||
fn();
|
||||
} catch (error: unknown) {
|
||||
return error as Error;
|
||||
}
|
||||
};
|
||||
28
packages/nx/src/native/types.rs
Normal file
28
packages/nx/src/native/types.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct FileData {
|
||||
pub file: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
impl Eq for FileData {}
|
||||
|
||||
impl PartialEq<Self> for FileData {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.file.eq(&other.file)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<Self> for FileData {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
self.file.partial_cmp(&other.file)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for FileData {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.file.cmp(&other.file)
|
||||
}
|
||||
}
|
||||
10
packages/nx/src/native/utils/glob.rs
Normal file
10
packages/nx/src/native/utils/glob.rs
Normal file
@ -0,0 +1,10 @@
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
|
||||
pub(crate) fn build_glob_set(globs: Vec<String>) -> anyhow::Result<GlobSet> {
|
||||
let mut glob_builder = GlobSetBuilder::new();
|
||||
for glob in globs {
|
||||
glob_builder.add(Glob::new(&glob).map_err(anyhow::Error::from)?);
|
||||
}
|
||||
|
||||
glob_builder.build().map_err(anyhow::Error::from)
|
||||
}
|
||||
1
packages/nx/src/native/utils/mod.rs
Normal file
1
packages/nx/src/native/utils/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod glob;
|
||||
41
packages/nx/src/native/workspace/errors.rs
Normal file
41
packages/nx/src/native/workspace/errors.rs
Normal file
@ -0,0 +1,41 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Public NAPI error codes that are for Node
|
||||
#[napi(string_enum)]
|
||||
#[derive(Debug)]
|
||||
pub enum WorkspaceErrors {
|
||||
ParseError,
|
||||
Generic,
|
||||
}
|
||||
|
||||
impl AsRef<str> for WorkspaceErrors {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
WorkspaceErrors::ParseError => "ParseError",
|
||||
WorkspaceErrors::Generic => "Generic",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum InternalWorkspaceErrors {
|
||||
#[error("{file}")]
|
||||
ParseError { file: PathBuf },
|
||||
#[error("{msg}")]
|
||||
Generic { msg: String },
|
||||
}
|
||||
|
||||
impl From<InternalWorkspaceErrors> for napi::Error<WorkspaceErrors> {
|
||||
fn from(value: InternalWorkspaceErrors) -> Self {
|
||||
match value {
|
||||
InternalWorkspaceErrors::ParseError { file } => {
|
||||
Error::new(WorkspaceErrors::ParseError, file.display().to_string())
|
||||
}
|
||||
InternalWorkspaceErrors::Generic { msg } => Error::new(WorkspaceErrors::Generic, msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
17
packages/nx/src/native/workspace/get_config_files.rs
Normal file
17
packages/nx/src/native/workspace/get_config_files.rs
Normal file
@ -0,0 +1,17 @@
|
||||
use crate::native::parallel_walker::nx_walker;
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
|
||||
#[napi]
|
||||
/// Get workspace config files based on provided globs
|
||||
pub fn get_config_files(workspace_root: String, globs: Vec<String>) -> anyhow::Result<Vec<String>> {
|
||||
let globs = build_glob_set(globs)?;
|
||||
Ok(nx_walker(workspace_root, move |rec| {
|
||||
let mut config_paths: Vec<String> = vec![];
|
||||
for (path, _) in rec {
|
||||
if globs.is_match(&path) {
|
||||
config_paths.push(path.to_owned());
|
||||
}
|
||||
}
|
||||
config_paths
|
||||
}))
|
||||
}
|
||||
190
packages/nx/src/native/workspace/get_nx_workspace_files.rs
Normal file
190
packages/nx/src/native/workspace/get_nx_workspace_files.rs
Normal file
@ -0,0 +1,190 @@
|
||||
use jsonc_parser::ParseOptions;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rayon::prelude::*;
|
||||
use tracing::trace;
|
||||
use xxhash_rust::xxh3;
|
||||
|
||||
use crate::native::logger::enable_logger;
|
||||
use crate::native::parallel_walker::nx_walker;
|
||||
use crate::native::types::FileData;
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors};
|
||||
use crate::native::workspace::types::{FileLocation, ProjectConfiguration};
|
||||
|
||||
#[napi(object)]
|
||||
pub struct NxWorkspaceFiles {
|
||||
pub project_file_map: HashMap<String, Vec<FileData>>,
|
||||
pub global_files: Vec<FileData>,
|
||||
pub config_files: Vec<String>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
/// Throws exceptions
|
||||
pub fn get_workspace_files_native(
|
||||
workspace_root: String,
|
||||
globs: Vec<String>,
|
||||
) -> napi::Result<NxWorkspaceFiles, WorkspaceErrors> {
|
||||
enable_logger();
|
||||
|
||||
trace!("{workspace_root}, {globs:?}");
|
||||
|
||||
let (projects, mut file_data) = get_file_data(&workspace_root, globs)
|
||||
.map_err(|err| napi::Error::new(WorkspaceErrors::Generic, err.to_string()))?;
|
||||
|
||||
let root_map = create_root_map(&projects)?;
|
||||
|
||||
trace!(?root_map);
|
||||
|
||||
// Files need to be sorted each time because when we do hashArray in the TaskHasher.js, the order of the files should be deterministic
|
||||
file_data.sort();
|
||||
|
||||
let file_locations = file_data
|
||||
.into_par_iter()
|
||||
.map(|file_data| {
|
||||
let file_path = Path::new(&file_data.file);
|
||||
trace!(?file_path);
|
||||
let mut parent = file_path.parent().unwrap_or_else(|| Path::new(""));
|
||||
trace!(?parent);
|
||||
while root_map.get(parent).is_none() {
|
||||
parent = parent.parent().unwrap_or_else(|| Path::new(""));
|
||||
|
||||
if parent == Path::new("") {
|
||||
return (FileLocation::Global, file_data);
|
||||
}
|
||||
}
|
||||
|
||||
let project_name = root_map.get(parent).unwrap();
|
||||
|
||||
(FileLocation::Project(project_name.clone()), file_data)
|
||||
})
|
||||
.collect::<Vec<(FileLocation, FileData)>>();
|
||||
|
||||
let mut project_file_map: HashMap<String, Vec<FileData>> = HashMap::with_capacity(
|
||||
file_locations
|
||||
.iter()
|
||||
.filter(|&f| f.0 != FileLocation::Global)
|
||||
.count(),
|
||||
);
|
||||
let mut global_files: Vec<FileData> = Vec::with_capacity(
|
||||
file_locations
|
||||
.iter()
|
||||
.filter(|&f| f.0 == FileLocation::Global)
|
||||
.count(),
|
||||
);
|
||||
for (file_location, file_data) in file_locations {
|
||||
match file_location {
|
||||
FileLocation::Global => global_files.push(file_data),
|
||||
FileLocation::Project(project_name) => match project_file_map.get_mut(&project_name) {
|
||||
None => {
|
||||
project_file_map.insert(project_name, vec![file_data]);
|
||||
}
|
||||
Some(project_files) => project_files.push(file_data),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Ok(NxWorkspaceFiles {
|
||||
project_file_map,
|
||||
global_files,
|
||||
config_files: projects.iter().map(|(path, _)| path.clone()).collect(),
|
||||
})
|
||||
}
|
||||
|
||||
fn create_root_map(
|
||||
projects: &Vec<(String, Vec<u8>)>,
|
||||
) -> Result<hashbrown::HashMap<&Path, String>, InternalWorkspaceErrors> {
|
||||
projects
|
||||
.par_iter()
|
||||
.map(|(path, content)| {
|
||||
let path = Path::new(path);
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.expect("path should always have a filename");
|
||||
return if file_name == "project.json" || file_name == "package.json" {
|
||||
// use serde_json to do the initial parse, if that fails fall back to jsonc_parser.
|
||||
// If all those fail, expose the error from jsonc_parser
|
||||
let project_configuration: ProjectConfiguration =
|
||||
read_project_configuration(content, path)?;
|
||||
|
||||
let Some(parent_path) = path.parent() else {
|
||||
return Err(InternalWorkspaceErrors::Generic {
|
||||
msg: format!("{path:?} has no parent"),
|
||||
})
|
||||
};
|
||||
|
||||
let name: String = if let Some(name) = project_configuration.name {
|
||||
Ok(name)
|
||||
} else {
|
||||
parent_path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_os_string()
|
||||
.into_string()
|
||||
.map_err(|os_string| InternalWorkspaceErrors::Generic {
|
||||
msg: format!("Cannot turn {os_string:?} into String"),
|
||||
})
|
||||
}?;
|
||||
Ok((parent_path, name))
|
||||
} else {
|
||||
if let Some(parent_path) = path.parent() {
|
||||
Ok((
|
||||
parent_path,
|
||||
parent_path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_os_string()
|
||||
.into_string()
|
||||
.map_err(|os_string| InternalWorkspaceErrors::Generic {
|
||||
msg: format!("Cannot turn {os_string:?} into String"),
|
||||
})?,
|
||||
))
|
||||
} else {
|
||||
Err(InternalWorkspaceErrors::Generic {
|
||||
msg: format!("{path:?} has no parent"),
|
||||
})
|
||||
}
|
||||
};
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn read_project_configuration(
|
||||
content: &Vec<u8>,
|
||||
path: &Path,
|
||||
) -> Result<ProjectConfiguration, InternalWorkspaceErrors> {
|
||||
serde_json::from_slice(content).or_else(|_| {
|
||||
let content_str = std::str::from_utf8(content).expect("content should be valid utf8");
|
||||
let parser_value =
|
||||
jsonc_parser::parse_to_serde_value(content_str, &ParseOptions::default()).map_err(
|
||||
|_| InternalWorkspaceErrors::ParseError {
|
||||
file: PathBuf::from(path),
|
||||
},
|
||||
)?;
|
||||
serde_json::from_value(parser_value.into()).map_err(|_| InternalWorkspaceErrors::Generic {
|
||||
msg: format!("Failed to parse {path:?}"),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
type WorkspaceData = (Vec<(String, Vec<u8>)>, Vec<FileData>);
|
||||
fn get_file_data(workspace_root: &str, globs: Vec<String>) -> anyhow::Result<WorkspaceData> {
|
||||
let globs = build_glob_set(globs)?;
|
||||
let (projects, file_data) = nx_walker(workspace_root, move |rec| {
|
||||
let mut projects: Vec<(String, Vec<u8>)> = vec![];
|
||||
let mut file_hashes: Vec<FileData> = vec![];
|
||||
for (path, content) in rec {
|
||||
file_hashes.push(FileData {
|
||||
file: path.clone(),
|
||||
hash: xxh3::xxh3_64(&content).to_string(),
|
||||
});
|
||||
if globs.is_match(&path) {
|
||||
projects.push((path, content));
|
||||
}
|
||||
}
|
||||
(projects, file_hashes)
|
||||
});
|
||||
Ok((projects, file_data))
|
||||
}
|
||||
4
packages/nx/src/native/workspace/mod.rs
Normal file
4
packages/nx/src/native/workspace/mod.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod errors;
|
||||
pub mod get_config_files;
|
||||
pub mod get_nx_workspace_files;
|
||||
mod types;
|
||||
12
packages/nx/src/native/workspace/types.rs
Normal file
12
packages/nx/src/native/workspace/types.rs
Normal file
@ -0,0 +1,12 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub(crate) struct ProjectConfiguration {
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum FileLocation {
|
||||
Global,
|
||||
Project(String),
|
||||
}
|
||||
@ -1,10 +1,9 @@
|
||||
import { TempFs } from '../../../../utils/testing/temp-fs';
|
||||
const tempFs = new TempFs('explicit-project-deps');
|
||||
|
||||
import { createProjectFileMap } from '../../../../project-graph/file-map-utils';
|
||||
import { ProjectGraphBuilder } from '../../../../project-graph/project-graph-builder';
|
||||
import { buildExplicitTypeScriptDependencies } from './explicit-project-dependencies';
|
||||
import { fileHasher } from '../../../../hasher/file-hasher';
|
||||
import { retrieveWorkspaceFiles } from '../../../../project-graph/utils/retrieve-workspace-files';
|
||||
|
||||
// projectName => tsconfig import path
|
||||
const dependencyProjectNamesToImportPaths = {
|
||||
@ -604,13 +603,13 @@ async function createVirtualWorkspace(config: VirtualWorkspaceConfig) {
|
||||
const nxJson = {
|
||||
npmScope: 'proj',
|
||||
};
|
||||
const projects = {
|
||||
projects: {
|
||||
[config.sourceProjectName]: {
|
||||
root: `libs/${config.sourceProjectName}`,
|
||||
},
|
||||
},
|
||||
const projectsFs = {
|
||||
[`./libs/${config.sourceProjectName}/project.json`]: JSON.stringify({
|
||||
name: config.sourceProjectName,
|
||||
sourceRoot: `libs/${config.sourceProjectName}`,
|
||||
}),
|
||||
};
|
||||
|
||||
const fsJson = {
|
||||
'./package.json': `{
|
||||
"name": "test",
|
||||
@ -663,9 +662,11 @@ async function createVirtualWorkspace(config: VirtualWorkspaceConfig) {
|
||||
dependencyProjectNamesToImportPaths
|
||||
)) {
|
||||
fsJson[`libs/${projectName}/index.ts`] = ``;
|
||||
projects.projects[projectName] = {
|
||||
root: `libs/${projectName}`,
|
||||
};
|
||||
|
||||
projectsFs[`./libs/${projectName}/project.json`] = JSON.stringify({
|
||||
name: projectName,
|
||||
sourceRoot: `libs/${projectName}`,
|
||||
});
|
||||
tsConfig.compilerOptions.paths[tsconfigPath] = [
|
||||
`libs/${projectName}/index.ts`,
|
||||
];
|
||||
@ -680,18 +681,19 @@ async function createVirtualWorkspace(config: VirtualWorkspaceConfig) {
|
||||
|
||||
fsJson['./tsconfig.base.json'] = JSON.stringify(tsConfig);
|
||||
|
||||
await tempFs.createFiles(fsJson);
|
||||
await tempFs.createFiles({
|
||||
...fsJson,
|
||||
...projectsFs,
|
||||
});
|
||||
|
||||
await fileHasher.init();
|
||||
const { projectFileMap, projectConfigurations } =
|
||||
await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
|
||||
|
||||
return {
|
||||
ctx: {
|
||||
projectsConfigurations: projects,
|
||||
projectsConfigurations: projectConfigurations,
|
||||
nxJsonConfiguration: nxJson,
|
||||
filesToProcess: createProjectFileMap(
|
||||
projects as any,
|
||||
fileHasher.allFileData()
|
||||
).projectFileMap,
|
||||
filesToProcess: projectFileMap,
|
||||
},
|
||||
builder,
|
||||
};
|
||||
|
||||
@ -16,9 +16,15 @@ export async function createProjectFileMapUsingProjectGraph(
|
||||
graph: ProjectGraph
|
||||
): Promise<ProjectFileMap> {
|
||||
const configs = readProjectsConfigurationFromProjectGraph(graph);
|
||||
const files = daemonClient.enabled()
|
||||
? await daemonClient.getAllFileData()
|
||||
: fileHasher.allFileData();
|
||||
|
||||
let files;
|
||||
if (daemonClient.enabled()) {
|
||||
files = await daemonClient.getAllFileData();
|
||||
} else {
|
||||
await fileHasher.ensureInitialized();
|
||||
files = fileHasher.allFileData();
|
||||
}
|
||||
|
||||
return createProjectFileMap(configs, files).projectFileMap;
|
||||
}
|
||||
|
||||
|
||||
@ -6,12 +6,8 @@ import {
|
||||
import * as fs from 'fs';
|
||||
import { JsonDiffType } from '../utils/json-diff';
|
||||
import ignore from 'ignore';
|
||||
import { fileHasher } from '../hasher/file-hasher';
|
||||
|
||||
describe('calculateFileChanges', () => {
|
||||
beforeEach(() => {
|
||||
fileHasher.ensureInitialized();
|
||||
});
|
||||
it('should return a whole file change by default for files that exist', () => {
|
||||
jest.spyOn(fs, 'existsSync').mockReturnValue(true);
|
||||
const changes = calculateFileChanges(
|
||||
|
||||
@ -4,7 +4,6 @@ import {
|
||||
} from './nx-deps-cache';
|
||||
import { buildProjectGraphUsingProjectFileMap } from './build-project-graph';
|
||||
import { output } from '../utils/output';
|
||||
import { fileHasher } from '../hasher/file-hasher';
|
||||
import { markDaemonAsDisabled, writeDaemonLogs } from '../daemon/tmp-dir';
|
||||
import { ProjectGraph } from '../config/project-graph';
|
||||
import { stripIndents } from '../utils/strip-indents';
|
||||
@ -15,9 +14,9 @@ import {
|
||||
import { daemonClient } from '../daemon/client/client';
|
||||
import { fileExists } from '../utils/fileutils';
|
||||
import { workspaceRoot } from '../utils/workspace-root';
|
||||
import { Workspaces } from '../config/workspaces';
|
||||
import { createProjectFileMap } from './file-map-utils';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { retrieveWorkspaceFiles } from './utils/retrieve-workspace-files';
|
||||
import { readNxJson } from './file-utils';
|
||||
|
||||
/**
|
||||
* Synchronously reads the latest cached copy of the workspace's ProjectGraph.
|
||||
@ -70,16 +69,10 @@ export function readProjectsConfigurationFromProjectGraph(
|
||||
}
|
||||
|
||||
export async function buildProjectGraphWithoutDaemon() {
|
||||
await fileHasher.ensureInitialized();
|
||||
const nxJson = readNxJson();
|
||||
|
||||
const projectConfigurations = new Workspaces(
|
||||
workspaceRoot
|
||||
).readProjectsConfigurations();
|
||||
|
||||
const { projectFileMap, allWorkspaceFiles } = createProjectFileMap(
|
||||
projectConfigurations,
|
||||
fileHasher.allFileData()
|
||||
);
|
||||
const { allWorkspaceFiles, projectFileMap, projectConfigurations } =
|
||||
await retrieveWorkspaceFiles(workspaceRoot, nxJson);
|
||||
|
||||
const cacheEnabled = process.env.NX_CACHE_PROJECT_GRAPH !== 'false';
|
||||
return (
|
||||
|
||||
184
packages/nx/src/project-graph/utils/retrieve-workspace-files.ts
Normal file
184
packages/nx/src/project-graph/utils/retrieve-workspace-files.ts
Normal file
@ -0,0 +1,184 @@
|
||||
import { performance } from 'perf_hooks';
|
||||
import {
|
||||
buildProjectsConfigurationsFromProjectPaths,
|
||||
getGlobPatternsFromPackageManagerWorkspaces,
|
||||
getGlobPatternsFromPluginsAsync,
|
||||
mergeTargetConfigurations,
|
||||
readTargetDefaultsForTarget,
|
||||
} from '../../config/workspaces';
|
||||
import { getNxRequirePaths } from '../../utils/installation-directory';
|
||||
import { readJsonFile } from '../../utils/fileutils';
|
||||
import { join } from 'path';
|
||||
import { ProjectsConfigurations } from '../../config/workspace-json-project-json';
|
||||
import {
|
||||
mergeAngularJsonAndProjects,
|
||||
shouldMergeAngularProjects,
|
||||
} from '../../adapter/angular-json';
|
||||
import { NxJsonConfiguration } from '../../config/nx-json';
|
||||
import { FileData, ProjectFileMap } from '../../config/project-graph';
|
||||
import { NxWorkspaceFiles, WorkspaceErrors } from '../../native';
|
||||
|
||||
/**
|
||||
* Walks the workspace directory to create the `projectFileMap`, `ProjectConfigurations` and `allWorkspaceFiles`
|
||||
* @throws
|
||||
* @param workspaceRoot
|
||||
* @param nxJson
|
||||
*/
|
||||
export async function retrieveWorkspaceFiles(
|
||||
workspaceRoot: string,
|
||||
nxJson: NxJsonConfiguration
|
||||
) {
|
||||
const { getWorkspaceFilesNative } = require('../../native');
|
||||
|
||||
performance.mark('native-file-deps:start');
|
||||
let globs = await configurationGlobs(workspaceRoot, nxJson);
|
||||
performance.mark('native-file-deps:end');
|
||||
performance.measure(
|
||||
'native-file-deps',
|
||||
'native-file-deps:start',
|
||||
'native-file-deps:end'
|
||||
);
|
||||
|
||||
performance.mark('get-workspace-files:start');
|
||||
let workspaceFiles: NxWorkspaceFiles;
|
||||
try {
|
||||
workspaceFiles = getWorkspaceFilesNative(workspaceRoot, globs);
|
||||
} catch (e) {
|
||||
// If the error is a parse error from Rust, then use the JS readJsonFile function to write a pretty error message
|
||||
if (e.code === WorkspaceErrors.ParseError) {
|
||||
readJsonFile(join(workspaceRoot, e.message));
|
||||
// readJsonFile should always fail, but if it doesn't, then throw the original error
|
||||
throw e;
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
performance.mark('get-workspace-files:end');
|
||||
performance.measure(
|
||||
'get-workspace-files',
|
||||
'get-workspace-files:start',
|
||||
'get-workspace-files:end'
|
||||
);
|
||||
|
||||
return {
|
||||
allWorkspaceFiles: buildAllWorkspaceFiles(
|
||||
workspaceFiles.projectFileMap,
|
||||
workspaceFiles.globalFiles
|
||||
),
|
||||
projectFileMap: workspaceFiles.projectFileMap,
|
||||
projectConfigurations: createProjectConfigurations(
|
||||
workspaceRoot,
|
||||
nxJson,
|
||||
workspaceFiles.configFiles
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Walk through the workspace and return `ProjectConfigurations`. Only use this if the projectFileMap is not needed.
|
||||
*
|
||||
* @param workspaceRoot
|
||||
* @param nxJson
|
||||
*/
|
||||
export async function retrieveProjectConfigurations(
|
||||
workspaceRoot: string,
|
||||
nxJson: NxJsonConfiguration
|
||||
) {
|
||||
const { getConfigFiles } = require('../../native');
|
||||
const globs = await configurationGlobs(workspaceRoot, nxJson);
|
||||
const configPaths = getConfigFiles(workspaceRoot, globs);
|
||||
return createProjectConfigurations(workspaceRoot, nxJson, configPaths);
|
||||
}
|
||||
|
||||
function buildAllWorkspaceFiles(
|
||||
projectFileMap: ProjectFileMap,
|
||||
globalFiles: FileData[]
|
||||
): FileData[] {
|
||||
performance.mark('get-all-workspace-files:start');
|
||||
let fileData = Object.values(projectFileMap).flat();
|
||||
|
||||
fileData.push(...globalFiles);
|
||||
performance.mark('get-all-workspace-files:end');
|
||||
performance.measure(
|
||||
'get-all-workspace-files',
|
||||
'get-all-workspace-files:start',
|
||||
'get-all-workspace-files:end'
|
||||
);
|
||||
|
||||
return fileData;
|
||||
}
|
||||
|
||||
function createProjectConfigurations(
|
||||
workspaceRoot: string,
|
||||
nxJson: NxJsonConfiguration,
|
||||
configFiles: string[]
|
||||
): ProjectsConfigurations {
|
||||
performance.mark('build-project-configs:start');
|
||||
|
||||
let projectConfigurations = mergeTargetDefaultsIntoProjectDescriptions(
|
||||
buildProjectsConfigurationsFromProjectPaths(nxJson, configFiles, (path) =>
|
||||
readJsonFile(join(workspaceRoot, path))
|
||||
),
|
||||
nxJson
|
||||
);
|
||||
|
||||
if (shouldMergeAngularProjects(workspaceRoot, false)) {
|
||||
projectConfigurations.projects = mergeAngularJsonAndProjects(
|
||||
projectConfigurations.projects,
|
||||
workspaceRoot
|
||||
);
|
||||
}
|
||||
performance.mark('build-project-configs:end');
|
||||
performance.measure(
|
||||
'build-project-configs',
|
||||
'build-project-configs:start',
|
||||
'build-project-configs:end'
|
||||
);
|
||||
|
||||
return projectConfigurations;
|
||||
}
|
||||
|
||||
function mergeTargetDefaultsIntoProjectDescriptions(
|
||||
config: ProjectsConfigurations,
|
||||
nxJson: NxJsonConfiguration
|
||||
) {
|
||||
for (const proj of Object.values(config.projects)) {
|
||||
if (proj.targets) {
|
||||
for (const targetName of Object.keys(proj.targets)) {
|
||||
const projectTargetDefinition = proj.targets[targetName];
|
||||
const defaults = readTargetDefaultsForTarget(
|
||||
targetName,
|
||||
nxJson.targetDefaults,
|
||||
projectTargetDefinition.executor
|
||||
);
|
||||
|
||||
if (defaults) {
|
||||
proj.targets[targetName] = mergeTargetConfigurations(
|
||||
proj,
|
||||
targetName,
|
||||
defaults
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
async function configurationGlobs(
|
||||
workspaceRoot: string,
|
||||
nxJson: NxJsonConfiguration
|
||||
): Promise<string[]> {
|
||||
let pluginGlobs = await getGlobPatternsFromPluginsAsync(
|
||||
nxJson,
|
||||
getNxRequirePaths(workspaceRoot),
|
||||
workspaceRoot
|
||||
);
|
||||
|
||||
return [
|
||||
'project.json',
|
||||
'**/project.json',
|
||||
...pluginGlobs,
|
||||
...getGlobPatternsFromPackageManagerWorkspaces(workspaceRoot),
|
||||
];
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user