fix(core): optimize daemon output glob matching (#27775)
<!-- Please make sure you have read the submission guidelines before posting an PR --> <!-- https://github.com/nrwl/nx/blob/master/CONTRIBUTING.md#-submitting-a-pr --> <!-- Please make sure that your commit message follows our format --> <!-- Example: `fix(nx): must begin with lowercase` --> <!-- If this is a particularly complex change or feature addition, you can request a dedicated Nx release for this pull request branch. Mention someone from the Nx team or the `@nrwl/nx-pipelines-reviewers` and they will confirm if the PR warrants its own release for testing purposes, and generate it for you if appropriate. --> ## Current Behavior - Nonexistant outputs are considered globs - Glob outputs are walked from workspace root, which is slow on large repos [NX Daemon Server] - 2024-09-06T19:44:39.674Z - Done responding to the client outputsHashesMatch [NX Daemon Server] - 2024-09-06T19:44:39.674Z - Handled OUTPUTS_HASHES_MATCH. Handling time: 94. Response time: 0. [NX Daemon Server] - 2024-09-06T19:44:39.775Z - [REQUEST]: Responding to the client. recordOutputsHash [NX Daemon Server] - 2024-09-06T19:44:39.775Z - Done responding to the client recordOutputsHash [NX Daemon Server] - 2024-09-06T19:44:39.775Z - Handled RECORD_OUTPUTS_HASH. Handling time: 100. Response time: 0. [NX Daemon Server] - 2024-09-06T19:44:39.818Z - [REQUEST]: Responding to the client. PROCESS_IN_BACKGROUND [NX Daemon Server] - 2024-09-06T19:44:39.818Z - Done responding to the client PROCESS_IN_BACKGROUND [NX Daemon Server] - 2024-09-06T19:44:39.818Z - Handled PROCESS_IN_BACKGROUND. Handling time: 14. Response time: 0. ## Expected Behavior - Nonexistant outputs are only globs if they should be - Globs are a bit faster [NX Daemon Server] - 2024-09-06T19:43:36.899Z - Handled OUTPUTS_HASHES_MATCH. Handling time: 0. Response time: 0. [NX Daemon Server] - 2024-09-06T19:43:36.900Z - [REQUEST]: Responding to the client. recordOutputsHash [NX Daemon Server] - 2024-09-06T19:43:36.900Z - Done responding to the client recordOutputsHash [NX Daemon Server] - 2024-09-06T19:43:36.900Z - Handled RECORD_OUTPUTS_HASH. Handling time: 0. Response time: 0. [NX Daemon Server] - 2024-09-06T19:43:36.944Z - [REQUEST]: Responding to the client. PROCESS_IN_BACKGROUND [NX Daemon Server] - 2024-09-06T19:43:36.944Z - Done responding to the client PROCESS_IN_BACKGROUND [NX Daemon Server] - 2024-09-06T19:43:36.944Z - Handled PROCESS_IN_BACKGROUND. Handling time: 13. Response time: 0. [NX Daemon Server] - 2024-09-06T19:43:36.949Z - Uploading file artifacts [NX Daemon Server] - 2024-09-06T19:43:36.949Z - Done uploading file artifacts > Note timings are from Nx repo, close enough to be comparable. No real improvement was expected here, mainly checking that things didn't get worse. ## Related Issue(s) <!-- Please link the issue being fixed so it gets closed when this is merged. --> Fixes # --------- Co-authored-by: FrozenPandaz <jasonjean1993@gmail.com>
This commit is contained in:
parent
e71e2f3da8
commit
ac9da418bd
@ -28,11 +28,7 @@ export default async function (tree: Tree) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
validateOutputs(target.outputs);
|
||||
} catch (e) {
|
||||
target.outputs = transformLegacyOutputs(project.root, e);
|
||||
}
|
||||
target.outputs = transformLegacyOutputs(project.root, target.outputs);
|
||||
}
|
||||
try {
|
||||
updateProjectConfiguration(tree, projectName, project);
|
||||
@ -44,11 +40,10 @@ export default async function (tree: Tree) {
|
||||
(json) => {
|
||||
for (const target of Object.values(json.nx?.targets ?? {})) {
|
||||
if (target.outputs) {
|
||||
try {
|
||||
validateOutputs(target.outputs);
|
||||
} catch (e) {
|
||||
target.outputs = transformLegacyOutputs(project.root, e);
|
||||
}
|
||||
target.outputs = transformLegacyOutputs(
|
||||
project.root,
|
||||
target.outputs
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -64,11 +59,7 @@ export default async function (tree: Tree) {
|
||||
if (!target.outputs) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
validateOutputs(target.outputs);
|
||||
} catch (e: any) {
|
||||
target.outputs = transformLegacyOutputs('{projectRoot}', e);
|
||||
}
|
||||
target.outputs = transformLegacyOutputs('{projectRoot}', target.outputs);
|
||||
}
|
||||
|
||||
updateNxJson(tree, nxJson);
|
||||
|
||||
79
packages/nx/src/native/cache/expand_outputs.rs
vendored
79
packages/nx/src/native/cache/expand_outputs.rs
vendored
@ -1,9 +1,11 @@
|
||||
use hashbrown::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tracing::trace;
|
||||
|
||||
use crate::native::glob::{build_glob_set, contains_glob_pattern};
|
||||
use crate::native::glob::{build_glob_set, contains_glob_pattern, glob_transform::partition_glob};
|
||||
use crate::native::logger::enable_logger;
|
||||
use crate::native::utils::Normalize;
|
||||
use crate::native::walker::nx_walker_sync;
|
||||
use crate::native::walker::{nx_walker, nx_walker_sync};
|
||||
|
||||
#[napi]
|
||||
pub fn expand_outputs(directory: String, entries: Vec<String>) -> anyhow::Result<Vec<String>> {
|
||||
@ -70,6 +72,24 @@ where
|
||||
Ok(found_paths)
|
||||
}
|
||||
|
||||
fn partition_globs_into_map(globs: Vec<String>) -> anyhow::Result<HashMap<String, Vec<String>>> {
|
||||
globs
|
||||
.iter()
|
||||
.map(|glob| partition_glob(glob))
|
||||
// Right now we have an iterator where each item is (root: String, patterns: String[]).
|
||||
// We want a singular root, with the patterns mapped to it.
|
||||
.fold(
|
||||
Ok(HashMap::<String, Vec<String>>::new()),
|
||||
|map_result, parsed_glob| {
|
||||
let mut map = map_result?;
|
||||
let (root, patterns) = parsed_glob?;
|
||||
let entry = map.entry(root).or_insert(vec![]);
|
||||
entry.extend(patterns);
|
||||
Ok(map)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
/// Expands the given outputs into a list of existing files.
|
||||
/// This is used when hashing outputs
|
||||
@ -77,6 +97,8 @@ pub fn get_files_for_outputs(
|
||||
directory: String,
|
||||
entries: Vec<String>,
|
||||
) -> anyhow::Result<Vec<String>> {
|
||||
enable_logger();
|
||||
|
||||
let directory: PathBuf = directory.into();
|
||||
|
||||
let mut globs: Vec<String> = vec![];
|
||||
@ -86,7 +108,9 @@ pub fn get_files_for_outputs(
|
||||
let path = directory.join(&entry);
|
||||
|
||||
if !path.exists() {
|
||||
if contains_glob_pattern(&entry) {
|
||||
globs.push(entry);
|
||||
}
|
||||
} else if path.is_dir() {
|
||||
directories.push(entry);
|
||||
} else {
|
||||
@ -95,28 +119,41 @@ pub fn get_files_for_outputs(
|
||||
}
|
||||
|
||||
if !globs.is_empty() {
|
||||
// todo(jcammisuli): optimize this as nx_walker_sync is very slow on the root directory. We need to change this to only search smaller directories
|
||||
let glob_set = build_glob_set(&globs)?;
|
||||
let found_paths = nx_walker_sync(&directory, None).filter_map(|path| {
|
||||
if glob_set.is_match(&path) {
|
||||
Some(path.to_normalized_string())
|
||||
let partitioned_globs = partition_globs_into_map(globs)?;
|
||||
for (root, patterns) in partitioned_globs {
|
||||
let root_path = directory.join(&root);
|
||||
let glob_set = build_glob_set(&patterns)?;
|
||||
trace!("walking directory: {:?}", root_path);
|
||||
|
||||
let found_paths: Vec<String> = nx_walker(&root_path, false)
|
||||
.filter_map(|file| {
|
||||
if glob_set.is_match(&file.normalized_path) {
|
||||
Some(
|
||||
// root_path contains full directory,
|
||||
// root is only the leading dirs from glob
|
||||
PathBuf::from(&root)
|
||||
.join(&file.normalized_path)
|
||||
.to_normalized_string(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
})
|
||||
.collect();
|
||||
|
||||
files.extend(found_paths);
|
||||
}
|
||||
}
|
||||
|
||||
if !directories.is_empty() {
|
||||
for dir in directories {
|
||||
let dir = PathBuf::from(dir);
|
||||
let dir_path = directory.join(&dir);
|
||||
let files_in_dir = nx_walker_sync(&dir_path, None).filter_map(|e| {
|
||||
let path = dir_path.join(&e);
|
||||
let files_in_dir = nx_walker(&dir_path, false).filter_map(|e| {
|
||||
let path = dir_path.join(&e.normalized_path);
|
||||
|
||||
if path.is_file() {
|
||||
Some(dir.join(e).to_normalized_string())
|
||||
Some(dir.join(e.normalized_path).to_normalized_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -221,4 +258,24 @@ mod test {
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_get_files_for_outputs_with_glob() {
|
||||
let temp = setup_fs();
|
||||
let entries = vec![
|
||||
"packages/nx/src/native/*.node".to_string(),
|
||||
"folder/nested-folder".to_string(),
|
||||
"test.txt".to_string(),
|
||||
];
|
||||
let mut result = get_files_for_outputs(temp.display().to_string(), entries).unwrap();
|
||||
result.sort();
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
"folder/nested-folder",
|
||||
"packages/nx/src/native/nx.darwin-arm64.node",
|
||||
"test.txt"
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
3
packages/nx/src/native/cache/mod.rs
vendored
3
packages/nx/src/native/cache/mod.rs
vendored
@ -1,3 +1,4 @@
|
||||
pub mod cache;
|
||||
pub mod expand_outputs;
|
||||
pub mod file_ops;
|
||||
pub mod cache;
|
||||
pub mod validate_outputs;
|
||||
|
||||
77
packages/nx/src/native/cache/validate_outputs.rs
vendored
Normal file
77
packages/nx/src/native/cache/validate_outputs.rs
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::native::glob::{contains_glob_pattern, glob_transform::partition_glob};
|
||||
|
||||
const ALLOWED_WORKSPACE_ROOT_OUTPUT_PREFIXES: [&str; 2] = ["!{workspaceRoot}", "{workspaceRoot}"];
|
||||
|
||||
fn is_missing_prefix(output: &str) -> bool {
|
||||
let re = Regex::new(r"^!?\{[\s\S]+\}").expect("Output pattern regex should compile");
|
||||
|
||||
!re.is_match(output)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn validate_outputs(outputs: Vec<String>) -> anyhow::Result<()> {
|
||||
let outputs_len = outputs.len();
|
||||
let mut missing_prefix = Vec::with_capacity(outputs_len);
|
||||
let mut workspace_globs = Vec::with_capacity(outputs_len);
|
||||
|
||||
for output in outputs.iter() {
|
||||
if is_missing_prefix(output) {
|
||||
missing_prefix.push(output);
|
||||
} else {
|
||||
for prefix in ALLOWED_WORKSPACE_ROOT_OUTPUT_PREFIXES.iter() {
|
||||
if let Some(trimmed) = output.strip_prefix(prefix) {
|
||||
if contains_glob_pattern(&trimmed) {
|
||||
let (root, _) = partition_glob(&trimmed)?;
|
||||
if root.is_empty() {
|
||||
workspace_globs.push(output);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if workspace_globs.is_empty() && missing_prefix.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut error_message = String::new();
|
||||
if !missing_prefix.is_empty() {
|
||||
error_message.push_str(&format!(
|
||||
"The following outputs are invalid: \n - {}\n\nRun `nx repair` to fix this.",
|
||||
missing_prefix.iter().join("\n - ")
|
||||
));
|
||||
}
|
||||
if !workspace_globs.is_empty() {
|
||||
error_message.push_str(&format!(
|
||||
"The following outputs are defined by a glob pattern from the workspace root: \n - {}\n\nThese can be slow, replace them with a more specific pattern.",
|
||||
workspace_globs.iter().join("\n - ")
|
||||
));
|
||||
}
|
||||
|
||||
Err(anyhow::anyhow!(error_message))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_transformable_outputs(outputs: Vec<String>) -> Vec<String> {
|
||||
outputs
|
||||
.into_iter()
|
||||
.filter(|output| is_missing_prefix(output))
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::is_missing_prefix;
|
||||
|
||||
#[test]
|
||||
fn test_is_missing_prefix() {
|
||||
assert!(is_missing_prefix("dist"));
|
||||
assert!(is_missing_prefix("!dist"));
|
||||
assert!(!is_missing_prefix("{workspaceRoot}/dist"));
|
||||
assert!(!is_missing_prefix("!{workspaceRoot}/dist"));
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
mod glob_group;
|
||||
mod glob_parser;
|
||||
mod glob_transform;
|
||||
pub mod glob_transform;
|
||||
|
||||
use crate::native::glob::glob_transform::convert_glob;
|
||||
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
|
||||
|
||||
@ -2,6 +2,8 @@ use crate::native::glob::glob_group::GlobGroup;
|
||||
use crate::native::glob::glob_parser::parse_glob;
|
||||
use itertools::Itertools;
|
||||
use std::collections::HashSet;
|
||||
use itertools::Either::{Left, Right};
|
||||
use super::contains_glob_pattern;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum GlobType {
|
||||
@ -9,8 +11,7 @@ enum GlobType {
|
||||
Positive(String),
|
||||
}
|
||||
|
||||
pub fn convert_glob(glob: &str) -> anyhow::Result<Vec<String>> {
|
||||
let (negated, parsed) = parse_glob(glob)?;
|
||||
fn convert_glob_segments(negated: bool, parsed: Vec<Vec<GlobGroup>>) -> Vec<String> {
|
||||
let mut built_segments: Vec<Vec<GlobType>> = Vec::new();
|
||||
for (index, glob_segment) in parsed.iter().enumerate() {
|
||||
let is_last = index == parsed.len() - 1;
|
||||
@ -58,7 +59,12 @@ pub fn convert_glob(glob: &str) -> anyhow::Result<Vec<String>> {
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
globs.sort();
|
||||
Ok(globs)
|
||||
globs
|
||||
}
|
||||
|
||||
pub fn convert_glob(glob: &str) -> anyhow::Result<Vec<String>> {
|
||||
let (negated, parsed) = parse_glob(glob)?;
|
||||
Ok(convert_glob_segments(negated, parsed))
|
||||
}
|
||||
|
||||
fn build_segment(
|
||||
@ -107,6 +113,31 @@ fn build_segment(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn partition_glob(glob: &str) -> anyhow::Result<(String, Vec<String>)> {
|
||||
let (negated, groups) = parse_glob(glob)?;
|
||||
// Partition glob into leading directories and patterns that should be matched
|
||||
let mut has_patterns = false;
|
||||
let (leading_dir_segments, pattern_segments): (Vec<String>, _) = groups
|
||||
.into_iter()
|
||||
.filter(|group| !group.is_empty())
|
||||
.partition_map(|group| {
|
||||
match &group[0] {
|
||||
GlobGroup::NonSpecial(value) if !contains_glob_pattern(&value) && !has_patterns => {
|
||||
Left(value.to_string())
|
||||
}
|
||||
_ => {
|
||||
has_patterns = true;
|
||||
Right(group)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok((
|
||||
leading_dir_segments.join("/"),
|
||||
convert_glob_segments(negated, pattern_segments),
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::convert_glob;
|
||||
@ -233,4 +264,32 @@ mod test {
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_partition_glob_with_leading_dirs() {
|
||||
let (leading_dirs, globs) = super::partition_glob("dist/app/**/!(README|LICENSE).(js|ts)").unwrap();
|
||||
assert_eq!(leading_dirs, "dist/app");
|
||||
assert_eq!(globs, ["!**/{README,LICENSE}.{js,ts}", "**/*.{js,ts}",]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_partition_glob_with_leading_dirs_and_simple_patterns() {
|
||||
let (leading_dirs, globs) = super::partition_glob("dist/app/**/*.css").unwrap();
|
||||
assert_eq!(leading_dirs, "dist/app");
|
||||
assert_eq!(globs, ["**/*.css"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_partition_glob_with_leading_dirs_dirs_and_patterns() {
|
||||
let (leading_dirs, globs) = super::partition_glob("dist/app/**/js/*.js").unwrap();
|
||||
assert_eq!(leading_dirs, "dist/app");
|
||||
assert_eq!(globs, ["**/js/*.js"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_partition_glob_with_leading_dirs_and_no_patterns() {
|
||||
let (leading_dirs, globs) = super::partition_glob("dist/app/").unwrap();
|
||||
assert_eq!(leading_dirs, "dist/app");
|
||||
assert_eq!(globs, [] as [String; 0]);
|
||||
}
|
||||
}
|
||||
|
||||
4
packages/nx/src/native/index.d.ts
vendored
4
packages/nx/src/native/index.d.ts
vendored
@ -150,6 +150,8 @@ export declare export function getBinaryTarget(): string
|
||||
*/
|
||||
export declare export function getFilesForOutputs(directory: string, entries: Array<string>): Array<string>
|
||||
|
||||
export declare export function getTransformableOutputs(outputs: Array<string>): Array<string>
|
||||
|
||||
export declare export function hashArray(input: Array<string>): string
|
||||
|
||||
export interface HashDetails {
|
||||
@ -263,6 +265,8 @@ export interface UpdatedWorkspaceFiles {
|
||||
externalReferences: NxWorkspaceFilesExternals
|
||||
}
|
||||
|
||||
export declare export function validateOutputs(outputs: Array<string>): void
|
||||
|
||||
export interface WatchEvent {
|
||||
path: string
|
||||
type: EventType
|
||||
|
||||
@ -378,10 +378,12 @@ module.exports.expandOutputs = nativeBinding.expandOutputs
|
||||
module.exports.findImports = nativeBinding.findImports
|
||||
module.exports.getBinaryTarget = nativeBinding.getBinaryTarget
|
||||
module.exports.getFilesForOutputs = nativeBinding.getFilesForOutputs
|
||||
module.exports.getTransformableOutputs = nativeBinding.getTransformableOutputs
|
||||
module.exports.hashArray = nativeBinding.hashArray
|
||||
module.exports.hashFile = nativeBinding.hashFile
|
||||
module.exports.IS_WASM = nativeBinding.IS_WASM
|
||||
module.exports.remove = nativeBinding.remove
|
||||
module.exports.testOnlyTransferFileMap = nativeBinding.testOnlyTransferFileMap
|
||||
module.exports.transferProjectGraph = nativeBinding.transferProjectGraph
|
||||
module.exports.validateOutputs = nativeBinding.validateOutputs
|
||||
module.exports.WorkspaceErrors = nativeBinding.WorkspaceErrors
|
||||
|
||||
@ -1480,7 +1480,7 @@ import(myTag`react@${version}`);
|
||||
let root = PathBuf::from(ancestors.next().unwrap());
|
||||
|
||||
let glob = build_glob_set(&["**/*.[jt]s"]).unwrap();
|
||||
let files = nx_walker(root.clone())
|
||||
let files = nx_walker(root.clone(), true)
|
||||
.filter(|file| glob.is_match(&file.full_path))
|
||||
.map(|file| file.full_path)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
use ignore::WalkBuilder;
|
||||
use std::fmt::Debug;
|
||||
use std::path::{Path, PathBuf};
|
||||
use ignore::{WalkBuilder};
|
||||
|
||||
use crate::native::glob::build_glob_set;
|
||||
|
||||
use crate::native::logger::enable_logger;
|
||||
use crate::native::utils::{get_mod_time, Normalize};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
@ -93,7 +94,7 @@ where
|
||||
|
||||
/// Walk the directory and ignore files from .gitignore and .nxignore
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn nx_walker<P>(directory: P) -> impl Iterator<Item = NxFile>
|
||||
pub fn nx_walker<P>(directory: P, use_ignores: bool) -> impl Iterator<Item = NxFile>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
@ -102,9 +103,10 @@ where
|
||||
|
||||
use crossbeam_channel::unbounded;
|
||||
use tracing::trace;
|
||||
enable_logger();
|
||||
|
||||
let directory = directory.as_ref();
|
||||
let mut walker = create_walker(directory);
|
||||
let mut walker = create_walker(directory, use_ignores);
|
||||
|
||||
let cpus = available_parallelism().map_or(2, |n| n.get()) - 1;
|
||||
|
||||
@ -151,9 +153,9 @@ where
|
||||
receiver_thread.join().unwrap()
|
||||
}
|
||||
|
||||
fn create_walker<P>(directory: P) -> WalkBuilder
|
||||
fn create_walker<P>(directory: P, use_ignores: bool) -> WalkBuilder
|
||||
where
|
||||
P: AsRef<Path>
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let directory: PathBuf = directory.as_ref().into();
|
||||
|
||||
@ -169,7 +171,10 @@ where
|
||||
let mut walker = WalkBuilder::new(&directory);
|
||||
walker.require_git(false);
|
||||
walker.hidden(false);
|
||||
walker.git_ignore(use_ignores);
|
||||
if use_ignores {
|
||||
walker.add_custom_ignore_filename(".nxignore");
|
||||
}
|
||||
|
||||
// We should make sure to always ignore node_modules and the .git folder
|
||||
walker.filter_entry(move |entry| {
|
||||
@ -210,12 +215,12 @@ mod test {
|
||||
#[test]
|
||||
fn it_walks_a_directory() {
|
||||
// handle empty workspaces
|
||||
let content = nx_walker("/does/not/exist").collect::<Vec<_>>();
|
||||
let content = nx_walker("/does/not/exist", true).collect::<Vec<_>>();
|
||||
assert!(content.is_empty());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let mut content = nx_walker(&temp_dir).collect::<Vec<_>>();
|
||||
let mut content = nx_walker(&temp_dir, true).collect::<Vec<_>>();
|
||||
content.sort();
|
||||
let content = content
|
||||
.into_iter()
|
||||
@ -282,7 +287,7 @@ nested/child-two/
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut file_names = nx_walker(temp_dir)
|
||||
let mut file_names = nx_walker(temp_dir, true)
|
||||
.map(
|
||||
|NxFile {
|
||||
normalized_path: relative_path,
|
||||
|
||||
@ -10,7 +10,7 @@ use crate::native::walker::{nx_walker, NxFile};
|
||||
use crate::native::workspace::files_archive::{NxFileHashed, NxFileHashes};
|
||||
|
||||
pub fn full_files_hash(workspace_root: &Path) -> NxFileHashes {
|
||||
let files = nx_walker(workspace_root).collect::<Vec<_>>();
|
||||
let files = nx_walker(workspace_root, true).collect::<Vec<_>>();
|
||||
trace!("Found {} files", files.len());
|
||||
hash_files(files).into_iter().collect()
|
||||
}
|
||||
@ -19,7 +19,7 @@ pub fn selective_files_hash(
|
||||
workspace_root: &Path,
|
||||
mut archived_files: NxFileHashes,
|
||||
) -> NxFileHashes {
|
||||
let files = nx_walker(workspace_root).collect::<Vec<_>>();
|
||||
let files = nx_walker(workspace_root, true).collect::<Vec<_>>();
|
||||
let mut archived = vec![];
|
||||
let mut not_archived = vec![];
|
||||
let now = std::time::Instant::now();
|
||||
|
||||
@ -405,47 +405,28 @@ describe('utils', () => {
|
||||
describe('transformLegacyOutputs', () => {
|
||||
it('should prefix paths with {workspaceRoot}', () => {
|
||||
const outputs = ['dist'];
|
||||
try {
|
||||
validateOutputs(outputs);
|
||||
} catch (e) {
|
||||
const result = transformLegacyOutputs('myapp', e);
|
||||
const result = transformLegacyOutputs('myapp', outputs);
|
||||
expect(result).toEqual(['{workspaceRoot}/dist']);
|
||||
}
|
||||
expect.assertions(1);
|
||||
});
|
||||
|
||||
it('should prefix unix-absolute paths with {workspaceRoot}', () => {
|
||||
const outputs = ['/dist'];
|
||||
try {
|
||||
validateOutputs(outputs);
|
||||
} catch (e) {
|
||||
const result = transformLegacyOutputs('myapp', e);
|
||||
const result = transformLegacyOutputs('myapp', outputs);
|
||||
expect(result).toEqual(['{workspaceRoot}/dist']);
|
||||
}
|
||||
expect.assertions(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should prefix relative paths with {projectRoot}', () => {
|
||||
const outputs = ['/dist'];
|
||||
try {
|
||||
validateOutputs(outputs);
|
||||
} catch (e) {
|
||||
const result = transformLegacyOutputs('myapp', e);
|
||||
expect(result).toEqual(['{workspaceRoot}/dist']);
|
||||
}
|
||||
expect.assertions(1);
|
||||
const outputs = ['./dist'];
|
||||
const result = transformLegacyOutputs('myapp', outputs);
|
||||
expect(result).toEqual(['{projectRoot}/dist']);
|
||||
});
|
||||
|
||||
it('should prefix paths within the project with {projectRoot}', () => {
|
||||
const outputs = ['myapp/dist'];
|
||||
try {
|
||||
validateOutputs(outputs);
|
||||
} catch (e) {
|
||||
const result = transformLegacyOutputs('myapp', e);
|
||||
|
||||
const result = transformLegacyOutputs('myapp', outputs);
|
||||
expect(result).toEqual(['{projectRoot}/dist']);
|
||||
}
|
||||
expect.assertions(1);
|
||||
});
|
||||
|
||||
describe('expandDependencyConfigSyntaxSugar', () => {
|
||||
@ -772,6 +753,36 @@ describe('utils', () => {
|
||||
"The 'outputs' field must contain only strings, but received types: [string, number, object, boolean, object, object]"
|
||||
);
|
||||
});
|
||||
|
||||
it('throws an error if the output is a glob pattern from the workspace root', () => {
|
||||
expect(() => validateOutputs(['{workspaceRoot}/**/dist/*.js']))
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"The following outputs are defined by a glob pattern from the workspace root:
|
||||
- {workspaceRoot}/**/dist/*.js
|
||||
|
||||
These can be slow, replace them with a more specific pattern."
|
||||
`);
|
||||
});
|
||||
|
||||
it("shouldn't throw an error if the output is a glob pattern from the project root", () => {
|
||||
expect(() => validateOutputs(['{projectRoot}/*.js'])).not.toThrow();
|
||||
});
|
||||
|
||||
it("shouldn't throw an error if the pattern is a glob based in a subdirectory of workspace root", () => {
|
||||
expect(() =>
|
||||
validateOutputs(['{workspaceRoot}/dist/**/*.js'])
|
||||
).not.toThrow();
|
||||
});
|
||||
|
||||
it("throws an error if the output doesn't start with a prefix", () => {
|
||||
expect(() => validateOutputs(['dist']))
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
"The following outputs are invalid:
|
||||
- dist
|
||||
|
||||
Run \`nx repair\` to fix this."
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -18,6 +18,10 @@ import { readProjectsConfigurationFromProjectGraph } from '../project-graph/proj
|
||||
import { findMatchingProjects } from '../utils/find-matching-projects';
|
||||
import { minimatch } from 'minimatch';
|
||||
import { isGlobPattern } from '../utils/globs';
|
||||
import {
|
||||
getTransformableOutputs,
|
||||
validateOutputs as nativeValidateOutputs,
|
||||
} from '../native';
|
||||
|
||||
export type NormalizedTargetDependencyConfig = TargetDependencyConfig & {
|
||||
projects: string[];
|
||||
@ -253,24 +257,16 @@ function assertOutputsAreValidType(outputs: unknown) {
|
||||
export function validateOutputs(outputs: string[]) {
|
||||
assertOutputsAreValidType(outputs);
|
||||
|
||||
const invalidOutputs = new Set<string>();
|
||||
|
||||
for (const output of outputs) {
|
||||
if (!/^!?{[\s\S]+}/.test(output)) {
|
||||
invalidOutputs.add(output);
|
||||
}
|
||||
}
|
||||
if (invalidOutputs.size > 0) {
|
||||
throw new InvalidOutputsError(outputs, invalidOutputs);
|
||||
}
|
||||
nativeValidateOutputs(outputs);
|
||||
}
|
||||
|
||||
export function transformLegacyOutputs(
|
||||
projectRoot: string,
|
||||
error: InvalidOutputsError
|
||||
) {
|
||||
return error.outputs.map((output) => {
|
||||
if (!error.invalidOutputs.has(output)) {
|
||||
export function transformLegacyOutputs(projectRoot: string, outputs: string[]) {
|
||||
const transformableOutputs = new Set(getTransformableOutputs(outputs));
|
||||
if (transformableOutputs.size === 0) {
|
||||
return outputs;
|
||||
}
|
||||
return outputs.map((output) => {
|
||||
if (!transformableOutputs.has(output)) {
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
@ -89,10 +89,10 @@ describe('collapseExpandedOutputs', () => {
|
||||
it('should collapse long lists of files in nested directories', async () => {
|
||||
const outputs = [];
|
||||
// Create dist/apps/app1/n/m.js + dist/apps/app1/n/m.d.ts
|
||||
for (let i = 0; i < 6; i++) {
|
||||
for (let i = 0; i < 6000; i++) {
|
||||
outputs.push(`dist/apps/app1/${i}.js`);
|
||||
outputs.push(`dist/apps/app1/${i}.d.ts`);
|
||||
for (let j = 0; j < 6; j++) {
|
||||
for (let j = 0; j < 600; j++) {
|
||||
outputs.push(`dist/apps/app1/${i}/${j}.js`);
|
||||
outputs.push(`dist/apps/app1/${i}/${j}.d.ts`);
|
||||
}
|
||||
|
||||
@ -236,7 +236,7 @@ describe('Storybook - Convert To Inferred', () => {
|
||||
},
|
||||
"outputs": [
|
||||
"{projectRoot}/{options.output-dir}",
|
||||
"{workspaceRoot}/{projectRoot}/storybook-static",
|
||||
"{projectRoot}/storybook-static",
|
||||
"{options.output-dir}",
|
||||
"{options.outputDir}",
|
||||
"{options.o}",
|
||||
@ -312,7 +312,7 @@ describe('Storybook - Convert To Inferred', () => {
|
||||
},
|
||||
"outputs": [
|
||||
"{projectRoot}/{options.output-dir}",
|
||||
"{workspaceRoot}/{projectRoot}/storybook-static",
|
||||
"{projectRoot}/storybook-static",
|
||||
"{options.output-dir}",
|
||||
"{options.outputDir}",
|
||||
"{options.o}",
|
||||
@ -387,7 +387,7 @@ describe('Storybook - Convert To Inferred', () => {
|
||||
},
|
||||
"outputs": [
|
||||
"{projectRoot}/{options.output-dir}",
|
||||
"{workspaceRoot}/{projectRoot}/storybook-static",
|
||||
"{projectRoot}/storybook-static",
|
||||
"{options.output-dir}",
|
||||
"{options.outputDir}",
|
||||
"{options.o}",
|
||||
@ -446,7 +446,7 @@ describe('Storybook - Convert To Inferred', () => {
|
||||
},
|
||||
"outputs": [
|
||||
"{projectRoot}/{options.output-dir}",
|
||||
"{workspaceRoot}/{projectRoot}/storybook-static",
|
||||
"{projectRoot}/storybook-static",
|
||||
"{options.output-dir}",
|
||||
"{options.outputDir}",
|
||||
"{options.o}",
|
||||
@ -565,7 +565,7 @@ describe('Storybook - Convert To Inferred', () => {
|
||||
},
|
||||
"outputs": [
|
||||
"{projectRoot}/{options.output-dir}",
|
||||
"{workspaceRoot}/{projectRoot}/storybook-static",
|
||||
"{projectRoot}/storybook-static",
|
||||
"{options.output-dir}",
|
||||
"{options.outputDir}",
|
||||
"{options.o}",
|
||||
@ -597,7 +597,7 @@ describe('Storybook - Convert To Inferred', () => {
|
||||
},
|
||||
"outputs": [
|
||||
"{projectRoot}/{options.output-dir}",
|
||||
"{workspaceRoot}/{projectRoot}/storybook-static",
|
||||
"{projectRoot}/storybook-static",
|
||||
"{options.output-dir}",
|
||||
"{options.outputDir}",
|
||||
"{options.o}",
|
||||
|
||||
@ -74,7 +74,7 @@ describe('@nx/storybook/plugin', () => {
|
||||
},
|
||||
cache: true,
|
||||
outputs: [
|
||||
'{workspaceRoot}/{projectRoot}/storybook-static',
|
||||
'{projectRoot}/storybook-static',
|
||||
'{options.output-dir}',
|
||||
'{options.outputDir}',
|
||||
'{options.o}',
|
||||
@ -127,7 +127,7 @@ describe('@nx/storybook/plugin', () => {
|
||||
},
|
||||
cache: true,
|
||||
outputs: [
|
||||
'{workspaceRoot}/{projectRoot}/storybook-static',
|
||||
'{projectRoot}/storybook-static',
|
||||
'{options.output-dir}',
|
||||
'{options.outputDir}',
|
||||
'{options.o}',
|
||||
@ -192,7 +192,7 @@ describe('@nx/storybook/plugin', () => {
|
||||
},
|
||||
cache: true,
|
||||
outputs: [
|
||||
'{workspaceRoot}/{projectRoot}/storybook-static',
|
||||
'{projectRoot}/storybook-static',
|
||||
'{options.output-dir}',
|
||||
'{options.outputDir}',
|
||||
'{options.o}',
|
||||
|
||||
@ -2,16 +2,16 @@ import {
|
||||
CreateDependencies,
|
||||
CreateNodes,
|
||||
CreateNodesContext,
|
||||
TargetConfiguration,
|
||||
detectPackageManager,
|
||||
joinPathFragments,
|
||||
parseJson,
|
||||
readJsonFile,
|
||||
TargetConfiguration,
|
||||
writeJsonFile,
|
||||
} from '@nx/devkit';
|
||||
import { dirname, join } from 'path';
|
||||
import { getNamedInputs } from '@nx/devkit/src/utils/get-named-inputs';
|
||||
import { existsSync, readFileSync, readdirSync } from 'fs';
|
||||
import { existsSync, readdirSync, readFileSync } from 'fs';
|
||||
import { calculateHashForCreateNodes } from '@nx/devkit/src/utils/calculate-hash-for-create-nodes';
|
||||
import { workspaceDataDirectory } from 'nx/src/utils/cache-directory';
|
||||
import { getLockFileName } from '@nx/js';
|
||||
@ -109,7 +109,7 @@ async function buildStorybookTargets(
|
||||
context: CreateNodesContext,
|
||||
projectName: string
|
||||
) {
|
||||
const buildOutputs = getOutputs(projectRoot);
|
||||
const buildOutputs = getOutputs();
|
||||
|
||||
const namedInputs = getNamedInputs(projectRoot, context);
|
||||
|
||||
@ -274,11 +274,9 @@ async function getStorybookFramework(
|
||||
return typeof framework === 'string' ? framework : framework.name;
|
||||
}
|
||||
|
||||
function getOutputs(projectRoot: string): string[] {
|
||||
const normalizedOutputPath = normalizeOutputPath(projectRoot);
|
||||
|
||||
function getOutputs(): string[] {
|
||||
const outputs = [
|
||||
normalizedOutputPath,
|
||||
`{projectRoot}/storybook-static`,
|
||||
`{options.output-dir}`,
|
||||
`{options.outputDir}`,
|
||||
`{options.o}`,
|
||||
@ -287,14 +285,6 @@ function getOutputs(projectRoot: string): string[] {
|
||||
return outputs;
|
||||
}
|
||||
|
||||
function normalizeOutputPath(projectRoot: string): string | undefined {
|
||||
if (projectRoot === '.') {
|
||||
return `{projectRoot}/storybook-static`;
|
||||
} else {
|
||||
return `{workspaceRoot}/{projectRoot}/storybook-static`;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeOptions(
|
||||
options: StorybookPluginOptions
|
||||
): StorybookPluginOptions {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user