fix(core): handle complex glob patterns within rust (#18242)

This commit is contained in:
Jonathan Cammisuli 2023-07-25 11:11:22 -04:00 committed by GitHub
parent e6c67e41ec
commit b6db266c9e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 322 additions and 192 deletions

45
Cargo.lock generated
View File

@ -33,6 +33,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "aho-corasick"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.14"
@ -155,7 +164,7 @@ checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09"
dependencies = [
"memchr",
"once_cell",
"regex-automata",
"regex-automata 0.1.10",
"serde",
]
@ -740,7 +749,7 @@ version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc"
dependencies = [
"aho-corasick",
"aho-corasick 0.7.20",
"bstr",
"fnv",
"log",
@ -1067,7 +1076,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata",
"regex-automata 0.1.10",
]
[[package]]
@ -1343,7 +1352,9 @@ dependencies = [
"napi",
"napi-build",
"napi-derive",
"once_cell",
"rayon",
"regex",
"swc_common",
"swc_ecma_ast",
"swc_ecma_dep_graph",
@ -1652,13 +1663,14 @@ dependencies = [
[[package]]
name = "regex"
version = "1.7.0"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
dependencies = [
"aho-corasick",
"aho-corasick 1.0.2",
"memchr",
"regex-syntax",
"regex-automata 0.3.3",
"regex-syntax 0.7.4",
]
[[package]]
@ -1667,7 +1679,18 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax",
"regex-syntax 0.6.28",
]
[[package]]
name = "regex-automata"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310"
dependencies = [
"aho-corasick 1.0.2",
"memchr",
"regex-syntax 0.7.4",
]
[[package]]
@ -1676,6 +1699,12 @@ version = "0.6.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
[[package]]
name = "regex-syntax"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2"
[[package]]
name = "rustc-hash"
version = "1.1.0"

View File

@ -328,6 +328,21 @@ But if the above is not possible, globs (parsed by the [GlobSet](https://docs.rs
}
```
More advanced patterns can be used to exclude files and folders in a single line
```json
{
"targets": {
"build-js": {
"outputs": ["{workspaceRoot}/dist/libs/!(cache|.next)/**/*.{js,map}"]
},
"build-css": {
"outputs": ["{workspaceRoot}/dist/libs/mylib/**/!(secondary).css"]
}
}
}
```
### dependsOn
Targets can depend on other targets. This is the relevant portion of the configuration file:

View File

@ -1,10 +1,12 @@
import {
cleanupProject,
directoryExists,
listFiles,
newProject,
readFile,
rmDist,
runCLI,
tmpProjPath,
uniq,
updateFile,
updateJson,
@ -157,18 +159,23 @@ describe('cache', () => {
updateProjectConfig(mylib, (c) => {
c.targets.build = {
executor: 'nx:run-commands',
outputs: ['{workspaceRoot}/dist/*.{txt,md}'],
outputs: ['{workspaceRoot}/dist/!(.next)/**/!(z|x).(txt|md)'],
options: {
commands: [
'rm -rf dist',
'mkdir dist',
'echo a > dist/a.txt',
'echo b > dist/b.txt',
'echo c > dist/c.txt',
'echo d > dist/d.txt',
'echo e > dist/e.txt',
'echo f > dist/f.md',
'echo g > dist/g.html',
'mkdir dist/apps',
'mkdir dist/.next',
'echo a > dist/apps/a.txt',
'echo b > dist/apps/b.txt',
'echo c > dist/apps/c.txt',
'echo d > dist/apps/d.txt',
'echo e > dist/apps/e.txt',
'echo f > dist/apps/f.md',
'echo g > dist/apps/g.html',
'echo h > dist/.next/h.txt',
'echo x > dist/apps/x.txt',
'echo z > dist/apps/z.md',
],
parallel: false,
},
@ -183,7 +190,10 @@ describe('cache', () => {
// Rerun without touching anything
const rerunWithUntouchedOutputs = runCLI(`build ${mylib}`);
expect(rerunWithUntouchedOutputs).toContain('local cache');
const outputsWithUntouchedOutputs = listFiles('dist');
const outputsWithUntouchedOutputs = [
...listFiles('dist/apps'),
...listFiles('dist/.next').map((f) => `.next/${f}`),
];
expect(outputsWithUntouchedOutputs).toContain('a.txt');
expect(outputsWithUntouchedOutputs).toContain('b.txt');
expect(outputsWithUntouchedOutputs).toContain('c.txt');
@ -191,14 +201,20 @@ describe('cache', () => {
expect(outputsWithUntouchedOutputs).toContain('e.txt');
expect(outputsWithUntouchedOutputs).toContain('f.md');
expect(outputsWithUntouchedOutputs).toContain('g.html');
expect(outputsWithUntouchedOutputs).toContain('.next/h.txt');
expect(outputsWithUntouchedOutputs).toContain('x.txt');
expect(outputsWithUntouchedOutputs).toContain('z.md');
// Create a file in the dist that does not match output glob
updateFile('dist/c.ts', '');
updateFile('dist/apps/c.ts', '');
// Rerun
const rerunWithNewUnrelatedFile = runCLI(`build ${mylib}`);
expect(rerunWithNewUnrelatedFile).toContain('local cache');
const outputsAfterAddingUntouchedFileAndRerunning = listFiles('dist');
const outputsAfterAddingUntouchedFileAndRerunning = [
...listFiles('dist/apps'),
...listFiles('dist/.next').map((f) => `.next/${f}`),
];
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('a.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('b.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('c.txt');
@ -206,6 +222,11 @@ describe('cache', () => {
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('e.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('f.md');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('g.html');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain(
'.next/h.txt'
);
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('x.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('z.md');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('c.ts');
// Clear Dist
@ -214,7 +235,8 @@ describe('cache', () => {
// Rerun
const rerunWithoutOutputs = runCLI(`build ${mylib}`);
expect(rerunWithoutOutputs).toContain('read the output from the cache');
const outputsWithoutOutputs = listFiles('dist');
const outputsWithoutOutputs = listFiles('dist/apps');
expect(directoryExists(`${tmpProjPath()}/dist/.next`)).toBe(false);
expect(outputsWithoutOutputs).toContain('a.txt');
expect(outputsWithoutOutputs).toContain('b.txt');
expect(outputsWithoutOutputs).toContain('c.txt');
@ -223,6 +245,8 @@ describe('cache', () => {
expect(outputsWithoutOutputs).toContain('f.md');
expect(outputsWithoutOutputs).not.toContain('c.ts');
expect(outputsWithoutOutputs).not.toContain('g.html');
expect(outputsWithoutOutputs).not.toContain('x.txt');
expect(outputsWithoutOutputs).not.toContain('z.md');
});
it('should use consider filesets when hashing', async () => {

View File

@ -13,8 +13,10 @@ hashbrown = { version = "0.14.0", features = ["rayon"] }
ignore = '0.4'
ignore-files = "1.3.0"
itertools = "0.10.5"
once_cell = "1.18.0"
napi = { version = '2.12.6', default-features = false, features = ['anyhow', 'napi4', 'tokio_rt'] }
napi-derive = '2.9.3'
regex = "1.9.1"
rayon = "1.7.0"
thiserror = "1.0.40"
tokio = { version = "1.28.2", features = ["fs"] }

View File

@ -77,12 +77,6 @@
"version": "16.2.0-beta.0",
"description": "Remove outputPath from run commands",
"implementation": "./src/migrations/update-16-2-0/remove-run-commands-output-path"
},
"16.5.4-update-output-globs": {
"cli": "nx",
"version": "16.5.4-beta.0",
"description": "Update outdated non-standard globs to unix standard",
"implementation": "./src/migrations/update-16-5-4/update-output-globs"
}
}
}

View File

@ -1,77 +0,0 @@
import { createTreeWithEmptyWorkspace } from '../../generators/testing-utils/create-tree-with-empty-workspace';
import { TargetConfiguration } from '../../config/workspace-json-project-json';
import {
addProjectConfiguration,
readProjectConfiguration,
} from '../../generators/utils/project-configuration';
import updateOutputsGlobs from './update-output-globs';
import { readJson, updateJson } from '../../generators/utils/json';
import { NxJsonConfiguration } from '../../config/nx-json';
describe('update output globs', () => {
it('should update output globs', () => {
const tree = createTreeWithEmptyWorkspace();
const targets: Record<string, TargetConfiguration> = {
build: {
outputs: ['{options.outputPath}', 'dist/apps/my-app/*.(js|map|ts)'],
},
lint: {},
test: {
outputs: ['dist/apps/my-app/main.(js|map|ts)'],
},
run: {
outputs: ['dist/apps/my-app'],
},
};
addProjectConfiguration(tree, 'my-app', {
root: 'apps/my-app',
targets,
});
updateJson<NxJsonConfiguration>(tree, 'nx.json', (json) => {
json.targetDefaults = {
lint: {
outputs: ['dist/apps/my-app', '*.(js|map|ts)'],
},
};
return json;
});
updateOutputsGlobs(tree);
const migratedTargets = readProjectConfiguration(tree, 'my-app').targets;
expect(migratedTargets).toMatchInlineSnapshot(`
{
"build": {
"outputs": [
"{options.outputPath}",
"dist/apps/my-app/*.{js,map,ts}",
],
},
"lint": {},
"run": {
"outputs": [
"dist/apps/my-app",
],
},
"test": {
"outputs": [
"dist/apps/my-app/main.{js,map,ts}",
],
},
}
`);
const nxJson = readJson<NxJsonConfiguration>(tree, 'nx.json');
expect(nxJson.targetDefaults).toMatchInlineSnapshot(`
{
"lint": {
"outputs": [
"dist/apps/my-app",
"*.{js,map,ts}",
],
},
}
`);
});
});

View File

@ -1,54 +0,0 @@
import { Tree } from '../../generators/tree';
import {
getProjects,
updateProjectConfiguration,
} from '../../generators/utils/project-configuration';
import { formatChangedFilesWithPrettierIfAvailable } from '../../generators/internal-utils/format-changed-files-with-prettier-if-available';
import { TargetConfiguration } from '../../config/workspace-json-project-json';
import { updateJson } from '../../generators/utils/json';
import { NxJsonConfiguration } from '../../config/nx-json';
function replaceOutput(output: string) {
// replace {projectRoot}/folder/*.(js|map|ts) to {projectRoot}/folder/*.{js,map,ts}
const regex = /\(([^)]+)\)/g;
return output.replace(regex, (match, group1) => {
let replacements = group1.split('|').join(',');
return `{${replacements}}`;
});
}
export default async function updateOutputsGlobs(tree: Tree) {
for (const [projectName, projectConfiguration] of getProjects(
tree
).entries()) {
for (const [targetName, targetConfiguration] of Object.entries(
projectConfiguration.targets ?? {}
)) {
if (!Array.isArray(targetConfiguration.outputs)) {
continue;
}
targetConfiguration.outputs =
targetConfiguration.outputs.map(replaceOutput);
}
updateProjectConfiguration(tree, projectName, projectConfiguration);
}
if (tree.exists('nx.json')) {
updateJson<NxJsonConfiguration>(tree, 'nx.json', (json) => {
for (const [, targetConfiguration] of Object.entries(
json.targetDefaults ?? {}
)) {
if (!Array.isArray(targetConfiguration.outputs)) {
continue;
}
targetConfiguration.outputs =
targetConfiguration.outputs.map(replaceOutput);
}
return json;
});
}
await formatChangedFilesWithPrettierIfAvailable(tree);
}

View File

@ -19,7 +19,7 @@ pub fn expand_outputs(directory: String, entries: Vec<String>) -> anyhow::Result
return Ok(existing_paths);
}
let glob_set = build_glob_set(not_found)?;
let glob_set = build_glob_set(&not_found)?;
let found_paths = nx_walker_sync(directory)
.filter_map(|path| {
if glob_set.is_match(&path) {

View File

@ -43,7 +43,7 @@ fn hash_files_matching_globs(
directory: String,
glob_patterns: Vec<String>,
) -> anyhow::Result<Option<String>> {
let glob_set = build_glob_set(glob_patterns)?;
let glob_set = build_glob_set(&glob_patterns)?;
let mut hashes = nx_walker(directory, move |receiver| {
let mut collection: Vec<FileData> = Vec::new();

View File

@ -1,56 +1,260 @@
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
use once_cell::sync::Lazy;
use regex::Regex;
use std::fmt::Debug;
use std::path::Path;
use tracing::trace;
pub(crate) fn build_glob_set(globs: Vec<String>) -> anyhow::Result<GlobSet> {
let mut glob_set_builder = GlobSetBuilder::new();
pub struct NxGlobSetBuilder {
included_globs: GlobSetBuilder,
excluded_globs: GlobSetBuilder,
}
impl NxGlobSetBuilder {
pub fn new<S: AsRef<str>>(globs: &[S]) -> anyhow::Result<Self> {
let mut glob_set_builder = NxGlobSetBuilder {
included_globs: GlobSetBuilder::new(),
excluded_globs: GlobSetBuilder::new(),
};
let mut globs: Vec<&str> = globs.iter().map(|s| s.as_ref()).collect();
globs.sort();
for glob in globs {
let glob = GlobBuilder::new(&glob)
glob_set_builder.add(glob)?;
}
Ok(glob_set_builder)
}
pub fn add(&mut self, glob: &str) -> anyhow::Result<&mut NxGlobSetBuilder> {
let negated = glob.starts_with('!');
let glob_string = glob.strip_prefix('!').unwrap_or(glob).to_string();
let glob_string = if glob_string.ends_with('/') {
format!("{}**", glob_string)
} else {
glob_string
};
let glob = GlobBuilder::new(&glob_string)
.literal_separator(true)
.build()
.map_err(anyhow::Error::from)?;
glob_set_builder.add(glob);
if negated {
self.excluded_globs.add(glob);
} else {
self.included_globs.add(glob);
}
glob_set_builder.build().map_err(anyhow::Error::from)
Ok(self)
}
pub fn build(&self) -> anyhow::Result<NxGlobSet> {
Ok(NxGlobSet {
excluded_globs: self.excluded_globs.build()?,
included_globs: self.included_globs.build()?,
})
}
}
pub struct NxGlobSet {
included_globs: GlobSet,
excluded_globs: GlobSet,
}
impl NxGlobSet {
pub fn is_match<P: AsRef<Path>>(&self, path: P) -> bool {
self.included_globs.is_match(path.as_ref()) && !self.excluded_globs.is_match(path.as_ref())
}
}
pub(crate) fn build_glob_set<S: AsRef<str> + Debug>(globs: &[S]) -> anyhow::Result<NxGlobSet> {
let result = globs
.iter()
.map(|s| convert_glob(s.as_ref()))
.collect::<anyhow::Result<Vec<_>>>()?
.into_iter()
.flatten()
.collect::<Vec<_>>();
trace!(?globs, ?result, "converted globs to result");
NxGlobSetBuilder::new(&result)?.build()
}
// path/!(cache)/**
static NEGATIVE_DIR_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"!\{(.*?)}").unwrap());
// path/**/(subdir1|subdir2)/*.(js|ts)
static MULTI_PATTERNS_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\((.*?)\)").unwrap());
/// Converts a glob string to a list of globs
/// e.g. `path/!(cache)/**` -> `path/**`, `!path/cache/**`
fn convert_glob(glob: &str) -> anyhow::Result<Vec<String>> {
// If there are no negations or multiple patterns, return the glob as is
if !glob.contains('!') && !glob.contains('|') && !glob.contains('(') {
return Ok(vec![glob.to_string()]);
}
let glob = MULTI_PATTERNS_REGEX.replace_all(glob, |caps: &regex::Captures| {
format!("{{{}}}", &caps[1].replace('|', ","))
});
let mut globs: Vec<String> = Vec::new();
globs.push(NEGATIVE_DIR_REGEX.replace_all(&glob, "*").into());
let matches: Vec<_> = NEGATIVE_DIR_REGEX.find_iter(&glob).collect();
if matches.len() == 1 {
globs.push(format!("!{}", glob.replace('!', "")));
} else {
for matched in matches {
let a = glob.replace(matched.as_str(), "*");
globs.push(format!("!{}", a.replace('!', "")));
}
}
Ok(globs)
}
#[cfg(test)]
mod test {
use super::*;
use std::assert_eq;
#[test]
fn should_convert_globs() {
let full_convert =
convert_glob("dist/!(cache|cache2)/**/!(README|LICENSE).(js|ts)").unwrap();
assert_eq!(
full_convert,
[
"dist/*/**/*.{js,ts}",
"!dist/*/**/{README,LICENSE}.{js,ts}",
"!dist/{cache,cache2}/**/*.{js,ts}",
]
);
let no_dirs = convert_glob("dist/**/!(README|LICENSE).(js|ts)").unwrap();
assert_eq!(
no_dirs,
["dist/**/*.{js,ts}", "!dist/**/{README,LICENSE}.{js,ts}"]
);
let no_files = convert_glob("dist/!(cache|cache2)/**/*.(js|ts)").unwrap();
assert_eq!(
no_files,
["dist/*/**/*.{js,ts}", "!dist/{cache,cache2}/**/*.{js,ts}"]
);
let no_extensions = convert_glob("dist/!(cache|cache2)/**/*.js").unwrap();
assert_eq!(
no_extensions,
["dist/*/**/*.js", "!dist/{cache,cache2}/**/*.js"]
);
let no_patterns = convert_glob("dist/**/*.js").unwrap();
assert_eq!(no_patterns, ["dist/**/*.js",]);
}
#[test]
fn should_detect_package_json() {
let glob_set = build_glob_set(vec![String::from("packages/*/package.json")]).unwrap();
let glob_set = build_glob_set(&["packages/*/package.json"]).unwrap();
assert!(glob_set.is_match("packages/nx/package.json"))
}
#[test]
fn should_not_detect_deeply_nested_package_json() {
let glob_set = build_glob_set(vec![String::from("packages/*/package.json")]).unwrap();
let glob_set = build_glob_set(&["packages/*/package.json"]).unwrap();
assert!(!glob_set.is_match("packages/nx/test-files/package.json"))
}
#[test]
fn should_detect_deeply_nested_package_json() {
let glob_set = build_glob_set(vec![String::from("packages/**/package.json")]).unwrap();
let glob_set = build_glob_set(&["packages/**/package.json"]).unwrap();
assert!(glob_set.is_match("packages/nx/test-files/package.json"))
}
#[test]
fn should_detect_node_modules() {
let glob_set = build_glob_set(vec![String::from("**/node_modules")]).unwrap();
let glob_set = build_glob_set(&["**/node_modules"]).unwrap();
assert!(glob_set.is_match("node_modules"));
assert!(glob_set.is_match("packages/nx/node_modules"));
}
#[test]
fn should_not_detect_root_plugin_configs() {
let glob_set = build_glob_set(vec![
// String::from("!(Cargo.toml)"),
String::from("*/**/Cargo.toml"),
])
.unwrap();
let glob_set = build_glob_set(&["*/**/Cargo.toml"]).unwrap();
assert!(glob_set.is_match("packages/a/Cargo.toml"));
assert!(glob_set.is_match("a/Cargo.toml"));
assert!(!glob_set.is_match("Cargo.toml"))
}
#[test]
fn should_handle_negated_globs() {
let glob_set = build_glob_set(&["!nested/ignore/", "nested/"]).unwrap();
assert!(!glob_set.is_match("file.map"));
assert!(!glob_set.is_match("nested/ignore/file.js"));
assert!(!glob_set.is_match("another-nested/nested/file.ts"));
assert!(glob_set.is_match("nested/file.js"));
assert!(glob_set.is_match("nested/nested/file.ts"));
let glob_set = build_glob_set(&["nested/", "!nested/*.{css,map}"]).unwrap();
assert!(glob_set.is_match("nested/file.js"));
assert!(glob_set.is_match("nested/file.ts"));
assert!(!glob_set.is_match("nested/file.css"));
assert!(!glob_set.is_match("nested/file.map"));
let glob_set = build_glob_set(&["!nested/**/ignore/", "nested/**"]).unwrap();
assert!(glob_set.is_match("nested/nested/file.js"));
assert!(!glob_set.is_match("nested/ignore/file.ts"));
assert!(!glob_set.is_match("nested/nested/ignore/file.ts"));
}
#[test]
fn should_handle_multiple_globs() {
let glob_set = build_glob_set(&["nested/", "doesnt-exist/"]).unwrap();
assert!(glob_set.is_match("nested/file.js"));
assert!(!glob_set.is_match("file.js"));
}
#[test]
fn should_handle_complex_patterns() {
// let glob_set =
// build_glob_set(&["dist/!(cache|cache2)/**/!(README|LICENSE).(txt|md)"]).unwrap();
//
// // matches
// assert!(glob_set.is_match("dist/nested/file.txt"));
// assert!(glob_set.is_match("dist/nested/file.md"));
// // no matches
// assert!(!glob_set.is_match("dist/file.txt"));
// assert!(!glob_set.is_match("dist/cache/nested/README.txt"));
// assert!(!glob_set.is_match("dist/nested/LICENSE.md"));
// assert!(!glob_set.is_match("dist/cache/file.txt"));
// assert!(!glob_set.is_match("dist/cache2/file.txt"));
// assert!(!glob_set.is_match("dist/cache2/README.txt"));
// assert!(!glob_set.is_match("dist/LICENSE.md"));
// assert!(!glob_set.is_match("dist/README.txt"));
let glob_set = build_glob_set(&["dist/*.(js|ts)"]).unwrap();
// matches
assert!(glob_set.is_match("dist/file.js"));
assert!(glob_set.is_match("dist/file.ts"));
//no matches
assert!(!glob_set.is_match("dist/file.txt"));
assert!(!glob_set.is_match("dist/nested/file.js"));
let glob_set = build_glob_set(&["dist/**/!(main).(js|ts)"]).unwrap();
// matches
assert!(glob_set.is_match("dist/file.js"));
//no matches
assert!(!glob_set.is_match("dist/main.js"));
let glob_set = build_glob_set(&["dist/!(main|cache)/"]).unwrap();
// matches
assert!(glob_set.is_match("dist/nested/"));
// no matches
assert!(!glob_set.is_match("dist/main.js"));
assert!(!glob_set.is_match("dist/file.js"));
assert!(!glob_set.is_match("dist/cache/"));
assert!(!glob_set.is_match("dist/main/"));
}
}

View File

@ -17,11 +17,8 @@ where
{
let base_dir: PathBuf = directory.as_ref().into();
let ignore_glob_set = build_glob_set(vec![
String::from("**/node_modules"),
String::from("**/.git"),
])
.expect("These static ignores always build");
let ignore_glob_set =
build_glob_set(&["**/node_modules", "**/.git"]).expect("These static ignores always build");
// Use WalkDir instead of ignore::WalkBuilder because it's faster
WalkDir::new(&base_dir)
@ -47,11 +44,8 @@ where
let directory = directory.as_ref();
let nx_ignore = directory.join(".nxignore");
let ignore_glob_set = build_glob_set(vec![
String::from("**/node_modules"),
String::from("**/.git"),
])
.expect("These static ignores always build");
let ignore_glob_set =
build_glob_set(&["**/node_modules", "**/.git"]).expect("These static ignores always build");
let mut walker = WalkBuilder::new(directory);
walker.hidden(false);

View File

@ -2,8 +2,6 @@ use napi::bindgen_prelude::*;
use std::path::PathBuf;
use tracing::trace;
use watchexec_events::filekind::ModifyKind::Name;
use watchexec_events::filekind::RenameMode;
use watchexec_events::{Event, Tag};
#[napi(string_enum)]
@ -96,6 +94,8 @@ impl From<&Event> for WatchEventInternal {
#[cfg(not(target_os = "macos"))]
{
use watchexec_events::filekind::FileEventKind;
use watchexec_events::filekind::ModifyKind::Name;
use watchexec_events::filekind::RenameMode;
match event_kind {
FileEventKind::Create(_) => EventType::create,

View File

@ -1,7 +1,6 @@
use crate::native::utils::glob::build_glob_set;
use crate::native::utils::glob::{build_glob_set, NxGlobSet};
use crate::native::utils::path::Normalize;
use crate::native::walker::nx_walker;
use globset::GlobSet;
use napi::JsObject;
use std::collections::hash_map::Entry;
@ -19,7 +18,7 @@ pub fn get_project_configurations<ConfigurationParser>(
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<HashMap<String, JsObject>>,
{
let globs = build_glob_set(globs)?;
let globs = build_glob_set(&globs)?;
let config_paths: Vec<String> = nx_walker(workspace_root, move |rec| {
let mut config_paths: HashMap<PathBuf, PathBuf> = HashMap::new();
for (path, _) in rec {
@ -38,7 +37,7 @@ where
pub fn insert_config_file_into_map(
path: PathBuf,
config_paths: &mut HashMap<PathBuf, PathBuf>,
globs: &GlobSet,
globs: &NxGlobSet,
) {
if globs.is_match(&path) {
let parent = path.parent().unwrap_or_else(|| Path::new("")).to_path_buf();
@ -78,7 +77,7 @@ mod test {
#[test]
fn should_insert_config_files_properly() {
let mut config_paths: HashMap<PathBuf, PathBuf> = HashMap::new();
let globs = build_glob_set(vec!["**/*".into()]).unwrap();
let globs = build_glob_set(&["**/*"]).unwrap();
insert_config_file_into_map(PathBuf::from("project.json"), &mut config_paths, &globs);
insert_config_file_into_map(PathBuf::from("package.json"), &mut config_paths, &globs);

View File

@ -112,7 +112,7 @@ fn create_root_map(
type WorkspaceData = (HashSet<PathBuf>, Vec<FileData>);
fn get_file_data(workspace_root: &str, globs: Vec<String>) -> anyhow::Result<WorkspaceData> {
let globs = build_glob_set(globs)?;
let globs = build_glob_set(&globs)?;
let (projects, file_data) = nx_walker(workspace_root, move |rec| {
let mut projects: HashMap<PathBuf, PathBuf> = HashMap::new();
let mut file_hashes: Vec<FileData> = vec![];