feat(core): add sqlite db for nx (#26891)
## Current Behavior <!-- This is the behavior we have today --> Nx has some persistent storage managed as separate files on disk. For example, the local cache queries the file system for existing directories. ## Expected Behavior <!-- This is the behavior we should expect with the changes in this PR --> Nx has a new more performant persistent storage via SQLite database. The db is used for the following purposes now: 1. Storing task details of different hashes (This serves as reference for other tables to get more information about a hash) 2. Storing a record of cached artifacts 3. Storing a history of tasks which have run The cache in particular has the following benefits: * It's faster, YMMV but it's definitely faster because it writes and reads less from disk. * It's able to track access of different cached artifacts * It purges cached artifacts more intelligently by looking at when artifacts were last ACCESSED rather than when they were CREATED. This will also eliminate cache misses due to the cached artifacts being purged simply because they were CREATED. ## Related Issue(s) <!-- Please link the issue being fixed so it gets closed when this is merged. --> Fixes #
This commit is contained in:
parent
9269de7763
commit
cade5bc671
5
.github/workflows/publish.yml
vendored
5
.github/workflows/publish.yml
vendored
@ -397,7 +397,10 @@ jobs:
|
||||
run: ls -R artifacts
|
||||
shell: bash
|
||||
- name: Build Wasm
|
||||
run: pnpm build:wasm
|
||||
run: |
|
||||
wget https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-23/wasi-sdk-23.0-x86_64-linux.tar.gz
|
||||
tar -xvf wasi-sdk-23.0-x86_64-linux.tar.gz
|
||||
pnpm build:wasm
|
||||
- name: Publish
|
||||
env:
|
||||
VERSION: ${{ needs.resolve-required-data.outputs.version }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -22,6 +22,7 @@ jest.debug.config.js
|
||||
/graph/client/src/assets/generated-source-maps
|
||||
/nx-dev/nx-dev/public/documentation
|
||||
/nx-dev/nx-dev/public/images/open-graph
|
||||
**/tests/temp-db
|
||||
|
||||
# Issues scraper creates these files, stored by github's cache
|
||||
/scripts/issues-scraper/cached
|
||||
@ -58,3 +59,4 @@ out
|
||||
.rustup/
|
||||
target
|
||||
*.wasm
|
||||
/wasi-sdk*
|
||||
|
||||
173
Cargo.lock
generated
173
Cargo.lock
generated
@ -171,6 +171,29 @@ dependencies = [
|
||||
"scoped-tls",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.69.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn 2.0.53",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
@ -249,12 +272,32 @@ version = "1.0.90"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5"
|
||||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "clang-sys"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"libc",
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colored"
|
||||
version = "2.1.0"
|
||||
@ -430,6 +473,18 @@ version = "2.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
|
||||
|
||||
[[package]]
|
||||
name = "fallible-iterator"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||
|
||||
[[package]]
|
||||
name = "fallible-streaming-iterator"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
|
||||
|
||||
[[package]]
|
||||
name = "faster-hex"
|
||||
version = "0.9.0"
|
||||
@ -834,6 +889,12 @@ dependencies = [
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.14"
|
||||
@ -879,6 +940,15 @@ dependencies = [
|
||||
"rkyv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
|
||||
dependencies = [
|
||||
"hashbrown 0.14.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.3.9"
|
||||
@ -1040,10 +1110,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.153"
|
||||
name = "lazycell"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.155"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
@ -1055,6 +1131,17 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.13"
|
||||
@ -1077,6 +1164,18 @@ version = "0.4.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||
|
||||
[[package]]
|
||||
name = "machine-uid"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a842fb2a43d7a19fe4a6eab96456c417aea97a166f2de0e82410df5bfd7e0cbc"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"cc",
|
||||
"libc",
|
||||
"winreg 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
@ -1376,6 +1475,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"ignore-files 2.1.0",
|
||||
"itertools",
|
||||
"machine-uid",
|
||||
"mio",
|
||||
"napi",
|
||||
"napi-build",
|
||||
@ -1387,6 +1487,7 @@ dependencies = [
|
||||
"rayon",
|
||||
"regex",
|
||||
"rkyv",
|
||||
"rusqlite",
|
||||
"swc_common",
|
||||
"swc_ecma_ast",
|
||||
"swc_ecma_dep_graph",
|
||||
@ -1485,6 +1586,12 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
|
||||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
|
||||
|
||||
[[package]]
|
||||
name = "portable-pty"
|
||||
version = "0.8.1"
|
||||
@ -1502,7 +1609,7 @@ dependencies = [
|
||||
"shared_library",
|
||||
"shell-words",
|
||||
"winapi",
|
||||
"winreg",
|
||||
"winreg 0.10.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1550,6 +1657,16 @@ dependencies = [
|
||||
"termtree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn 2.0.53",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.79"
|
||||
@ -1771,6 +1888,20 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rusqlite"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"fallible-iterator",
|
||||
"fallible-streaming-iterator",
|
||||
"hashlink",
|
||||
"libsqlite3-sys",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.23"
|
||||
@ -1922,6 +2053,12 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook"
|
||||
version = "0.3.17"
|
||||
@ -2518,6 +2655,12 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
@ -2619,6 +2762,18 @@ dependencies = [
|
||||
"watchexec-signals",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "4.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7"
|
||||
dependencies = [
|
||||
"either",
|
||||
"home",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
@ -2807,6 +2962,16 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winreg"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
"preinstall": "node ./scripts/preinstall.js",
|
||||
"test": "nx run-many -t test",
|
||||
"e2e": "nx run-many -t e2e --projects ./e2e/*",
|
||||
"build:wasm": "rustup override set nightly-2024-07-04 && rustup target add wasm32-wasip1-threads && pnpm exec nx run-many -t build-native-wasm && rustup override unset"
|
||||
"build:wasm": "rustup override set nightly-2024-07-19 && rustup target add wasm32-wasip1-threads && WASI_SDK_PATH=\"$(pwd)/wasi-sdk-23.0-x86_64-linux\" CMAKE_BUILD_PARALLEL_LEVEL=2 LIBSQLITE3_FLAGS=\"-DLONGDOUBLE_TYPE=double\" pnpm exec nx run-many -t build-native-wasm && rustup override unset"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
|
||||
@ -34,6 +34,7 @@ nom = '7.1.3'
|
||||
regex = "1.9.1"
|
||||
rayon = "1.7.0"
|
||||
rkyv = { version = "0.7", features = ["validation"] }
|
||||
rusqlite = { version = "0.29.0", features = ["bundled", "array", "vtab", "wasm32-wasi-vfs"] }
|
||||
thiserror = "1.0.40"
|
||||
tracing = "0.1.37"
|
||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
||||
@ -58,6 +59,7 @@ watchexec = "3.0.1"
|
||||
watchexec-events = "2.0.1"
|
||||
watchexec-filterer-ignore = "3.0.0"
|
||||
watchexec-signals = "2.1.0"
|
||||
machine-uid = "0.5.2"
|
||||
|
||||
[lib]
|
||||
crate-type = ['cdylib']
|
||||
|
||||
@ -28,7 +28,7 @@ import {
|
||||
DaemonProjectGraphError,
|
||||
ProjectGraphError,
|
||||
} from '../../project-graph/error-types';
|
||||
import { IS_WASM, NxWorkspaceFiles } from '../../native';
|
||||
import { IS_WASM, NxWorkspaceFiles, TaskRun } from '../../native';
|
||||
import { HandleGlobMessage } from '../message-types/glob';
|
||||
import {
|
||||
GET_NX_WORKSPACE_FILES,
|
||||
@ -43,10 +43,11 @@ import {
|
||||
HandleGetFilesInDirectoryMessage,
|
||||
} from '../message-types/get-files-in-directory';
|
||||
import { HASH_GLOB, HandleHashGlobMessage } from '../message-types/hash-glob';
|
||||
import { TaskRun } from '../../utils/task-history';
|
||||
import {
|
||||
HandleGetTaskHistoryForHashesMessage,
|
||||
HandleWriteTaskRunsToHistoryMessage,
|
||||
GET_FLAKY_TASKS,
|
||||
HandleGetFlakyTasks,
|
||||
HandleRecordTaskRunsMessage,
|
||||
RECORD_TASK_RUNS,
|
||||
} from '../message-types/task-history';
|
||||
import { FORCE_SHUTDOWN } from '../message-types/force-shutdown';
|
||||
import {
|
||||
@ -341,20 +342,18 @@ export class DaemonClient {
|
||||
return this.sendToDaemonViaQueue(message);
|
||||
}
|
||||
|
||||
getTaskHistoryForHashes(hashes: string[]): Promise<{
|
||||
[hash: string]: TaskRun[];
|
||||
}> {
|
||||
const message: HandleGetTaskHistoryForHashesMessage = {
|
||||
type: 'GET_TASK_HISTORY_FOR_HASHES',
|
||||
getFlakyTasks(hashes: string[]): Promise<string[]> {
|
||||
const message: HandleGetFlakyTasks = {
|
||||
type: GET_FLAKY_TASKS,
|
||||
hashes,
|
||||
};
|
||||
|
||||
return this.sendToDaemonViaQueue(message);
|
||||
}
|
||||
|
||||
writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void> {
|
||||
const message: HandleWriteTaskRunsToHistoryMessage = {
|
||||
type: 'WRITE_TASK_RUNS_TO_HISTORY',
|
||||
recordTaskRuns(taskRuns: TaskRun[]): Promise<void> {
|
||||
const message: HandleRecordTaskRunsMessage = {
|
||||
type: RECORD_TASK_RUNS,
|
||||
taskRuns,
|
||||
};
|
||||
return this.sendMessageToDaemon(message);
|
||||
|
||||
@ -1,38 +1,37 @@
|
||||
import { TaskRun } from '../../utils/task-history';
|
||||
import type { TaskRun } from '../../native';
|
||||
|
||||
export const GET_TASK_HISTORY_FOR_HASHES =
|
||||
'GET_TASK_HISTORY_FOR_HASHES' as const;
|
||||
export const GET_FLAKY_TASKS = 'GET_FLAKY_TASKS' as const;
|
||||
|
||||
export type HandleGetTaskHistoryForHashesMessage = {
|
||||
type: typeof GET_TASK_HISTORY_FOR_HASHES;
|
||||
export type HandleGetFlakyTasks = {
|
||||
type: typeof GET_FLAKY_TASKS;
|
||||
hashes: string[];
|
||||
};
|
||||
|
||||
export function isHandleGetTaskHistoryForHashesMessage(
|
||||
export function isHandleGetFlakyTasksMessage(
|
||||
message: unknown
|
||||
): message is HandleGetTaskHistoryForHashesMessage {
|
||||
): message is HandleGetFlakyTasks {
|
||||
return (
|
||||
typeof message === 'object' &&
|
||||
message !== null &&
|
||||
'type' in message &&
|
||||
message['type'] === GET_TASK_HISTORY_FOR_HASHES
|
||||
message['type'] === GET_FLAKY_TASKS
|
||||
);
|
||||
}
|
||||
|
||||
export const WRITE_TASK_RUNS_TO_HISTORY = 'WRITE_TASK_RUNS_TO_HISTORY' as const;
|
||||
export const RECORD_TASK_RUNS = 'RECORD_TASK_RUNS' as const;
|
||||
|
||||
export type HandleWriteTaskRunsToHistoryMessage = {
|
||||
type: typeof WRITE_TASK_RUNS_TO_HISTORY;
|
||||
export type HandleRecordTaskRunsMessage = {
|
||||
type: typeof RECORD_TASK_RUNS;
|
||||
taskRuns: TaskRun[];
|
||||
};
|
||||
|
||||
export function isHandleWriteTaskRunsToHistoryMessage(
|
||||
message: unknown
|
||||
): message is HandleWriteTaskRunsToHistoryMessage {
|
||||
): message is HandleRecordTaskRunsMessage {
|
||||
return (
|
||||
typeof message === 'object' &&
|
||||
message !== null &&
|
||||
'type' in message &&
|
||||
message['type'] === WRITE_TASK_RUNS_TO_HISTORY
|
||||
message['type'] === RECORD_TASK_RUNS
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
import { getHistoryForHashes } from '../../utils/task-history';
|
||||
|
||||
export async function handleGetTaskHistoryForHashes(hashes: string[]) {
|
||||
const history = await getHistoryForHashes(hashes);
|
||||
return {
|
||||
response: JSON.stringify(history),
|
||||
description: 'handleGetTaskHistoryForHashes',
|
||||
};
|
||||
}
|
||||
29
packages/nx/src/daemon/server/handle-task-history.ts
Normal file
29
packages/nx/src/daemon/server/handle-task-history.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { TaskRun } from '../../native';
|
||||
import { TaskHistory } from '../../utils/task-history';
|
||||
|
||||
let taskHistory: TaskHistory;
|
||||
|
||||
function getTaskHistory() {
|
||||
if (!taskHistory) {
|
||||
taskHistory = new TaskHistory();
|
||||
}
|
||||
return taskHistory;
|
||||
}
|
||||
|
||||
export async function handleRecordTaskRuns(taskRuns: TaskRun[]) {
|
||||
const taskHistory = getTaskHistory();
|
||||
await taskHistory.recordTaskRuns(taskRuns);
|
||||
return {
|
||||
response: 'true',
|
||||
description: 'handleRecordTaskRuns',
|
||||
};
|
||||
}
|
||||
|
||||
export async function handleGetFlakyTasks(hashes: string[]) {
|
||||
const taskHistory = getTaskHistory();
|
||||
const history = await taskHistory.getFlakyTasks(hashes);
|
||||
return {
|
||||
response: JSON.stringify(history),
|
||||
description: 'handleGetFlakyTasks',
|
||||
};
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
import { TaskRun, writeTaskRunsToHistory } from '../../utils/task-history';
|
||||
|
||||
export async function handleWriteTaskRunsToHistory(taskRuns: TaskRun[]) {
|
||||
await writeTaskRunsToHistory(taskRuns);
|
||||
return {
|
||||
response: 'true',
|
||||
description: 'handleWriteTaskRunsToHistory',
|
||||
};
|
||||
}
|
||||
@ -74,11 +74,13 @@ import { handleGetFilesInDirectory } from './handle-get-files-in-directory';
|
||||
import { HASH_GLOB, isHandleHashGlobMessage } from '../message-types/hash-glob';
|
||||
import { handleHashGlob } from './handle-hash-glob';
|
||||
import {
|
||||
isHandleGetTaskHistoryForHashesMessage,
|
||||
isHandleGetFlakyTasksMessage,
|
||||
isHandleWriteTaskRunsToHistoryMessage,
|
||||
} from '../message-types/task-history';
|
||||
import { handleGetTaskHistoryForHashes } from './handle-get-task-history';
|
||||
import { handleWriteTaskRunsToHistory } from './handle-write-task-runs-to-history';
|
||||
import {
|
||||
handleRecordTaskRuns,
|
||||
handleGetFlakyTasks,
|
||||
} from './handle-task-history';
|
||||
import { isHandleForceShutdownMessage } from '../message-types/force-shutdown';
|
||||
import { handleForceShutdown } from './handle-force-shutdown';
|
||||
import {
|
||||
@ -237,13 +239,13 @@ async function handleMessage(socket, data: string) {
|
||||
await handleResult(socket, HASH_GLOB, () =>
|
||||
handleHashGlob(payload.globs, payload.exclude)
|
||||
);
|
||||
} else if (isHandleGetTaskHistoryForHashesMessage(payload)) {
|
||||
} else if (isHandleGetFlakyTasksMessage(payload)) {
|
||||
await handleResult(socket, 'GET_TASK_HISTORY_FOR_HASHES', () =>
|
||||
handleGetTaskHistoryForHashes(payload.hashes)
|
||||
handleGetFlakyTasks(payload.hashes)
|
||||
);
|
||||
} else if (isHandleWriteTaskRunsToHistoryMessage(payload)) {
|
||||
await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () =>
|
||||
handleWriteTaskRunsToHistory(payload.taskRuns)
|
||||
handleRecordTaskRuns(payload.taskRuns)
|
||||
);
|
||||
} else if (isHandleForceShutdownMessage(payload)) {
|
||||
await handleResult(socket, 'FORCE_SHUTDOWN', () =>
|
||||
|
||||
@ -5,6 +5,21 @@ import { getInputs, TaskHasher } from './task-hasher';
|
||||
import { ProjectGraph } from '../config/project-graph';
|
||||
import { NxJsonConfiguration } from '../config/nx-json';
|
||||
import { readNxJson } from '../config/nx-json';
|
||||
import { IS_WASM, TaskDetails } from '../native';
|
||||
import { getDbConnection } from '../utils/db-connection';
|
||||
|
||||
let taskDetails: TaskDetails;
|
||||
|
||||
function getTaskDetails() {
|
||||
// TODO: Remove when wasm supports sqlite
|
||||
if (IS_WASM) {
|
||||
return null;
|
||||
}
|
||||
if (!taskDetails) {
|
||||
taskDetails = new TaskDetails(getDbConnection());
|
||||
}
|
||||
return taskDetails;
|
||||
}
|
||||
|
||||
export async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(
|
||||
hasher: TaskHasher,
|
||||
@ -13,6 +28,9 @@ export async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(
|
||||
nxJson: NxJsonConfiguration
|
||||
) {
|
||||
performance.mark('hashMultipleTasks:start');
|
||||
|
||||
const taskDetails = getTaskDetails();
|
||||
|
||||
const tasks = Object.values(taskGraph.tasks);
|
||||
const tasksWithHashers = await Promise.all(
|
||||
tasks.map(async (task) => {
|
||||
@ -35,11 +53,23 @@ export async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(
|
||||
})
|
||||
.map((t) => t.task);
|
||||
|
||||
const hashes = await hasher.hashTasks(tasksToHash, taskGraph);
|
||||
const hashes = await hasher.hashTasks(tasksToHash, taskGraph, process.env);
|
||||
for (let i = 0; i < tasksToHash.length; i++) {
|
||||
tasksToHash[i].hash = hashes[i].value;
|
||||
tasksToHash[i].hashDetails = hashes[i].details;
|
||||
}
|
||||
// TODO: Remove if when wasm supports sqlite
|
||||
if (taskDetails) {
|
||||
taskDetails.recordTaskDetails(
|
||||
tasksToHash.map((task) => ({
|
||||
hash: task.hash,
|
||||
project: task.target.project,
|
||||
target: task.target.target,
|
||||
configuration: task.target.configuration,
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
performance.mark('hashMultipleTasks:end');
|
||||
performance.measure(
|
||||
'hashMultipleTasks',
|
||||
@ -56,9 +86,12 @@ export async function hashTask(
|
||||
env: NodeJS.ProcessEnv
|
||||
) {
|
||||
performance.mark('hashSingleTask:start');
|
||||
|
||||
const taskDetails = getTaskDetails();
|
||||
const customHasher = getCustomHasher(task, projectGraph);
|
||||
const projectsConfigurations =
|
||||
readProjectsConfigurationFromProjectGraph(projectGraph);
|
||||
|
||||
const { value, details } = await (customHasher
|
||||
? customHasher(task, {
|
||||
hasher,
|
||||
@ -72,6 +105,19 @@ export async function hashTask(
|
||||
: hasher.hashTask(task, taskGraph, env));
|
||||
task.hash = value;
|
||||
task.hashDetails = details;
|
||||
|
||||
// TODO: Remove if when wasm supports sqlite
|
||||
if (taskDetails) {
|
||||
taskDetails.recordTaskDetails([
|
||||
{
|
||||
hash: task.hash,
|
||||
project: task.target.project,
|
||||
target: task.target.target,
|
||||
configuration: task.target.configuration,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
performance.mark('hashSingleTask:end');
|
||||
performance.measure(
|
||||
'hashSingleTask',
|
||||
|
||||
227
packages/nx/src/native/cache/cache.rs
vendored
Normal file
227
packages/nx/src/native/cache/cache.rs
vendored
Normal file
@ -0,0 +1,227 @@
|
||||
use std::fs::{create_dir_all, read_to_string, write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Instant;
|
||||
|
||||
use fs_extra::remove_items;
|
||||
use napi::bindgen_prelude::*;
|
||||
use rusqlite::{params, Connection, OptionalExtension};
|
||||
use tracing::trace;
|
||||
|
||||
use crate::native::cache::expand_outputs::_expand_outputs;
|
||||
use crate::native::cache::file_ops::_copy;
|
||||
use crate::native::machine_id::get_machine_id;
|
||||
use crate::native::utils::Normalize;
|
||||
|
||||
#[napi(object)]
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct CachedResult {
|
||||
pub code: i16,
|
||||
pub terminal_output: String,
|
||||
pub outputs_path: String,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct NxCache {
|
||||
pub cache_directory: String,
|
||||
workspace_root: PathBuf,
|
||||
cache_path: PathBuf,
|
||||
db: External<Connection>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl NxCache {
|
||||
#[napi(constructor)]
|
||||
pub fn new(
|
||||
workspace_root: String,
|
||||
cache_path: String,
|
||||
db_connection: External<Connection>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let machine_id = get_machine_id();
|
||||
let cache_path = PathBuf::from(&cache_path).join(machine_id);
|
||||
|
||||
create_dir_all(&cache_path)?;
|
||||
create_dir_all(cache_path.join("terminalOutputs"))?;
|
||||
|
||||
let r = Self {
|
||||
db: db_connection,
|
||||
workspace_root: PathBuf::from(workspace_root),
|
||||
cache_directory: cache_path.to_normalized_string(),
|
||||
cache_path,
|
||||
};
|
||||
|
||||
r.setup()?;
|
||||
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn setup(&self) -> anyhow::Result<()> {
|
||||
self.db
|
||||
.execute_batch(
|
||||
"BEGIN;
|
||||
CREATE TABLE IF NOT EXISTS cache_outputs (
|
||||
hash TEXT PRIMARY KEY NOT NULL,
|
||||
code INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
accessed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (hash) REFERENCES task_details (hash)
|
||||
);
|
||||
COMMIT;
|
||||
",
|
||||
)
|
||||
.map_err(anyhow::Error::from)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get(&mut self, hash: String) -> anyhow::Result<Option<CachedResult>> {
|
||||
let start = Instant::now();
|
||||
trace!("GET {}", &hash);
|
||||
let task_dir = self.cache_path.join(&hash);
|
||||
|
||||
let terminal_output_path = self.get_task_outputs_path_internal(&hash);
|
||||
|
||||
let r = self
|
||||
.db
|
||||
.query_row(
|
||||
"UPDATE cache_outputs
|
||||
SET accessed_at = CURRENT_TIMESTAMP
|
||||
WHERE hash = ?1
|
||||
RETURNING code",
|
||||
params![hash],
|
||||
|row| {
|
||||
let code: i16 = row.get(0)?;
|
||||
|
||||
let start = Instant::now();
|
||||
let terminal_output =
|
||||
read_to_string(terminal_output_path).unwrap_or(String::from(""));
|
||||
trace!("TIME reading terminal outputs {:?}", start.elapsed());
|
||||
|
||||
Ok(CachedResult {
|
||||
code,
|
||||
terminal_output,
|
||||
outputs_path: task_dir.to_normalized_string(),
|
||||
})
|
||||
},
|
||||
)
|
||||
.optional()
|
||||
.map_err(anyhow::Error::new)?;
|
||||
trace!("GET {} {:?}", &hash, start.elapsed());
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn put(
|
||||
&mut self,
|
||||
hash: String,
|
||||
terminal_output: String,
|
||||
outputs: Vec<String>,
|
||||
code: i16,
|
||||
) -> anyhow::Result<()> {
|
||||
let task_dir = self.cache_path.join(&hash);
|
||||
|
||||
// Remove the task directory
|
||||
remove_items(&[&task_dir])?;
|
||||
// Create the task directory again
|
||||
create_dir_all(&task_dir)?;
|
||||
|
||||
// Write the terminal outputs into a file
|
||||
write(self.get_task_outputs_path_internal(&hash), terminal_output)?;
|
||||
|
||||
// Expand the outputs
|
||||
let expanded_outputs = _expand_outputs(&self.workspace_root, outputs)?;
|
||||
|
||||
// Copy the outputs to the cache
|
||||
for expanded_output in expanded_outputs.iter() {
|
||||
let p = self.workspace_root.join(expanded_output);
|
||||
if p.exists() {
|
||||
let cached_outputs_dir = task_dir.join(expanded_output);
|
||||
_copy(p, cached_outputs_dir)?;
|
||||
}
|
||||
}
|
||||
|
||||
self.record_to_cache(hash, code)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn apply_remote_cache_results(&self, hash: String, result: CachedResult) -> anyhow::Result<()> {
|
||||
let terminal_output = result.terminal_output;
|
||||
write(self.get_task_outputs_path(hash.clone()), terminal_output)?;
|
||||
|
||||
let code: i16 = result.code;
|
||||
self.record_to_cache(hash, code)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_task_outputs_path_internal(&self, hash: &str) -> PathBuf {
|
||||
self.cache_path
|
||||
.join("terminalOutputs")
|
||||
.join(hash)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_task_outputs_path(&self, hash: String) -> String {
|
||||
self.get_task_outputs_path_internal(&hash).to_normalized_string()
|
||||
}
|
||||
|
||||
fn record_to_cache(&self, hash: String, code: i16) -> anyhow::Result<()> {
|
||||
self.db.execute(
|
||||
"INSERT INTO cache_outputs
|
||||
(hash, code)
|
||||
VALUES (?1, ?2)",
|
||||
params![hash, code],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn copy_files_from_cache(
|
||||
&self,
|
||||
cached_result: CachedResult,
|
||||
outputs: Vec<String>,
|
||||
) -> anyhow::Result<()> {
|
||||
let outputs_path = Path::new(&cached_result.outputs_path);
|
||||
|
||||
let expanded_outputs = _expand_outputs(outputs_path, outputs)?;
|
||||
|
||||
trace!("Removing expanded outputs: {:?}", &expanded_outputs);
|
||||
remove_items(
|
||||
expanded_outputs
|
||||
.iter()
|
||||
.map(|p| self.workspace_root.join(p))
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
)?;
|
||||
|
||||
trace!("Copying Files from Cache {:?} -> {:?}", &outputs_path, &self.workspace_root);
|
||||
_copy(
|
||||
outputs_path,
|
||||
&self.workspace_root,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn remove_old_cache_records(&self) -> anyhow::Result<()> {
|
||||
let outdated_cache = self
|
||||
.db
|
||||
.prepare(
|
||||
"DELETE FROM cache_outputs WHERE accessed_at < datetime('now', '-7 days') RETURNING hash",
|
||||
)?
|
||||
.query_map(params![], |row| {
|
||||
let hash: String = row.get(0)?;
|
||||
|
||||
Ok(vec![
|
||||
self.cache_path.join(&hash),
|
||||
self.get_task_outputs_path_internal(&hash).into(),
|
||||
])
|
||||
})?
|
||||
.filter_map(anyhow::Result::ok)
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
remove_items(&outdated_cache)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
13
packages/nx/src/native/cache/expand_outputs.rs
vendored
13
packages/nx/src/native/cache/expand_outputs.rs
vendored
@ -1,4 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tracing::trace;
|
||||
|
||||
use crate::native::glob::{build_glob_set, contains_glob_pattern};
|
||||
@ -6,10 +6,17 @@ use crate::native::utils::Normalize;
|
||||
use crate::native::walker::nx_walker_sync;
|
||||
|
||||
#[napi]
|
||||
pub fn expand_outputs(directory: String, entries: Vec<String>) -> anyhow::Result<Vec<String>> {
|
||||
_expand_outputs(directory, entries)
|
||||
}
|
||||
|
||||
/// Expands the given entries into a list of existing directories and files.
|
||||
/// This is used for copying outputs to and from the cache
|
||||
pub fn expand_outputs(directory: String, entries: Vec<String>) -> anyhow::Result<Vec<String>> {
|
||||
let directory: PathBuf = directory.into();
|
||||
pub fn _expand_outputs<P>(directory: P, entries: Vec<String>) -> anyhow::Result<Vec<String>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let directory: PathBuf = directory.as_ref().into();
|
||||
|
||||
let has_glob_pattern = entries.iter().any(|entry| contains_glob_pattern(entry));
|
||||
|
||||
|
||||
8
packages/nx/src/native/cache/file_ops.rs
vendored
8
packages/nx/src/native/cache/file_ops.rs
vendored
@ -13,10 +13,14 @@ pub fn remove(src: String) -> anyhow::Result<()> {
|
||||
|
||||
#[napi]
|
||||
pub fn copy(src: String, dest: String) -> anyhow::Result<()> {
|
||||
let dest: PathBuf = dest.into();
|
||||
_copy(src, dest)
|
||||
}
|
||||
|
||||
pub fn _copy<P>(src: P, dest: P) -> anyhow::Result<()> where P: AsRef<Path> {
|
||||
let dest: PathBuf = dest.as_ref().into();
|
||||
let dest_parent = dest.parent().unwrap_or(&dest);
|
||||
|
||||
let src: PathBuf = src.into();
|
||||
let src: PathBuf = src.as_ref().into();
|
||||
|
||||
if !dest_parent.exists() {
|
||||
fs::create_dir_all(dest_parent)?;
|
||||
|
||||
1
packages/nx/src/native/cache/mod.rs
vendored
1
packages/nx/src/native/cache/mod.rs
vendored
@ -1,2 +1,3 @@
|
||||
pub mod expand_outputs;
|
||||
pub mod file_ops;
|
||||
pub mod cache;
|
||||
|
||||
66
packages/nx/src/native/db/mod.rs
Normal file
66
packages/nx/src/native/db/mod.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use std::fs::{create_dir_all, remove_file};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use napi::bindgen_prelude::External;
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::native::machine_id::get_machine_id;
|
||||
|
||||
#[napi]
|
||||
pub fn connect_to_nx_db(
|
||||
cache_dir: String,
|
||||
nx_version: String,
|
||||
) -> anyhow::Result<External<Connection>> {
|
||||
let machine_id = get_machine_id();
|
||||
let cache_dir_buf = PathBuf::from(cache_dir);
|
||||
let db_path = cache_dir_buf.join(format!("{}.db", machine_id));
|
||||
create_dir_all(cache_dir_buf)?;
|
||||
|
||||
let c = create_connection(&db_path)?;
|
||||
|
||||
let db_version = c.query_row(
|
||||
"SELECT value FROM metadata WHERE key='NX_VERSION'",
|
||||
[],
|
||||
|row| {
|
||||
let r: String = row.get(0)?;
|
||||
Ok(r)
|
||||
},
|
||||
);
|
||||
|
||||
let c = match db_version {
|
||||
Ok(version) if version == nx_version => c,
|
||||
_ => {
|
||||
c.close().map_err(|(_, error)| anyhow::Error::from(error))?;
|
||||
remove_file(&db_path)?;
|
||||
|
||||
create_connection(&db_path)?
|
||||
}
|
||||
};
|
||||
|
||||
c.execute(
|
||||
"CREATE TABLE IF NOT EXISTS metadata (
|
||||
key TEXT NOT NULL PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
c.execute(
|
||||
"INSERT OR REPLACE INTO metadata (key, value) VALUES ('NX_VERSION', ?)",
|
||||
[nx_version],
|
||||
)?;
|
||||
|
||||
Ok(External::new(c))
|
||||
}
|
||||
|
||||
fn create_connection(db_path: &PathBuf) -> anyhow::Result<Connection> {
|
||||
let c = Connection::open(db_path).map_err(anyhow::Error::from)?;
|
||||
|
||||
// This allows writes at the same time as reads
|
||||
c.pragma_update(None, "journal_mode", "WAL")?;
|
||||
|
||||
// This makes things less synchronous than default
|
||||
c.pragma_update(None, "synchronous", "NORMAL")?;
|
||||
|
||||
Ok(c)
|
||||
}
|
||||
49
packages/nx/src/native/index.d.ts
vendored
49
packages/nx/src/native/index.d.ts
vendored
@ -26,6 +26,23 @@ export declare class ImportResult {
|
||||
staticImportExpressions: Array<string>
|
||||
}
|
||||
|
||||
export declare class NxCache {
|
||||
cacheDirectory: string
|
||||
constructor(workspaceRoot: string, cachePath: string, dbConnection: ExternalObject<Connection>)
|
||||
get(hash: string): CachedResult | null
|
||||
put(hash: string, terminalOutput: string, outputs: Array<string>, code: number): void
|
||||
applyRemoteCacheResults(hash: string, result: CachedResult): void
|
||||
getTaskOutputsPath(hash: string): string
|
||||
copyFilesFromCache(cachedResult: CachedResult, outputs: Array<string>): void
|
||||
removeOldCacheRecords(): void
|
||||
}
|
||||
|
||||
export declare class NxTaskHistory {
|
||||
constructor(db: ExternalObject<Connection>)
|
||||
recordTaskRuns(taskRuns: Array<TaskRun>): void
|
||||
getFlakyTasks(hashes: Array<string>): Array<string>
|
||||
}
|
||||
|
||||
export declare class RustPseudoTerminal {
|
||||
constructor()
|
||||
runCommand(command: string, commandDir?: string | undefined | null, jsEnv?: Record<string, string> | undefined | null, execArgv?: Array<string> | undefined | null, quiet?: boolean | undefined | null, tty?: boolean | undefined | null): ChildProcess
|
||||
@ -36,6 +53,11 @@ export declare class RustPseudoTerminal {
|
||||
fork(id: string, forkScript: string, pseudoIpcPath: string, commandDir: string | undefined | null, jsEnv: Record<string, string> | undefined | null, execArgv: Array<string> | undefined | null, quiet: boolean): ChildProcess
|
||||
}
|
||||
|
||||
export declare class TaskDetails {
|
||||
constructor(db: ExternalObject<Connection>)
|
||||
recordTaskDetails(tasks: Array<HashedTask>): void
|
||||
}
|
||||
|
||||
export declare class TaskHasher {
|
||||
constructor(workspaceRoot: string, projectGraph: ExternalObject<ProjectGraph>, projectFileMap: ExternalObject<ProjectFiles>, allWorkspaceFiles: ExternalObject<Array<FileData>>, tsConfig: Buffer, tsConfigPaths: Record<string, Array<string>>, options?: HasherOptions | undefined | null)
|
||||
hashPlans(hashPlans: ExternalObject<Record<string, Array<HashInstruction>>>, jsEnv: Record<string, string>): NapiDashMap
|
||||
@ -67,6 +89,14 @@ export declare class WorkspaceContext {
|
||||
getFilesInDirectory(directory: string): Array<string>
|
||||
}
|
||||
|
||||
export interface CachedResult {
|
||||
code: number
|
||||
terminalOutput: string
|
||||
outputsPath: string
|
||||
}
|
||||
|
||||
export declare export function connectToNxDb(cacheDir: string, nxVersion: string): ExternalObject<Connection>
|
||||
|
||||
export declare export function copy(src: string, dest: string): void
|
||||
|
||||
export interface DepsOutputsInput {
|
||||
@ -84,10 +114,6 @@ export declare const enum EventType {
|
||||
create = 'create'
|
||||
}
|
||||
|
||||
/**
|
||||
* Expands the given entries into a list of existing directories and files.
|
||||
* This is used for copying outputs to and from the cache
|
||||
*/
|
||||
export declare export function expandOutputs(directory: string, entries: Array<string>): Array<string>
|
||||
|
||||
export interface ExternalDependenciesInput {
|
||||
@ -131,6 +157,13 @@ export interface HashDetails {
|
||||
details: Record<string, string>
|
||||
}
|
||||
|
||||
export interface HashedTask {
|
||||
hash: string
|
||||
project: string
|
||||
target: string
|
||||
configuration?: string
|
||||
}
|
||||
|
||||
export interface HasherOptions {
|
||||
selectivelyHashTsConfig: boolean
|
||||
}
|
||||
@ -203,6 +236,14 @@ export interface TaskGraph {
|
||||
dependencies: Record<string, Array<string>>
|
||||
}
|
||||
|
||||
export interface TaskRun {
|
||||
hash: string
|
||||
status: string
|
||||
code: number
|
||||
start: number
|
||||
end: number
|
||||
}
|
||||
|
||||
export interface TaskTarget {
|
||||
project: string
|
||||
target: string
|
||||
|
||||
32
packages/nx/src/native/machine_id/mod.rs
Normal file
32
packages/nx/src/native/machine_id/mod.rs
Normal file
@ -0,0 +1,32 @@
|
||||
pub fn get_machine_id() -> String {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
return machine_uid::get().unwrap_or(String::from("machine"));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
use crate::native::hasher::hash;
|
||||
use crate::native::tasks::hashers::create_command_builder;
|
||||
use std::fs::read_to_string;
|
||||
|
||||
hash(
|
||||
read_to_string("/var/lib/dbus/machine-id")
|
||||
.or_else(|_| read_to_string("/etc/machine-id"))
|
||||
.or_else(|_| {
|
||||
let mut command_builder = create_command_builder();
|
||||
|
||||
command_builder.arg("hostname");
|
||||
|
||||
std::str::from_utf8(
|
||||
&command_builder
|
||||
.output()
|
||||
.map_err(|_| anyhow::anyhow!("Failed to get hostname"))?
|
||||
.stdout,
|
||||
)
|
||||
.map_err(anyhow::Error::from)
|
||||
.map(|s| s.trim().to_string())
|
||||
})
|
||||
.unwrap_or(String::from("machine"))
|
||||
.as_bytes(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -10,8 +10,11 @@ mod types;
|
||||
mod utils;
|
||||
mod walker;
|
||||
pub mod workspace;
|
||||
mod machine_id;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod pseudo_terminal;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod watch;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod db;
|
||||
|
||||
@ -364,10 +364,14 @@ if (!nativeBinding) {
|
||||
module.exports.ChildProcess = nativeBinding.ChildProcess
|
||||
module.exports.HashPlanner = nativeBinding.HashPlanner
|
||||
module.exports.ImportResult = nativeBinding.ImportResult
|
||||
module.exports.NxCache = nativeBinding.NxCache
|
||||
module.exports.NxTaskHistory = nativeBinding.NxTaskHistory
|
||||
module.exports.RustPseudoTerminal = nativeBinding.RustPseudoTerminal
|
||||
module.exports.TaskDetails = nativeBinding.TaskDetails
|
||||
module.exports.TaskHasher = nativeBinding.TaskHasher
|
||||
module.exports.Watcher = nativeBinding.Watcher
|
||||
module.exports.WorkspaceContext = nativeBinding.WorkspaceContext
|
||||
module.exports.connectToNxDb = nativeBinding.connectToNxDb
|
||||
module.exports.copy = nativeBinding.copy
|
||||
module.exports.EventType = nativeBinding.EventType
|
||||
module.exports.expandOutputs = nativeBinding.expandOutputs
|
||||
|
||||
@ -14,7 +14,7 @@ const __wasi = new __WASI({
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
const __sharedMemory = new WebAssembly.Memory({
|
||||
initial: 16384,
|
||||
initial: 1024,
|
||||
maximum: 32768,
|
||||
shared: true,
|
||||
})
|
||||
@ -55,49 +55,63 @@ function __napi_rs_initialize_modules(__napiInstance) {
|
||||
__napiInstance.exports['__napi_register__get_files_for_outputs_1']?.()
|
||||
__napiInstance.exports['__napi_register__remove_2']?.()
|
||||
__napiInstance.exports['__napi_register__copy_3']?.()
|
||||
__napiInstance.exports['__napi_register__hash_array_4']?.()
|
||||
__napiInstance.exports['__napi_register__hash_file_5']?.()
|
||||
__napiInstance.exports['__napi_register__ImportResult_struct_6']?.()
|
||||
__napiInstance.exports['__napi_register__find_imports_7']?.()
|
||||
__napiInstance.exports['__napi_register__transfer_project_graph_8']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalNode_struct_9']?.()
|
||||
__napiInstance.exports['__napi_register__Target_struct_10']?.()
|
||||
__napiInstance.exports['__napi_register__Project_struct_11']?.()
|
||||
__napiInstance.exports['__napi_register__ProjectGraph_struct_12']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_struct_13']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_impl_17']?.()
|
||||
__napiInstance.exports['__napi_register__HashDetails_struct_18']?.()
|
||||
__napiInstance.exports['__napi_register__HasherOptions_struct_19']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_struct_20']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_impl_23']?.()
|
||||
__napiInstance.exports['__napi_register__Task_struct_24']?.()
|
||||
__napiInstance.exports['__napi_register__TaskTarget_struct_25']?.()
|
||||
__napiInstance.exports['__napi_register__TaskGraph_struct_26']?.()
|
||||
__napiInstance.exports['__napi_register__FileData_struct_27']?.()
|
||||
__napiInstance.exports['__napi_register__InputsInput_struct_28']?.()
|
||||
__napiInstance.exports['__napi_register__FileSetInput_struct_29']?.()
|
||||
__napiInstance.exports['__napi_register__RuntimeInput_struct_30']?.()
|
||||
__napiInstance.exports['__napi_register__EnvironmentInput_struct_31']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_32']?.()
|
||||
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_33']?.()
|
||||
__napiInstance.exports['__napi_register__NxJson_struct_34']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_struct_35']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_impl_44']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceErrors_45']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_46']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_47']?.()
|
||||
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_48']?.()
|
||||
__napiInstance.exports['__napi_register__FileMap_struct_49']?.()
|
||||
__napiInstance.exports['__napi_register____test_only_transfer_file_map_50']?.()
|
||||
__napiInstance.exports['__napi_register__IS_WASM_51']?.()
|
||||
__napiInstance.exports['__napi_register__CachedResult_struct_4']?.()
|
||||
__napiInstance.exports['__napi_register__NxCache_struct_5']?.()
|
||||
__napiInstance.exports['__napi_register__NxCache_impl_13']?.()
|
||||
__napiInstance.exports['__napi_register__hash_array_14']?.()
|
||||
__napiInstance.exports['__napi_register__hash_file_15']?.()
|
||||
__napiInstance.exports['__napi_register__IS_WASM_16']?.()
|
||||
__napiInstance.exports['__napi_register__get_binary_target_17']?.()
|
||||
__napiInstance.exports['__napi_register__ImportResult_struct_18']?.()
|
||||
__napiInstance.exports['__napi_register__find_imports_19']?.()
|
||||
__napiInstance.exports['__napi_register__transfer_project_graph_20']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalNode_struct_21']?.()
|
||||
__napiInstance.exports['__napi_register__Target_struct_22']?.()
|
||||
__napiInstance.exports['__napi_register__Project_struct_23']?.()
|
||||
__napiInstance.exports['__napi_register__ProjectGraph_struct_24']?.()
|
||||
__napiInstance.exports['__napi_register__HashedTask_struct_25']?.()
|
||||
__napiInstance.exports['__napi_register__TaskDetails_struct_26']?.()
|
||||
__napiInstance.exports['__napi_register__TaskDetails_impl_29']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_struct_30']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_impl_34']?.()
|
||||
__napiInstance.exports['__napi_register__HashDetails_struct_35']?.()
|
||||
__napiInstance.exports['__napi_register__HasherOptions_struct_36']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_struct_37']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_impl_40']?.()
|
||||
__napiInstance.exports['__napi_register__TaskRun_struct_41']?.()
|
||||
__napiInstance.exports['__napi_register__NxTaskHistory_struct_42']?.()
|
||||
__napiInstance.exports['__napi_register__NxTaskHistory_impl_46']?.()
|
||||
__napiInstance.exports['__napi_register__Task_struct_47']?.()
|
||||
__napiInstance.exports['__napi_register__TaskTarget_struct_48']?.()
|
||||
__napiInstance.exports['__napi_register__TaskGraph_struct_49']?.()
|
||||
__napiInstance.exports['__napi_register__FileData_struct_50']?.()
|
||||
__napiInstance.exports['__napi_register__InputsInput_struct_51']?.()
|
||||
__napiInstance.exports['__napi_register__FileSetInput_struct_52']?.()
|
||||
__napiInstance.exports['__napi_register__RuntimeInput_struct_53']?.()
|
||||
__napiInstance.exports['__napi_register__EnvironmentInput_struct_54']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_55']?.()
|
||||
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_56']?.()
|
||||
__napiInstance.exports['__napi_register__NxJson_struct_57']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_struct_58']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_impl_67']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceErrors_68']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_69']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_70']?.()
|
||||
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_71']?.()
|
||||
__napiInstance.exports['__napi_register__FileMap_struct_72']?.()
|
||||
__napiInstance.exports['__napi_register____test_only_transfer_file_map_73']?.()
|
||||
}
|
||||
export const HashPlanner = __napiModule.exports.HashPlanner
|
||||
export const ImportResult = __napiModule.exports.ImportResult
|
||||
export const NxCache = __napiModule.exports.NxCache
|
||||
export const NxTaskHistory = __napiModule.exports.NxTaskHistory
|
||||
export const TaskDetails = __napiModule.exports.TaskDetails
|
||||
export const TaskHasher = __napiModule.exports.TaskHasher
|
||||
export const WorkspaceContext = __napiModule.exports.WorkspaceContext
|
||||
export const copy = __napiModule.exports.copy
|
||||
export const expandOutputs = __napiModule.exports.expandOutputs
|
||||
export const findImports = __napiModule.exports.findImports
|
||||
export const getBinaryTarget = __napiModule.exports.getBinaryTarget
|
||||
export const getFilesForOutputs = __napiModule.exports.getFilesForOutputs
|
||||
export const hashArray = __napiModule.exports.hashArray
|
||||
export const hashFile = __napiModule.exports.hashFile
|
||||
|
||||
@ -27,7 +27,7 @@ const __wasi = new __nodeWASI({
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
const __sharedMemory = new WebAssembly.Memory({
|
||||
initial: 16384,
|
||||
initial: 1024,
|
||||
maximum: 32768,
|
||||
shared: true,
|
||||
})
|
||||
@ -86,49 +86,63 @@ function __napi_rs_initialize_modules(__napiInstance) {
|
||||
__napiInstance.exports['__napi_register__get_files_for_outputs_1']?.()
|
||||
__napiInstance.exports['__napi_register__remove_2']?.()
|
||||
__napiInstance.exports['__napi_register__copy_3']?.()
|
||||
__napiInstance.exports['__napi_register__hash_array_4']?.()
|
||||
__napiInstance.exports['__napi_register__hash_file_5']?.()
|
||||
__napiInstance.exports['__napi_register__ImportResult_struct_6']?.()
|
||||
__napiInstance.exports['__napi_register__find_imports_7']?.()
|
||||
__napiInstance.exports['__napi_register__transfer_project_graph_8']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalNode_struct_9']?.()
|
||||
__napiInstance.exports['__napi_register__Target_struct_10']?.()
|
||||
__napiInstance.exports['__napi_register__Project_struct_11']?.()
|
||||
__napiInstance.exports['__napi_register__ProjectGraph_struct_12']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_struct_13']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_impl_17']?.()
|
||||
__napiInstance.exports['__napi_register__HashDetails_struct_18']?.()
|
||||
__napiInstance.exports['__napi_register__HasherOptions_struct_19']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_struct_20']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_impl_23']?.()
|
||||
__napiInstance.exports['__napi_register__Task_struct_24']?.()
|
||||
__napiInstance.exports['__napi_register__TaskTarget_struct_25']?.()
|
||||
__napiInstance.exports['__napi_register__TaskGraph_struct_26']?.()
|
||||
__napiInstance.exports['__napi_register__FileData_struct_27']?.()
|
||||
__napiInstance.exports['__napi_register__InputsInput_struct_28']?.()
|
||||
__napiInstance.exports['__napi_register__FileSetInput_struct_29']?.()
|
||||
__napiInstance.exports['__napi_register__RuntimeInput_struct_30']?.()
|
||||
__napiInstance.exports['__napi_register__EnvironmentInput_struct_31']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_32']?.()
|
||||
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_33']?.()
|
||||
__napiInstance.exports['__napi_register__NxJson_struct_34']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_struct_35']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_impl_44']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceErrors_45']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_46']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_47']?.()
|
||||
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_48']?.()
|
||||
__napiInstance.exports['__napi_register__FileMap_struct_49']?.()
|
||||
__napiInstance.exports['__napi_register____test_only_transfer_file_map_50']?.()
|
||||
__napiInstance.exports['__napi_register__IS_WASM_51']?.()
|
||||
__napiInstance.exports['__napi_register__CachedResult_struct_4']?.()
|
||||
__napiInstance.exports['__napi_register__NxCache_struct_5']?.()
|
||||
__napiInstance.exports['__napi_register__NxCache_impl_13']?.()
|
||||
__napiInstance.exports['__napi_register__hash_array_14']?.()
|
||||
__napiInstance.exports['__napi_register__hash_file_15']?.()
|
||||
__napiInstance.exports['__napi_register__IS_WASM_16']?.()
|
||||
__napiInstance.exports['__napi_register__get_binary_target_17']?.()
|
||||
__napiInstance.exports['__napi_register__ImportResult_struct_18']?.()
|
||||
__napiInstance.exports['__napi_register__find_imports_19']?.()
|
||||
__napiInstance.exports['__napi_register__transfer_project_graph_20']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalNode_struct_21']?.()
|
||||
__napiInstance.exports['__napi_register__Target_struct_22']?.()
|
||||
__napiInstance.exports['__napi_register__Project_struct_23']?.()
|
||||
__napiInstance.exports['__napi_register__ProjectGraph_struct_24']?.()
|
||||
__napiInstance.exports['__napi_register__HashedTask_struct_25']?.()
|
||||
__napiInstance.exports['__napi_register__TaskDetails_struct_26']?.()
|
||||
__napiInstance.exports['__napi_register__TaskDetails_impl_29']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_struct_30']?.()
|
||||
__napiInstance.exports['__napi_register__HashPlanner_impl_34']?.()
|
||||
__napiInstance.exports['__napi_register__HashDetails_struct_35']?.()
|
||||
__napiInstance.exports['__napi_register__HasherOptions_struct_36']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_struct_37']?.()
|
||||
__napiInstance.exports['__napi_register__TaskHasher_impl_40']?.()
|
||||
__napiInstance.exports['__napi_register__TaskRun_struct_41']?.()
|
||||
__napiInstance.exports['__napi_register__NxTaskHistory_struct_42']?.()
|
||||
__napiInstance.exports['__napi_register__NxTaskHistory_impl_46']?.()
|
||||
__napiInstance.exports['__napi_register__Task_struct_47']?.()
|
||||
__napiInstance.exports['__napi_register__TaskTarget_struct_48']?.()
|
||||
__napiInstance.exports['__napi_register__TaskGraph_struct_49']?.()
|
||||
__napiInstance.exports['__napi_register__FileData_struct_50']?.()
|
||||
__napiInstance.exports['__napi_register__InputsInput_struct_51']?.()
|
||||
__napiInstance.exports['__napi_register__FileSetInput_struct_52']?.()
|
||||
__napiInstance.exports['__napi_register__RuntimeInput_struct_53']?.()
|
||||
__napiInstance.exports['__napi_register__EnvironmentInput_struct_54']?.()
|
||||
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_55']?.()
|
||||
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_56']?.()
|
||||
__napiInstance.exports['__napi_register__NxJson_struct_57']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_struct_58']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceContext_impl_67']?.()
|
||||
__napiInstance.exports['__napi_register__WorkspaceErrors_68']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_69']?.()
|
||||
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_70']?.()
|
||||
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_71']?.()
|
||||
__napiInstance.exports['__napi_register__FileMap_struct_72']?.()
|
||||
__napiInstance.exports['__napi_register____test_only_transfer_file_map_73']?.()
|
||||
}
|
||||
module.exports.HashPlanner = __napiModule.exports.HashPlanner
|
||||
module.exports.ImportResult = __napiModule.exports.ImportResult
|
||||
module.exports.NxCache = __napiModule.exports.NxCache
|
||||
module.exports.NxTaskHistory = __napiModule.exports.NxTaskHistory
|
||||
module.exports.TaskDetails = __napiModule.exports.TaskDetails
|
||||
module.exports.TaskHasher = __napiModule.exports.TaskHasher
|
||||
module.exports.WorkspaceContext = __napiModule.exports.WorkspaceContext
|
||||
module.exports.copy = __napiModule.exports.copy
|
||||
module.exports.expandOutputs = __napiModule.exports.expandOutputs
|
||||
module.exports.findImports = __napiModule.exports.findImports
|
||||
module.exports.getBinaryTarget = __napiModule.exports.getBinaryTarget
|
||||
module.exports.getFilesForOutputs = __napiModule.exports.getFilesForOutputs
|
||||
module.exports.hashArray = __napiModule.exports.hashArray
|
||||
module.exports.hashFile = __napiModule.exports.hashFile
|
||||
|
||||
56
packages/nx/src/native/tasks/details.rs
Normal file
56
packages/nx/src/native/tasks/details.rs
Normal file
@ -0,0 +1,56 @@
|
||||
use napi::bindgen_prelude::*;
|
||||
use rusqlite::{params, Connection};
|
||||
|
||||
#[napi(object)]
|
||||
#[derive(Default, Clone)]
|
||||
pub struct HashedTask {
|
||||
pub hash: String,
|
||||
pub project: String,
|
||||
pub target: String,
|
||||
pub configuration: Option<String>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
struct TaskDetails {
|
||||
db: External<Connection>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl TaskDetails {
|
||||
#[napi(constructor)]
|
||||
pub fn new(db: External<Connection>) -> anyhow::Result<Self> {
|
||||
let r = Self { db };
|
||||
|
||||
r.setup()?;
|
||||
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn setup(&self) -> anyhow::Result<()> {
|
||||
self.db.execute(
|
||||
"
|
||||
CREATE TABLE IF NOT EXISTS task_details (
|
||||
hash TEXT PRIMARY KEY NOT NULL,
|
||||
project TEXT NOT NULL,
|
||||
target TEXT NOT NULL,
|
||||
configuration TEXT
|
||||
);",
|
||||
params![],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn record_task_details(&self, tasks: Vec<HashedTask>) -> anyhow::Result<()> {
|
||||
for task in tasks.iter() {
|
||||
self.db.execute(
|
||||
"INSERT OR REPLACE INTO task_details (hash, project, target, configuration)
|
||||
VALUES (?1, ?2, ?3, ?4)",
|
||||
params![task.hash, task.project, task.target, task.configuration],
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -48,7 +48,7 @@ pub fn hash_runtime(
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn create_command_builder() -> Command {
|
||||
pub fn create_command_builder() -> Command {
|
||||
let comspec = std::env::var("COMSPEC");
|
||||
let shell = comspec
|
||||
.as_ref()
|
||||
@ -61,7 +61,7 @@ fn create_command_builder() -> Command {
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn create_command_builder() -> Command {
|
||||
pub fn create_command_builder() -> Command {
|
||||
let mut command = Command::new("sh");
|
||||
command.arg("-c");
|
||||
command
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
mod dep_outputs;
|
||||
pub mod details;
|
||||
mod hash_planner;
|
||||
mod hashers;
|
||||
pub mod hashers;
|
||||
mod inputs;
|
||||
pub mod task_hasher;
|
||||
mod types;
|
||||
pub mod task_history;
|
||||
pub mod types;
|
||||
mod utils;
|
||||
|
||||
97
packages/nx/src/native/tasks/task_history.rs
Normal file
97
packages/nx/src/native/tasks/task_history.rs
Normal file
@ -0,0 +1,97 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use rusqlite::vtab::array;
|
||||
use rusqlite::{params, types::Value, Connection};
|
||||
|
||||
#[napi(object)]
|
||||
pub struct TaskRun {
|
||||
pub hash: String,
|
||||
pub status: String,
|
||||
pub code: i16,
|
||||
pub start: i64,
|
||||
pub end: i64,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct NxTaskHistory {
|
||||
db: External<Connection>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl NxTaskHistory {
|
||||
#[napi(constructor)]
|
||||
pub fn new(db: External<Connection>) -> anyhow::Result<Self> {
|
||||
let s = Self { db };
|
||||
|
||||
s.setup()?;
|
||||
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
fn setup(&self) -> anyhow::Result<()> {
|
||||
array::load_module(&self.db)?;
|
||||
self.db
|
||||
.execute_batch(
|
||||
"
|
||||
BEGIN;
|
||||
CREATE TABLE IF NOT EXISTS task_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
hash TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
code INTEGER NOT NULL,
|
||||
start TIMESTAMP NOT NULL,
|
||||
end TIMESTAMP NOT NULL,
|
||||
FOREIGN KEY (hash) REFERENCES task_details (hash)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS hash_idx ON task_history (hash);
|
||||
COMMIT;
|
||||
",
|
||||
)
|
||||
.map_err(anyhow::Error::from)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn record_task_runs(&self, task_runs: Vec<TaskRun>) -> anyhow::Result<()> {
|
||||
for task_run in task_runs.iter() {
|
||||
self.db
|
||||
.execute(
|
||||
"
|
||||
INSERT INTO task_history
|
||||
(hash, status, code, start, end)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
params![
|
||||
task_run.hash,
|
||||
task_run.status,
|
||||
task_run.code,
|
||||
task_run.start,
|
||||
task_run.end
|
||||
],
|
||||
)
|
||||
.map_err(anyhow::Error::from)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_flaky_tasks(&self, hashes: Vec<String>) -> anyhow::Result<Vec<String>> {
|
||||
let values = Rc::new(
|
||||
hashes
|
||||
.iter()
|
||||
.map(|s| Value::from(s.clone()))
|
||||
.collect::<Vec<Value>>(),
|
||||
);
|
||||
|
||||
self.db
|
||||
.prepare(
|
||||
"SELECT hash from task_history
|
||||
WHERE hash IN rarray(?1)
|
||||
GROUP BY hash
|
||||
HAVING COUNT(DISTINCT code) > 1
|
||||
",
|
||||
)?
|
||||
.query_map([values], |row| row.get(0))?
|
||||
.map(|r| r.map_err(anyhow::Error::from))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
60
packages/nx/src/native/tests/cache.spec.ts
Normal file
60
packages/nx/src/native/tests/cache.spec.ts
Normal file
@ -0,0 +1,60 @@
|
||||
import { TaskDetails, NxCache } from '../index';
|
||||
import { join } from 'path';
|
||||
import { TempFs } from '../../internal-testing-utils/temp-fs';
|
||||
import { rmSync } from 'fs';
|
||||
import { getDbConnection } from '../../utils/db-connection';
|
||||
|
||||
describe('Cache', () => {
|
||||
let cache: NxCache;
|
||||
let tempFs: TempFs;
|
||||
let taskDetails: TaskDetails;
|
||||
|
||||
beforeEach(() => {
|
||||
tempFs = new TempFs('cache');
|
||||
rmSync(join(__dirname, 'temp-db'), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
|
||||
const dbConnection = getDbConnection(join(__dirname, 'temp-db'));
|
||||
|
||||
taskDetails = new TaskDetails(dbConnection);
|
||||
|
||||
cache = new NxCache(
|
||||
tempFs.tempDir,
|
||||
join(tempFs.tempDir, '.cache'),
|
||||
dbConnection
|
||||
);
|
||||
|
||||
taskDetails.recordTaskDetails([
|
||||
{
|
||||
hash: '123',
|
||||
project: 'proj',
|
||||
target: 'test',
|
||||
configuration: 'production',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should store results into cache', async () => {
|
||||
const result = cache.get('123');
|
||||
|
||||
expect(result).toBeNull();
|
||||
|
||||
tempFs.createFileSync('dist/output.txt', 'output contents 123');
|
||||
|
||||
cache.put('123', 'output 123', ['dist'], 0);
|
||||
|
||||
tempFs.removeFileSync('dist/output.txt');
|
||||
|
||||
const result2 = cache.get('123');
|
||||
cache.copyFilesFromCache(result2, ['dist']);
|
||||
|
||||
expect(result2.code).toEqual(0);
|
||||
expect(result2.terminalOutput).toEqual('output 123');
|
||||
|
||||
expect(await tempFs.readFile('dist/output.txt')).toEqual(
|
||||
'output contents 123'
|
||||
);
|
||||
});
|
||||
});
|
||||
85
packages/nx/src/native/tests/task_history.spec.ts
Normal file
85
packages/nx/src/native/tests/task_history.spec.ts
Normal file
@ -0,0 +1,85 @@
|
||||
import { TaskDetails, NxTaskHistory } from '../index';
|
||||
import { join } from 'path';
|
||||
import { TempFs } from '../../internal-testing-utils/temp-fs';
|
||||
import { rmSync } from 'fs';
|
||||
import { getDbConnection } from '../../utils/db-connection';
|
||||
|
||||
describe('NxTaskHistory', () => {
|
||||
let taskHistory: NxTaskHistory;
|
||||
let tempFs: TempFs;
|
||||
let taskDetails: TaskDetails;
|
||||
|
||||
beforeEach(() => {
|
||||
tempFs = new TempFs('task-history');
|
||||
|
||||
rmSync(join(__dirname, 'temp-db'), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
|
||||
const dbConnection = getDbConnection(join(__dirname, 'temp-db'));
|
||||
taskHistory = new NxTaskHistory(dbConnection);
|
||||
taskDetails = new TaskDetails(dbConnection);
|
||||
|
||||
// Cache sets up the task details
|
||||
taskDetails.recordTaskDetails([
|
||||
{
|
||||
hash: '123',
|
||||
project: 'proj',
|
||||
target: 'build',
|
||||
configuration: 'production',
|
||||
},
|
||||
{
|
||||
hash: '234',
|
||||
project: 'proj',
|
||||
target: 'build',
|
||||
configuration: 'production',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should record task history', () => {
|
||||
taskHistory.recordTaskRuns([
|
||||
{
|
||||
hash: '123',
|
||||
code: 0,
|
||||
status: 'success',
|
||||
start: Date.now() - 1000 * 60 * 60,
|
||||
end: Date.now(),
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should query flaky tasks', () => {
|
||||
taskHistory.recordTaskRuns([
|
||||
{
|
||||
hash: '123',
|
||||
code: 1,
|
||||
status: 'failure',
|
||||
start: Date.now() - 1000 * 60 * 60,
|
||||
end: Date.now(),
|
||||
},
|
||||
{
|
||||
hash: '123',
|
||||
code: 0,
|
||||
status: 'success',
|
||||
start: Date.now() - 1000 * 60 * 30,
|
||||
end: Date.now(),
|
||||
},
|
||||
{
|
||||
hash: '234',
|
||||
code: 0,
|
||||
status: 'success',
|
||||
start: Date.now() - 1000 * 60 * 60,
|
||||
end: Date.now(),
|
||||
},
|
||||
]);
|
||||
const r = taskHistory.getFlakyTasks(['123', '234']);
|
||||
expect(r).toContain('123');
|
||||
expect(r).not.toContain('234');
|
||||
|
||||
const r2 = taskHistory.getFlakyTasks([]);
|
||||
expect(r2).not.toContain('123');
|
||||
expect(r2).not.toContain('234');
|
||||
});
|
||||
});
|
||||
@ -1,19 +1,17 @@
|
||||
use napi::bindgen_prelude::External;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::native::hasher::hash;
|
||||
use crate::native::utils::{path::get_child_files, Normalize, NxMutex, NxCondvar};
|
||||
use rayon::prelude::*;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
||||
use napi::bindgen_prelude::External;
|
||||
use rayon::prelude::*;
|
||||
use tracing::{trace, warn};
|
||||
|
||||
use crate::native::hasher::hash;
|
||||
use crate::native::logger::enable_logger;
|
||||
use crate::native::project_graph::utils::{find_project_for_path, ProjectRootMappings};
|
||||
use crate::native::types::FileData;
|
||||
use tracing::{trace, warn};
|
||||
|
||||
use crate::native::utils::{path::get_child_files, Normalize, NxCondvar, NxMutex};
|
||||
use crate::native::workspace::files_archive::{read_files_archive, write_files_archive};
|
||||
use crate::native::workspace::files_hashing::{full_files_hash, selective_files_hash};
|
||||
use crate::native::workspace::types::{
|
||||
@ -36,9 +34,9 @@ fn gather_and_hash_files(workspace_root: &Path, cache_dir: String) -> Vec<(PathB
|
||||
trace!("Gathering files in {}", workspace_root.display());
|
||||
let now = std::time::Instant::now();
|
||||
let file_hashes = if let Some(archived_files) = archived_files {
|
||||
selective_files_hash(&workspace_root, archived_files)
|
||||
selective_files_hash(workspace_root, archived_files)
|
||||
} else {
|
||||
full_files_hash(&workspace_root)
|
||||
full_files_hash(workspace_root)
|
||||
};
|
||||
|
||||
let mut files = file_hashes
|
||||
@ -69,7 +67,7 @@ impl FilesWorker {
|
||||
let files_lock_clone = Arc::clone(&files_lock);
|
||||
let workspace_root = workspace_root.to_owned();
|
||||
|
||||
thread::spawn(move || {
|
||||
std::thread::spawn(move || {
|
||||
let (lock, cvar) = &*files_lock_clone;
|
||||
trace!("Initially locking files");
|
||||
let mut workspace_files = lock.lock().expect("Should be the first time locking files");
|
||||
@ -116,10 +114,14 @@ impl FilesWorker {
|
||||
let files = files_lock.lock().expect("Should be able to lock files");
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut files = cvar.wait(files, |guard| guard.len() == 0).expect("Should be able to wait for files");
|
||||
let files = cvar
|
||||
.wait(files, |guard| guard.len() == 0)
|
||||
.expect("Should be able to wait for files");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let files = cvar.wait(files, |guard| guard.len() == 0).expect("Should be able to wait for files");
|
||||
let files = cvar
|
||||
.wait(files, |guard| guard.len() == 0)
|
||||
.expect("Should be able to wait for files");
|
||||
|
||||
let file_data = files
|
||||
.iter()
|
||||
@ -150,7 +152,9 @@ impl FilesWorker {
|
||||
};
|
||||
|
||||
let (files_lock, _) = &files_sync.deref();
|
||||
let mut files = files_lock.lock().expect("Should always be able to update files");
|
||||
let mut files = files_lock
|
||||
.lock()
|
||||
.expect("Should always be able to update files");
|
||||
let mut map: HashMap<PathBuf, String> = files.drain(..).collect();
|
||||
|
||||
for deleted_path in deleted_files_and_directories {
|
||||
@ -162,7 +166,6 @@ impl FilesWorker {
|
||||
let owned_deleted_path = deleted_path.to_owned();
|
||||
!path.starts_with(owned_deleted_path + "/")
|
||||
});
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@ -180,7 +183,7 @@ impl FilesWorker {
|
||||
|
||||
for (file, hash) in &updated_files_hashes {
|
||||
map.entry(file.into())
|
||||
.and_modify(|e| *e = hash.clone())
|
||||
.and_modify(|e| e.clone_from(hash))
|
||||
.or_insert(hash.clone());
|
||||
}
|
||||
|
||||
@ -202,7 +205,7 @@ impl WorkspaceContext {
|
||||
let workspace_root_path = PathBuf::from(&workspace_root);
|
||||
|
||||
WorkspaceContext {
|
||||
files_worker: FilesWorker::gather_files(&workspace_root_path, cache_dir),
|
||||
files_worker: FilesWorker::gather_files(&workspace_root_path, cache_dir.clone()),
|
||||
workspace_root,
|
||||
workspace_root_path,
|
||||
}
|
||||
@ -293,7 +296,7 @@ impl WorkspaceContext {
|
||||
trace!("{file:?} was not found in any project, updating global files");
|
||||
global_files
|
||||
.entry(file)
|
||||
.and_modify(|e| *e = hash.clone())
|
||||
.and_modify(|e| e.clone_from(&hash))
|
||||
.or_insert(hash);
|
||||
}
|
||||
}
|
||||
|
||||
@ -18,6 +18,7 @@ import * as tar from 'tar-stream';
|
||||
import { cacheDir } from '../utils/cache-directory';
|
||||
import { createHash } from 'crypto';
|
||||
import { TasksRunner } from '../tasks-runner/tasks-runner';
|
||||
import { RemoteCacheV2 } from '../tasks-runner/default-tasks-runner';
|
||||
|
||||
interface CloudBundleInstall {
|
||||
version: string;
|
||||
@ -55,6 +56,7 @@ export interface NxCloudClient {
|
||||
configureLightClientRequire: () => (paths: string[]) => void;
|
||||
commands: Record<string, () => Promise<void>>;
|
||||
nxCloudTasksRunner: TasksRunner<CloudTaskRunnerOptions>;
|
||||
remoteCache: RemoteCacheV2;
|
||||
}
|
||||
export async function verifyOrUpdateNxCloudClient(
|
||||
options: CloudTaskRunnerOptions
|
||||
|
||||
@ -2,11 +2,21 @@ import { workspaceRoot } from '../utils/workspace-root';
|
||||
import { mkdir, mkdirSync, pathExists, readFile, writeFile } from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { DefaultTasksRunnerOptions } from './default-tasks-runner';
|
||||
import {
|
||||
DefaultTasksRunnerOptions,
|
||||
RemoteCache,
|
||||
RemoteCacheV2,
|
||||
} from './default-tasks-runner';
|
||||
import { spawn } from 'child_process';
|
||||
import { cacheDir } from '../utils/cache-directory';
|
||||
import { Task } from '../config/task-graph';
|
||||
import { machineId } from 'node-machine-id';
|
||||
import { NxCache } from '../native';
|
||||
import { getDbConnection } from '../utils/db-connection';
|
||||
import { isNxCloudUsed } from '../utils/nx-cloud-utils';
|
||||
import { readNxJson } from '../config/nx-json';
|
||||
import { verifyOrUpdateNxCloudClient } from '../nx-cloud/update-manager';
|
||||
import { getCloudOptions } from '../nx-cloud/utilities/get-cloud-options';
|
||||
|
||||
export type CachedResult = {
|
||||
terminalOutput: string;
|
||||
@ -16,6 +26,114 @@ export type CachedResult = {
|
||||
};
|
||||
export type TaskWithCachedResult = { task: Task; cachedResult: CachedResult };
|
||||
|
||||
export class DbCache {
|
||||
private cache = new NxCache(workspaceRoot, cacheDir, getDbConnection());
|
||||
private remoteCache: RemoteCacheV2 | null;
|
||||
private remoteCachePromise: Promise<RemoteCacheV2>;
|
||||
|
||||
async setup() {
|
||||
this.remoteCache = await this.getRemoteCache();
|
||||
}
|
||||
|
||||
constructor(private readonly options: { nxCloudRemoteCache: RemoteCache }) {}
|
||||
|
||||
async get(task: Task): Promise<CachedResult | null> {
|
||||
const res = this.cache.get(task.hash);
|
||||
|
||||
if (res) {
|
||||
return {
|
||||
...res,
|
||||
remote: false,
|
||||
};
|
||||
}
|
||||
await this.setup();
|
||||
if (this.remoteCache) {
|
||||
// didn't find it locally but we have a remote cache
|
||||
// attempt remote cache
|
||||
const res = await this.remoteCache.retrieve(
|
||||
task.hash,
|
||||
this.cache.cacheDirectory
|
||||
);
|
||||
|
||||
if (res) {
|
||||
this.cache.applyRemoteCacheResults(task.hash, res);
|
||||
|
||||
return {
|
||||
...res,
|
||||
remote: true,
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async put(
|
||||
task: Task,
|
||||
terminalOutput: string | null,
|
||||
outputs: string[],
|
||||
code: number
|
||||
) {
|
||||
return tryAndRetry(async () => {
|
||||
this.cache.put(task.hash, terminalOutput, outputs, code);
|
||||
|
||||
await this.setup();
|
||||
if (this.remoteCache) {
|
||||
await this.remoteCache.store(
|
||||
task.hash,
|
||||
this.cache.cacheDirectory,
|
||||
terminalOutput,
|
||||
code
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
copyFilesFromCache(_: string, cachedResult: CachedResult, outputs: string[]) {
|
||||
return tryAndRetry(async () =>
|
||||
this.cache.copyFilesFromCache(cachedResult, outputs)
|
||||
);
|
||||
}
|
||||
|
||||
removeOldCacheRecords() {
|
||||
return this.cache.removeOldCacheRecords();
|
||||
}
|
||||
|
||||
temporaryOutputPath(task: Task) {
|
||||
return this.cache.getTaskOutputsPath(task.hash);
|
||||
}
|
||||
|
||||
private async getRemoteCache(): Promise<RemoteCacheV2 | null> {
|
||||
if (this.remoteCachePromise) {
|
||||
return this.remoteCachePromise;
|
||||
}
|
||||
|
||||
this.remoteCachePromise = this._getRemoteCache();
|
||||
return this.remoteCachePromise;
|
||||
}
|
||||
|
||||
private async _getRemoteCache(): Promise<RemoteCacheV2 | null> {
|
||||
const nxJson = readNxJson();
|
||||
if (isNxCloudUsed(nxJson)) {
|
||||
const options = getCloudOptions();
|
||||
const { nxCloudClient } = await verifyOrUpdateNxCloudClient(options);
|
||||
if (nxCloudClient.remoteCache) {
|
||||
return nxCloudClient.remoteCache;
|
||||
} else {
|
||||
// old nx cloud instance
|
||||
return RemoteCacheV2.fromCacheV1(this.options.nxCloudRemoteCache);
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use the {@link DbCache} class instead. This will be removed in Nx 21.
|
||||
*/
|
||||
export class Cache {
|
||||
root = workspaceRoot;
|
||||
cachePath = this.createCacheDir();
|
||||
@ -85,7 +203,10 @@ export class Cache {
|
||||
outputs: string[],
|
||||
code: number
|
||||
) {
|
||||
return this.tryAndRetry(async () => {
|
||||
return tryAndRetry(async () => {
|
||||
/**
|
||||
* This is the directory with the cached artifacts
|
||||
*/
|
||||
const td = join(this.cachePath, task.hash);
|
||||
const tdCommit = join(this.cachePath, `${task.hash}.commit`);
|
||||
|
||||
@ -135,7 +256,7 @@ export class Cache {
|
||||
cachedResult: CachedResult,
|
||||
outputs: string[]
|
||||
) {
|
||||
return this.tryAndRetry(async () => {
|
||||
return tryAndRetry(async () => {
|
||||
const expandedOutputs = await this.expandOutputsInCache(
|
||||
outputs,
|
||||
cachedResult
|
||||
@ -273,10 +394,10 @@ export class Cache {
|
||||
mkdirSync(path, { recursive: true });
|
||||
return path;
|
||||
}
|
||||
}
|
||||
|
||||
private tryAndRetry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
function tryAndRetry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
let attempts = 0;
|
||||
const baseTimeout = 5;
|
||||
// Generate a random number between 2 and 4 to raise to the power of attempts
|
||||
const baseExponent = Math.random() * 2 + 2;
|
||||
const _try = async () => {
|
||||
@ -295,4 +416,3 @@ export class Cache {
|
||||
};
|
||||
return _try();
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,12 +7,53 @@ import { NxJsonConfiguration } from '../config/nx-json';
|
||||
import { Task, TaskGraph } from '../config/task-graph';
|
||||
import { NxArgs } from '../utils/command-line-utils';
|
||||
import { DaemonClient } from '../daemon/client/client';
|
||||
import { readFile, writeFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { CachedResult } from '../native';
|
||||
|
||||
export interface RemoteCache {
|
||||
retrieve: (hash: string, cacheDirectory: string) => Promise<boolean>;
|
||||
store: (hash: string, cacheDirectory: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
export abstract class RemoteCacheV2 {
|
||||
static fromCacheV1(cache: RemoteCache): RemoteCacheV2 {
|
||||
return {
|
||||
retrieve: async (hash, cacheDirectory) => {
|
||||
const res = cache.retrieve(hash, cacheDirectory);
|
||||
const [terminalOutput, code] = await Promise.all([
|
||||
readFile(join(cacheDirectory, hash, 'terminalOutputs'), 'utf-8'),
|
||||
readFile(join(cacheDirectory, hash, 'code'), 'utf-8').then((s) => +s),
|
||||
]);
|
||||
if (res) {
|
||||
return {
|
||||
outputsPath: cacheDirectory,
|
||||
terminalOutput,
|
||||
code,
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
store: async (hash, cacheDirectory, __, code) => {
|
||||
await writeFile(join(cacheDirectory, hash, 'code'), code.toString());
|
||||
|
||||
return cache.store(hash, cacheDirectory);
|
||||
},
|
||||
};
|
||||
}
|
||||
abstract retrieve(
|
||||
hash: string,
|
||||
cacheDirectory: string
|
||||
): Promise<CachedResult | null>;
|
||||
abstract store(
|
||||
hash: string,
|
||||
cacheDirectory: string,
|
||||
terminalOutput: string,
|
||||
code: number
|
||||
): Promise<boolean>;
|
||||
}
|
||||
|
||||
export interface DefaultTasksRunnerOptions {
|
||||
parallel?: number;
|
||||
cacheableOperations?: string[];
|
||||
|
||||
@ -0,0 +1,71 @@
|
||||
import { serializeTarget } from '../../utils/serialize-target';
|
||||
import { Task } from '../../config/task-graph';
|
||||
import { output } from '../../utils/output';
|
||||
import {
|
||||
getHistoryForHashes,
|
||||
TaskRun,
|
||||
writeTaskRunsToHistory as writeTaskRunsToHistory,
|
||||
} from '../../utils/legacy-task-history';
|
||||
import { LifeCycle, TaskResult } from '../life-cycle';
|
||||
|
||||
export class LegacyTaskHistoryLifeCycle implements LifeCycle {
|
||||
private startTimings: Record<string, number> = {};
|
||||
private taskRuns: TaskRun[] = [];
|
||||
|
||||
startTasks(tasks: Task[]): void {
|
||||
for (let task of tasks) {
|
||||
this.startTimings[task.id] = new Date().getTime();
|
||||
}
|
||||
}
|
||||
|
||||
async endTasks(taskResults: TaskResult[]) {
|
||||
const taskRuns: TaskRun[] = taskResults.map((taskResult) => ({
|
||||
project: taskResult.task.target.project,
|
||||
target: taskResult.task.target.target,
|
||||
configuration: taskResult.task.target.configuration,
|
||||
hash: taskResult.task.hash,
|
||||
code: taskResult.code.toString(),
|
||||
status: taskResult.status,
|
||||
start: (
|
||||
taskResult.task.startTime ?? this.startTimings[taskResult.task.id]
|
||||
).toString(),
|
||||
end: (taskResult.task.endTime ?? new Date().getTime()).toString(),
|
||||
}));
|
||||
this.taskRuns.push(...taskRuns);
|
||||
}
|
||||
|
||||
async endCommand() {
|
||||
await writeTaskRunsToHistory(this.taskRuns);
|
||||
const history = await getHistoryForHashes(this.taskRuns.map((t) => t.hash));
|
||||
const flakyTasks: string[] = [];
|
||||
|
||||
// check if any hash has different exit codes => flaky
|
||||
for (let hash in history) {
|
||||
if (
|
||||
history[hash].length > 1 &&
|
||||
history[hash].some((run) => run.code !== history[hash][0].code)
|
||||
) {
|
||||
flakyTasks.push(
|
||||
serializeTarget(
|
||||
history[hash][0].project,
|
||||
history[hash][0].target,
|
||||
history[hash][0].configuration
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (flakyTasks.length > 0) {
|
||||
output.warn({
|
||||
title: `Nx detected ${
|
||||
flakyTasks.length === 1 ? 'a flaky task' : ' flaky tasks'
|
||||
}`,
|
||||
bodyLines: [
|
||||
,
|
||||
...flakyTasks.map((t) => ` ${t}`),
|
||||
'',
|
||||
`Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,16 +1,18 @@
|
||||
import { serializeTarget } from '../../utils/serialize-target';
|
||||
import { Task } from '../../config/task-graph';
|
||||
import { output } from '../../utils/output';
|
||||
import {
|
||||
getHistoryForHashes,
|
||||
TaskRun,
|
||||
writeTaskRunsToHistory as writeTaskRunsToHistory,
|
||||
} from '../../utils/task-history';
|
||||
import { TaskHistory } from '../../utils/task-history';
|
||||
import { LifeCycle, TaskResult } from '../life-cycle';
|
||||
import type { TaskRun as NativeTaskRun } from '../../native';
|
||||
|
||||
interface TaskRun extends NativeTaskRun {
|
||||
target: Task['target'];
|
||||
}
|
||||
|
||||
export class TaskHistoryLifeCycle implements LifeCycle {
|
||||
private startTimings: Record<string, number> = {};
|
||||
private taskRuns: TaskRun[] = [];
|
||||
private taskRuns = new Map<string, TaskRun>();
|
||||
private taskHistory = new TaskHistory();
|
||||
|
||||
startTasks(tasks: Task[]): void {
|
||||
for (let task of tasks) {
|
||||
@ -19,41 +21,27 @@ export class TaskHistoryLifeCycle implements LifeCycle {
|
||||
}
|
||||
|
||||
async endTasks(taskResults: TaskResult[]) {
|
||||
const taskRuns: TaskRun[] = taskResults.map((taskResult) => ({
|
||||
project: taskResult.task.target.project,
|
||||
target: taskResult.task.target.target,
|
||||
configuration: taskResult.task.target.configuration,
|
||||
taskResults
|
||||
.map((taskResult) => ({
|
||||
hash: taskResult.task.hash,
|
||||
code: taskResult.code.toString(),
|
||||
target: taskResult.task.target,
|
||||
code: taskResult.code,
|
||||
status: taskResult.status,
|
||||
start: (
|
||||
taskResult.task.startTime ?? this.startTimings[taskResult.task.id]
|
||||
).toString(),
|
||||
end: (taskResult.task.endTime ?? new Date().getTime()).toString(),
|
||||
}));
|
||||
this.taskRuns.push(...taskRuns);
|
||||
start:
|
||||
taskResult.task.startTime ?? this.startTimings[taskResult.task.id],
|
||||
end: taskResult.task.endTime ?? Date.now(),
|
||||
}))
|
||||
.forEach((taskRun) => {
|
||||
this.taskRuns.set(taskRun.hash, taskRun);
|
||||
});
|
||||
}
|
||||
|
||||
async endCommand() {
|
||||
await writeTaskRunsToHistory(this.taskRuns);
|
||||
const history = await getHistoryForHashes(this.taskRuns.map((t) => t.hash));
|
||||
const flakyTasks: string[] = [];
|
||||
|
||||
// check if any hash has different exit codes => flaky
|
||||
for (let hash in history) {
|
||||
if (
|
||||
history[hash].length > 1 &&
|
||||
history[hash].some((run) => run.code !== history[hash][0].code)
|
||||
) {
|
||||
flakyTasks.push(
|
||||
serializeTarget(
|
||||
history[hash][0].project,
|
||||
history[hash][0].target,
|
||||
history[hash][0].configuration
|
||||
)
|
||||
const entries = Array.from(this.taskRuns);
|
||||
await this.taskHistory.recordTaskRuns(entries.map(([_, v]) => v));
|
||||
const flakyTasks = await this.taskHistory.getFlakyTasks(
|
||||
entries.map(([hash]) => hash)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (flakyTasks.length > 0) {
|
||||
output.warn({
|
||||
title: `Nx detected ${
|
||||
@ -61,7 +49,14 @@ export class TaskHistoryLifeCycle implements LifeCycle {
|
||||
}`,
|
||||
bodyLines: [
|
||||
,
|
||||
...flakyTasks.map((t) => ` ${t}`),
|
||||
...flakyTasks.map((hash) => {
|
||||
const taskRun = this.taskRuns.get(hash);
|
||||
return ` ${serializeTarget(
|
||||
taskRun.target.project,
|
||||
taskRun.target.target,
|
||||
taskRun.target.configuration
|
||||
)}`;
|
||||
}),
|
||||
'',
|
||||
`Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,
|
||||
],
|
||||
|
||||
@ -34,6 +34,7 @@ import { StaticRunManyTerminalOutputLifeCycle } from './life-cycles/static-run-m
|
||||
import { StaticRunOneTerminalOutputLifeCycle } from './life-cycles/static-run-one-terminal-output-life-cycle';
|
||||
import { StoreRunInformationLifeCycle } from './life-cycles/store-run-information-life-cycle';
|
||||
import { TaskHistoryLifeCycle } from './life-cycles/task-history-life-cycle';
|
||||
import { LegacyTaskHistoryLifeCycle } from './life-cycles/task-history-life-cycle-old';
|
||||
import { TaskProfilingLifeCycle } from './life-cycles/task-profiling-life-cycle';
|
||||
import { TaskTimingsLifeCycle } from './life-cycles/task-timings-life-cycle';
|
||||
import {
|
||||
@ -44,6 +45,7 @@ import {
|
||||
import { TasksRunner, TaskStatus } from './tasks-runner';
|
||||
import { shouldStreamOutput } from './utils';
|
||||
import chalk = require('chalk');
|
||||
import { IS_WASM } from '../native';
|
||||
|
||||
async function getTerminalOutputLifeCycle(
|
||||
initiatingProject: string,
|
||||
@ -517,7 +519,9 @@ function constructLifeCycles(lifeCycle: LifeCycle) {
|
||||
lifeCycles.push(new TaskProfilingLifeCycle(process.env.NX_PROFILE));
|
||||
}
|
||||
if (!isNxCloudUsed(readNxJson())) {
|
||||
lifeCycles.push(new TaskHistoryLifeCycle());
|
||||
lifeCycles.push(
|
||||
!IS_WASM ? new TaskHistoryLifeCycle() : new LegacyTaskHistoryLifeCycle()
|
||||
);
|
||||
}
|
||||
return lifeCycles;
|
||||
}
|
||||
|
||||
@ -5,7 +5,7 @@ import { writeFileSync } from 'fs';
|
||||
import { TaskHasher } from '../hasher/task-hasher';
|
||||
import runCommandsImpl from '../executors/run-commands/run-commands.impl';
|
||||
import { ForkedProcessTaskRunner } from './forked-process-task-runner';
|
||||
import { Cache } from './cache';
|
||||
import { Cache, DbCache } from './cache';
|
||||
import { DefaultTasksRunnerOptions } from './default-tasks-runner';
|
||||
import { TaskStatus } from './tasks-runner';
|
||||
import {
|
||||
@ -31,9 +31,19 @@ import {
|
||||
import { workspaceRoot } from '../utils/workspace-root';
|
||||
import { output } from '../utils/output';
|
||||
import { combineOptionsForExecutor } from '../utils/params';
|
||||
import { isNxCloudUsed } from '../utils/nx-cloud-utils';
|
||||
import { readNxJson } from '../config/nx-json';
|
||||
|
||||
export class TaskOrchestrator {
|
||||
private cache = new Cache(this.options);
|
||||
private cache =
|
||||
process.env.NX_DB_CACHE === 'true'
|
||||
? new DbCache({
|
||||
// Remove this in Nx 21
|
||||
nxCloudRemoteCache: isNxCloudUsed(readNxJson())
|
||||
? this.options.remoteCache
|
||||
: null,
|
||||
})
|
||||
: new Cache(this.options);
|
||||
private forkedProcessTaskRunner = new ForkedProcessTaskRunner(this.options);
|
||||
|
||||
private tasksSchedule = new TasksSchedule(
|
||||
|
||||
@ -79,9 +79,14 @@ export function cacheDirectoryForWorkspace(workspaceRoot: string) {
|
||||
);
|
||||
}
|
||||
|
||||
export const workspaceDataDirectory = absolutePath(
|
||||
export const workspaceDataDirectory =
|
||||
workspaceDataDirectoryForWorkspace(workspaceRoot);
|
||||
|
||||
export function workspaceDataDirectoryForWorkspace(workspaceRoot: string) {
|
||||
return absolutePath(
|
||||
workspaceRoot,
|
||||
process.env.NX_WORKSPACE_DATA_DIRECTORY ??
|
||||
process.env.NX_PROJECT_GRAPH_CACHE_DIRECTORY ??
|
||||
defaultWorkspaceDataDirectory(workspaceRoot)
|
||||
);
|
||||
}
|
||||
|
||||
10
packages/nx/src/utils/db-connection.ts
Normal file
10
packages/nx/src/utils/db-connection.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { connectToNxDb, ExternalObject } from '../native';
|
||||
import { workspaceDataDirectory } from './cache-directory';
|
||||
import { version as NX_VERSION } from '../../package.json';
|
||||
|
||||
let dbConnection: ExternalObject<any>;
|
||||
|
||||
export function getDbConnection(directory = workspaceDataDirectory) {
|
||||
dbConnection ??= connectToNxDb(directory, NX_VERSION);
|
||||
return dbConnection;
|
||||
}
|
||||
104
packages/nx/src/utils/legacy-task-history.ts
Normal file
104
packages/nx/src/utils/legacy-task-history.ts
Normal file
@ -0,0 +1,104 @@
|
||||
import { appendFileSync, existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { workspaceDataDirectory } from './cache-directory';
|
||||
|
||||
const taskRunKeys = [
|
||||
'project',
|
||||
'target',
|
||||
'configuration',
|
||||
'hash',
|
||||
'code',
|
||||
'status',
|
||||
'start',
|
||||
'end',
|
||||
] as const;
|
||||
|
||||
export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
|
||||
|
||||
let taskHistory: TaskRun[] | undefined = undefined;
|
||||
let taskHashToIndicesMap: Map<string, number[]> = new Map();
|
||||
|
||||
export async function getHistoryForHashes(hashes: string[]): Promise<{
|
||||
[hash: string]: TaskRun[];
|
||||
}> {
|
||||
if (taskHistory === undefined) {
|
||||
loadTaskHistoryFromDisk();
|
||||
}
|
||||
|
||||
const result: { [hash: string]: TaskRun[] } = {};
|
||||
for (let hash of hashes) {
|
||||
const indices = taskHashToIndicesMap.get(hash);
|
||||
if (!indices) {
|
||||
result[hash] = [];
|
||||
} else {
|
||||
result[hash] = indices.map((index) => taskHistory[index]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export async function writeTaskRunsToHistory(
|
||||
taskRuns: TaskRun[]
|
||||
): Promise<void> {
|
||||
if (taskHistory === undefined) {
|
||||
loadTaskHistoryFromDisk();
|
||||
}
|
||||
|
||||
const serializedLines: string[] = [];
|
||||
for (let taskRun of taskRuns) {
|
||||
const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
|
||||
serializedLines.push(serializedLine);
|
||||
recordTaskRunInMemory(taskRun);
|
||||
}
|
||||
|
||||
if (!existsSync(taskHistoryFile)) {
|
||||
writeFileSync(taskHistoryFile, `${taskRunKeys.join(',')}\n`);
|
||||
}
|
||||
appendFileSync(taskHistoryFile, serializedLines.join('\n') + '\n');
|
||||
}
|
||||
|
||||
export const taskHistoryFile = join(workspaceDataDirectory, 'task-history.csv');
|
||||
|
||||
function loadTaskHistoryFromDisk() {
|
||||
taskHashToIndicesMap.clear();
|
||||
taskHistory = [];
|
||||
|
||||
if (!existsSync(taskHistoryFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileContent = readFileSync(taskHistoryFile, 'utf8');
|
||||
if (!fileContent) {
|
||||
return;
|
||||
}
|
||||
const lines = fileContent.split('\n');
|
||||
|
||||
// if there are no lines or just the header, return
|
||||
if (lines.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
|
||||
|
||||
// read the values from csv format where each header is a key and the value is the value
|
||||
for (let line of contentLines) {
|
||||
const values = line.trim().split(',');
|
||||
|
||||
const run: Partial<TaskRun> = {};
|
||||
taskRunKeys.forEach((header, index) => {
|
||||
run[header] = values[index];
|
||||
});
|
||||
|
||||
recordTaskRunInMemory(run as TaskRun);
|
||||
}
|
||||
}
|
||||
|
||||
function recordTaskRunInMemory(taskRun: TaskRun) {
|
||||
const index = taskHistory.push(taskRun) - 1;
|
||||
if (taskHashToIndicesMap.has(taskRun.hash)) {
|
||||
taskHashToIndicesMap.get(taskRun.hash).push(index);
|
||||
} else {
|
||||
taskHashToIndicesMap.set(taskRun.hash, [index]);
|
||||
}
|
||||
}
|
||||
@ -1,114 +1,22 @@
|
||||
import { appendFileSync, existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { daemonClient } from '../daemon/client/client';
|
||||
import { isOnDaemon } from '../daemon/is-on-daemon';
|
||||
import { workspaceDataDirectory } from './cache-directory';
|
||||
import { NxTaskHistory, TaskRun } from '../native';
|
||||
import { getDbConnection } from './db-connection';
|
||||
|
||||
const taskRunKeys = [
|
||||
'project',
|
||||
'target',
|
||||
'configuration',
|
||||
'hash',
|
||||
'code',
|
||||
'status',
|
||||
'start',
|
||||
'end',
|
||||
] as const;
|
||||
export class TaskHistory {
|
||||
taskHistory = new NxTaskHistory(getDbConnection());
|
||||
|
||||
export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
|
||||
|
||||
let taskHistory: TaskRun[] | undefined = undefined;
|
||||
let taskHashToIndicesMap: Map<string, number[]> = new Map();
|
||||
|
||||
export async function getHistoryForHashes(hashes: string[]): Promise<{
|
||||
[hash: string]: TaskRun[];
|
||||
}> {
|
||||
async getFlakyTasks(hashes: string[]) {
|
||||
if (isOnDaemon() || !daemonClient.enabled()) {
|
||||
if (taskHistory === undefined) {
|
||||
loadTaskHistoryFromDisk();
|
||||
return this.taskHistory.getFlakyTasks(hashes);
|
||||
}
|
||||
return await daemonClient.getFlakyTasks(hashes);
|
||||
}
|
||||
|
||||
const result: { [hash: string]: TaskRun[] } = {};
|
||||
for (let hash of hashes) {
|
||||
const indices = taskHashToIndicesMap.get(hash);
|
||||
if (!indices) {
|
||||
result[hash] = [];
|
||||
} else {
|
||||
result[hash] = indices.map((index) => taskHistory[index]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return await daemonClient.getTaskHistoryForHashes(hashes);
|
||||
}
|
||||
|
||||
export async function writeTaskRunsToHistory(
|
||||
taskRuns: TaskRun[]
|
||||
): Promise<void> {
|
||||
async recordTaskRuns(taskRuns: TaskRun[]) {
|
||||
if (isOnDaemon() || !daemonClient.enabled()) {
|
||||
if (taskHistory === undefined) {
|
||||
loadTaskHistoryFromDisk();
|
||||
return this.taskHistory.recordTaskRuns(taskRuns);
|
||||
}
|
||||
|
||||
const serializedLines: string[] = [];
|
||||
for (let taskRun of taskRuns) {
|
||||
const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
|
||||
serializedLines.push(serializedLine);
|
||||
recordTaskRunInMemory(taskRun);
|
||||
}
|
||||
|
||||
if (!existsSync(taskHistoryFile)) {
|
||||
writeFileSync(taskHistoryFile, `${taskRunKeys.join(',')}\n`);
|
||||
}
|
||||
appendFileSync(taskHistoryFile, serializedLines.join('\n') + '\n');
|
||||
} else {
|
||||
await daemonClient.writeTaskRunsToHistory(taskRuns);
|
||||
}
|
||||
}
|
||||
|
||||
export const taskHistoryFile = join(workspaceDataDirectory, 'task-history.csv');
|
||||
|
||||
function loadTaskHistoryFromDisk() {
|
||||
taskHashToIndicesMap.clear();
|
||||
taskHistory = [];
|
||||
|
||||
if (!existsSync(taskHistoryFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileContent = readFileSync(taskHistoryFile, 'utf8');
|
||||
if (!fileContent) {
|
||||
return;
|
||||
}
|
||||
const lines = fileContent.split('\n');
|
||||
|
||||
// if there are no lines or just the header, return
|
||||
if (lines.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
|
||||
|
||||
// read the values from csv format where each header is a key and the value is the value
|
||||
for (let line of contentLines) {
|
||||
const values = line.trim().split(',');
|
||||
|
||||
const run: Partial<TaskRun> = {};
|
||||
taskRunKeys.forEach((header, index) => {
|
||||
run[header] = values[index];
|
||||
});
|
||||
|
||||
recordTaskRunInMemory(run as TaskRun);
|
||||
}
|
||||
}
|
||||
|
||||
function recordTaskRunInMemory(taskRun: TaskRun) {
|
||||
const index = taskHistory.push(taskRun) - 1;
|
||||
if (taskHashToIndicesMap.has(taskRun.hash)) {
|
||||
taskHashToIndicesMap.get(taskRun.hash).push(index);
|
||||
} else {
|
||||
taskHashToIndicesMap.set(taskRun.hash, [index]);
|
||||
return daemonClient.recordTaskRuns(taskRuns);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { NxWorkspaceFilesExternals, WorkspaceContext } from '../native';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { cacheDirectoryForWorkspace } from './cache-directory';
|
||||
import { workspaceDataDirectoryForWorkspace } from './cache-directory';
|
||||
import { isOnDaemon } from '../daemon/is-on-daemon';
|
||||
import { daemonClient } from '../daemon/client/client';
|
||||
|
||||
@ -12,7 +12,7 @@ export function setupWorkspaceContext(workspaceRoot: string) {
|
||||
performance.mark('workspace-context');
|
||||
workspaceContext = new WorkspaceContext(
|
||||
workspaceRoot,
|
||||
cacheDirectoryForWorkspace(workspaceRoot)
|
||||
workspaceDataDirectoryForWorkspace(workspaceRoot)
|
||||
);
|
||||
performance.mark('workspace-context:end');
|
||||
performance.measure(
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user