feat(core): add daemon to the context provided to task runners
This commit is contained in:
parent
34fdbc0de2
commit
1a31018ea2
@ -942,6 +942,7 @@ stored in the daemon process. To reset both run: `nx reset`.
|
|||||||
| `tasks` | [`Task`](../../devkit/index#task)[] |
|
| `tasks` | [`Task`](../../devkit/index#task)[] |
|
||||||
| `options` | [`DefaultTasksRunnerOptions`](../../devkit/index#defaulttasksrunneroptions) |
|
| `options` | [`DefaultTasksRunnerOptions`](../../devkit/index#defaulttasksrunneroptions) |
|
||||||
| `context?` | `Object` |
|
| `context?` | `Object` |
|
||||||
|
| `context.daemon?` | `DaemonClient` |
|
||||||
| `context.hasher?` | [`Hasher`](../../devkit/index#hasher) |
|
| `context.hasher?` | [`Hasher`](../../devkit/index#hasher) |
|
||||||
| `context.initiatingProject?` | `string` |
|
| `context.initiatingProject?` | `string` |
|
||||||
| `context.nxArgs` | `NxArgs` |
|
| `context.nxArgs` | `NxArgs` |
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@ -63,7 +63,7 @@
|
|||||||
"@nrwl/eslint-plugin-nx": "14.6.0-beta.3",
|
"@nrwl/eslint-plugin-nx": "14.6.0-beta.3",
|
||||||
"@nrwl/jest": "14.6.0-beta.3",
|
"@nrwl/jest": "14.6.0-beta.3",
|
||||||
"@nrwl/next": "14.6.0-beta.3",
|
"@nrwl/next": "14.6.0-beta.3",
|
||||||
"@nrwl/nx-cloud": "14.4.1",
|
"@nrwl/nx-cloud": "14.4.2-beta.2",
|
||||||
"@nrwl/react": "14.6.0-beta.3",
|
"@nrwl/react": "14.6.0-beta.3",
|
||||||
"@nrwl/web": "14.6.0-beta.3",
|
"@nrwl/web": "14.6.0-beta.3",
|
||||||
"@parcel/watcher": "2.0.4",
|
"@parcel/watcher": "2.0.4",
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import { workspaceRoot } from '../../utils/workspace-root';
|
import { workspaceRoot } from '../../utils/workspace-root';
|
||||||
import { ChildProcess, spawn, spawnSync } from 'child_process';
|
import { ChildProcess, spawn, spawnSync } from 'child_process';
|
||||||
import { openSync, readFileSync } from 'fs';
|
import { openSync, readFileSync, statSync } from 'fs';
|
||||||
import { ensureDirSync, ensureFileSync } from 'fs-extra';
|
import { ensureDirSync, ensureFileSync } from 'fs-extra';
|
||||||
import { connect } from 'net';
|
import { connect } from 'net';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
@ -14,8 +14,12 @@ import { FULL_OS_SOCKET_PATH, killSocketOrPath } from '../socket-utils';
|
|||||||
import {
|
import {
|
||||||
DAEMON_DIR_FOR_CURRENT_WORKSPACE,
|
DAEMON_DIR_FOR_CURRENT_WORKSPACE,
|
||||||
DAEMON_OUTPUT_LOG_FILE,
|
DAEMON_OUTPUT_LOG_FILE,
|
||||||
|
isDaemonDisabled,
|
||||||
} from '../tmp-dir';
|
} from '../tmp-dir';
|
||||||
import { ProjectGraph } from '../../config/project-graph';
|
import { ProjectGraph } from '../../config/project-graph';
|
||||||
|
import { isCI } from '../../utils/is-ci';
|
||||||
|
import { readNxJson } from '../../config/configuration';
|
||||||
|
import { NxJsonConfiguration } from 'nx/src/config/nx-json';
|
||||||
|
|
||||||
const DAEMON_ENV_SETTINGS = {
|
const DAEMON_ENV_SETTINGS = {
|
||||||
...process.env,
|
...process.env,
|
||||||
@ -23,6 +27,64 @@ const DAEMON_ENV_SETTINGS = {
|
|||||||
NX_CACHE_WORKSPACE_CONFIG: 'false',
|
NX_CACHE_WORKSPACE_CONFIG: 'false',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export class DaemonClient {
|
||||||
|
constructor(private readonly nxJson: NxJsonConfiguration) {}
|
||||||
|
|
||||||
|
enabled() {
|
||||||
|
const useDaemonProcessOption =
|
||||||
|
this.nxJson.tasksRunnerOptions?.['default']?.options?.useDaemonProcess;
|
||||||
|
const env = process.env.NX_DAEMON;
|
||||||
|
|
||||||
|
// env takes precedence
|
||||||
|
// option=true,env=false => no daemon
|
||||||
|
// option=false,env=undefined => no daemon
|
||||||
|
// option=false,env=false => no daemon
|
||||||
|
|
||||||
|
// option=undefined,env=undefined => daemon
|
||||||
|
// option=true,env=true => daemon
|
||||||
|
// option=false,env=true => daemon
|
||||||
|
if (
|
||||||
|
isCI() ||
|
||||||
|
isDocker() ||
|
||||||
|
isDaemonDisabled() ||
|
||||||
|
(useDaemonProcessOption === undefined && env === 'false') ||
|
||||||
|
(useDaemonProcessOption === true && env === 'false') ||
|
||||||
|
(useDaemonProcessOption === false && env === undefined) ||
|
||||||
|
(useDaemonProcessOption === false && env === 'false')
|
||||||
|
) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getProjectGraph(): Promise<ProjectGraph> {
|
||||||
|
if (!(await isServerAvailable())) {
|
||||||
|
await startInBackground();
|
||||||
|
}
|
||||||
|
return sendMessageToDaemon({ type: 'REQUEST_PROJECT_GRAPH' });
|
||||||
|
}
|
||||||
|
|
||||||
|
async processInBackground(requirePath: string, data: any): Promise<any> {
|
||||||
|
if (!(await isServerAvailable())) {
|
||||||
|
await startInBackground();
|
||||||
|
}
|
||||||
|
return sendMessageToDaemon({
|
||||||
|
type: 'PROCESS_IN_BACKGROUND',
|
||||||
|
requirePath,
|
||||||
|
data,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDocker() {
|
||||||
|
try {
|
||||||
|
statSync('/.dockerenv');
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function startInBackground(): Promise<ChildProcess['pid']> {
|
export async function startInBackground(): Promise<ChildProcess['pid']> {
|
||||||
await safelyCleanUpExistingProcess();
|
await safelyCleanUpExistingProcess();
|
||||||
ensureDirSync(DAEMON_DIR_FOR_CURRENT_WORKSPACE);
|
ensureDirSync(DAEMON_DIR_FOR_CURRENT_WORKSPACE);
|
||||||
@ -136,19 +198,13 @@ export async function isServerAvailable(): Promise<boolean> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async function sendMessageToDaemon(message: {
|
||||||
* Establishes a client connection to the daemon server for use in project graph
|
type: string;
|
||||||
* creation utilities.
|
requirePath?: string;
|
||||||
*
|
data?: any;
|
||||||
* All logs are performed by the devkit logger because this logic does not
|
}): Promise<any> {
|
||||||
* run "on the server" per se and therefore does not write to its log output.
|
|
||||||
*
|
|
||||||
* TODO: Gracefully handle a server shutdown (for whatever reason) while a client
|
|
||||||
* is connecting and querying it.
|
|
||||||
*/
|
|
||||||
export async function getProjectGraphFromServer(): Promise<ProjectGraph> {
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
performance.mark('getProjectGraphFromServer-start');
|
performance.mark('sendMessageToDaemon-start');
|
||||||
const socket = connect(FULL_OS_SOCKET_PATH);
|
const socket = connect(FULL_OS_SOCKET_PATH);
|
||||||
|
|
||||||
socket.on('error', (err) => {
|
socket.on('error', (err) => {
|
||||||
@ -156,7 +212,7 @@ export async function getProjectGraphFromServer(): Promise<ProjectGraph> {
|
|||||||
return reject(err);
|
return reject(err);
|
||||||
}
|
}
|
||||||
if (err.message.startsWith('LOCK-FILES-CHANGED')) {
|
if (err.message.startsWith('LOCK-FILES-CHANGED')) {
|
||||||
return getProjectGraphFromServer().then(resolve, reject);
|
return sendMessageToDaemon(message).then(resolve, reject);
|
||||||
}
|
}
|
||||||
let error: any;
|
let error: any;
|
||||||
if (err.message.startsWith('connect ENOENT')) {
|
if (err.message.startsWith('connect ENOENT')) {
|
||||||
@ -174,54 +230,47 @@ export async function getProjectGraphFromServer(): Promise<ProjectGraph> {
|
|||||||
return reject(error || err);
|
return reject(error || err);
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
|
||||||
* Immediately after connecting to the server we send it the known project graph creation
|
|
||||||
* request payload. See the notes above createServer() for more context as to why we explicitly
|
|
||||||
* request the graph from the client like this.
|
|
||||||
*/
|
|
||||||
socket.on('connect', () => {
|
socket.on('connect', () => {
|
||||||
socket.write('REQUEST_PROJECT_GRAPH_PAYLOAD');
|
socket.write(JSON.stringify(message));
|
||||||
|
|
||||||
let serializedProjectGraphResult = '';
|
let serializedResult = '';
|
||||||
socket.on('data', (data) => {
|
socket.on('data', (data) => {
|
||||||
serializedProjectGraphResult += data.toString();
|
serializedResult += data.toString();
|
||||||
});
|
});
|
||||||
|
|
||||||
socket.on('end', () => {
|
socket.on('end', () => {
|
||||||
try {
|
try {
|
||||||
performance.mark('json-parse-start');
|
performance.mark('json-parse-start');
|
||||||
const projectGraphResult = JSON.parse(serializedProjectGraphResult);
|
const parsedResult = JSON.parse(serializedResult);
|
||||||
performance.mark('json-parse-end');
|
performance.mark('json-parse-end');
|
||||||
performance.measure(
|
performance.measure(
|
||||||
'deserialize graph result on the client',
|
'deserialize daemon response',
|
||||||
'json-parse-start',
|
'json-parse-start',
|
||||||
'json-parse-end'
|
'json-parse-end'
|
||||||
);
|
);
|
||||||
if (projectGraphResult.error) {
|
if (parsedResult.error) {
|
||||||
reject(projectGraphResult.error);
|
reject(parsedResult.error);
|
||||||
} else {
|
} else {
|
||||||
performance.measure(
|
performance.measure(
|
||||||
'total for getProjectGraphFromServer()',
|
'total for sendMessageToDaemon()',
|
||||||
'getProjectGraphFromServer-start',
|
'sendMessageToDaemon-start',
|
||||||
'json-parse-end'
|
'json-parse-end'
|
||||||
);
|
);
|
||||||
return resolve(projectGraphResult.projectGraph);
|
return resolve(parsedResult.projectGraph);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const endOfGraph =
|
const endOfResponse =
|
||||||
serializedProjectGraphResult.length > 300
|
serializedResult.length > 300
|
||||||
? serializedProjectGraphResult.substring(
|
? serializedResult.substring(serializedResult.length - 300)
|
||||||
serializedProjectGraphResult.length - 300
|
: serializedResult;
|
||||||
)
|
|
||||||
: serializedProjectGraphResult;
|
|
||||||
reject(
|
reject(
|
||||||
daemonProcessException(
|
daemonProcessException(
|
||||||
[
|
[
|
||||||
'Could not deserialize project graph.',
|
'Could not deserialize response from Nx deamon.',
|
||||||
`Message: ${e.message}`,
|
`Message: ${e.message}`,
|
||||||
'\n',
|
'\n',
|
||||||
`Received:`,
|
`Received:`,
|
||||||
endOfGraph,
|
endOfResponse,
|
||||||
'\n',
|
'\n',
|
||||||
].join('\n')
|
].join('\n')
|
||||||
)
|
)
|
||||||
|
|||||||
@ -0,0 +1,27 @@
|
|||||||
|
import { respondWithErrorAndExit } from './shutdown-utils';
|
||||||
|
|
||||||
|
export async function handleProcessInBackground(
|
||||||
|
socket,
|
||||||
|
payload: { type: string; requirePath: string; data: any }
|
||||||
|
) {
|
||||||
|
let fn;
|
||||||
|
try {
|
||||||
|
fn = require(payload.requirePath);
|
||||||
|
} catch (e) {
|
||||||
|
await respondWithErrorAndExit(
|
||||||
|
socket,
|
||||||
|
`Unable to require ${payload.requirePath}`,
|
||||||
|
new Error(`Unable to require ${payload.requirePath}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fn(socket, payload.data);
|
||||||
|
} catch (e) {
|
||||||
|
await respondWithErrorAndExit(
|
||||||
|
socket,
|
||||||
|
`Error when processing ${payload.type}.`,
|
||||||
|
new Error(`Error when processing ${payload.type}. Message: ${e.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,63 @@
|
|||||||
|
import { performance } from 'perf_hooks';
|
||||||
|
import { serializeResult } from '../socket-utils';
|
||||||
|
import { serverLogger } from './logger';
|
||||||
|
import { getCachedSerializedProjectGraphPromise } from './project-graph-incremental-recomputation';
|
||||||
|
import { respondWithErrorAndExit } from './shutdown-utils';
|
||||||
|
|
||||||
|
export async function handleRequestProjectGraph(socket) {
|
||||||
|
performance.mark('server-connection');
|
||||||
|
serverLogger.requestLog('Client Request for Project Graph Received');
|
||||||
|
|
||||||
|
const result = await getCachedSerializedProjectGraphPromise();
|
||||||
|
if (result.error) {
|
||||||
|
await respondWithErrorAndExit(
|
||||||
|
socket,
|
||||||
|
`Error when preparing serialized project graph.`,
|
||||||
|
result.error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const serializedResult = serializeResult(
|
||||||
|
result.error,
|
||||||
|
result.serializedProjectGraph
|
||||||
|
);
|
||||||
|
if (!serializedResult) {
|
||||||
|
await respondWithErrorAndExit(
|
||||||
|
socket,
|
||||||
|
`Error when serializing project graph result.`,
|
||||||
|
new Error(
|
||||||
|
'Critical error when serializing server result, check server logs'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
performance.mark('serialized-project-graph-ready');
|
||||||
|
performance.measure(
|
||||||
|
'total for creating and serializing project graph',
|
||||||
|
'server-connection',
|
||||||
|
'serialized-project-graph-ready'
|
||||||
|
);
|
||||||
|
|
||||||
|
socket.write(serializedResult, () => {
|
||||||
|
performance.mark('serialized-project-graph-written-to-client');
|
||||||
|
performance.measure(
|
||||||
|
'write project graph to socket',
|
||||||
|
'serialized-project-graph-ready',
|
||||||
|
'serialized-project-graph-written-to-client'
|
||||||
|
);
|
||||||
|
// Close the connection once all data has been written so that the client knows when to read it.
|
||||||
|
socket.end();
|
||||||
|
performance.measure(
|
||||||
|
'total for server response',
|
||||||
|
'server-connection',
|
||||||
|
'serialized-project-graph-written-to-client'
|
||||||
|
);
|
||||||
|
const bytesWritten = Buffer.byteLength(
|
||||||
|
result.serializedProjectGraph,
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
serverLogger.requestLog(
|
||||||
|
`Closed Connection to Client (${bytesWritten} bytes transferred)`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
@ -1,7 +1,7 @@
|
|||||||
import { workspaceRoot } from '../../utils/workspace-root';
|
import { workspaceRoot } from '../../utils/workspace-root';
|
||||||
import { createServer, Server, Socket } from 'net';
|
import { createServer, Server, Socket } from 'net';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import { performance, PerformanceObserver } from 'perf_hooks';
|
import { PerformanceObserver } from 'perf_hooks';
|
||||||
import {
|
import {
|
||||||
FULL_OS_SOCKET_PATH,
|
FULL_OS_SOCKET_PATH,
|
||||||
isWindows,
|
isWindows,
|
||||||
@ -12,6 +12,7 @@ import { serverLogger } from './logger';
|
|||||||
import {
|
import {
|
||||||
handleServerProcessTermination,
|
handleServerProcessTermination,
|
||||||
resetInactivityTimeout,
|
resetInactivityTimeout,
|
||||||
|
respondWithErrorAndExit,
|
||||||
SERVER_INACTIVITY_TIMEOUT_MS,
|
SERVER_INACTIVITY_TIMEOUT_MS,
|
||||||
} from './shutdown-utils';
|
} from './shutdown-utils';
|
||||||
import {
|
import {
|
||||||
@ -20,48 +21,17 @@ import {
|
|||||||
SubscribeToWorkspaceChangesCallback,
|
SubscribeToWorkspaceChangesCallback,
|
||||||
WatcherSubscription,
|
WatcherSubscription,
|
||||||
} from './watcher';
|
} from './watcher';
|
||||||
import {
|
import { addUpdatedAndDeletedFiles } from './project-graph-incremental-recomputation';
|
||||||
addUpdatedAndDeletedFiles,
|
|
||||||
getCachedSerializedProjectGraphPromise,
|
|
||||||
} from './project-graph-incremental-recomputation';
|
|
||||||
import { existsSync, statSync } from 'fs';
|
import { existsSync, statSync } from 'fs';
|
||||||
import { HashingImpl } from '../../hasher/hashing-impl';
|
import { HashingImpl } from '../../hasher/hashing-impl';
|
||||||
import { defaultFileHasher } from '../../hasher/file-hasher';
|
import { defaultFileHasher } from '../../hasher/file-hasher';
|
||||||
|
import { handleRequestProjectGraph } from './handle-request-project-graph';
|
||||||
function respondToClient(socket: Socket, message: string) {
|
import { handleProcessInBackground } from './handle-process-in-background';
|
||||||
return new Promise((res) => {
|
|
||||||
socket.write(message, () => {
|
|
||||||
// Close the connection once all data has been written so that the client knows when to read it.
|
|
||||||
socket.end();
|
|
||||||
serverLogger.log(`Closed Connection to Client`);
|
|
||||||
res(null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let watcherSubscription: WatcherSubscription | undefined;
|
let watcherSubscription: WatcherSubscription | undefined;
|
||||||
let performanceObserver: PerformanceObserver | undefined;
|
let performanceObserver: PerformanceObserver | undefined;
|
||||||
let watcherError: Error | undefined;
|
let watcherError: Error | undefined;
|
||||||
|
|
||||||
async function respondWithErrorAndExit(
|
|
||||||
socket: Socket,
|
|
||||||
description: string,
|
|
||||||
error: Error
|
|
||||||
) {
|
|
||||||
// print some extra stuff in the error message
|
|
||||||
serverLogger.requestLog(
|
|
||||||
`Responding to the client with an error.`,
|
|
||||||
description,
|
|
||||||
error.message
|
|
||||||
);
|
|
||||||
console.error(error);
|
|
||||||
|
|
||||||
error.message = `${error.message}\n\nBecause of the error the Nx daemon process has exited. The next Nx command is going to restart the daemon process.\nIf the error persists, please run "nx reset".`;
|
|
||||||
|
|
||||||
await respondToClient(socket, serializeResult(error, null));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const server = createServer(async (socket) => {
|
const server = createServer(async (socket) => {
|
||||||
resetInactivityTimeout(handleInactivityTimeout);
|
resetInactivityTimeout(handleInactivityTimeout);
|
||||||
if (!performanceObserver) {
|
if (!performanceObserver) {
|
||||||
@ -90,70 +60,33 @@ const server = createServer(async (socket) => {
|
|||||||
|
|
||||||
resetInactivityTimeout(handleInactivityTimeout);
|
resetInactivityTimeout(handleInactivityTimeout);
|
||||||
|
|
||||||
const payload = data.toString();
|
const unparsedPayload = data.toString();
|
||||||
if (payload !== 'REQUEST_PROJECT_GRAPH_PAYLOAD') {
|
let payload;
|
||||||
|
try {
|
||||||
|
payload = JSON.parse(unparsedPayload);
|
||||||
|
} catch (e) {
|
||||||
await respondWithErrorAndExit(
|
await respondWithErrorAndExit(
|
||||||
socket,
|
socket,
|
||||||
`Invalid payload from the client`,
|
`Invalid payload from the client`,
|
||||||
new Error(`Unsupported payload sent to daemon server: ${payload}`)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
performance.mark('server-connection');
|
|
||||||
serverLogger.requestLog('Client Request for Project Graph Received');
|
|
||||||
|
|
||||||
const result = await getCachedSerializedProjectGraphPromise();
|
|
||||||
if (result.error) {
|
|
||||||
await respondWithErrorAndExit(
|
|
||||||
socket,
|
|
||||||
`Error when preparing serialized project graph.`,
|
|
||||||
result.error
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const serializedResult = serializeResult(
|
|
||||||
result.error,
|
|
||||||
result.serializedProjectGraph
|
|
||||||
);
|
|
||||||
if (!serializedResult) {
|
|
||||||
await respondWithErrorAndExit(
|
|
||||||
socket,
|
|
||||||
`Error when serializing project graph result.`,
|
|
||||||
new Error(
|
new Error(
|
||||||
'Critical error when serializing server result, check server logs'
|
`Unsupported payload sent to daemon server: ${unparsedPayload}`
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
performance.mark('serialized-project-graph-ready');
|
if (payload.type === 'REQUEST_PROJECT_GRAPH') {
|
||||||
performance.measure(
|
await handleRequestProjectGraph(socket);
|
||||||
'total for creating and serializing project graph',
|
} else if (payload.type === 'PROCESS_IN_BACKGROUND') {
|
||||||
'server-connection',
|
await handleProcessInBackground(socket, payload);
|
||||||
'serialized-project-graph-ready'
|
} else {
|
||||||
);
|
await respondWithErrorAndExit(
|
||||||
|
socket,
|
||||||
socket.write(serializedResult, () => {
|
`Invalid payload from the client`,
|
||||||
performance.mark('serialized-project-graph-written-to-client');
|
new Error(
|
||||||
performance.measure(
|
`Unsupported payload sent to daemon server: ${unparsedPayload}`
|
||||||
'write project graph to socket',
|
)
|
||||||
'serialized-project-graph-ready',
|
|
||||||
'serialized-project-graph-written-to-client'
|
|
||||||
);
|
);
|
||||||
// Close the connection once all data has been written so that the client knows when to read it.
|
}
|
||||||
socket.end();
|
|
||||||
performance.measure(
|
|
||||||
'total for server response',
|
|
||||||
'server-connection',
|
|
||||||
'serialized-project-graph-written-to-client'
|
|
||||||
);
|
|
||||||
const bytesWritten = Buffer.byteLength(
|
|
||||||
result.serializedProjectGraph,
|
|
||||||
'utf-8'
|
|
||||||
);
|
|
||||||
serverLogger.requestLog(
|
|
||||||
`Closed Connection to Client (${bytesWritten} bytes transferred)`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
import { workspaceRoot } from '../../utils/workspace-root';
|
import { workspaceRoot } from '../../utils/workspace-root';
|
||||||
import type { Server } from 'net';
|
import type { Server, Socket } from 'net';
|
||||||
import { serverLogger } from './logger';
|
import { serverLogger } from './logger';
|
||||||
import type { WatcherSubscription } from './watcher';
|
import type { WatcherSubscription } from './watcher';
|
||||||
|
import { serializeResult } from 'nx/src/daemon/socket-utils';
|
||||||
|
|
||||||
export const SERVER_INACTIVITY_TIMEOUT_MS = 10800000 as const; // 10800000 ms = 3 hours
|
export const SERVER_INACTIVITY_TIMEOUT_MS = 10800000 as const; // 10800000 ms = 3 hours
|
||||||
|
|
||||||
@ -38,3 +39,33 @@ export function resetInactivityTimeout(cb: () => void): void {
|
|||||||
}
|
}
|
||||||
serverInactivityTimerId = setTimeout(cb, SERVER_INACTIVITY_TIMEOUT_MS);
|
serverInactivityTimerId = setTimeout(cb, SERVER_INACTIVITY_TIMEOUT_MS);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function respondToClient(socket: Socket, message: string) {
|
||||||
|
return new Promise((res) => {
|
||||||
|
socket.write(message, () => {
|
||||||
|
// Close the connection once all data has been written so that the client knows when to read it.
|
||||||
|
socket.end();
|
||||||
|
serverLogger.log(`Closed Connection to Client`);
|
||||||
|
res(null);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function respondWithErrorAndExit(
|
||||||
|
socket: Socket,
|
||||||
|
description: string,
|
||||||
|
error: Error
|
||||||
|
) {
|
||||||
|
// print some extra stuff in the error message
|
||||||
|
serverLogger.requestLog(
|
||||||
|
`Responding to the client with an error.`,
|
||||||
|
description,
|
||||||
|
error.message
|
||||||
|
);
|
||||||
|
console.error(error);
|
||||||
|
|
||||||
|
error.message = `${error.message}\n\nBecause of the error the Nx daemon process has exited. The next Nx command is going to restart the daemon process.\nIf the error persists, please run "nx reset".`;
|
||||||
|
|
||||||
|
await respondToClient(socket, serializeResult(error, null));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|||||||
@ -2,14 +2,8 @@ import { ProjectGraphCache, readCache } from './nx-deps-cache';
|
|||||||
import { buildProjectGraph } from './build-project-graph';
|
import { buildProjectGraph } from './build-project-graph';
|
||||||
import { workspaceFileName } from './file-utils';
|
import { workspaceFileName } from './file-utils';
|
||||||
import { output } from '../utils/output';
|
import { output } from '../utils/output';
|
||||||
import { isCI } from '../utils/is-ci';
|
|
||||||
import { defaultFileHasher } from '../hasher/file-hasher';
|
import { defaultFileHasher } from '../hasher/file-hasher';
|
||||||
import {
|
import { markDaemonAsDisabled, writeDaemonLogs } from '../daemon/tmp-dir';
|
||||||
isDaemonDisabled,
|
|
||||||
markDaemonAsDisabled,
|
|
||||||
writeDaemonLogs,
|
|
||||||
} from '../daemon/tmp-dir';
|
|
||||||
import { statSync } from 'fs';
|
|
||||||
import { ProjectGraph, ProjectGraphV4 } from '../config/project-graph';
|
import { ProjectGraph, ProjectGraphV4 } from '../config/project-graph';
|
||||||
import { stripIndents } from '../utils/strip-indents';
|
import { stripIndents } from '../utils/strip-indents';
|
||||||
import { readNxJson } from '../config/configuration';
|
import { readNxJson } from '../config/configuration';
|
||||||
@ -17,6 +11,7 @@ import {
|
|||||||
ProjectConfiguration,
|
ProjectConfiguration,
|
||||||
ProjectsConfigurations,
|
ProjectsConfigurations,
|
||||||
} from '../config/workspace-json-project-json';
|
} from '../config/workspace-json-project-json';
|
||||||
|
import { DaemonClient } from '../daemon/client/client';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Synchronously reads the latest cached copy of the workspace's ProjectGraph.
|
* Synchronously reads the latest cached copy of the workspace's ProjectGraph.
|
||||||
@ -108,35 +103,12 @@ async function buildProjectGraphWithoutDaemon() {
|
|||||||
*/
|
*/
|
||||||
export async function createProjectGraphAsync(): Promise<ProjectGraph> {
|
export async function createProjectGraphAsync(): Promise<ProjectGraph> {
|
||||||
const nxJson = readNxJson();
|
const nxJson = readNxJson();
|
||||||
const useDaemonProcessOption =
|
const daemon = new DaemonClient(nxJson);
|
||||||
nxJson.tasksRunnerOptions?.['default']?.options?.useDaemonProcess;
|
if (!daemon.enabled()) {
|
||||||
const env = process.env.NX_DAEMON;
|
|
||||||
|
|
||||||
// env takes precedence
|
|
||||||
// option=true,env=false => no daemon
|
|
||||||
// option=false,env=undefined => no daemon
|
|
||||||
// option=false,env=false => no daemon
|
|
||||||
|
|
||||||
// option=undefined,env=undefined => daemon
|
|
||||||
// option=true,env=true => daemon
|
|
||||||
// option=false,env=true => daemon
|
|
||||||
if (
|
|
||||||
isCI() ||
|
|
||||||
isDocker() ||
|
|
||||||
isDaemonDisabled() ||
|
|
||||||
(useDaemonProcessOption === undefined && env === 'false') ||
|
|
||||||
(useDaemonProcessOption === true && env === 'false') ||
|
|
||||||
(useDaemonProcessOption === false && env === undefined) ||
|
|
||||||
(useDaemonProcessOption === false && env === 'false')
|
|
||||||
) {
|
|
||||||
return await buildProjectGraphWithoutDaemon();
|
return await buildProjectGraphWithoutDaemon();
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
const daemonClient = require('../daemon/client/client');
|
return daemon.getProjectGraph();
|
||||||
if (!(await daemonClient.isServerAvailable())) {
|
|
||||||
await daemonClient.startInBackground();
|
|
||||||
}
|
|
||||||
return daemonClient.getProjectGraphFromServer();
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e.message.indexOf('inotify_add_watch') > -1) {
|
if (e.message.indexOf('inotify_add_watch') > -1) {
|
||||||
// common errors with the daemon due to OS settings (cannot watch all the files available)
|
// common errors with the daemon due to OS settings (cannot watch all the files available)
|
||||||
@ -164,23 +136,6 @@ export async function createProjectGraphAsync(): Promise<ProjectGraph> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function isDocker() {
|
|
||||||
try {
|
|
||||||
statSync('/.dockerenv');
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function printErrorMessage(e: any) {
|
|
||||||
const lines = e.message.split('\n');
|
|
||||||
output.error({
|
|
||||||
title: lines[0],
|
|
||||||
bodyLines: lines.slice(1),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Backwards compatibility adapter for project graph
|
* Backwards compatibility adapter for project graph
|
||||||
* @param {string} sourceVersion
|
* @param {string} sourceVersion
|
||||||
|
|||||||
@ -27,6 +27,7 @@ import { handleErrors } from '../utils/params';
|
|||||||
import { Workspaces } from 'nx/src/config/workspaces';
|
import { Workspaces } from 'nx/src/config/workspaces';
|
||||||
import { Hasher } from 'nx/src/hasher/hasher';
|
import { Hasher } from 'nx/src/hasher/hasher';
|
||||||
import { hashDependsOnOtherTasks, hashTask } from 'nx/src/hasher/hash-task';
|
import { hashDependsOnOtherTasks, hashTask } from 'nx/src/hasher/hash-task';
|
||||||
|
import { DaemonClient } from '../daemon/client/client';
|
||||||
|
|
||||||
async function getTerminalOutputLifeCycle(
|
async function getTerminalOutputLifeCycle(
|
||||||
initiatingProject: string,
|
initiatingProject: string,
|
||||||
@ -192,6 +193,7 @@ export async function runCommand(
|
|||||||
nxArgs,
|
nxArgs,
|
||||||
taskGraph,
|
taskGraph,
|
||||||
hasher,
|
hasher,
|
||||||
|
daemon: new DaemonClient(nxJson),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
let anyFailures;
|
let anyFailures;
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { ProjectGraph } from '../config/project-graph';
|
|||||||
import { Task, TaskGraph } from '../config/task-graph';
|
import { Task, TaskGraph } from '../config/task-graph';
|
||||||
import { NxArgs } from '../utils/command-line-utils';
|
import { NxArgs } from '../utils/command-line-utils';
|
||||||
import { Hasher } from '../hasher/hasher';
|
import { Hasher } from '../hasher/hasher';
|
||||||
|
import { DaemonClient } from '../daemon/client/client';
|
||||||
|
|
||||||
export type TaskStatus =
|
export type TaskStatus =
|
||||||
| 'success'
|
| 'success'
|
||||||
@ -27,5 +28,6 @@ export type TasksRunner<T = unknown> = (
|
|||||||
nxArgs: NxArgs;
|
nxArgs: NxArgs;
|
||||||
taskGraph?: TaskGraph;
|
taskGraph?: TaskGraph;
|
||||||
hasher?: Hasher;
|
hasher?: Hasher;
|
||||||
|
daemon?: DaemonClient;
|
||||||
}
|
}
|
||||||
) => any | Promise<{ [id: string]: TaskStatus }>;
|
) => any | Promise<{ [id: string]: TaskStatus }>;
|
||||||
|
|||||||
@ -3738,10 +3738,10 @@
|
|||||||
url-loader "^4.1.1"
|
url-loader "^4.1.1"
|
||||||
webpack-merge "^5.8.0"
|
webpack-merge "^5.8.0"
|
||||||
|
|
||||||
"@nrwl/nx-cloud@14.4.1":
|
"@nrwl/nx-cloud@14.4.2-beta.2":
|
||||||
version "14.4.1"
|
version "14.4.2-beta.2"
|
||||||
resolved "https://registry.yarnpkg.com/@nrwl/nx-cloud/-/nx-cloud-14.4.1.tgz#a4d8e9ce6e5bbb753916adadaf0e23ca24b54823"
|
resolved "https://registry.yarnpkg.com/@nrwl/nx-cloud/-/nx-cloud-14.4.2-beta.2.tgz#9e913c9fc182827492aa6960bda5066ff50c700a"
|
||||||
integrity sha512-vlWpBmIGfYvB9XMAdDZWOihOTFPE2VV9CDeZzBbSMF32KxDqUkhfaLf3dg6puIeUPkPbj5k+V57xjAl7g9k+Xw==
|
integrity sha512-sJcxDFGqAcehLaE5DeWokuAKZmSAOI+b1k2w+9QgXNYVQD7dbS+rP0qo+32W+PYMElDWfwizOszKiaCNF7dHDw==
|
||||||
dependencies:
|
dependencies:
|
||||||
axios "^0.21.1"
|
axios "^0.21.1"
|
||||||
chalk "4.1.0"
|
chalk "4.1.0"
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user