fix(core): create new env variable for performance profile and add documentation (#8479)
This commit is contained in:
parent
1d8db105d2
commit
eabeebcb2d
@ -1487,6 +1487,11 @@
|
||||
"id": "monorepo-nx-enterprise",
|
||||
"file": "shared/monorepo-nx-enterprise"
|
||||
},
|
||||
{
|
||||
"name": "Profiling Build Performance",
|
||||
"id": "performance-profiling",
|
||||
"file": "shared/guides/performance-profiling"
|
||||
},
|
||||
{
|
||||
"name": "Using ESLint in Nx Workspaces",
|
||||
"id": "eslint",
|
||||
|
||||
BIN
docs/shared/guides/performance-profiling-devtools.png
Normal file
BIN
docs/shared/guides/performance-profiling-devtools.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 75 KiB |
29
docs/shared/guides/performance-profiling.md
Normal file
29
docs/shared/guides/performance-profiling.md
Normal file
@ -0,0 +1,29 @@
|
||||
# Profiling Performance
|
||||
|
||||
When running an Nx command, many tasks are run at different times in different processes.
|
||||
Visualizing where and when the tasks were executed can help optimize the entire run to be quicker.
|
||||
|
||||
## How to Profile the Performance of Running Tasks
|
||||
|
||||
1. Prepend `NX_PROFILE=profile.json` before running targets with Nx. For example,
|
||||
|
||||
```bash
|
||||
# This builds the "cart" application and creates a profile.json file
|
||||
NX_PROFILE=profile.json nx build cart
|
||||
|
||||
# This builds affected projects and creates a profile.json file
|
||||
NX_PROFILE=profile.json nx affected --target build
|
||||
```
|
||||
|
||||
2. Open the Performance Tab in Chrome Devtools
|
||||

|
||||
3. Click the upload button and open the `profile.json` that was created. (Or drag the file into the window)
|
||||
4. Expand each group to see the names of the tasks which were run
|
||||
|
||||
## Optimizing the Performance of Running Tasks
|
||||
|
||||
Now that you have visualized how the tasks were run, you can try tweaking things to make the process faster. Generate profiles after each tweak and compare the results.
|
||||
|
||||
1. Are there tasks that you did not expect to be necessary? Sometimes, more tasks are captured by a command than expected. Excluding them could free up workers for other tasks.
|
||||
2. Try adding more workers. Did the new workers handle tasks while other workers were busy? This will likely result in faster runs.
|
||||
3. Are a lot of the tasks waiting on a single task to be completed? Splitting that project into smaller projects may allow fewer projects and therefore tasks to depend on a single project/task. This will allow for more room to parallelize the tasks.
|
||||
@ -1487,6 +1487,11 @@
|
||||
"id": "monorepo-nx-enterprise",
|
||||
"file": "shared/monorepo-nx-enterprise"
|
||||
},
|
||||
{
|
||||
"name": "Profiling Build Performance",
|
||||
"id": "performance-profiling",
|
||||
"file": "shared/guides/performance-profiling"
|
||||
},
|
||||
{
|
||||
"name": "Using ESLint in Nx Workspaces",
|
||||
"id": "eslint",
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 75 KiB |
@ -0,0 +1,29 @@
|
||||
# Profiling Performance
|
||||
|
||||
When running an Nx command, many tasks are run at different times in different processes.
|
||||
Visualizing where and when the tasks were executed can help optimize the entire run to be quicker.
|
||||
|
||||
## How to Profile the Performance of Running Tasks
|
||||
|
||||
1. Prepend `NX_PROFILE=profile.json` before running targets with Nx. For example,
|
||||
|
||||
```bash
|
||||
# This builds the "cart" application and creates a profile.json file
|
||||
NX_PROFILE=profile.json nx build cart
|
||||
|
||||
# This builds affected projects and creates a profile.json file
|
||||
NX_PROFILE=profile.json nx affected --target build
|
||||
```
|
||||
|
||||
2. Open the Performance Tab in Chrome Devtools
|
||||

|
||||
3. Click the upload button and open the `profile.json` that was created. (Or drag the file into the window)
|
||||
4. Expand each group to see the names of the tasks which were run
|
||||
|
||||
## Optimizing the Performance of Running Tasks
|
||||
|
||||
Now that you have visualized how the tasks were run, you can try tweaking things to make the process faster. Generate profiles after each tweak and compare the results.
|
||||
|
||||
1. Are there tasks that you did not expect to be necessary? Sometimes, more tasks are captured by a command than expected. Excluding them could free up workers for other tasks.
|
||||
2. Try adding more workers. Did the new workers handle tasks while other workers were busy? This will likely result in faster runs.
|
||||
3. Are a lot of the tasks waiting on a single task to be completed? Splitting that project into smaller projects may allow fewer projects and therefore tasks to depend on a single project/task. This will allow for more room to parallelize the tasks.
|
||||
@ -25,6 +25,7 @@ import { EmptyTerminalOutputLifeCycle } from './empty-terminal-output-life-cycle
|
||||
import { RunOneTerminalOutputLifeCycle } from './run-one-terminal-output-life-cycle';
|
||||
import { TaskTimingsLifeCycle } from './task-timings-life-cycle';
|
||||
import { createOutputRenderer } from './neo-output/render';
|
||||
import { TaskProfilingLifeCycle } from '@nrwl/workspace/src/tasks-runner/task-profiling-life-cycle';
|
||||
|
||||
async function getTerminalOutputLifeCycle(
|
||||
initiatingProject: string,
|
||||
@ -117,7 +118,11 @@ export async function runCommand(
|
||||
const lifeCycles = [lifeCycle] as LifeCycle[];
|
||||
|
||||
if (process.env.NX_PERF_LOGGING) {
|
||||
lifeCycles.push(new TaskTimingsLifeCycle(process.env.NX_PERF_LOGGING));
|
||||
lifeCycles.push(new TaskTimingsLifeCycle());
|
||||
}
|
||||
|
||||
if (process.env.NX_PROFILE) {
|
||||
lifeCycles.push(new TaskProfilingLifeCycle(process.env.NX_PROFILE));
|
||||
}
|
||||
|
||||
const promiseOrObservable = tasksRunner(
|
||||
|
||||
@ -0,0 +1,87 @@
|
||||
import { LifeCycle, TaskMetadata } from './life-cycle';
|
||||
import { Task, writeJsonFile } from '@nrwl/devkit';
|
||||
import { TaskStatus } from './tasks-runner';
|
||||
|
||||
import { performance } from 'perf_hooks';
|
||||
import { join } from 'path';
|
||||
|
||||
export class TaskProfilingLifeCycle implements LifeCycle {
|
||||
private timings: {
|
||||
[target: string]: {
|
||||
perfStart: number;
|
||||
perfEnd?: number;
|
||||
};
|
||||
} = {};
|
||||
private profile = [];
|
||||
private readonly profileFile: string;
|
||||
private registeredGroups = new Set();
|
||||
|
||||
constructor(_profileFile: string) {
|
||||
this.profileFile = join(process.cwd(), _profileFile);
|
||||
}
|
||||
|
||||
startTasks(tasks: Task[], { groupId }: TaskMetadata): void {
|
||||
if (this.profileFile && !this.registeredGroups.has(groupId)) {
|
||||
this.registerGroup(groupId);
|
||||
}
|
||||
for (let t of tasks) {
|
||||
this.timings[`${t.target.project}:${t.target.target}`] = {
|
||||
perfStart: performance.now(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
endTasks(
|
||||
taskResults: Array<{ task: Task; status: TaskStatus; code: number }>,
|
||||
metadata: TaskMetadata
|
||||
): void {
|
||||
for (let tr of taskResults) {
|
||||
this.timings[
|
||||
`${tr.task.target.project}:${tr.task.target.target}`
|
||||
].perfEnd = performance.now();
|
||||
}
|
||||
this.recordTaskCompletions(taskResults, metadata);
|
||||
}
|
||||
|
||||
endCommand(): void {
|
||||
writeJsonFile(this.profileFile, this.profile);
|
||||
console.log(`Performance Profile: ${this.profileFile}`);
|
||||
}
|
||||
|
||||
private recordTaskCompletions(
|
||||
tasks: Array<{ task: Task; status: TaskStatus }>,
|
||||
{ groupId }: TaskMetadata
|
||||
) {
|
||||
for (const { task, status } of tasks) {
|
||||
const { perfStart, perfEnd } =
|
||||
this.timings[`${task.target.project}:${task.target.target}`];
|
||||
this.profile.push({
|
||||
name: task.id,
|
||||
cat: Object.values(task.target).join(','),
|
||||
ph: 'X',
|
||||
ts: perfStart * 1000,
|
||||
dur: (perfEnd - perfStart) * 1000,
|
||||
pid: process.pid,
|
||||
tid: groupId,
|
||||
args: {
|
||||
target: task.target,
|
||||
status,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private registerGroup(groupId: number) {
|
||||
this.profile.push({
|
||||
name: 'thread_name',
|
||||
ph: 'M',
|
||||
pid: process.pid,
|
||||
tid: groupId,
|
||||
ts: 0,
|
||||
args: {
|
||||
name: 'Group #' + (groupId + 1),
|
||||
},
|
||||
});
|
||||
this.registeredGroups.add(groupId);
|
||||
}
|
||||
}
|
||||
@ -1,59 +1,30 @@
|
||||
import { LifeCycle, TaskMetadata } from './life-cycle';
|
||||
import { LifeCycle } from './life-cycle';
|
||||
import { Task } from '@nrwl/devkit';
|
||||
import { TaskStatus } from './tasks-runner';
|
||||
|
||||
import { writeFileSync } from 'fs';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { join } from 'path';
|
||||
|
||||
export class TaskTimingsLifeCycle implements LifeCycle {
|
||||
private timings: {
|
||||
[target: string]: {
|
||||
start: number;
|
||||
end: number;
|
||||
perfStart: number;
|
||||
perfEnd?: number;
|
||||
};
|
||||
} = {};
|
||||
private profile = [];
|
||||
private readonly profileFile: string;
|
||||
private registeredGroups = new Set();
|
||||
|
||||
constructor(private perfLoggingEnvValue: string) {
|
||||
if (
|
||||
typeof perfLoggingEnvValue === 'string' &&
|
||||
perfLoggingEnvValue !== 'true'
|
||||
) {
|
||||
this.profileFile = join(process.cwd(), perfLoggingEnvValue);
|
||||
}
|
||||
}
|
||||
|
||||
startTasks(tasks: Task[], { groupId }: TaskMetadata): void {
|
||||
if (this.profileFile && !this.registeredGroups.has(groupId)) {
|
||||
this.registerGroup(groupId);
|
||||
}
|
||||
startTasks(tasks: Task[]): void {
|
||||
for (let t of tasks) {
|
||||
this.timings[`${t.target.project}:${t.target.target}`] = {
|
||||
start: new Date().getTime(),
|
||||
end: undefined,
|
||||
perfStart: performance.now(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
endTasks(
|
||||
taskResults: Array<{ task: Task; status: TaskStatus; code: number }>,
|
||||
metadata: TaskMetadata
|
||||
taskResults: Array<{ task: Task; status: TaskStatus; code: number }>
|
||||
): void {
|
||||
for (let tr of taskResults) {
|
||||
this.timings[`${tr.task.target.project}:${tr.task.target.target}`].end =
|
||||
new Date().getTime();
|
||||
this.timings[
|
||||
`${tr.task.target.project}:${tr.task.target.target}`
|
||||
].perfEnd = performance.now();
|
||||
}
|
||||
if (this.profileFile) {
|
||||
this.recordTaskCompletions(taskResults, metadata);
|
||||
}
|
||||
}
|
||||
|
||||
@ -65,46 +36,5 @@ export class TaskTimingsLifeCycle implements LifeCycle {
|
||||
timings[p] = t.end ? t.end - t.start : null;
|
||||
});
|
||||
console.log(JSON.stringify(timings, null, 2));
|
||||
if (this.profileFile) {
|
||||
writeFileSync('profile.json', JSON.stringify(this.profile));
|
||||
console.log(`Performance Profile: ${this.profileFile}`);
|
||||
}
|
||||
}
|
||||
|
||||
private recordTaskCompletions(
|
||||
tasks: Array<{ task: Task; status: TaskStatus }>,
|
||||
{ groupId }: TaskMetadata
|
||||
) {
|
||||
for (const { task, status } of tasks) {
|
||||
const { perfStart, perfEnd } =
|
||||
this.timings[`${task.target.project}:${task.target.target}`];
|
||||
this.profile.push({
|
||||
name: task.id,
|
||||
cat: Object.values(task.target).join(','),
|
||||
ph: 'X',
|
||||
ts: perfStart * 1000,
|
||||
dur: (perfEnd - perfStart) * 1000,
|
||||
pid: process.pid,
|
||||
tid: groupId,
|
||||
args: {
|
||||
target: task.target,
|
||||
status,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private registerGroup(groupId: number) {
|
||||
this.profile.push({
|
||||
name: 'thread_name',
|
||||
ph: 'M',
|
||||
pid: process.pid,
|
||||
tid: groupId,
|
||||
ts: 0,
|
||||
args: {
|
||||
name: 'Group #' + (groupId + 1),
|
||||
},
|
||||
});
|
||||
this.registeredGroups.add(groupId);
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user