// Create marks at specific pointsperformance.mark('request-start');// Process requestawait handleRequest();performance.mark('request-end');// Measure duration between marksperformance.measure( 'request-duration', 'request-start', 'request-end');// Get the measurementconst measurements = performance.getEntriesByName('request-duration');console.log(`Request took ${measurements[0].duration}ms`);
import { Session } from 'node:inspector/promises';import fs from 'node:fs';const session = new Session();session.connect();// Start profilingawait session.post('Profiler.enable');await session.post('Profiler.start');// Run code to profileawait performExpensiveOperation();// Stop and save profileconst { profile } = await session.post('Profiler.stop');fs.writeFileSync('./profile.cpuprofile', JSON.stringify(profile));session.disconnect();
Open .cpuprofile files in Chrome DevTools (Performance tab → Load Profile) for interactive analysis.
import v8 from 'node:v8';// Take snapshot at any pointconst filename = v8.writeHeapSnapshot();console.log(`Heap snapshot written to ${filename}`);// With custom filenamev8.writeHeapSnapshot('./snapshots/heap-snapshot.heapsnapshot');
// Bad - blocks event loopfunction processLargeFile() { const data = fs.readFileSync('large.txt'); return parse(data);}// Good - non-blockingasync function processLargeFile() { const data = await fs.promises.readFile('large.txt'); return parse(data);}
Optimize JSON Operations
// For large objects, consider streamingimport { pipeline } from 'node:stream/promises';import JSONStream from 'JSONStream';await pipeline( fs.createReadStream('large.json'), JSONStream.parse('items.*'), processItems);
Use Worker Threads for CPU-Intensive Tasks
import { Worker } from 'node:worker_threads';function runWorker(data) { return new Promise((resolve, reject) => { const worker = new Worker('./worker.js', { workerData: data }); worker.on('message', resolve); worker.on('error', reject); });}const result = await runWorker({ task: 'heavy-computation' });
Implement Caching
const cache = new Map();async function fetchData(key) { if (cache.has(key)) { return cache.get(key); } const data = await expensiveOperation(key); cache.set(key, data); return data;}
Use Streams for Large Data
import { pipeline } from 'node:stream/promises';import { createReadStream, createWriteStream } from 'node:fs';import { createGzip } from 'node:zlib';await pipeline( createReadStream('input.txt'), createGzip(), createWriteStream('input.txt.gz'));