Worker Threads & Child Processes
Node.js is single-threaded, but that doesn’t mean you’re limited to one CPU core. Worker Threads and Child Processes let you leverage multi-core systems for CPU-intensive operations.
The Problem: Blocking the Event Loop
// ❌ This blocks ALL requests while computing
app.get('/compute', (req, res) => {
let result = 0;
for (let i = 0; i < 1e10; i++) {
result += Math.sqrt(i);
}
res.json({ result });
});
// Other requests wait until computation is done!
app.get('/health', (req, res) => {
res.json({ status: 'ok' }); // Blocked!
});
CPU-intensive operations block the event loop, making your server unresponsive to ALL requests. This is the #1 performance killer in Node.js applications.
Solutions Overview
| Method | Use Case | Communication | Overhead |
|---|
| Worker Threads | CPU-intensive JS | Shared memory, message passing | Low |
| Child Process | External programs, shell commands | stdio, IPC | Medium |
| Cluster | Multi-instance servers | None (separate processes) | High |
Worker Threads
Worker Threads run JavaScript in parallel threads, sharing memory when needed.
Basic Usage
// main.js
const { Worker, isMainThread, parentPort, workerData } = require('worker_threads');
if (isMainThread) {
// Main thread
const worker = new Worker(__filename, {
workerData: { value: 1000000 }
});
worker.on('message', (result) => {
console.log('Result:', result);
});
worker.on('error', (error) => {
console.error('Worker error:', error);
});
worker.on('exit', (code) => {
if (code !== 0) {
console.error(`Worker stopped with code ${code}`);
}
});
} else {
// Worker thread
const { value } = workerData;
let result = 0;
for (let i = 0; i < value; i++) {
result += Math.sqrt(i);
}
parentPort.postMessage(result);
}
Separate Worker File (Recommended)
// workers/compute.js
const { parentPort, workerData } = require('worker_threads');
const heavyComputation = (n) => {
let result = 0;
for (let i = 0; i < n; i++) {
result += Math.sqrt(i);
}
return result;
};
const result = heavyComputation(workerData.iterations);
parentPort.postMessage(result);
// main.js
const { Worker } = require('worker_threads');
const path = require('path');
const runWorker = (iterations) => {
return new Promise((resolve, reject) => {
const worker = new Worker(
path.join(__dirname, 'workers/compute.js'),
{ workerData: { iterations } }
);
worker.on('message', resolve);
worker.on('error', reject);
worker.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Worker stopped with code ${code}`));
}
});
});
};
// Usage in Express
app.get('/compute', async (req, res) => {
try {
const result = await runWorker(1e9);
res.json({ result });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
Worker Pool
For better performance, reuse workers instead of creating new ones:
// workerPool.js
const { Worker } = require('worker_threads');
const path = require('path');
class WorkerPool {
constructor(workerPath, poolSize = 4) {
this.workerPath = workerPath;
this.poolSize = poolSize;
this.workers = [];
this.freeWorkers = [];
this.taskQueue = [];
this.init();
}
init() {
for (let i = 0; i < this.poolSize; i++) {
this.addWorker();
}
}
addWorker() {
const worker = new Worker(this.workerPath);
worker.on('message', (result) => {
const { resolve } = worker.currentTask;
worker.currentTask = null;
this.freeWorkers.push(worker);
resolve(result);
this.processQueue();
});
worker.on('error', (error) => {
if (worker.currentTask) {
worker.currentTask.reject(error);
}
// Replace dead worker
this.workers = this.workers.filter(w => w !== worker);
this.freeWorkers = this.freeWorkers.filter(w => w !== worker);
this.addWorker();
});
this.workers.push(worker);
this.freeWorkers.push(worker);
}
runTask(data) {
return new Promise((resolve, reject) => {
this.taskQueue.push({ data, resolve, reject });
this.processQueue();
});
}
processQueue() {
if (this.taskQueue.length === 0) return;
if (this.freeWorkers.length === 0) return;
const worker = this.freeWorkers.pop();
const task = this.taskQueue.shift();
worker.currentTask = task;
worker.postMessage(task.data);
}
destroy() {
for (const worker of this.workers) {
worker.terminate();
}
}
}
module.exports = WorkerPool;
// Usage
const WorkerPool = require('./workerPool');
const pool = new WorkerPool('./workers/compute.js', 4);
app.get('/compute', async (req, res) => {
const result = await pool.runTask({ iterations: 1e9 });
res.json({ result });
});
// Cleanup on shutdown
process.on('SIGTERM', () => pool.destroy());
SharedArrayBuffer for Shared Memory
// main.js
const { Worker } = require('worker_threads');
// Create shared memory
const sharedBuffer = new SharedArrayBuffer(4);
const sharedArray = new Int32Array(sharedBuffer);
sharedArray[0] = 0;
// Start multiple workers that increment the counter
const workers = [];
for (let i = 0; i < 4; i++) {
const worker = new Worker('./workers/counter.js', {
workerData: { sharedBuffer }
});
workers.push(worker);
}
// Wait for all workers
Promise.all(workers.map(w =>
new Promise(resolve => w.on('exit', resolve))
)).then(() => {
console.log('Final count:', sharedArray[0]);
});
// workers/counter.js
const { workerData } = require('worker_threads');
const sharedArray = new Int32Array(workerData.sharedBuffer);
for (let i = 0; i < 1000000; i++) {
Atomics.add(sharedArray, 0, 1); // Thread-safe increment
}
Child Processes
Child Processes run separate Node.js instances or shell commands.
exec - Run Shell Commands
const { exec } = require('child_process');
const util = require('util');
const execPromise = util.promisify(exec);
// Simple command
exec('ls -la', (error, stdout, stderr) => {
if (error) {
console.error('Error:', error.message);
return;
}
console.log('Output:', stdout);
});
// Promise version
const runCommand = async (command) => {
try {
const { stdout, stderr } = await execPromise(command);
return stdout;
} catch (error) {
throw new Error(error.stderr || error.message);
}
};
// Usage
app.get('/git-log', async (req, res) => {
const log = await runCommand('git log --oneline -10');
res.json({ log: log.split('\n') });
});
spawn - Stream Output
const { spawn } = require('child_process');
// Better for long-running processes or large output
const runProcess = (command, args) => {
return new Promise((resolve, reject) => {
const process = spawn(command, args);
let stdout = '';
let stderr = '';
process.stdout.on('data', (data) => {
stdout += data;
console.log('stdout:', data.toString());
});
process.stderr.on('data', (data) => {
stderr += data;
console.error('stderr:', data.toString());
});
process.on('close', (code) => {
if (code === 0) {
resolve(stdout);
} else {
reject(new Error(stderr || `Process exited with code ${code}`));
}
});
process.on('error', reject);
});
};
// Install npm packages
app.post('/npm-install', async (req, res) => {
try {
await runProcess('npm', ['install', req.body.package]);
res.json({ success: true });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
fork - Run Node.js Scripts
// main.js
const { fork } = require('child_process');
const path = require('path');
const runNodeScript = (scriptPath, data) => {
return new Promise((resolve, reject) => {
const child = fork(scriptPath);
child.send(data);
child.on('message', (result) => {
resolve(result);
child.kill();
});
child.on('error', reject);
child.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Process exited with code ${code}`));
}
});
});
};
// scripts/process-data.js
process.on('message', async (data) => {
// Heavy processing
const result = await processData(data);
process.send(result);
});
// Usage
app.post('/process', async (req, res) => {
const result = await runNodeScript(
path.join(__dirname, 'scripts/process-data.js'),
req.body
);
res.json(result);
});
Real-World Use Cases
Image Processing with Workers
// workers/imageProcessor.js
const { parentPort, workerData } = require('worker_threads');
const sharp = require('sharp');
const processImage = async ({ inputPath, outputPath, width, height }) => {
await sharp(inputPath)
.resize(width, height)
.webp({ quality: 80 })
.toFile(outputPath);
return { success: true, outputPath };
};
parentPort.on('message', async (task) => {
try {
const result = await processImage(task);
parentPort.postMessage({ success: true, ...result });
} catch (error) {
parentPort.postMessage({ success: false, error: error.message });
}
});
// imageService.js
const WorkerPool = require('./workerPool');
const pool = new WorkerPool('./workers/imageProcessor.js', 4);
const processImages = async (images) => {
const tasks = images.map(img =>
pool.runTask({
inputPath: img.path,
outputPath: img.outputPath,
width: 800,
height: 600
})
);
return Promise.all(tasks);
};
PDF Generation
// workers/pdfGenerator.js
const { parentPort } = require('worker_threads');
const PDFDocument = require('pdfkit');
const fs = require('fs');
parentPort.on('message', async ({ data, outputPath }) => {
try {
const doc = new PDFDocument();
const stream = fs.createWriteStream(outputPath);
doc.pipe(stream);
// Generate PDF content
doc.fontSize(25).text(data.title, 100, 100);
doc.fontSize(12).text(data.content, 100, 150);
doc.end();
stream.on('finish', () => {
parentPort.postMessage({ success: true, path: outputPath });
});
} catch (error) {
parentPort.postMessage({ success: false, error: error.message });
}
});
Video Transcoding with FFmpeg
const { spawn } = require('child_process');
const transcodeVideo = (input, output, options = {}) => {
return new Promise((resolve, reject) => {
const args = [
'-i', input,
'-c:v', options.codec || 'libx264',
'-preset', options.preset || 'medium',
'-crf', options.quality || '23',
'-c:a', 'aac',
'-y', // Overwrite output
output
];
const ffmpeg = spawn('ffmpeg', args);
let progress = '';
ffmpeg.stderr.on('data', (data) => {
progress = data.toString();
// Parse progress for UI updates
});
ffmpeg.on('close', (code) => {
if (code === 0) {
resolve({ success: true, output });
} else {
reject(new Error(`FFmpeg exited with code ${code}`));
}
});
ffmpeg.on('error', reject);
});
};
Best Practices
- Use Worker Pools - Don’t create new workers per request
- Set appropriate pool size - Usually number of CPU cores
- Handle errors properly - Workers can crash
- Clean up on shutdown - Terminate workers gracefully
- Don’t overuse workers - Only for CPU-intensive tasks
- Consider message serialization - Large data transfers have overhead
- Use SharedArrayBuffer - For shared state between threads
When to Use What
| Scenario | Solution |
|---|
| CPU-intensive calculations | Worker Threads |
| Image/video processing | Worker Pool + spawn for external tools |
| Running shell commands | exec or spawn |
| Running Node.js scripts | fork |
| Scaling HTTP servers | Cluster or PM2 |
| Background job processing | Worker Threads or separate process |
Summary
- Worker Threads run JavaScript in parallel without blocking
- Worker Pools reuse threads for better performance
- SharedArrayBuffer enables shared memory between threads
- Child Processes run external programs or Node.js scripts
- Use exec for simple commands, spawn for streaming output
- Use fork for Node.js scripts with IPC
- Always handle errors and cleanup properly
- Match pool size to CPU cores for optimal performance