Performance Optimization
Optimize your Node.js backend - profiling, N+1 queries, memory leaks, and best practices.
7 min read
Performance Checklist#
| Area | Quick Wins |
|---|---|
| Database | Indexes, query optimization, N+1 fixes |
| Caching | Redis, HTTP caching, memoization |
| Code | Async operations, streaming, pooling |
| Network | Compression, CDN, keep-alive |
| Memory | Leak detection, garbage collection |
Profiling#
Node.js Built-in Profiler#
bash
# Generate CPU profile
node --prof app.js
# Process the profile
node --prof-process isolate-*.log > profile.txt
Clinic.js (Recommended)#
bash
npm install -g clinic
# CPU profiling
clinic doctor -- node app.js
# Memory analysis
clinic heapprofiler -- node app.js
# Event loop analysis
clinic bubbleprof -- node app.js
Simple Timing#
javascript
// Time specific operations
console.time('database-query');
const users = await prisma.user.findMany();
console.timeEnd('database-query');
// More detailed timing
const { performance } = require('perf_hooks');
const start = performance.now();
await someOperation();
const duration = performance.now() - start;
console.log(`Operation took ${duration.toFixed(2)}ms`);
Request Timing Middleware#
javascript
// src/middleware/timing.js
export function requestTiming(req, res, next) {
const start = process.hrtime.bigint();
res.on('finish', () => {
const end = process.hrtime.bigint();
const durationMs = Number(end - start) / 1e6;
console.log({
method: req.method,
path: req.path,
status: res.statusCode,
duration: `${durationMs.toFixed(2)}ms`,
});
});
next();
}
app.use(requestTiming);
N+1 Query Problem#
The Problem#
javascript
// BAD - N+1 queries
const posts = await prisma.post.findMany();
for (const post of posts) {
// This runs a query for EACH post!
const author = await prisma.user.findUnique({
where: { id: post.authorId },
});
post.author = author;
}
// Result: 1 query for posts + N queries for authors
The Solution#
javascript
// GOOD - Eager loading (include)
const posts = await prisma.post.findMany({
include: {
author: true, // Join in single query
},
});
// Result: 1-2 queries total
// GOOD - Select specific fields
const posts = await prisma.post.findMany({
include: {
author: {
select: { id: true, name: true, avatar: true },
},
},
});
// GOOD - Batch loading with DataLoader
import DataLoader from 'dataloader';
const userLoader = new DataLoader(async (userIds) => {
const users = await prisma.user.findMany({
where: { id: { in: userIds } },
});
const userMap = new Map(users.map(u => [u.id, u]));
return userIds.map(id => userMap.get(id));
});
// Usage - automatically batches
const author1 = await userLoader.load(post1.authorId);
const author2 = await userLoader.load(post2.authorId);
// Only 1 query for both!
Database Optimization#
Indexing#
javascript
// Check slow queries with Prisma
const prisma = new PrismaClient({
log: [
{ emit: 'event', level: 'query' },
],
});
prisma.$on('query', (e) => {
if (e.duration > 100) { // Log slow queries (>100ms)
console.warn('Slow query:', e.query, `${e.duration}ms`);
}
});
sql
-- Find missing indexes (PostgreSQL)
SELECT
schemaname,
tablename,
indexname,
idx_scan,
idx_tup_read,
idx_tup_fetch
FROM pg_stat_user_indexes
ORDER BY idx_scan;
-- Create indexes for common queries
CREATE INDEX idx_posts_author_id ON posts(author_id);
CREATE INDEX idx_posts_created_at ON posts(created_at DESC);
CREATE INDEX idx_users_email ON users(email);
Query Optimization#
javascript
// BAD - Select all columns
const users = await prisma.user.findMany();
// GOOD - Select only needed fields
const users = await prisma.user.findMany({
select: {
id: true,
name: true,
email: true,
},
});
// BAD - Load everything then filter in JS
const allPosts = await prisma.post.findMany();
const recentPosts = allPosts.filter(p => p.createdAt > weekAgo);
// GOOD - Filter in database
const recentPosts = await prisma.post.findMany({
where: {
createdAt: { gt: weekAgo },
},
});
// BAD - Multiple queries for count + data
const users = await prisma.user.findMany({ take: 20 });
const count = await prisma.user.count();
// GOOD - Single transaction
const [users, count] = await prisma.$transaction([
prisma.user.findMany({ take: 20 }),
prisma.user.count(),
]);
Caching Strategies#
Response Caching#
javascript
import { Redis } from 'ioredis';
const redis = new Redis(process.env.REDIS_URL);
async function cachedQuery(key, ttl, queryFn) {
const cached = await redis.get(key);
if (cached) return JSON.parse(cached);
const result = await queryFn();
await redis.setex(key, ttl, JSON.stringify(result));
return result;
}
// Usage
app.get('/api/posts', async (req, res) => {
const posts = await cachedQuery(
'posts:recent',
60, // 1 minute
() => prisma.post.findMany({
take: 20,
orderBy: { createdAt: 'desc' },
})
);
res.json(posts);
});
Function Memoization#
javascript
// In-memory memoization
function memoize(fn, maxAge = 60000) {
const cache = new Map();
return async function (...args) {
const key = JSON.stringify(args);
const cached = cache.get(key);
if (cached && Date.now() < cached.expiry) {
return cached.value;
}
const result = await fn.apply(this, args);
cache.set(key, { value: result, expiry: Date.now() + maxAge });
return result;
};
}
const getConfig = memoize(async () => {
return prisma.config.findMany();
}, 300000); // Cache for 5 minutes
Memory Optimization#
Detecting Memory Leaks#
javascript
// Monitor memory usage
setInterval(() => {
const usage = process.memoryUsage();
console.log({
heapUsed: `${Math.round(usage.heapUsed / 1024 / 1024)}MB`,
heapTotal: `${Math.round(usage.heapTotal / 1024 / 1024)}MB`,
external: `${Math.round(usage.external / 1024 / 1024)}MB`,
});
}, 30000);
// Expose for debugging
app.get('/debug/memory', (req, res) => {
res.json(process.memoryUsage());
});
Common Memory Leaks#
javascript
// LEAK - Event listeners not removed
class Service {
constructor() {
// This adds a new listener every time Service is created!
process.on('message', this.handleMessage);
}
// FIX - Clean up
destroy() {
process.off('message', this.handleMessage);
}
}
// LEAK - Closures holding references
function createHandler() {
const hugeData = loadHugeDataset(); // Held in memory forever
return (req, res) => {
res.json(hugeData.slice(0, 10));
};
}
// FIX - Load on demand or use WeakRef
function createHandler() {
return async (req, res) => {
const data = await loadData(); // Fresh each time
res.json(data.slice(0, 10));
};
}
// LEAK - Growing arrays/maps
const cache = new Map(); // Grows forever!
// FIX - Use LRU cache
import { LRUCache } from 'lru-cache';
const cache = new LRUCache({
max: 500, // Max items
ttl: 1000 * 60 * 5, // 5 minutes
});
Streaming Large Data#
javascript
// BAD - Load entire file into memory
app.get('/download', async (req, res) => {
const data = await fs.promises.readFile('large-file.csv');
res.send(data); // Memory spike!
});
// GOOD - Stream the file
app.get('/download', (req, res) => {
const stream = fs.createReadStream('large-file.csv');
stream.pipe(res);
});
// GOOD - Stream database results
app.get('/export', async (req, res) => {
res.setHeader('Content-Type', 'application/json');
res.write('[');
let first = true;
const cursor = prisma.user.findMany({
cursor: undefined,
});
// Process in batches
let skip = 0;
const batchSize = 100;
while (true) {
const users = await prisma.user.findMany({
skip,
take: batchSize,
});
if (users.length === 0) break;
for (const user of users) {
if (!first) res.write(',');
res.write(JSON.stringify(user));
first = false;
}
skip += batchSize;
}
res.write(']');
res.end();
});
Async Best Practices#
javascript
// BAD - Sequential when parallel is possible
const user = await getUser(id);
const posts = await getPosts(id);
const notifications = await getNotifications(id);
// GOOD - Parallel execution
const [user, posts, notifications] = await Promise.all([
getUser(id),
getPosts(id),
getNotifications(id),
]);
// BAD - await in loop
for (const id of userIds) {
const user = await getUser(id); // Sequential!
results.push(user);
}
// GOOD - Parallel with Promise.all
const results = await Promise.all(
userIds.map(id => getUser(id))
);
// GOOD - Controlled concurrency
import pLimit from 'p-limit';
const limit = pLimit(5); // Max 5 concurrent
const results = await Promise.all(
userIds.map(id => limit(() => getUser(id)))
);
Compression#
javascript
import compression from 'compression';
app.use(compression({
level: 6, // Compression level (1-9)
threshold: 1024, // Min size to compress (bytes)
filter: (req, res) => {
if (req.headers['x-no-compression']) {
return false;
}
return compression.filter(req, res);
},
}));
Key Takeaways#
- Profile first - Don't optimize blindly
- Fix N+1 queries - Use includes/joins
- Cache strategically - Hot data in Redis
- Stream large data - Don't load into memory
- Run in parallel - Use Promise.all
Continue Learning
Ready to level up your skills?
Explore more guides and tutorials to deepen your understanding and become a better developer.