Understanding n8n Performance Bottlenecks
Most n8n performance issues come from loading too much data into memory, inefficient loops, blocking operations, or uncontrolled concurrency. Understanding these patterns is crucial for building production-grade nodes.
- Memory bloat: Loading entire datasets instead of streaming
- N+1 queries: Making API calls in loops without batching
- Blocking I/O: Synchronous operations that freeze the event loop
- Uncontrolled concurrency: Spawning unlimited parallel operations
- Inefficient algorithms: O(n²) operations on large datasets
Memory Optimization Strategies
Memory-Efficient Data Processing
Copy
Ask AI
import { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
export class MemoryOptimizedNode {
private readonly MAX_MEMORY_MB = 512;
private memoryUsage = 0;
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const chunkSize = this.calculateOptimalChunkSize(items.length);
// Process in chunks to control memory
const results: INodeExecutionData[] = [];
for (let i = 0; i < items.length; i += chunkSize) {
// Check memory before processing
await this.checkMemoryPressure();
const chunk = items.slice(i, i + chunkSize);
const processed = await this.processChunk(chunk);
// Stream results immediately instead of accumulating
if (this.supportsStreaming()) {
await this.streamResults(processed);
} else {
results.push(...processed);
}
// Force garbage collection if available
if (global.gc && this.memoryUsage > this.MAX_MEMORY_MB * 0.8) {
global.gc();
await this.delay(100); // Give GC time to work
}
}
return [results];
}
private calculateOptimalChunkSize(totalItems: number): number {
const avgItemSize = this.estimateItemSize();
const availableMemory = this.MAX_MEMORY_MB * 1024 * 1024;
const safetyFactor = 0.5; // Use only 50% of available memory
const maxChunkSize = Math.floor(
(availableMemory * safetyFactor) / avgItemSize
);
return Math.min(maxChunkSize, Math.ceil(totalItems / 10));
}
private async checkMemoryPressure(): Promise<void> {
const usage = process.memoryUsage();
this.memoryUsage = usage.heapUsed / (1024 * 1024);
if (this.memoryUsage > this.MAX_MEMORY_MB * 0.9) {
this.logger.warn('High memory pressure detected', {
used: Math.round(this.memoryUsage),
limit: this.MAX_MEMORY_MB
});
// Pause to allow GC
await this.delay(1000);
}
}
private estimateItemSize(): number {
// Sample first few items to estimate size
const sample = this.getInputData().slice(0, 10);
const sampleSize = JSON.stringify(sample).length;
return Math.ceil(sampleSize / sample.length);
}
}
Object Pool Pattern
Copy
Ask AI
export class ObjectPoolNode {
private pools = new Map<string, ObjectPool>();
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const poolType = this.getNodeParameter('poolType', 0) as string;
const pool = this.getOrCreatePool(poolType);
const results: INodeExecutionData[] = [];
// Process with pooled objects
for (const item of this.getInputData()) {
const obj = await pool.acquire();
try {
const result = await this.processWithPooledObject(obj, item);
results.push({ json: result });
} finally {
// Always return to pool
await pool.release(obj);
}
}
return [results];
}
private getOrCreatePool(type: string): ObjectPool {
if (!this.pools.has(type)) {
const pool = new ObjectPool({
create: () => this.createExpensiveObject(type),
destroy: (obj) => this.destroyObject(obj),
validate: (obj) => this.validateObject(obj),
max: 10,
min: 2,
idleTimeoutMillis: 30000,
});
this.pools.set(type, pool);
}
return this.pools.get(type)!;
}
private async createExpensiveObject(type: string): Promise<any> {
switch (type) {
case 'browser':
// Reuse browser instances
const puppeteer = require('puppeteer');
return await puppeteer.launch({
args: ['--no-sandbox', '--disable-dev-shm-usage']
});
case 'database':
// Reuse database connections
const { Client } = require('pg');
const client = new Client({
connectionString: process.env.DATABASE_URL
});
await client.connect();
return client;
case 'tensorflow':
// Reuse ML models
const tf = require('@tensorflow/tfjs-node');
return await tf.loadLayersModel('path/to/model');
default:
throw new Error(`Unknown pool type: ${type}`);
}
}
}
class ObjectPool {
private available: any[] = [];
private inUse = new Set<any>();
private creating = 0;
constructor(private config: PoolConfig) {
// Pre-create minimum objects
this.ensureMinimum();
}
async acquire(): Promise<any> {
// Return available object
if (this.available.length > 0) {
const obj = this.available.pop()!;
this.inUse.add(obj);
return obj;
}
// Create new if under limit
if (this.size < this.config.max) {
this.creating++;
try {
const obj = await this.config.create();
this.inUse.add(obj);
return obj;
} finally {
this.creating--;
}
}
// Wait for available object
return await this.waitForAvailable();
}
async release(obj: any): Promise<void> {
this.inUse.delete(obj);
// Validate before returning to pool
if (await this.config.validate(obj)) {
this.available.push(obj);
} else {
// Destroy invalid object and create new
await this.config.destroy(obj);
this.ensureMinimum();
}
}
private get size(): number {
return this.available.length + this.inUse.size + this.creating;
}
private async ensureMinimum(): Promise<void> {
const toCreate = Math.max(0, this.config.min - this.size);
for (let i = 0; i < toCreate; i++) {
const obj = await this.config.create();
this.available.push(obj);
}
}
private async waitForAvailable(): Promise<any> {
return new Promise((resolve) => {
const checkInterval = setInterval(() => {
if (this.available.length > 0) {
clearInterval(checkInterval);
resolve(this.acquire());
}
}, 100);
});
}
}
Concurrency Control
Advanced Rate Limiting
Uncontrolled concurrency can overwhelm APIs, databases, and your n8n instance. Always implement concurrency limits based on the target systemās capacity.
Copy
Ask AI
export class ConcurrencyControlNode {
private semaphore: Semaphore;
private rateLimiter: RateLimiter;
constructor() {
this.semaphore = new Semaphore(5); // Max 5 concurrent operations
this.rateLimiter = new RateLimiter({
maxRequests: 100,
perMilliseconds: 1000, // 100 requests per second
});
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const concurrencyLimit = this.getNodeParameter('concurrency', 0, 5) as number;
const batchSize = this.getNodeParameter('batchSize', 0, 10) as number;
// Update semaphore limit
this.semaphore.setLimit(concurrencyLimit);
// Process in controlled batches
const results: INodeExecutionData[] = [];
const batches = this.createBatches(items, batchSize);
await this.processWithConcurrencyControl(batches, results);
return [results];
}
private async processWithConcurrencyControl(
batches: INodeExecutionData[][],
results: INodeExecutionData[]
): Promise<void> {
// Process batches with concurrency control
const promises = batches.map(async (batch, index) => {
// Acquire semaphore slot
const release = await this.semaphore.acquire();
try {
// Wait for rate limit
await this.rateLimiter.throttle();
// Process batch
const batchResults = await this.processBatch(batch, index);
results.push(...batchResults);
} finally {
release();
}
});
// Wait for all batches
await Promise.all(promises);
}
private createBatches(
items: INodeExecutionData[],
size: number
): INodeExecutionData[][] {
const batches: INodeExecutionData[][] = [];
for (let i = 0; i < items.length; i += size) {
batches.push(items.slice(i, i + size));
}
return batches;
}
private async processBatch(
batch: INodeExecutionData[],
batchIndex: number
): Promise<INodeExecutionData[]> {
const startTime = Date.now();
// Process batch items in parallel (within batch)
const results = await Promise.all(
batch.map(item => this.processItem(item))
);
const duration = Date.now() - startTime;
this.logger.info(`Batch ${batchIndex} processed in ${duration}ms`, {
itemCount: batch.length,
itemsPerSecond: Math.round((batch.length / duration) * 1000)
});
return results;
}
}
class Semaphore {
private permits: number;
private waiting: Array<() => void> = [];
constructor(private limit: number) {
this.permits = limit;
}
async acquire(): Promise<() => void> {
if (this.permits > 0) {
this.permits--;
return () => this.release();
}
// Wait for permit
return new Promise(resolve => {
this.waiting.push(() => {
this.permits--;
resolve(() => this.release());
});
});
}
private release(): void {
this.permits++;
if (this.waiting.length > 0 && this.permits > 0) {
const next = this.waiting.shift()!;
next();
}
}
setLimit(newLimit: number): void {
const difference = newLimit - this.limit;
this.limit = newLimit;
this.permits += difference;
// Process waiting queue if limit increased
while (this.permits > 0 && this.waiting.length > 0) {
const next = this.waiting.shift()!;
next();
}
}
}
class RateLimiter {
private tokens: number;
private lastRefill: number;
constructor(private config: RateLimiterConfig) {
this.tokens = config.maxRequests;
this.lastRefill = Date.now();
}
async throttle(): Promise<void> {
await this.refillTokens();
if (this.tokens <= 0) {
const waitTime = this.config.perMilliseconds - (Date.now() - this.lastRefill);
await this.delay(waitTime);
await this.refillTokens();
}
this.tokens--;
}
private async refillTokens(): Promise<void> {
const now = Date.now();
const timePassed = now - this.lastRefill;
if (timePassed >= this.config.perMilliseconds) {
this.tokens = this.config.maxRequests;
this.lastRefill = now;
}
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
Worker Thread Pool
Copy
Ask AI
import { Worker } from 'worker_threads';
import * as os from 'os';
export class WorkerPoolNode {
private workerPool: WorkerPool;
constructor() {
this.workerPool = new WorkerPool({
workerScript: './heavy-computation.js',
minWorkers: 2,
maxWorkers: os.cpus().length,
});
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const operation = this.getNodeParameter('operation', 0) as string;
// Distribute work across worker threads
const promises = items.map((item, index) =>
this.workerPool.execute({
operation,
data: item.json,
index
})
);
const results = await Promise.all(promises);
return [results.map(result => ({ json: result }))];
}
}
class WorkerPool {
private workers: Worker[] = [];
private freeWorkers: Worker[] = [];
private queue: Array<{
resolve: (value: any) => void;
reject: (error: any) => void;
task: any;
}> = [];
constructor(private config: WorkerPoolConfig) {
this.initializeWorkers();
}
private initializeWorkers(): void {
for (let i = 0; i < this.config.minWorkers; i++) {
this.createWorker();
}
}
private createWorker(): Worker {
const worker = new Worker(this.config.workerScript);
worker.on('message', (result) => {
// Mark worker as free
this.freeWorkers.push(worker);
// Process next task in queue
if (this.queue.length > 0) {
const next = this.queue.shift()!;
this.executeOnWorker(worker, next.task)
.then(next.resolve)
.catch(next.reject);
}
});
worker.on('error', (error) => {
this.logger.error('Worker error:', error);
this.removeWorker(worker);
this.createWorker(); // Replace failed worker
});
this.workers.push(worker);
this.freeWorkers.push(worker);
return worker;
}
async execute(task: any): Promise<any> {
// Get free worker or queue task
const worker = this.getFreeWorker();
if (worker) {
return await this.executeOnWorker(worker, task);
}
// Queue task
return new Promise((resolve, reject) => {
this.queue.push({ resolve, reject, task });
// Create new worker if under limit and queue is building
if (
this.workers.length < this.config.maxWorkers &&
this.queue.length > this.freeWorkers.length
) {
this.createWorker();
}
});
}
private getFreeWorker(): Worker | null {
return this.freeWorkers.pop() || null;
}
private async executeOnWorker(worker: Worker, task: any): Promise<any> {
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
reject(new Error('Worker timeout'));
this.removeWorker(worker);
}, 30000);
worker.once('message', (result) => {
clearTimeout(timeout);
resolve(result);
});
worker.once('error', (error) => {
clearTimeout(timeout);
reject(error);
});
worker.postMessage(task);
});
}
private removeWorker(worker: Worker): void {
const index = this.workers.indexOf(worker);
if (index > -1) {
this.workers.splice(index, 1);
}
const freeIndex = this.freeWorkers.indexOf(worker);
if (freeIndex > -1) {
this.freeWorkers.splice(freeIndex, 1);
}
worker.terminate();
}
async terminate(): Promise<void> {
await Promise.all(this.workers.map(w => w.terminate()));
this.workers = [];
this.freeWorkers = [];
}
}
Algorithm Optimization
Efficient Data Structures
Copy
Ask AI
export class OptimizedDataStructureNode {
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const operation = this.getNodeParameter('operation', 0) as string;
switch (operation) {
case 'deduplicate':
return [this.efficientDeduplication(items)];
case 'join':
return [await this.efficientJoin(items)];
case 'aggregate':
return [this.efficientAggregation(items)];
case 'search':
return [this.efficientSearch(items)];
default:
throw new Error(`Unknown operation: ${operation}`);
}
}
private efficientDeduplication(items: INodeExecutionData[]): INodeExecutionData[] {
// Use Set for O(1) lookup instead of array.includes() O(n)
const seen = new Set<string>();
const results: INodeExecutionData[] = [];
for (const item of items) {
const key = this.getDeduplicationKey(item);
if (!seen.has(key)) {
seen.add(key);
results.push(item);
}
}
this.logger.info(`Deduplicated ${items.length} to ${results.length} items`);
return results;
}
private async efficientJoin(items: INodeExecutionData[]): Promise<INodeExecutionData[]> {
// Build hash map for O(1) lookup instead of nested loops O(n²)
const leftItems = items.filter(item => item.json.source === 'left');
const rightItems = items.filter(item => item.json.source === 'right');
// Create index on join key
const rightIndex = new Map<string, INodeExecutionData[]>();
for (const item of rightItems) {
const key = String(item.json.joinKey);
if (!rightIndex.has(key)) {
rightIndex.set(key, []);
}
rightIndex.get(key)!.push(item);
}
// Perform join
const results: INodeExecutionData[] = [];
for (const leftItem of leftItems) {
const key = String(leftItem.json.joinKey);
const matches = rightIndex.get(key) || [];
for (const rightItem of matches) {
results.push({
json: {
...leftItem.json,
...rightItem.json,
_joined: true
}
});
}
}
return results;
}
private efficientAggregation(items: INodeExecutionData[]): INodeExecutionData[] {
// Use Map for grouping instead of reduce with object spread
const groups = new Map<string, any[]>();
for (const item of items) {
const key = String(item.json.groupKey);
if (!groups.has(key)) {
groups.set(key, []);
}
groups.get(key)!.push(item.json.value);
}
// Aggregate each group
const results: INodeExecutionData[] = [];
for (const [key, values] of groups) {
results.push({
json: {
key,
count: values.length,
sum: values.reduce((a, b) => a + b, 0),
avg: values.reduce((a, b) => a + b, 0) / values.length,
min: Math.min(...values),
max: Math.max(...values),
}
});
}
return results;
}
private efficientSearch(items: INodeExecutionData[]): INodeExecutionData[] {
const searchTerm = this.getNodeParameter('searchTerm', 0) as string;
const fields = this.getNodeParameter('searchFields', 0) as string[];
// Build search index
const searchIndex = new SearchIndex();
items.forEach((item, index) => {
for (const field of fields) {
const value = String(item.json[field] || '');
searchIndex.add(value, index);
}
});
// Perform search
const matchedIndices = searchIndex.search(searchTerm);
return matchedIndices.map(index => items[index]);
}
private getDeduplicationKey(item: INodeExecutionData): string {
// Create composite key from specified fields
const keyFields = this.getNodeParameter('keyFields', 0, ['id']) as string[];
return keyFields
.map(field => String(item.json[field] || ''))
.join('|');
}
}
class SearchIndex {
private index = new Map<string, Set<number>>();
private trigramIndex = new Map<string, Set<number>>();
add(text: string, documentId: number): void {
// Word index
const words = text.toLowerCase().split(/\s+/);
for (const word of words) {
if (!this.index.has(word)) {
this.index.set(word, new Set());
}
this.index.get(word)!.add(documentId);
}
// Trigram index for fuzzy search
for (let i = 0; i <= text.length - 3; i++) {
const trigram = text.slice(i, i + 3).toLowerCase();
if (!this.trigramIndex.has(trigram)) {
this.trigramIndex.set(trigram, new Set());
}
this.trigramIndex.get(trigram)!.add(documentId);
}
}
search(query: string): number[] {
const queryLower = query.toLowerCase();
const words = queryLower.split(/\s+/);
// Exact word matching
const exactMatches = new Set<number>();
for (const word of words) {
const matches = this.index.get(word);
if (matches) {
matches.forEach(id => exactMatches.add(id));
}
}
// Fuzzy matching with trigrams
const fuzzyMatches = new Set<number>();
for (let i = 0; i <= queryLower.length - 3; i++) {
const trigram = queryLower.slice(i, i + 3);
const matches = this.trigramIndex.get(trigram);
if (matches) {
matches.forEach(id => fuzzyMatches.add(id));
}
}
// Combine and rank results
return [...new Set([...exactMatches, ...fuzzyMatches])];
}
}
Caching Strategies
Multi-Level Cache
Copy
Ask AI
export class CachedNode {
private memoryCache = new LRUCache<string, any>(100);
private diskCache = new DiskCache('/tmp/n8n-cache');
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const cacheKey = this.getCacheKey();
const ttl = this.getNodeParameter('cacheTTL', 0, 3600) as number;
// Check memory cache (L1)
const memCached = this.memoryCache.get(cacheKey);
if (memCached && !this.isExpired(memCached, ttl)) {
this.logger.info('Memory cache hit');
return [[{ json: memCached.data }]];
}
// Check disk cache (L2)
const diskCached = await this.diskCache.get(cacheKey);
if (diskCached && !this.isExpired(diskCached, ttl)) {
this.logger.info('Disk cache hit');
// Promote to memory cache
this.memoryCache.set(cacheKey, diskCached);
return [[{ json: diskCached.data }]];
}
// Cache miss - execute operation
const result = await this.executeExpensiveOperation();
// Update both cache levels
const cacheEntry = {
data: result,
timestamp: Date.now()
};
this.memoryCache.set(cacheKey, cacheEntry);
await this.diskCache.set(cacheKey, cacheEntry);
return [[{ json: result }]];
}
private getCacheKey(): string {
const params = this.getNodeParameter('cacheParams', 0) as any;
return crypto
.createHash('sha256')
.update(JSON.stringify(params))
.digest('hex');
}
private isExpired(entry: CacheEntry, ttl: number): boolean {
return Date.now() - entry.timestamp > ttl * 1000;
}
}
class LRUCache<K, V> {
private cache = new Map<K, V>();
constructor(private maxSize: number) {}
get(key: K): V | undefined {
const value = this.cache.get(key);
if (value !== undefined) {
// Move to end (most recently used)
this.cache.delete(key);
this.cache.set(key, value);
}
return value;
}
set(key: K, value: V): void {
// Remove if exists (to update position)
this.cache.delete(key);
// Add to end
this.cache.set(key, value);
// Evict oldest if over capacity
if (this.cache.size > this.maxSize) {
const firstKey = this.cache.keys().next().value;
this.cache.delete(firstKey);
}
}
}
Performance Monitoring
Comprehensive Metrics Collection
Copy
Ask AI
export class MetricsNode {
private metrics = new MetricsCollector();
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const operation = this.getNodeParameter('operation', 0) as string;
// Start timing
const timer = this.metrics.startTimer('execution_time');
try {
// Track memory before
const memBefore = process.memoryUsage();
// Execute operation
const result = await this.executeOperation(operation);
// Track memory after
const memAfter = process.memoryUsage();
// Record metrics
timer.end();
this.metrics.gauge('memory_delta', memAfter.heapUsed - memBefore.heapUsed);
this.metrics.increment('operations_completed');
this.metrics.histogram('result_size', JSON.stringify(result).length);
// Report if threshold exceeded
if (timer.duration > 5000) {
this.logger.warn('Slow operation detected', {
operation,
duration: timer.duration,
resultSize: result.length
});
}
return [[{
json: result,
_metrics: this.metrics.getSnapshot()
}]];
} catch (error) {
this.metrics.increment('operations_failed');
throw error;
}
}
}
class MetricsCollector {
private counters = new Map<string, number>();
private gauges = new Map<string, number>();
private histograms = new Map<string, number[]>();
private timers = new Map<string, Timer>();
startTimer(name: string): Timer {
const timer = new Timer();
this.timers.set(name, timer);
return timer;
}
increment(name: string, value = 1): void {
const current = this.counters.get(name) || 0;
this.counters.set(name, current + value);
}
gauge(name: string, value: number): void {
this.gauges.set(name, value);
}
histogram(name: string, value: number): void {
if (!this.histograms.has(name)) {
this.histograms.set(name, []);
}
this.histograms.get(name)!.push(value);
}
getSnapshot(): MetricsSnapshot {
const histogramStats = new Map<string, any>();
for (const [name, values] of this.histograms) {
histogramStats.set(name, {
count: values.length,
min: Math.min(...values),
max: Math.max(...values),
avg: values.reduce((a, b) => a + b, 0) / values.length,
p50: this.percentile(values, 50),
p95: this.percentile(values, 95),
p99: this.percentile(values, 99),
});
}
return {
counters: Object.fromEntries(this.counters),
gauges: Object.fromEntries(this.gauges),
histograms: Object.fromEntries(histogramStats),
timers: Object.fromEntries(
[...this.timers].map(([k, v]) => [k, v.duration])
),
};
}
private percentile(values: number[], p: number): number {
const sorted = [...values].sort((a, b) => a - b);
const index = Math.ceil((p / 100) * sorted.length) - 1;
return sorted[index];
}
}
class Timer {
private start = Date.now();
duration = 0;
end(): void {
this.duration = Date.now() - this.start;
}
}
Best Practices Summary
Performance Optimization Checklist
Performance Best Practices
Performance Best Practices
-
Memory Management
- Use streaming for large datasets
- Implement object pooling for expensive resources
- Monitor and limit memory usage
- Force garbage collection when necessary
-
Concurrency Control
- Limit parallel operations with semaphores
- Implement rate limiting for external APIs
- Use worker threads for CPU-intensive tasks
- Batch operations for efficiency
-
Algorithm Optimization
- Use appropriate data structures (Map vs Object, Set vs Array)
- Build indices for frequent lookups
- Avoid nested loops with large datasets
- Cache expensive computations
-
Resource Management
- Reuse connections and handles
- Clean up resources promptly
- Implement timeouts for all operations
- Monitor resource usage continuously
-
Monitoring & Profiling
- Track execution time and memory usage
- Identify bottlenecks with profiling
- Set up alerts for performance degradation
- Use metrics to guide optimization
Performance Benchmarks
Optimization | Before | After | Improvement |
---|---|---|---|
Streaming | 2GB RAM, 45s | 50MB RAM, 40s | 40x memory reduction |
Object Pool | 1000ms per item | 50ms per item | 20x faster |
Concurrency | Sequential 5min | Parallel 30s | 10x faster |
Caching | 500ms per request | 5ms cache hit | 100x faster |
Algorithm | O(n²) 30s | O(n log n) 0.5s | 60x faster |
Next Steps
Learn about observability, logging, and debugging for production nodes:Observability & Debugging
Implement comprehensive logging, metrics, and debugging for n8n nodes