Why Redis for Node.js
Redis is an in-memory data store that serves as a cache, message broker, session store, and more. Its sub-millisecond latency makes it the go-to choice for performance-critical Node.js applications.
Setting Up ioredis
const Redis = require('ioredis');
const redis = new Redis({
host: process.env.REDIS_HOST || 'localhost',
port: 6379,
password: process.env.REDIS_PASSWORD,
maxRetriesPerRequest: 3,
retryStrategy(times) {
const delay = Math.min(times * 50, 2000);
return delay;
},
});
redis.on('connect', () => console.log('Redis connected'));
redis.on('error', (err) => console.error('Redis error:', err));Redis Data Structures
// Strings — simple key-value
await redis.set('user:1:name', 'Alice');
await redis.get('user:1:name'); // 'Alice'
await redis.set('api:token', 'abc123', 'EX', 3600); // Expires in 1 hour
// Hashes — object-like storage
await redis.hset('user:1', { name: 'Alice', email: '[email protected]', role: 'admin' });
await redis.hget('user:1', 'name'); // 'Alice'
await redis.hgetall('user:1'); // { name: 'Alice', email: '...', role: 'admin' }
// Lists — ordered collection (queue/stack)
await redis.lpush('queue:emails', JSON.stringify({ to: '[email protected]', subject: 'Welcome' }));
await redis.rpop('queue:emails'); // FIFO processing
// Sets — unique values
await redis.sadd('user:1:roles', 'admin', 'editor');
await redis.sismember('user:1:roles', 'admin'); // 1 (true)
await redis.smembers('user:1:roles'); // ['admin', 'editor']
// Sorted Sets — ranked data
await redis.zadd('leaderboard', 1500, 'player:1', 2200, 'player:2', 1800, 'player:3');
await redis.zrevrange('leaderboard', 0, 9, 'WITHSCORES'); // Top 10Caching Strategies
Cache-Aside (Lazy Loading)
The most common caching pattern. Check cache first, fall back to database, then populate cache.
class CacheService {
constructor(redis, defaultTTL = 300) {
this.redis = redis;
this.defaultTTL = defaultTTL;
}
async get(key) {
const cached = await this.redis.get(key);
return cached ? JSON.parse(cached) : null;
}
async set(key, value, ttl = this.defaultTTL) {
await this.redis.set(key, JSON.stringify(value), 'EX', ttl);
}
async invalidate(key) {
await this.redis.del(key);
}
async invalidatePattern(pattern) {
const keys = await this.redis.keys(pattern);
if (keys.length > 0) {
await this.redis.del(...keys);
}
}
}
// Usage in a service
async function getUser(id) {
const cacheKey = `user:${id}`;
// 1. Check cache
const cached = await cache.get(cacheKey);
if (cached) return cached;
// 2. Cache miss — query database
const user = await db.users.findById(id);
if (!user) return null;
// 3. Store in cache
await cache.set(cacheKey, user, 600); // 10 min TTL
return user;
}
// Invalidate on update
async function updateUser(id, data) {
const user = await db.users.update(id, data);
await cache.invalidate(`user:${id}`);
return user;
}Write-Through Cache
async function createOrder(orderData) {
// Write to DB first
const order = await db.orders.create(orderData);
// Write to cache simultaneously
await cache.set(`order:${order.id}`, order, 3600);
return order;
}Session Storage with connect-redis
const session = require('express-session');
const RedisStore = require('connect-redis').default;
app.use(session({
store: new RedisStore({
client: redis,
prefix: 'sess:',
ttl: 86400, // 24 hours
}),
secret: process.env.SESSION_SECRET,
resave: false,
saveUninitialized: false,
cookie: {
secure: process.env.NODE_ENV === 'production',
httpOnly: true,
maxAge: 24 * 60 * 60 * 1000,
sameSite: 'strict',
},
}));Pub/Sub Implementation
Redis Pub/Sub enables real-time messaging between services.
// Publisher (uses main redis connection)
async function publishEvent(channel, data) {
await redis.publish(channel, JSON.stringify({
...data,
timestamp: Date.now(),
}));
}
// On order creation
await publishEvent('order:created', {
orderId: order.id,
userId: order.userId,
total: order.total,
});
// Subscriber (needs a DEDICATED connection)
const subscriber = new Redis(/* same config */);
subscriber.subscribe('order:created', 'user:registered', 'payment:completed');
subscriber.on('message', (channel, message) => {
const data = JSON.parse(message);
switch (channel) {
case 'order:created':
emailService.sendOrderConfirmation(data);
analyticsService.trackOrder(data);
break;
case 'user:registered':
emailService.sendWelcome(data);
break;
case 'payment:completed':
fulfillmentService.processOrder(data);
break;
}
});
// Pattern-based subscription
subscriber.psubscribe('order:*');
subscriber.on('pmessage', (pattern, channel, message) => {
console.log(`${channel}: ${message}`);
});Rate Limiting with Redis
// Sliding window rate limiter using sorted sets
async function rateLimit(key, limit, windowSeconds) {
const now = Date.now();
const windowStart = now - windowSeconds * 1000;
const pipeline = redis.pipeline();
// Remove expired entries
pipeline.zremrangebyscore(key, 0, windowStart);
// Add current request
pipeline.zadd(key, now, `${now}:${Math.random()}`);
// Count requests in window
pipeline.zcard(key);
// Set TTL on the key
pipeline.expire(key, windowSeconds);
const results = await pipeline.exec();
const requestCount = results[2][1];
return {
allowed: requestCount <= limit,
remaining: Math.max(0, limit - requestCount),
resetAt: new Date(now + windowSeconds * 1000),
};
}
// Express middleware
function rateLimitMiddleware(limit = 100, windowSeconds = 60) {
return async (req, res, next) => {
const key = `ratelimit:${req.ip}`;
const result = await rateLimit(key, limit, windowSeconds);
res.set({
'X-RateLimit-Limit': limit,
'X-RateLimit-Remaining': result.remaining,
'X-RateLimit-Reset': result.resetAt.toISOString(),
});
if (!result.allowed) {
return res.status(429).json({
error: 'Too many requests',
retryAfter: windowSeconds,
});
}
next();
};
}
app.use('/api/', rateLimitMiddleware(100, 60));Distributed Locks with Redlock
const Redlock = require('redlock');
const redlock = new Redlock([redis], {
retryCount: 3,
retryDelay: 200,
retryJitter: 200,
});
// Acquire lock before critical section
async function processPayment(orderId) {
const lockKey = `lock:payment:${orderId}`;
const lock = await redlock.acquire([lockKey], 10000); // 10s lock
try {
// Critical section — only one process can execute this
const order = await db.orders.findById(orderId);
if (order.status !== 'pending') {
throw new Error('Order already processed');
}
await paymentGateway.charge(order);
await db.orders.update(orderId, { status: 'paid' });
} finally {
await lock.release();
}
}Performance Tips
// Use pipelines for batch operations (reduces round trips)
const pipeline = redis.pipeline();
for (const id of userIds) {
pipeline.hgetall(`user:${id}`);
}
const results = await pipeline.exec();
const users = results.map(([err, data]) => data);
// Use Lua scripts for atomic operations
const luaScript = `
local current = redis.call('GET', KEYS[1])
if current and tonumber(current) >= tonumber(ARGV[1]) then
return redis.call('DECRBY', KEYS[1], ARGV[1])
end
return nil
`;
// Atomic "decrement if sufficient balance"
const newBalance = await redis.eval(luaScript, 1, 'balance:user:1', 50);Key Takeaways
- Cache-aside is the safest pattern — cache on miss, invalidate on write
- Pub/Sub is fire-and-forget — no persistence, use streams for durability
- Rate limiting with sorted sets gives precise sliding windows
- Use pipelines for batch operations and Lua scripts for atomicity
- Always set TTL on cached keys to prevent stale data
- Use dedicated connections for pub/sub subscribers
