Building Node.js Microservices with Docker, RabbitMQ and API Gateway

Microservices architecture powers Netflix, Uber, and Amazon — and you can build the same patterns with Node.js. In this tutorial, you will build a complete e-commerce backend with three independent services communicating through RabbitMQ, orchestrated by an API Gateway, and containerized with Docker Compose.
What You Will Build
A mini e-commerce platform composed of independent microservices:
- API Gateway — single entry point routing requests to services, handling auth and rate limiting
- User Service — registration, authentication, JWT token management
- Product Service — product catalog CRUD operations
- Order Service — order creation with async inventory validation via RabbitMQ
- RabbitMQ — message broker for asynchronous inter-service communication
- Docker Compose — orchestrating all services with a single command
Architecture Overview
┌─────────────┐
│ Client │
└──────┬───────┘
│
┌──────▼───────┐
│ API Gateway │ :3000
│ (Express) │
└──┬───┬───┬───┘
│ │ │
│ │ └──────────────────┐
│ │ │
┌──▼───┴──┐ ┌──────────┐ ┌─▼──────────┐
│ User │ │ Product │ │ Order │
│ Service │ │ Service │ │ Service │
│ :3001 │ │ :3002 │ │ :3003 │
└─────────┘ └────┬─────┘ └─────┬──────┘
│ │
└──────┬───────┘
│
┌──────▼───────┐
│ RabbitMQ │
│ :5672 │
└──────────────┘
Prerequisites
Before starting, ensure you have:
- Node.js 20+ installed
- Docker and Docker Compose installed
- Basic understanding of Express.js and REST APIs
- Familiarity with async/await patterns in JavaScript
- A code editor (VS Code recommended)
- A terminal
All services will run in Docker containers. You do not need RabbitMQ installed locally — Docker handles everything.
Step 1: Project Structure Setup
Create the monorepo structure for all services:
mkdir ecommerce-microservices && cd ecommerce-microservices
# Create service directories
mkdir -p api-gateway/src
mkdir -p user-service/src
mkdir -p product-service/src
mkdir -p order-service/src
mkdir -p shared/srcInitialize each service with its own package.json:
# Root package.json for workspace management
cat > package.json << 'EOF'
{
"name": "ecommerce-microservices",
"private": true,
"workspaces": ["api-gateway", "user-service", "product-service", "order-service", "shared"]
}
EOF
# Initialize each service
for service in api-gateway user-service product-service order-service shared; do
cd $service
npm init -y
cd ..
doneYour directory structure should look like this:
ecommerce-microservices/
├── api-gateway/
│ ├── src/
│ ├── Dockerfile
│ └── package.json
├── user-service/
│ ├── src/
│ ├── Dockerfile
│ └── package.json
├── product-service/
│ ├── src/
│ ├── Dockerfile
│ └── package.json
├── order-service/
│ ├── src/
│ ├── Dockerfile
│ └── package.json
├── shared/
│ └── src/
├── docker-compose.yml
└── package.json
Step 2: Shared Utilities
Create shared modules that all services will use. This avoids code duplication across services.
RabbitMQ Connection Helper
// shared/src/rabbitmq.js
const amqp = require("amqplib");
class RabbitMQClient {
constructor() {
this.connection = null;
this.channel = null;
}
async connect(url = process.env.RABBITMQ_URL || "amqp://localhost:5672") {
const maxRetries = 10;
let retries = 0;
while (retries < maxRetries) {
try {
this.connection = await amqp.connect(url);
this.channel = await this.connection.createChannel();
console.log("Connected to RabbitMQ");
return this.channel;
} catch (error) {
retries++;
console.log(
`RabbitMQ connection attempt ${retries}/${maxRetries} failed. Retrying in 3s...`
);
await new Promise((resolve) => setTimeout(resolve, 3000));
}
}
throw new Error("Failed to connect to RabbitMQ after maximum retries");
}
async publishToQueue(queue, message) {
if (!this.channel) throw new Error("Channel not initialized");
await this.channel.assertQueue(queue, { durable: true });
this.channel.sendToQueue(queue, Buffer.from(JSON.stringify(message)), {
persistent: true,
});
}
async consumeFromQueue(queue, callback) {
if (!this.channel) throw new Error("Channel not initialized");
await this.channel.assertQueue(queue, { durable: true });
this.channel.prefetch(1);
this.channel.consume(queue, async (msg) => {
if (msg) {
const content = JSON.parse(msg.content.toString());
try {
await callback(content);
this.channel.ack(msg);
} catch (error) {
console.error("Message processing failed:", error);
this.channel.nack(msg, false, true);
}
}
});
console.log(`Consuming from queue: ${queue}`);
}
async close() {
if (this.channel) await this.channel.close();
if (this.connection) await this.connection.close();
}
}
module.exports = { RabbitMQClient };Shared Response Helper
// shared/src/response.js
function successResponse(res, data, statusCode = 200) {
return res.status(statusCode).json({
success: true,
data,
timestamp: new Date().toISOString(),
});
}
function errorResponse(res, message, statusCode = 500) {
return res.status(statusCode).json({
success: false,
error: message,
timestamp: new Date().toISOString(),
});
}
module.exports = { successResponse, errorResponse };Step 3: User Service
The User Service handles registration and authentication with JWT tokens.
Install Dependencies
cd user-service
npm install express jsonwebtoken bcryptjs uuid cors
cd ..User Service Implementation
// user-service/src/index.js
const express = require("express");
const jwt = require("jsonwebtoken");
const bcrypt = require("bcryptjs");
const { v4: uuidv4 } = require("uuid");
const cors = require("cors");
const app = express();
app.use(express.json());
app.use(cors());
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
const PORT = process.env.PORT || 3001;
// In-memory store (replace with a real database in production)
const users = new Map();
// Health check
app.get("/health", (req, res) => {
res.json({ status: "healthy", service: "user-service" });
});
// Register
app.post("/users/register", async (req, res) => {
try {
const { email, password, name } = req.body;
if (!email || !password || !name) {
return res.status(400).json({ error: "All fields are required" });
}
// Check if user exists
const existingUser = [...users.values()].find((u) => u.email === email);
if (existingUser) {
return res.status(409).json({ error: "Email already registered" });
}
const hashedPassword = await bcrypt.hash(password, 10);
const user = {
id: uuidv4(),
email,
name,
password: hashedPassword,
createdAt: new Date().toISOString(),
};
users.set(user.id, user);
const token = jwt.sign({ userId: user.id, email: user.email }, JWT_SECRET, {
expiresIn: "24h",
});
res.status(201).json({
success: true,
data: {
user: { id: user.id, email: user.email, name: user.name },
token,
},
});
} catch (error) {
res.status(500).json({ error: "Registration failed" });
}
});
// Login
app.post("/users/login", async (req, res) => {
try {
const { email, password } = req.body;
const user = [...users.values()].find((u) => u.email === email);
if (!user || !(await bcrypt.compare(password, user.password))) {
return res.status(401).json({ error: "Invalid credentials" });
}
const token = jwt.sign({ userId: user.id, email: user.email }, JWT_SECRET, {
expiresIn: "24h",
});
res.json({
success: true,
data: {
user: { id: user.id, email: user.email, name: user.name },
token,
},
});
} catch (error) {
res.status(500).json({ error: "Login failed" });
}
});
// Validate token (used internally by API Gateway)
app.get("/users/validate", (req, res) => {
try {
const token = req.headers.authorization?.split(" ")[1];
if (!token) return res.status(401).json({ error: "No token provided" });
const decoded = jwt.verify(token, JWT_SECRET);
const user = users.get(decoded.userId);
if (!user) return res.status(404).json({ error: "User not found" });
res.json({
success: true,
data: { id: user.id, email: user.email, name: user.name },
});
} catch (error) {
res.status(401).json({ error: "Invalid token" });
}
});
// Get user profile
app.get("/users/:id", (req, res) => {
const user = users.get(req.params.id);
if (!user) return res.status(404).json({ error: "User not found" });
res.json({
success: true,
data: { id: user.id, email: user.email, name: user.name },
});
});
app.listen(PORT, () => {
console.log(`User Service running on port ${PORT}`);
});Step 4: Product Service
The Product Service manages the product catalog and publishes inventory events to RabbitMQ.
Install Dependencies
cd product-service
npm install express uuid cors amqplib
cd ..Product Service Implementation
// product-service/src/index.js
const express = require("express");
const { v4: uuidv4 } = require("uuid");
const cors = require("cors");
const { RabbitMQClient } = require("../../shared/src/rabbitmq");
const app = express();
app.use(express.json());
app.use(cors());
const PORT = process.env.PORT || 3002;
const rabbit = new RabbitMQClient();
// In-memory product store
const products = new Map();
// Seed initial products
function seedProducts() {
const initial = [
{ name: "Wireless Keyboard", price: 49.99, stock: 100, category: "electronics" },
{ name: "USB-C Hub", price: 29.99, stock: 200, category: "electronics" },
{ name: "Standing Desk Mat", price: 39.99, stock: 50, category: "office" },
{ name: "Noise-Cancelling Headphones", price: 199.99, stock: 75, category: "electronics" },
{ name: "Ergonomic Mouse", price: 59.99, stock: 150, category: "electronics" },
];
initial.forEach((p) => {
const product = { ...p, id: uuidv4(), createdAt: new Date().toISOString() };
products.set(product.id, product);
});
}
// Health check
app.get("/health", (req, res) => {
res.json({ status: "healthy", service: "product-service" });
});
// List all products
app.get("/products", (req, res) => {
const { category, minPrice, maxPrice } = req.query;
let result = [...products.values()];
if (category) result = result.filter((p) => p.category === category);
if (minPrice) result = result.filter((p) => p.price >= parseFloat(minPrice));
if (maxPrice) result = result.filter((p) => p.price <= parseFloat(maxPrice));
res.json({ success: true, data: result, total: result.length });
});
// Get single product
app.get("/products/:id", (req, res) => {
const product = products.get(req.params.id);
if (!product) return res.status(404).json({ error: "Product not found" });
res.json({ success: true, data: product });
});
// Create product
app.post("/products", (req, res) => {
const { name, price, stock, category } = req.body;
if (!name || !price || stock === undefined || !category) {
return res.status(400).json({ error: "All fields are required" });
}
const product = {
id: uuidv4(),
name,
price: parseFloat(price),
stock: parseInt(stock),
category,
createdAt: new Date().toISOString(),
};
products.set(product.id, product);
res.status(201).json({ success: true, data: product });
});
// Update stock (called when order is confirmed)
app.patch("/products/:id/stock", async (req, res) => {
const product = products.get(req.params.id);
if (!product) return res.status(404).json({ error: "Product not found" });
const { quantity } = req.body;
product.stock += quantity; // negative to decrease
// Publish stock update event
await rabbit.publishToQueue("stock_updated", {
productId: product.id,
newStock: product.stock,
change: quantity,
timestamp: new Date().toISOString(),
});
res.json({ success: true, data: product });
});
// Check stock availability (internal endpoint)
app.post("/products/check-stock", (req, res) => {
const { items } = req.body; // [{ productId, quantity }]
const results = items.map((item) => {
const product = products.get(item.productId);
return {
productId: item.productId,
requested: item.quantity,
available: product ? product.stock : 0,
sufficient: product ? product.stock >= item.quantity : false,
};
});
const allAvailable = results.every((r) => r.sufficient);
res.json({ success: true, data: { allAvailable, items: results } });
});
async function start() {
seedProducts();
await rabbit.connect(process.env.RABBITMQ_URL);
app.listen(PORT, () => {
console.log(`Product Service running on port ${PORT}`);
});
}
start().catch(console.error);Step 5: Order Service with Async Messaging
The Order Service is the most interesting — it creates orders and uses RabbitMQ to asynchronously validate inventory and process orders.
Install Dependencies
cd order-service
npm install express uuid cors amqplib node-fetch
cd ..Order Service Implementation
// order-service/src/index.js
const express = require("express");
const { v4: uuidv4 } = require("uuid");
const cors = require("cors");
const { RabbitMQClient } = require("../../shared/src/rabbitmq");
const app = express();
app.use(express.json());
app.use(cors());
const PORT = process.env.PORT || 3003;
const PRODUCT_SERVICE_URL =
process.env.PRODUCT_SERVICE_URL || "http://localhost:3002";
const rabbit = new RabbitMQClient();
// In-memory order store
const orders = new Map();
// Health check
app.get("/health", (req, res) => {
res.json({ status: "healthy", service: "order-service" });
});
// Create order
app.post("/orders", async (req, res) => {
try {
const { userId, items } = req.body;
// items: [{ productId, quantity, price }]
if (!userId || !items || items.length === 0) {
return res.status(400).json({ error: "userId and items are required" });
}
const order = {
id: uuidv4(),
userId,
items,
total: items.reduce((sum, item) => sum + item.price * item.quantity, 0),
status: "pending",
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
};
orders.set(order.id, order);
// Publish order to RabbitMQ for async processing
await rabbit.publishToQueue("order_created", {
orderId: order.id,
userId: order.userId,
items: order.items,
total: order.total,
});
console.log(`Order ${order.id} created and sent to queue`);
res.status(201).json({
success: true,
data: order,
message: "Order created. Processing inventory validation...",
});
} catch (error) {
console.error("Order creation failed:", error);
res.status(500).json({ error: "Order creation failed" });
}
});
// Get order by ID
app.get("/orders/:id", (req, res) => {
const order = orders.get(req.params.id);
if (!order) return res.status(404).json({ error: "Order not found" });
res.json({ success: true, data: order });
});
// Get orders by user
app.get("/orders/user/:userId", (req, res) => {
const userOrders = [...orders.values()].filter(
(o) => o.userId === req.params.userId
);
res.json({ success: true, data: userOrders, total: userOrders.length });
});
// Process order queue - validate stock and confirm/reject
async function processOrderQueue() {
await rabbit.consumeFromQueue("order_created", async (orderData) => {
console.log(`Processing order: ${orderData.orderId}`);
const order = orders.get(orderData.orderId);
if (!order) return;
try {
// Check stock with Product Service
const response = await fetch(`${PRODUCT_SERVICE_URL}/products/check-stock`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ items: orderData.items }),
});
const stockCheck = await response.json();
if (stockCheck.data.allAvailable) {
// Deduct stock for each item
for (const item of orderData.items) {
await fetch(
`${PRODUCT_SERVICE_URL}/products/${item.productId}/stock`,
{
method: "PATCH",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ quantity: -item.quantity }),
}
);
}
order.status = "confirmed";
order.updatedAt = new Date().toISOString();
// Publish confirmation event
await rabbit.publishToQueue("order_confirmed", {
orderId: order.id,
userId: order.userId,
total: order.total,
});
console.log(`Order ${order.id} confirmed`);
} else {
order.status = "rejected";
order.reason = "Insufficient stock";
order.updatedAt = new Date().toISOString();
await rabbit.publishToQueue("order_rejected", {
orderId: order.id,
userId: order.userId,
reason: "Insufficient stock",
details: stockCheck.data.items,
});
console.log(`Order ${order.id} rejected - insufficient stock`);
}
} catch (error) {
console.error(`Failed to process order ${orderData.orderId}:`, error);
order.status = "failed";
order.reason = "Processing error";
order.updatedAt = new Date().toISOString();
}
});
}
async function start() {
await rabbit.connect(process.env.RABBITMQ_URL);
await processOrderQueue();
app.listen(PORT, () => {
console.log(`Order Service running on port ${PORT}`);
});
}
start().catch(console.error);In production, you would use a real database (PostgreSQL, MongoDB) instead of in-memory Maps. The in-memory approach is used here to focus on the microservices architecture patterns without database setup overhead.
Step 6: API Gateway
The API Gateway is the single entry point for all client requests. It routes traffic, validates JWT tokens, and applies rate limiting.
Install Dependencies
cd api-gateway
npm install express http-proxy-middleware cors express-rate-limit jsonwebtoken
cd ..API Gateway Implementation
// api-gateway/src/index.js
const express = require("express");
const { createProxyMiddleware } = require("http-proxy-middleware");
const cors = require("cors");
const rateLimit = require("express-rate-limit");
const jwt = require("jsonwebtoken");
const app = express();
app.use(cors());
app.use(express.json());
const PORT = process.env.PORT || 3000;
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
// Service URLs
const SERVICES = {
users: process.env.USER_SERVICE_URL || "http://localhost:3001",
products: process.env.PRODUCT_SERVICE_URL || "http://localhost:3002",
orders: process.env.ORDER_SERVICE_URL || "http://localhost:3003",
};
// Rate limiting
const limiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 100,
message: { error: "Too many requests, please try again later" },
});
app.use(limiter);
// JWT Authentication middleware
function authenticate(req, res, next) {
const token = req.headers.authorization?.split(" ")[1];
if (!token) {
return res.status(401).json({ error: "Authentication required" });
}
try {
const decoded = jwt.verify(token, JWT_SECRET);
req.user = decoded;
next();
} catch (error) {
return res.status(401).json({ error: "Invalid or expired token" });
}
}
// Request logging middleware
app.use((req, res, next) => {
const start = Date.now();
res.on("finish", () => {
const duration = Date.now() - start;
console.log(
`${req.method} ${req.originalUrl} -> ${res.statusCode} (${duration}ms)`
);
});
next();
});
// Health check for all services
app.get("/health", async (req, res) => {
const checks = {};
for (const [name, url] of Object.entries(SERVICES)) {
try {
const response = await fetch(`${url}/health`);
const data = await response.json();
checks[name] = { status: "up", ...data };
} catch {
checks[name] = { status: "down" };
}
}
const allUp = Object.values(checks).every((c) => c.status === "up");
res.status(allUp ? 200 : 503).json({
gateway: "healthy",
services: checks,
timestamp: new Date().toISOString(),
});
});
// Public routes (no auth required)
app.use(
"/api/users/register",
createProxyMiddleware({
target: SERVICES.users,
changeOrigin: true,
pathRewrite: { "^/api/users/register": "/users/register" },
})
);
app.use(
"/api/users/login",
createProxyMiddleware({
target: SERVICES.users,
changeOrigin: true,
pathRewrite: { "^/api/users/login": "/users/login" },
})
);
app.use(
"/api/products",
createProxyMiddleware({
target: SERVICES.products,
changeOrigin: true,
pathRewrite: { "^/api/products": "/products" },
})
);
// Protected routes (auth required)
app.use(
"/api/orders",
authenticate,
createProxyMiddleware({
target: SERVICES.orders,
changeOrigin: true,
pathRewrite: { "^/api/orders": "/orders" },
onProxyReq(proxyReq, req) {
// Forward user info to downstream service
proxyReq.setHeader("X-User-Id", req.user.userId);
proxyReq.setHeader("X-User-Email", req.user.email);
},
})
);
app.use(
"/api/users",
authenticate,
createProxyMiddleware({
target: SERVICES.users,
changeOrigin: true,
pathRewrite: { "^/api/users": "/users" },
})
);
// 404 handler
app.use((req, res) => {
res.status(404).json({ error: "Route not found" });
});
// Error handler
app.use((err, req, res, next) => {
console.error("Gateway error:", err.message);
res.status(500).json({ error: "Internal gateway error" });
});
app.listen(PORT, () => {
console.log(`API Gateway running on port ${PORT}`);
console.log("Service routes:");
Object.entries(SERVICES).forEach(([name, url]) => {
console.log(` /api/${name} -> ${url}`);
});
});Step 7: Dockerize Everything
Service Dockerfile
Create the same Dockerfile for each service (api-gateway, user-service, product-service, order-service):
# api-gateway/Dockerfile (same pattern for all services)
FROM node:20-alpine
WORKDIR /app
# Copy shared module
COPY shared/ ./shared/
# Copy service files
COPY api-gateway/package*.json ./
RUN npm install --production
COPY api-gateway/src/ ./src/
EXPOSE 3000
CMD ["node", "src/index.js"]Adjust the paths for each service:
# user-service/Dockerfile
FROM node:20-alpine
WORKDIR /app
COPY shared/ ./shared/
COPY user-service/package*.json ./
RUN npm install --production
COPY user-service/src/ ./src/
EXPOSE 3001
CMD ["node", "src/index.js"]# product-service/Dockerfile
FROM node:20-alpine
WORKDIR /app
COPY shared/ ./shared/
COPY product-service/package*.json ./
RUN npm install --production
COPY product-service/src/ ./src/
EXPOSE 3002
CMD ["node", "src/index.js"]# order-service/Dockerfile
FROM node:20-alpine
WORKDIR /app
COPY shared/ ./shared/
COPY order-service/package*.json ./
RUN npm install --production
COPY order-service/src/ ./src/
EXPOSE 3003
CMD ["node", "src/index.js"]Docker Compose Configuration
# docker-compose.yml
version: "3.8"
services:
rabbitmq:
image: rabbitmq:3-management-alpine
ports:
- "5672:5672"
- "15672:15672" # Management UI
environment:
RABBITMQ_DEFAULT_USER: admin
RABBITMQ_DEFAULT_PASS: rabbitmq123
volumes:
- rabbitmq_data:/var/lib/rabbitmq
healthcheck:
test: ["CMD", "rabbitmq-diagnostics", "-q", "ping"]
interval: 10s
timeout: 5s
retries: 5
user-service:
build:
context: .
dockerfile: user-service/Dockerfile
ports:
- "3001:3001"
environment:
PORT: 3001
JWT_SECRET: my-super-secret-jwt-key-change-in-production
restart: unless-stopped
product-service:
build:
context: .
dockerfile: product-service/Dockerfile
ports:
- "3002:3002"
environment:
PORT: 3002
RABBITMQ_URL: amqp://admin:rabbitmq123@rabbitmq:5672
depends_on:
rabbitmq:
condition: service_healthy
restart: unless-stopped
order-service:
build:
context: .
dockerfile: order-service/Dockerfile
ports:
- "3003:3003"
environment:
PORT: 3003
RABBITMQ_URL: amqp://admin:rabbitmq123@rabbitmq:5672
PRODUCT_SERVICE_URL: http://product-service:3002
depends_on:
rabbitmq:
condition: service_healthy
product-service:
condition: service_started
restart: unless-stopped
api-gateway:
build:
context: .
dockerfile: api-gateway/Dockerfile
ports:
- "3000:3000"
environment:
PORT: 3000
JWT_SECRET: my-super-secret-jwt-key-change-in-production
USER_SERVICE_URL: http://user-service:3001
PRODUCT_SERVICE_URL: http://product-service:3002
ORDER_SERVICE_URL: http://order-service:3003
depends_on:
- user-service
- product-service
- order-service
restart: unless-stopped
volumes:
rabbitmq_data:Step 8: Launch and Test
Start All Services
# Build and start everything
docker compose up --build -d
# Check all services are running
docker compose ps
# View logs
docker compose logs -fYou should see all five containers running (RabbitMQ + 4 services).
Test the API Gateway
Open a new terminal and test the complete flow:
# 1. Check gateway health
curl http://localhost:3000/health | jq
# 2. Register a user
curl -X POST http://localhost:3000/api/users/register \
-H "Content-Type: application/json" \
-d '{
"email": "test@example.com",
"password": "securePassword123",
"name": "Test User"
}' | jqSave the token from the response, then continue:
# 3. Browse products (public - no auth needed)
curl http://localhost:3000/api/products | jq
# 4. Create an order (requires auth)
TOKEN="your-jwt-token-here"
curl -X POST http://localhost:3000/api/orders \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $TOKEN" \
-d '{
"userId": "your-user-id",
"items": [
{
"productId": "product-id-from-step-3",
"quantity": 2,
"price": 49.99
}
]
}' | jq
# 5. Check order status (it should change from "pending" to "confirmed")
curl http://localhost:3000/api/orders/ORDER_ID \
-H "Authorization: Bearer $TOKEN" | jqMonitor RabbitMQ
Open the RabbitMQ management dashboard at http://localhost:15672 (login: admin / rabbitmq123). You will see:
- Queues:
order_created,order_confirmed,order_rejected,stock_updated - Message rates: Messages flowing between services
- Connections: One connection per service
Step 9: Add Service Resilience
Circuit Breaker Pattern
Add a simple circuit breaker to protect services from cascading failures:
// shared/src/circuit-breaker.js
class CircuitBreaker {
constructor(options = {}) {
this.failureThreshold = options.failureThreshold || 5;
this.resetTimeout = options.resetTimeout || 30000;
this.state = "CLOSED"; // CLOSED, OPEN, HALF_OPEN
this.failures = 0;
this.lastFailureTime = null;
}
async execute(fn) {
if (this.state === "OPEN") {
if (Date.now() - this.lastFailureTime > this.resetTimeout) {
this.state = "HALF_OPEN";
} else {
throw new Error("Circuit breaker is OPEN - service unavailable");
}
}
try {
const result = await fn();
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}
onSuccess() {
this.failures = 0;
this.state = "CLOSED";
}
onFailure() {
this.failures++;
this.lastFailureTime = Date.now();
if (this.failures >= this.failureThreshold) {
this.state = "OPEN";
console.warn("Circuit breaker OPENED - too many failures");
}
}
getState() {
return {
state: this.state,
failures: this.failures,
threshold: this.failureThreshold,
};
}
}
module.exports = { CircuitBreaker };Use it in the Order Service when calling the Product Service:
const { CircuitBreaker } = require("../../shared/src/circuit-breaker");
const productServiceBreaker = new CircuitBreaker({
failureThreshold: 3,
resetTimeout: 15000,
});
// In the order processing function:
const stockCheck = await productServiceBreaker.execute(async () => {
const response = await fetch(
`${PRODUCT_SERVICE_URL}/products/check-stock`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ items: orderData.items }),
}
);
if (!response.ok) throw new Error(`Stock check failed: ${response.status}`);
return response.json();
});Step 10: Add Structured Logging
Consistent logging across services makes debugging much easier:
// shared/src/logger.js
function createLogger(serviceName) {
function formatLog(level, message, meta = {}) {
return JSON.stringify({
timestamp: new Date().toISOString(),
service: serviceName,
level,
message,
...meta,
});
}
return {
info: (message, meta) => console.log(formatLog("info", message, meta)),
warn: (message, meta) => console.warn(formatLog("warn", message, meta)),
error: (message, meta) => console.error(formatLog("error", message, meta)),
debug: (message, meta) => console.debug(formatLog("debug", message, meta)),
};
}
module.exports = { createLogger };Usage in any service:
const { createLogger } = require("../../shared/src/logger");
const logger = createLogger("order-service");
logger.info("Order created", { orderId: order.id, userId: order.userId });
logger.error("Failed to process order", { orderId, error: error.message });Troubleshooting
RabbitMQ Connection Refused
If services fail to connect to RabbitMQ:
# Check RabbitMQ is running
docker compose logs rabbitmq
# Restart RabbitMQ
docker compose restart rabbitmqThe shared RabbitMQ client includes retry logic (10 attempts with 3-second delays), which handles startup ordering.
Service Cannot Reach Another Service
Inside Docker Compose, services reach each other by service name (e.g., http://product-service:3002), not localhost. Verify your environment variables are set correctly.
Port Conflicts
If ports 3000-3003 or 5672/15672 are in use:
# Find what is using the port
lsof -i :3000
# Or change ports in docker-compose.yml
ports:
- "4000:3000" # Map to different host portNext Steps
Now that you have a working microservices architecture, here are ways to extend it:
- Add a real database: Replace in-memory Maps with PostgreSQL using Prisma or Drizzle ORM
- Add a Notification Service: Consume
order_confirmedevents to send emails via Resend - Implement saga pattern: For multi-step transactions that need rollback
- Add OpenTelemetry: Distributed tracing across services
- Deploy to production: Use Kubernetes or Docker Swarm for orchestration
- Add GraphQL Gateway: Replace REST proxy with a federated GraphQL layer
Conclusion
You have built a complete microservices architecture with:
- Service decomposition — each domain (users, products, orders) has its own service
- Async messaging — RabbitMQ decouples order processing from stock validation
- API Gateway — single entry point with auth, rate limiting, and request routing
- Circuit breaker — resilience against cascading failures
- Containerization — Docker Compose for reproducible deployments
- Structured logging — consistent JSON logs across all services
This architecture scales horizontally: you can run multiple instances of any service behind a load balancer, and RabbitMQ distributes messages across consumers automatically. The patterns demonstrated here — service decomposition, async messaging, API gateway, circuit breaker — are the same patterns used in production by companies processing millions of requests daily.
Discuss Your Project with Us
We're here to help with your web development needs. Schedule a call to discuss your project and how we can assist you.
Let's find the best solutions for your needs.
Related Articles

Building Type-Safe Cloud Backend APIs with Encore.ts: From Zero to Production
Learn how to build production-ready, type-safe backend APIs with Encore.ts. This hands-on tutorial covers service definitions, API endpoints, databases, pub/sub messaging, cron jobs, and deployment to the cloud with automatic infrastructure provisioning.

How to Check If Your App Is Affected by the Axios Supply Chain Attack (And Fix It)
Practical step-by-step guide to detect, analyze, and remediate the axios npm supply chain attack (CVE CVSS 9.3). Includes terminal commands, a free automated scanner tool, RAT indicator checks, and prevention strategies.

Docker Compose for Full-Stack Developers: Next.js, PostgreSQL, and Redis
Learn how to containerize a full-stack Next.js application with PostgreSQL and Redis using Docker Compose. This hands-on tutorial covers multi-service orchestration, development workflows, hot reloading, health checks, and production-ready configurations.