Capstone Project: E-Commerce Platform
Build a production-ready e-commerce platform using all the microservices patterns you’ve learned.Project Goals:
- Apply all microservices patterns in practice
- Build a portfolio-worthy project
- Gain hands-on experience with real-world challenges
- Prepare for system design interviews
Project Overview
Copy
┌─────────────────────────────────────────────────────────────────────────────┐
│ E-COMMERCE MICROSERVICES PLATFORM │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ ┌─────────────┐ │
│ │ Client │ │
│ │ (React) │ │
│ └──────┬──────┘ │
│ │ │
│ ┌──────▼──────┐ │
│ │ API Gateway │ │
│ │ (Kong) │ │
│ └──────┬──────┘ │
│ │ │
│ ┌──────────────────────────────┼──────────────────────────────┐ │
│ │ │ │ │
│ ▼ ▼ ▼ │
│ ┌─────────┐ ┌─────────────┐ ┌──────────┐ │
│ │ User │ │ Order │ │ Product │ │
│ │ Service │ │ Service │ │ Catalog │ │
│ │ │ │ │ │ │ │
│ │ MongoDB │ │ PostgreSQL │ │ MongoDB │ │
│ └─────────┘ └──────┬──────┘ └──────────┘ │
│ │ │
│ ┌────────────────┼────────────────┐ │
│ │ │ │ │
│ ▼ ▼ ▼ │
│ ┌───────────┐ ┌───────────┐ ┌───────────┐ │
│ │ Payment │ │ Inventory │ │ Cart │ │
│ │ Service │ │ Service │ │ Service │ │
│ │ │ │ │ │ │ │
│ │ Stripe │ │PostgreSQL │ │ Redis │ │
│ └───────────┘ └───────────┘ └───────────┘ │
│ │ │
│ ┌──────▼──────┐ │
│ │ Kafka │ │
│ │ (Events) │ │
│ └──────┬──────┘ │
│ │ │
│ ┌──────▼──────┐ │
│ │Notification │ │
│ │ Service │ │
│ └─────────────┘ │
│ │
│ ┌──────────────────────────────────────────────────────────────────────┐ │
│ │ OBSERVABILITY STACK │ │
│ │ Prometheus → Grafana Jaeger (Tracing) Loki (Logs) │ │
│ └──────────────────────────────────────────────────────────────────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
Phase 1: Project Setup
Directory Structure
Copy
ecommerce-microservices/
├── docker-compose.yml
├── docker-compose.override.yml
├── Makefile
├── README.md
│
├── services/
│ ├── api-gateway/
│ ├── user-service/
│ ├── product-service/
│ ├── cart-service/
│ ├── order-service/
│ ├── payment-service/
│ ├── inventory-service/
│ └── notification-service/
│
├── shared/
│ ├── proto/ # gRPC definitions
│ ├── events/ # Event schemas
│ └── utils/ # Shared utilities
│
├── infrastructure/
│ ├── k8s/ # Kubernetes manifests
│ ├── helm/ # Helm charts
│ ├── prometheus/ # Monitoring config
│ └── grafana/ # Dashboards
│
└── tests/
├── unit/
├── integration/
├── contract/
└── e2e/
Initial Setup Script
Copy
#!/bin/bash
# setup.sh
# Create service directories
services=("api-gateway" "user-service" "product-service" "cart-service" "order-service" "payment-service" "inventory-service" "notification-service")
for service in "${services[@]}"; do
mkdir -p "services/$service/src"
# Initialize npm project
cd "services/$service"
npm init -y
npm install express cors helmet morgan
npm install --save-dev jest nodemon
cd ../..
done
# Create shared directories
mkdir -p shared/{proto,events,utils}
mkdir -p infrastructure/{k8s,helm,prometheus,grafana}
mkdir -p tests/{unit,integration,contract,e2e}
echo "Project structure created!"
Phase 2: Core Services
User Service
Copy
// services/user-service/src/index.js
const express = require('express');
const mongoose = require('mongoose');
const { initTracing } = require('./observability/tracing');
const { Metrics } = require('./observability/metrics');
const { logger } = require('./observability/logger');
// Initialize tracing before other imports
initTracing('user-service');
const app = express();
const metrics = new Metrics({ serviceName: 'user-service' });
// Middleware
app.use(express.json());
app.use(metrics.middleware());
// User Schema
const UserSchema = new mongoose.Schema({
email: { type: String, required: true, unique: true },
passwordHash: { type: String, required: true },
profile: {
firstName: String,
lastName: String,
phone: String
},
addresses: [{
street: String,
city: String,
country: String,
postalCode: String,
isDefault: Boolean
}],
createdAt: { type: Date, default: Date.now }
});
const User = mongoose.model('User', UserSchema);
// Routes
app.post('/users', async (req, res) => {
try {
const user = new User(req.body);
await user.save();
logger.info('User created', { userId: user._id });
res.status(201).json({ id: user._id, email: user.email });
} catch (error) {
logger.error('Failed to create user', { error: error.message });
res.status(400).json({ error: error.message });
}
});
app.get('/users/:id', async (req, res) => {
const user = await User.findById(req.params.id).select('-passwordHash');
if (!user) {
return res.status(404).json({ error: 'User not found' });
}
res.json(user);
});
// Health checks
app.get('/health/live', (req, res) => res.json({ status: 'UP' }));
app.get('/health/ready', async (req, res) => {
const dbReady = mongoose.connection.readyState === 1;
res.status(dbReady ? 200 : 503).json({
status: dbReady ? 'READY' : 'NOT_READY',
database: dbReady
});
});
app.get('/metrics', async (req, res) => {
res.set('Content-Type', metrics.register.contentType);
res.send(await metrics.getMetrics());
});
// Start server
const PORT = process.env.PORT || 3000;
mongoose.connect(process.env.MONGODB_URI)
.then(() => {
app.listen(PORT, () => {
logger.info(`User service running on port ${PORT}`);
});
});
Order Service with Saga
Copy
// services/order-service/src/sagas/OrderSaga.js
const { Kafka } = require('kafkajs');
class OrderSaga {
constructor(orderRepository, kafka) {
this.orderRepository = orderRepository;
this.producer = kafka.producer();
this.consumer = kafka.consumer({ groupId: 'order-saga' });
}
async start() {
await this.producer.connect();
await this.consumer.connect();
await this.consumer.subscribe({
topics: ['payment.completed', 'payment.failed', 'inventory.reserved', 'inventory.failed']
});
await this.consumer.run({
eachMessage: async ({ topic, message }) => {
const event = JSON.parse(message.value.toString());
await this.handleEvent(topic, event);
}
});
}
async createOrder(orderData) {
// Step 1: Create order in PENDING state
const order = await this.orderRepository.create({
...orderData,
status: 'PENDING',
sagaState: 'STARTED'
});
// Step 2: Request inventory reservation
await this.producer.send({
topic: 'inventory.reserve',
messages: [{
key: order.id,
value: JSON.stringify({
orderId: order.id,
items: order.items
})
}]
});
return order;
}
async handleEvent(topic, event) {
const order = await this.orderRepository.findById(event.orderId);
if (!order) return;
switch (topic) {
case 'inventory.reserved':
// Step 3: Request payment
await this.orderRepository.update(order.id, { sagaState: 'INVENTORY_RESERVED' });
await this.producer.send({
topic: 'payment.process',
messages: [{
key: order.id,
value: JSON.stringify({
orderId: order.id,
customerId: order.customerId,
amount: order.total
})
}]
});
break;
case 'payment.completed':
// Step 4: Complete order
await this.orderRepository.update(order.id, {
status: 'CONFIRMED',
sagaState: 'COMPLETED',
paymentId: event.paymentId
});
await this.producer.send({
topic: 'order.confirmed',
messages: [{
key: order.id,
value: JSON.stringify({ orderId: order.id })
}]
});
break;
case 'payment.failed':
// Compensate: Release inventory
await this.orderRepository.update(order.id, {
status: 'FAILED',
sagaState: 'COMPENSATING'
});
await this.producer.send({
topic: 'inventory.release',
messages: [{
key: order.id,
value: JSON.stringify({
orderId: order.id,
items: order.items
})
}]
});
break;
case 'inventory.failed':
// No compensation needed, just fail the order
await this.orderRepository.update(order.id, {
status: 'FAILED',
sagaState: 'FAILED',
failureReason: 'Insufficient inventory'
});
break;
}
}
}
module.exports = { OrderSaga };
Payment Service
Copy
// services/payment-service/src/index.js
const express = require('express');
const { Kafka } = require('kafkajs');
const Stripe = require('stripe');
const { initTracing } = require('./observability/tracing');
initTracing('payment-service');
const app = express();
const stripe = new Stripe(process.env.STRIPE_SECRET_KEY);
const kafka = new Kafka({
clientId: 'payment-service',
brokers: process.env.KAFKA_BROKERS.split(',')
});
const producer = kafka.producer();
const consumer = kafka.consumer({ groupId: 'payment-service' });
// Idempotency store
const processedPayments = new Map();
async function processPayment(event) {
const { orderId, customerId, amount } = event;
// Check idempotency
if (processedPayments.has(orderId)) {
console.log(`Payment already processed for order ${orderId}`);
return processedPayments.get(orderId);
}
try {
// Get customer's payment method
const customer = await getCustomerPaymentMethod(customerId);
// Create payment intent
const paymentIntent = await stripe.paymentIntents.create({
amount: Math.round(amount * 100),
currency: 'usd',
customer: customer.stripeCustomerId,
payment_method: customer.defaultPaymentMethod,
confirm: true,
metadata: { orderId }
});
const result = {
paymentId: paymentIntent.id,
orderId,
status: 'COMPLETED'
};
// Store for idempotency
processedPayments.set(orderId, result);
// Publish success event
await producer.send({
topic: 'payment.completed',
messages: [{
key: orderId,
value: JSON.stringify(result)
}]
});
return result;
} catch (error) {
// Publish failure event
await producer.send({
topic: 'payment.failed',
messages: [{
key: orderId,
value: JSON.stringify({
orderId,
error: error.message
})
}]
});
throw error;
}
}
// Start consumer
async function start() {
await producer.connect();
await consumer.connect();
await consumer.subscribe({ topic: 'payment.process' });
await consumer.run({
eachMessage: async ({ message }) => {
const event = JSON.parse(message.value.toString());
await processPayment(event);
}
});
app.listen(3000, () => {
console.log('Payment service running on port 3000');
});
}
start();
Phase 3: Infrastructure
Docker Compose
Copy
# docker-compose.yml
version: '3.8'
services:
api-gateway:
build: ./services/api-gateway
ports:
- "8080:3000"
environment:
- USER_SERVICE_URL=http://user-service:3000
- ORDER_SERVICE_URL=http://order-service:3000
- PRODUCT_SERVICE_URL=http://product-service:3000
depends_on:
- user-service
- order-service
- product-service
user-service:
build: ./services/user-service
environment:
- MONGODB_URI=mongodb://user-db:27017/users
- OTEL_EXPORTER_OTLP_ENDPOINT=http://jaeger:4317
depends_on:
- user-db
order-service:
build: ./services/order-service
environment:
- DATABASE_URL=postgresql://postgres:postgres@order-db:5432/orders
- KAFKA_BROKERS=kafka:9092
- OTEL_EXPORTER_OTLP_ENDPOINT=http://jaeger:4317
depends_on:
- order-db
- kafka
product-service:
build: ./services/product-service
environment:
- MONGODB_URI=mongodb://product-db:27017/products
- REDIS_URL=redis://redis:6379
depends_on:
- product-db
- redis
payment-service:
build: ./services/payment-service
environment:
- STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY}
- KAFKA_BROKERS=kafka:9092
depends_on:
- kafka
inventory-service:
build: ./services/inventory-service
environment:
- DATABASE_URL=postgresql://postgres:postgres@inventory-db:5432/inventory
- KAFKA_BROKERS=kafka:9092
depends_on:
- inventory-db
- kafka
cart-service:
build: ./services/cart-service
environment:
- REDIS_URL=redis://redis:6379
depends_on:
- redis
notification-service:
build: ./services/notification-service
environment:
- KAFKA_BROKERS=kafka:9092
- SENDGRID_API_KEY=${SENDGRID_API_KEY}
depends_on:
- kafka
# Databases
user-db:
image: mongo:6
volumes:
- user-db-data:/data/db
order-db:
image: postgres:15-alpine
environment:
- POSTGRES_DB=orders
- POSTGRES_PASSWORD=postgres
volumes:
- order-db-data:/var/lib/postgresql/data
product-db:
image: mongo:6
volumes:
- product-db-data:/data/db
inventory-db:
image: postgres:15-alpine
environment:
- POSTGRES_DB=inventory
- POSTGRES_PASSWORD=postgres
volumes:
- inventory-db-data:/var/lib/postgresql/data
redis:
image: redis:7-alpine
volumes:
- redis-data:/data
# Message Broker
kafka:
image: confluentinc/cp-kafka:7.5.0
environment:
KAFKA_NODE_ID: 1
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka:9093
KAFKA_PROCESS_ROLES: broker,controller
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
CLUSTER_ID: 'capstone-cluster'
volumes:
- kafka-data:/var/lib/kafka/data
# Observability
prometheus:
image: prom/prometheus:latest
ports:
- "9090:9090"
volumes:
- ./infrastructure/prometheus:/etc/prometheus
grafana:
image: grafana/grafana:latest
ports:
- "3001:3000"
volumes:
- ./infrastructure/grafana:/etc/grafana/provisioning
jaeger:
image: jaegertracing/all-in-one:latest
ports:
- "16686:16686"
- "4317:4317"
environment:
- COLLECTOR_OTLP_ENABLED=true
volumes:
user-db-data:
order-db-data:
product-db-data:
inventory-db-data:
redis-data:
kafka-data:
Phase 4: Testing
Contract Test Example
Copy
// tests/contract/OrderPayment.consumer.test.js
const { Pact } = require('@pact-foundation/pact');
const path = require('path');
describe('Order Service - Payment Service Contract', () => {
const provider = new Pact({
consumer: 'OrderService',
provider: 'PaymentService',
port: 8081,
dir: path.resolve(process.cwd(), 'pacts')
});
beforeAll(() => provider.setup());
afterAll(() => provider.finalize());
it('should process payment successfully', async () => {
await provider.addInteraction({
state: 'customer has valid payment method',
uponReceiving: 'payment request',
withRequest: {
method: 'POST',
path: '/payments',
body: {
orderId: '12345',
amount: 99.99,
customerId: 'cust-123'
}
},
willRespondWith: {
status: 200,
body: {
paymentId: like('pay_abc123'),
status: 'COMPLETED'
}
}
});
// Test actual client
const result = await paymentClient.process({
orderId: '12345',
amount: 99.99,
customerId: 'cust-123'
});
expect(result.status).toBe('COMPLETED');
});
});
E2E Test
Copy
// tests/e2e/checkout.test.js
describe('Checkout Flow E2E', () => {
const api = axios.create({ baseURL: 'http://localhost:8080' });
let authToken, orderId;
beforeAll(async () => {
const { data } = await api.post('/auth/login', {
email: '[email protected]',
password: 'password'
});
authToken = data.token;
api.defaults.headers.Authorization = `Bearer ${authToken}`;
});
it('should complete checkout successfully', async () => {
// 1. Add to cart
await api.post('/cart/items', {
productId: 'prod-123',
quantity: 2
});
// 2. Create order
const orderResponse = await api.post('/orders', {
shippingAddressId: 'addr-123'
});
orderId = orderResponse.data.id;
expect(orderResponse.data.status).toBe('PENDING');
// 3. Process payment
await api.post(`/orders/${orderId}/pay`, {
paymentMethodId: 'pm-123'
});
// 4. Wait for saga completion
await new Promise(r => setTimeout(r, 3000));
// 5. Verify order confirmed
const finalOrder = await api.get(`/orders/${orderId}`);
expect(finalOrder.data.status).toBe('CONFIRMED');
});
});
Evaluation Checklist
Architecture
- Clear service boundaries
- API Gateway implemented
- Service discovery working
- Database per service
- Event-driven communication
Resilience
- Circuit breakers
- Retry with backoff
- Fallback strategies
- Health checks
- Graceful degradation
Data
- Saga pattern for orders
- Event sourcing (optional)
- Idempotency handling
- Data consistency
Observability
- Distributed tracing
- Centralized logging
- Metrics & dashboards
- Alerting configured
Summary
Congratulations on completing the Microservices Mastery course! You now have:- Deep understanding of microservices patterns
- Hands-on experience with Node.js implementations
- Production-ready code for your portfolio
- Interview preparation for top tech companies
What's Next?
- Deploy your capstone to Kubernetes (local or cloud)
- Add more features (search, recommendations)
- Practice system design interviews
- Share your project on GitHub