TechLead
Lesson 16 of 16
5 min read
Node.js

Deploying Node.js Applications

Deploy Node.js to production with PM2, Docker containerization, health checks, structured logging with Pino, graceful shutdown, and CI/CD

Production-Ready Node.js

Getting a Node.js application from development to production involves process management, containerization, logging, monitoring, and automated deployment pipelines. This guide covers the essential tools and patterns.

🚀 Production Checklist

Process Management

PM2 or systemd for auto-restart, clustering

Containerization

Docker for consistent, isolated deployment

Structured Logging

JSON logs with Winston or Pino for observability

Health Checks

Liveness and readiness probes for orchestrators

PM2 Process Manager

// Install globally
// npm install -g pm2

// Start application
// pm2 start dist/index.js --name "api-server" -i max

// ecosystem.config.js
module.exports = {
  apps: [{
    name: 'api-server',
    script: 'dist/index.js',
    instances: 'max',          // Use all CPU cores
    exec_mode: 'cluster',
    max_memory_restart: '500M',
    env: {
      NODE_ENV: 'production',
      PORT: 3000,
    },
    env_staging: {
      NODE_ENV: 'staging',
      PORT: 3001,
    },
    // Logging
    log_file: '/var/log/app/combined.log',
    error_file: '/var/log/app/error.log',
    log_date_format: 'YYYY-MM-DD HH:mm:ss',
    merge_logs: true,
    // Auto-restart
    watch: false,
    max_restarts: 10,
    restart_delay: 4000,
    // Graceful shutdown
    kill_timeout: 5000,
    listen_timeout: 8000,
  }]
};

// PM2 commands:
// pm2 start ecosystem.config.js
// pm2 reload api-server           # Zero-downtime restart
// pm2 stop api-server
// pm2 logs api-server
// pm2 monit                       # Real-time monitoring
// pm2 save                        # Save process list
// pm2 startup                     # Auto-start on boot

Docker Containerization

# Dockerfile
FROM node:20-alpine AS base
WORKDIR /app

# Install dependencies (cached layer)
FROM base AS deps
COPY package.json package-lock.json ./
RUN npm ci --production

# Build stage
FROM base AS build
COPY package.json package-lock.json ./
RUN npm ci
COPY . .
RUN npm run build

# Production image
FROM base AS production
ENV NODE_ENV=production

# Security: run as non-root user
RUN addgroup -g 1001 -S nodejs && \
    adduser -S appuser -u 1001 -G nodejs
USER appuser

# Copy built files and production deps
COPY --from=deps /app/node_modules ./node_modules
COPY --from=build /app/dist ./dist
COPY --from=build /app/package.json ./

EXPOSE 3000
HEALTHCHECK --interval=30s --timeout=3s \
  CMD wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1

CMD ["node", "dist/index.js"]
# docker-compose.yml
version: '3.8'
services:
  api:
    build: .
    ports:
      - "3000:3000"
    environment:
      - DATABASE_URL=postgresql://user:pass@db:5432/mydb
      - REDIS_URL=redis://cache:6379
    depends_on:
      db:
        condition: service_healthy
      cache:
        condition: service_started
    restart: unless-stopped

  db:
    image: postgres:16-alpine
    volumes:
      - pgdata:/var/lib/postgresql/data
    environment:
      POSTGRES_DB: mydb
      POSTGRES_USER: user
      POSTGRES_PASSWORD: pass
    healthcheck:
      test: ["CMD-SHELL", "pg_isready -U user -d mydb"]
      interval: 5s
      timeout: 5s
      retries: 5

  cache:
    image: redis:7-alpine

volumes:
  pgdata:

Structured Logging with Pino

// npm install pino pino-pretty
const pino = require('pino');

const logger = pino({
  level: process.env.LOG_LEVEL || 'info',
  // Pretty print in development
  transport: process.env.NODE_ENV !== 'production'
    ? { target: 'pino-pretty', options: { colorize: true } }
    : undefined,
  // Add base context to every log
  base: {
    service: 'api-server',
    version: process.env.npm_package_version,
  },
  // Redact sensitive fields
  redact: ['req.headers.authorization', 'body.password', 'body.creditCard'],
});

// Log levels
logger.info('Server started');
logger.warn({ retries: 3 }, 'Database connection retry');
logger.error({ err, userId }, 'Failed to process payment');

// Request logging middleware
function requestLogger(req, res, next) {
  const start = Date.now();
  const child = logger.child({ requestId: req.id });

  res.on('finish', () => {
    child.info({
      method: req.method,
      url: req.url,
      statusCode: res.statusCode,
      duration: Date.now() - start,
      userAgent: req.headers['user-agent'],
    }, 'request completed');
  });

  req.log = child; // Attach logger to request
  next();
}

// Output (production JSON):
// {"level":30,"time":1704067200000,"service":"api-server",
//  "method":"GET","url":"/api/users","statusCode":200,
//  "duration":45,"msg":"request completed"}

Health Checks

const http = require('http');

// Health check endpoint
app.get('/health', async (req, res) => {
  const health = {
    status: 'ok',
    timestamp: new Date().toISOString(),
    uptime: process.uptime(),
    memory: process.memoryUsage(),
  };

  try {
    // Check database
    await pool.query('SELECT 1');
    health.database = 'connected';
  } catch (err) {
    health.database = 'disconnected';
    health.status = 'degraded';
  }

  try {
    // Check Redis
    await redis.ping();
    health.cache = 'connected';
  } catch (err) {
    health.cache = 'disconnected';
    health.status = 'degraded';
  }

  const statusCode = health.status === 'ok' ? 200 : 503;
  res.status(statusCode).json(health);
});

// Readiness check (is the app ready to serve traffic?)
app.get('/ready', async (req, res) => {
  try {
    await pool.query('SELECT 1');
    res.status(200).json({ ready: true });
  } catch {
    res.status(503).json({ ready: false });
  }
});

// Liveness check (is the process alive?)
app.get('/live', (req, res) => {
  res.status(200).json({ alive: true });
});

Nginx Reverse Proxy and CI/CD

# nginx.conf
upstream node_app {
    server 127.0.0.1:3000;
    server 127.0.0.1:3001;
    keepalive 64;
}

server {
    listen 80;
    server_name api.example.com;
    return 301 https://$server_name$request_uri;
}

server {
    listen 443 ssl http2;
    server_name api.example.com;

    ssl_certificate /etc/ssl/certs/api.crt;
    ssl_certificate_key /etc/ssl/private/api.key;

    # Security headers
    add_header X-Frame-Options DENY;
    add_header X-Content-Type-Options nosniff;

    location / {
        proxy_pass http://node_app;
        proxy_http_version 1.1;
        proxy_set_header Upgrade $http_upgrade;
        proxy_set_header Connection 'upgrade';
        proxy_set_header Host $host;
        proxy_set_header X-Real-IP $remote_addr;
        proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
        proxy_cache_bypass $http_upgrade;
    }

    # Static files served by Nginx (faster)
    location /static/ {
        root /var/www;
        expires 30d;
        add_header Cache-Control "public, immutable";
    }
}
# .github/workflows/deploy.yml
name: Deploy
on:
  push:
    branches: [main]

jobs:
  test:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - uses: actions/setup-node@v4
        with:
          node-version: 20
          cache: 'npm'
      - run: npm ci
      - run: npm test
      - run: npm run lint

  deploy:
    needs: test
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - name: Build and push Docker image
        run: |
          docker build -t myapp:$GITHUB_SHA .
          docker tag myapp:$GITHUB_SHA registry.example.com/myapp:latest
          docker push registry.example.com/myapp:latest
      - name: Deploy to production
        run: |
          ssh deploy@server 'docker pull registry.example.com/myapp:latest && docker-compose up -d'

💡 Key Takeaways

  • • Use PM2 or Docker for process management and auto-restart
  • • Multi-stage Docker builds produce smaller, more secure images
  • • Use Pino for high-performance structured JSON logging
  • • Implement health, readiness, and liveness check endpoints
  • • Put Nginx in front of Node.js for SSL termination and static files

Continue Learning