Database Connectivity in Node.js
Most Node.js applications need a database. Whether you choose a relational database like PostgreSQL, a document store like MongoDB, or an in-memory cache like Redis, understanding connection management, ORMs, and query patterns is essential.
🗄️ Database Options
PostgreSQL
Relational, ACID, complex queries, JSON support
MongoDB
Document store, flexible schema, horizontal scaling
Redis
In-memory, caching, pub/sub, queues, sessions
PostgreSQL with Connection Pooling
// npm install pg
const { Pool } = require('pg');
// Connection pool (reuse connections)
const pool = new Pool({
host: process.env.DB_HOST || 'localhost',
port: 5432,
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
max: 20, // Max connections in pool
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
// Query helpers
async function query(text, params) {
const start = Date.now();
const result = await pool.query(text, params);
const duration = Date.now() - start;
console.log('Query:', { text, duration: `${duration}ms`, rows: result.rowCount });
return result;
}
// CRUD operations
async function getUsers() {
const { rows } = await query('SELECT * FROM users ORDER BY created_at DESC');
return rows;
}
async function getUserById(id) {
const { rows } = await query('SELECT * FROM users WHERE id = $1', [id]);
return rows[0] || null;
}
async function createUser(name, email) {
const { rows } = await query(
'INSERT INTO users (name, email) VALUES ($1, $2) RETURNING *',
[name, email]
);
return rows[0];
}
// Transaction
async function transferFunds(fromId, toId, amount) {
const client = await pool.connect();
try {
await client.query('BEGIN');
await client.query(
'UPDATE accounts SET balance = balance - $1 WHERE id = $2',
[amount, fromId]
);
await client.query(
'UPDATE accounts SET balance = balance + $1 WHERE id = $2',
[amount, toId]
);
await client.query('COMMIT');
} catch (err) {
await client.query('ROLLBACK');
throw err;
} finally {
client.release();
}
}
Prisma ORM
// prisma/schema.prisma
// datasource db {
// provider = "postgresql"
// url = env("DATABASE_URL")
// }
//
// model User {
// id Int @id @default(autoincrement())
// email String @unique
// name String
// posts Post[]
// createdAt DateTime @default(now())
// }
//
// model Post {
// id Int @id @default(autoincrement())
// title String
// content String?
// published Boolean @default(false)
// author User @relation(fields: [authorId], references: [id])
// authorId Int
// }
// Usage in application code
const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient();
// Create with relation
const user = await prisma.user.create({
data: {
name: 'Alice',
email: 'alice@example.com',
posts: {
create: [
{ title: 'Hello World', content: 'My first post' },
{ title: 'Second Post', published: true },
]
}
},
include: { posts: true }
});
// Query with filters and pagination
const posts = await prisma.post.findMany({
where: {
published: true,
author: { name: { contains: 'Alice' } }
},
orderBy: { createdAt: 'desc' },
take: 10,
skip: 0,
include: { author: { select: { name: true, email: true } } }
});
// Migrations:
// npx prisma migrate dev --name add_posts_table
// npx prisma migrate deploy (production)
// npx prisma db push (prototyping)
Drizzle ORM
// npm install drizzle-orm pg
// npm install -D drizzle-kit
import { pgTable, serial, varchar, boolean, timestamp } from 'drizzle-orm/pg-core';
import { drizzle } from 'drizzle-orm/node-postgres';
import { eq, and, desc } from 'drizzle-orm';
import { Pool } from 'pg';
// Define schema in TypeScript
const users = pgTable('users', {
id: serial('id').primaryKey(),
name: varchar('name', { length: 100 }).notNull(),
email: varchar('email', { length: 255 }).unique().notNull(),
active: boolean('active').default(true),
createdAt: timestamp('created_at').defaultNow(),
});
// Initialize
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
const db = drizzle(pool);
// Type-safe queries
const activeUsers = await db
.select()
.from(users)
.where(eq(users.active, true))
.orderBy(desc(users.createdAt))
.limit(10);
// Insert
const [newUser] = await db
.insert(users)
.values({ name: 'Bob', email: 'bob@example.com' })
.returning();
// Update
await db
.update(users)
.set({ active: false })
.where(eq(users.id, 1));
// Delete
await db.delete(users).where(eq(users.id, 1));
Redis for Caching and Sessions
// npm install ioredis
const Redis = require('ioredis');
const redis = new Redis({
host: process.env.REDIS_HOST || 'localhost',
port: 6379,
password: process.env.REDIS_PASSWORD,
maxRetriesPerRequest: 3,
});
// Cache-aside pattern
async function getCachedUser(userId) {
const cacheKey = `user:${userId}`;
// Check cache first
const cached = await redis.get(cacheKey);
if (cached) {
return JSON.parse(cached);
}
// Cache miss: fetch from database
const user = await db.query('SELECT * FROM users WHERE id = $1', [userId]);
// Store in cache with TTL (60 seconds)
await redis.setex(cacheKey, 60, JSON.stringify(user));
return user;
}
// Invalidate on update
async function updateUser(userId, data) {
await db.query('UPDATE users SET name = $1 WHERE id = $2', [data.name, userId]);
await redis.del(`user:${userId}`); // Invalidate cache
}
// Rate limiting with Redis
async function checkRateLimit(ip, limit = 100, windowSec = 900) {
const key = `rate:${ip}`;
const current = await redis.incr(key);
if (current === 1) {
await redis.expire(key, windowSec);
}
return { allowed: current <= limit, remaining: Math.max(0, limit - current) };
}
// Pub/Sub for real-time features
const subscriber = new Redis();
const publisher = new Redis();
subscriber.subscribe('notifications');
subscriber.on('message', (channel, message) => {
const data = JSON.parse(message);
// Broadcast to connected WebSocket clients
});
publisher.publish('notifications', JSON.stringify({
type: 'new_message',
userId: 123
}));
💡 Key Takeaways
- • Always use connection pooling for database connections
- • Use parameterized queries to prevent SQL injection
- • Prisma offers the best developer experience; Drizzle is lighter and SQL-closer
- • Use Redis for caching, sessions, rate limiting, and pub/sub
- • Wrap multi-step operations in transactions for data consistency