Node.js has first-class async I/O, making it a natural fit for database work. Every database operation is non-blocking by default — no thread-per-connection model.
postgres://user:pass@host/dbasync/await for clean flowPromise.all() for parallel queries| Level | Tool | Trade-off |
|---|---|---|
| Raw SQL | pg, mysql2 | Full control, no abstraction |
| Query Builder | Knex.js | Composable, DB-agnostic SQL |
| ORM | Prisma, Sequelize | Models, relations, type safety |
| ODM | Mongoose | Schema enforcement for MongoDB |
Raw drivers give you direct access to SQL with minimal abstraction. Every Node.js database library is built on top of these.
const { Client } = require('pg');
const client = new Client({
connectionString: process.env.DATABASE_URL
});
await client.connect();
// Parameterised query — prevents SQL injection
const { rows } = await client.query(
'SELECT * FROM users WHERE email = $1',
['alice@example.com']
);
console.log(rows[0]);
await client.end();
const mysql = require('mysql2/promise');
const conn = await mysql.createConnection({
host: 'localhost',
user: 'root',
database: 'myapp'
});
// Parameterised with ? placeholders
const [rows] = await conn.execute(
'SELECT * FROM users WHERE email = ?',
['alice@example.com']
);
console.log(rows[0]);
await conn.end();
const Database = require('better-sqlite3');
const db = new Database('app.db');
// Synchronous — ideal for CLI tools & Electron
const stmt = db.prepare(
'SELECT * FROM users WHERE email = ?'
);
const user = stmt.get('alice@example.com');
console.log(user);
// Transactions via function wrapper
const insert = db.transaction((users) => {
for (const u of users)
db.prepare('INSERT INTO users (name) VALUES (?)')
.run(u.name);
});
Never interpolate user input into SQL strings: `SELECT * FROM users WHERE id = ${id}`. Use $1 (pg), ? (mysql2/sqlite), or named parameters. This is your primary defence against SQL injection.
Opening a new TCP connection per query is expensive (~20-50ms). A connection pool maintains a set of pre-established connections that are reused across requests.
const { Pool } = require('pg');
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
max: 20, // max connections in pool
idleTimeoutMillis: 30000, // close idle after 30s
connectionTimeoutMillis: 5000, // fail if no conn in 5s
});
// Option 1: pool.query() — auto checkout/checkin
const { rows } = await pool.query(
'SELECT * FROM users WHERE active = $1', [true]
);
// Option 2: manual checkout for transactions
const client = await pool.connect();
try {
await client.query('BEGIN');
await client.query('UPDATE accounts SET balance = balance - $1 WHERE id = $2', [100, 1]);
await client.query('UPDATE accounts SET balance = balance + $1 WHERE id = $2', [100, 2]);
await client.query('COMMIT');
} catch (e) {
await client.query('ROLLBACK');
throw e;
} finally {
client.release(); // return to pool — CRITICAL
}
max = CPU cores × 2 + disk spindlesconnectionTimeoutMillis to fail fastKnex is a SQL query builder that supports PostgreSQL, MySQL, SQLite3, and MSSQL. It generates parameterised SQL and provides migrations, seeds, and a chainable API.
const knex = require('knex')({
client: 'pg',
connection: process.env.DATABASE_URL,
pool: { min: 2, max: 10 }
});
// Select with conditions
const users = await knex('users')
.select('id', 'name', 'email')
.where('active', true)
.andWhere('role', 'admin')
.orderBy('created_at', 'desc')
.limit(20);
// Insert returning
const [newUser] = await knex('users')
.insert({ name: 'Alice', email: 'alice@dev.io' })
.returning('*');
// Update
await knex('users')
.where('id', 42)
.update({ last_login: knex.fn.now() });
// migrations/20240101_create_users.js
exports.up = function(knex) {
return knex.schema.createTable('users', (t) => {
t.increments('id').primary();
t.string('name').notNullable();
t.string('email').unique().notNullable();
t.boolean('active').defaultTo(true);
t.timestamps(true, true); // created_at, updated_at
});
};
exports.down = function(knex) {
return knex.schema.dropTable('users');
};
# CLI commands
npx knex migrate:latest # run pending migrations
npx knex migrate:rollback # undo last batch
npx knex seed:run # populate test data
Prisma is a next-generation ORM with a declarative schema, auto-generated type-safe client, and powerful migration system. It generates TypeScript types from your schema.
// prisma/schema.prisma
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
generator client {
provider = "prisma-client-js"
}
model User {
id Int @id @default(autoincrement())
email String @unique
name String
posts Post[] // 1-to-many relation
profile Profile? // 1-to-1 relation
createdAt DateTime @default(now())
}
model Post {
id Int @id @default(autoincrement())
title String
content String?
published Boolean @default(false)
author User @relation(fields: [authorId], references: [id])
authorId Int
tags Tag[] // many-to-many (implicit)
}
model Tag {
id Int @id @default(autoincrement())
name String @unique
posts Post[]
}
# Install and initialise
npm install prisma @prisma/client
npx prisma init
# Generate client from schema
npx prisma generate
# Create and apply migration
npx prisma migrate dev --name init
# Visual database browser
npx prisma studio
import { PrismaClient } from '@prisma/client';
const prisma = new PrismaClient();
// Full autocompletion & type checking
const user = await prisma.user.findUnique({
where: { email: 'alice@dev.io' },
include: { posts: true, profile: true }
});
// user.posts is typed as Post[]
// Create
const user = await prisma.user.create({
data: { name: 'Alice', email: 'alice@dev.io' }
});
// Read with filtering
const admins = await prisma.user.findMany({
where: {
role: 'ADMIN',
createdAt: { gte: new Date('2024-01-01') }
},
orderBy: { name: 'asc' }
});
// Update
const updated = await prisma.user.update({
where: { id: 1 },
data: { name: 'Alice Smith' }
});
// Delete
await prisma.user.delete({ where: { id: 1 } });
// Offset pagination
const page = await prisma.post.findMany({
skip: 20,
take: 10,
orderBy: { createdAt: 'desc' }
});
// Cursor pagination (more efficient)
const next = await prisma.post.findMany({
take: 10,
cursor: { id: lastId },
skip: 1, // skip the cursor itself
});
// Interactive transaction
const transfer = await prisma.$transaction(async (tx) => {
const sender = await tx.account.update({
where: { id: 1 },
data: { balance: { decrement: 100 } }
});
if (sender.balance < 0) throw new Error('Insufficient funds');
const receiver = await tx.account.update({
where: { id: 2 },
data: { balance: { increment: 100 } }
});
return { sender, receiver };
});
// When Prisma's API isn't enough
const result = await prisma.$queryRaw`
SELECT u.name, COUNT(p.id) as post_count
FROM "User" u
LEFT JOIN "Post" p ON p."authorId" = u.id
GROUP BY u.id
HAVING COUNT(p.id) > ${minPosts}
`;
// Execute without returning rows
await prisma.$executeRaw`
TRUNCATE TABLE "Session" CASCADE
`;
Sequelize is a mature, full-featured ORM for Node.js supporting PostgreSQL, MySQL, MariaDB, SQLite, and MSSQL. It follows the Active Record pattern and is widely used in legacy projects.
const { Sequelize, DataTypes } = require('sequelize');
const sequelize = new Sequelize(process.env.DATABASE_URL);
const User = sequelize.define('User', {
name: {
type: DataTypes.STRING,
allowNull: false
},
email: {
type: DataTypes.STRING,
unique: true,
validate: { isEmail: true }
},
role: {
type: DataTypes.ENUM('USER', 'ADMIN'),
defaultValue: 'USER'
}
}, { paranoid: true }); // soft deletes
const Post = sequelize.define('Post', {
title: DataTypes.STRING,
content: DataTypes.TEXT,
published: { type: DataTypes.BOOLEAN, defaultValue: false }
});
// Associations
User.hasMany(Post, { foreignKey: 'authorId' });
Post.belongsTo(User, { foreignKey: 'authorId' });
// Find with eager loading
const users = await User.findAll({
where: { role: 'ADMIN' },
include: [{ model: Post, where: { published: true } }],
order: [['createdAt', 'DESC']],
limit: 10
});
// Create with association
const user = await User.create({
name: 'Bob',
email: 'bob@dev.io',
Posts: [{ title: 'Hello World' }]
}, { include: [Post] });
User.beforeCreate(async (user) => {
user.email = user.email.toLowerCase();
});
User.afterDestroy(async (user) => {
await AuditLog.create({
action: 'USER_DELETED', userId: user.id
});
});
Mongoose is the de-facto ODM (Object Document Mapper) for MongoDB in Node.js. It adds schema validation, middleware, and relationship management to MongoDB's flexible document model.
const mongoose = require('mongoose');
await mongoose.connect(process.env.MONGO_URI);
const userSchema = new mongoose.Schema({
name: { type: String, required: true, trim: true },
email: { type: String, unique: true, lowercase: true },
posts: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Post' }],
metadata: {
loginCount: { type: Number, default: 0 },
lastSeen: Date
}
}, { timestamps: true }); // createdAt, updatedAt
// Middleware (hooks)
userSchema.pre('save', function(next) {
if (this.isModified('password')) {
this.password = hash(this.password);
}
next();
});
// Virtual fields
userSchema.virtual('postCount').get(function() {
return this.posts.length;
});
const User = mongoose.model('User', userSchema);
// Populate references — MongoDB "join"
const user = await User.findById(userId)
.populate({
path: 'posts',
match: { published: true },
select: 'title createdAt',
options: { sort: { createdAt: -1 }, limit: 5 }
});
const stats = await Post.aggregate([
{ $match: { published: true } },
{ $group: {
_id: '$authorId',
totalPosts: { $sum: 1 },
avgLength: { $avg: { $strLenCP: '$content' } }
}},
{ $sort: { totalPosts: -1 } },
{ $limit: 10 }
]);
Migrations are version control for your database schema. They ensure every environment (dev, staging, production) has the same structure, and changes are applied in order.
// 20240315_add_posts_table.js
exports.up = function(knex) {
return knex.schema.createTable('posts', (t) => {
t.increments('id').primary();
t.string('title').notNullable();
t.text('content');
t.integer('author_id')
.references('id').inTable('users')
.onDelete('CASCADE');
t.timestamps(true, true);
});
};
exports.down = function(knex) {
return knex.schema.dropTable('posts');
};
# Create migration from schema changes
npx prisma migrate dev --name add_posts
# Apply in production (no prompts)
npx prisma migrate deploy
# Check migration status
npx prisma migrate status
-- Generated SQL (prisma/migrations/...)
CREATE TABLE "Post" (
"id" SERIAL NOT NULL,
"title" TEXT NOT NULL,
"content" TEXT,
"authorId" INTEGER NOT NULL,
CONSTRAINT "Post_pkey" PRIMARY KEY ("id"),
CONSTRAINT "Post_authorId_fkey"
FOREIGN KEY ("authorId")
REFERENCES "User"("id") ON DELETE CASCADE
);
migrate devmigrate deploy on releaseSeeds populate databases with test data for development, testing, and demos. Fixtures provide deterministic data for automated tests.
// seeds/01_users.js
exports.seed = async function(knex) {
await knex('users').del(); // clear existing
await knex('users').insert([
{ id: 1, name: 'Alice', email: 'alice@test.io', role: 'ADMIN' },
{ id: 2, name: 'Bob', email: 'bob@test.io', role: 'USER' },
{ id: 3, name: 'Carol', email: 'carol@test.io', role: 'USER' }
]);
};
npx knex seed:run # run all seeds
npx knex seed:run --specific=01_users.js
// prisma/seed.ts
import { PrismaClient } from '@prisma/client';
const prisma = new PrismaClient();
async function main() {
const alice = await prisma.user.upsert({
where: { email: 'alice@test.io' },
update: {},
create: { name: 'Alice', email: 'alice@test.io' }
});
}
main().finally(() => prisma.$disconnect());
// test/factories/user.factory.ts
import { faker } from '@faker-js/faker';
export function buildUser(overrides = {}) {
return {
name: faker.person.fullName(),
email: faker.internet.email(),
role: 'USER',
...overrides // allow test-specific overrides
};
}
// In tests
const admin = buildUser({ role: 'ADMIN' });
const user = await prisma.user.create({
data: buildUser()
});
A transaction groups multiple operations into an atomic unit. Either all operations succeed (commit) or all are rolled back. Essential for data integrity.
| Property | Meaning |
|---|---|
| Atomicity | All or nothing — partial writes never persist |
| Consistency | DB moves from one valid state to another |
| Isolation | Concurrent txns don't interfere with each other |
| Durability | Committed data survives crashes |
await prisma.$transaction(async (tx) => {
// All queries use the same transaction
const order = await tx.order.create({
data: { userId: 1, total: 99.99 }
});
await tx.inventory.update({
where: { productId: 42 },
data: { stock: { decrement: 1 } }
});
await tx.payment.create({
data: { orderId: order.id, amount: 99.99 }
});
// If any fails, ALL are rolled back
});
await knex.transaction(async (trx) => {
const [orderId] = await trx('orders')
.insert({ user_id: 1, total: 99.99 })
.returning('id');
await trx('inventory')
.where('product_id', 42)
.decrement('stock', 1);
await trx('payments')
.insert({ order_id: orderId, amount: 99.99 });
});
| Level | Prevents |
|---|---|
| Read Uncommitted | Nothing (fastest) |
| Read Committed | Dirty reads |
| Repeatable Read | + Non-repeatable reads |
| Serializable | + Phantom reads (safest) |
PostgreSQL default: Read Committed. Use Serializable for financial operations.
How you wire your database into Express affects performance, testability, and resource management.
// db.js — singleton pool
const { Pool } = require('pg');
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
max: 20
});
module.exports = pool;
// routes/users.js
const pool = require('../db');
router.get('/users', async (req, res) => {
const { rows } = await pool.query(
'SELECT * FROM users'
);
res.json(rows);
});
// Attach to app for testability
function createApp(pool) {
const app = express();
app.get('/users', async (req, res) => {
const { rows } = await pool.query(
'SELECT * FROM users'
);
res.json(rows);
});
return app;
}
// Production
const app = createApp(realPool);
// Test
const app = createApp(mockPool);
// Attach client to request
app.use(async (req, res, next) => {
req.db = await pool.connect();
res.on('finish', () => req.db.release());
next();
});
// Use in routes
app.get('/users', async (req, res) => {
const { rows } = await req.db.query(
'SELECT * FROM users'
);
res.json(rows);
});
Never create a new pool per request. Pool creation is expensive and you'll exhaust database connections.
// Create ONE PrismaClient instance and reuse it globally
const prisma = new PrismaClient();
app.get('/users', async (req, res) => { res.json(await prisma.user.findMany()); });
// Prisma manages its own connection pool internally — no manual pool management needed.
Database errors are inevitable. Your app must handle constraint violations, connection failures, and timeouts gracefully.
try {
await prisma.user.create({
data: { email: 'alice@dev.io' } // duplicate
});
} catch (e) {
if (e.code === 'P2002') { // Prisma unique violation
return res.status(409).json({
error: `${e.meta.target} already exists`
});
}
throw e; // re-throw unexpected errors
}
// pg: error.code === '23505' (unique_violation)
// pg: error.code === '23503' (fk_violation)
pool.on('error', (err) => {
console.error('Idle client error:', err.message);
// Pool auto-removes dead connections
});
// Health check endpoint
app.get('/health', async (req, res) => {
try {
await pool.query('SELECT 1');
res.json({ db: 'connected' });
} catch {
res.status(503).json({ db: 'disconnected' });
}
});
async function withRetry(fn, { retries = 3, delay = 100 } = {}) {
for (let i = 0; i < retries; i++) {
try {
return await fn();
} catch (err) {
const isTransient = ['ECONNRESET', 'ETIMEDOUT',
'57P01'].includes(err.code); // 57P01 = admin shutdown
if (!isTransient || i === retries - 1) throw err;
await new Promise(r =>
setTimeout(r, delay * Math.pow(2, i)) // expo backoff
);
}
}
}
| Code | Meaning |
|---|---|
23505 | Unique violation |
23503 | Foreign key violation |
42P01 | Table does not exist |
40001 | Serialization failure |
The biggest database performance killers in Node.js apps are N+1 queries, missing indexes, and misconfigured connection pools.
// BAD: 1 query for users + N queries for posts
const users = await prisma.user.findMany();
for (const user of users) {
user.posts = await prisma.post.findMany({
where: { authorId: user.id } // N queries!
});
}
// GOOD: eager loading — 1-2 queries total
const users = await prisma.user.findMany({
include: { posts: true }
});
-- Use EXPLAIN ANALYZE to find slow queries
EXPLAIN ANALYZE
SELECT u.*, COUNT(p.id)
FROM users u
LEFT JOIN posts p ON p.author_id = u.id
GROUP BY u.id;
-- Look for: Seq Scan (missing index), Nested Loop
-- (N+1), high actual time, row estimates vs actuals
// Prisma schema indexes
model Post {
id Int @id @default(autoincrement())
authorId Int
status String
title String
@@index([authorId]) // single column
@@index([status, authorId]) // composite
@@index([title], type: BTree) // explicit type
}
| Strategy | ORM | Method |
|---|---|---|
| Include | Prisma | include: { posts: true } |
| Eager | Sequelize | include: [Post] |
| Populate | Mongoose | .populate('posts') |
| Join | Knex | .join('posts', ...) |
pool.waitingCount — if high, increase maxpool.idleCount — if high, decrease maxstatement_timeout to kill runaway queriespgBouncer for connection multiplexing at scaleDatabase tests need isolation (no test affects another), speed (fast feedback loops), and reproducibility (same results every run).
// jest.setup.js — per-test transaction rollback
beforeEach(async () => {
await pool.query('BEGIN');
});
afterEach(async () => {
await pool.query('ROLLBACK'); // undo all changes
});
// Use SQLite for fast unit tests
const knex = require('knex')({
client: 'better-sqlite3',
connection: { filename: ':memory:' },
useNullAsDefault: true
});
beforeAll(async () => {
await knex.migrate.latest(); // apply schema
await knex.seed.run(); // load fixtures
});
afterAll(() => knex.destroy());
// Use a separate test database
// .env.test
// DATABASE_URL="postgresql://localhost/myapp_test"
beforeEach(async () => {
// Clean all tables between tests
const tables = Prisma.dmmf.datamodel.models
.map(m => m.dbName || m.name);
for (const table of tables) {
await prisma.$executeRawUnsafe(
`TRUNCATE TABLE "${table}" CASCADE`
);
}
});
| Strategy | Speed | Fidelity |
|---|---|---|
| Transaction rollback | Fast | High |
| Truncate between tests | Medium | High |
| In-memory SQLite | Fastest | Lower |
| Docker per test suite | Slow | Highest |
Use factory functions (see Slide 12) for test data. Avoid shared mutable state. Each test should create exactly the data it needs.
There is no "best" tool — the right choice depends on your project's type safety needs, query complexity, team experience, and performance requirements.
| Criterion | pg / mysql2 | Knex | Prisma | Sequelize | Mongoose |
|---|---|---|---|---|---|
| Type Safety | None | Basic | Excellent | Via TS defs | Via TS defs |
| Learning Curve | Low (just SQL) | Low-Medium | Medium | Medium-High | Medium |
| Query Flexibility | Full SQL | High | Medium | Medium | High (aggregation) |
| Migrations | Manual | Built-in | Built-in | Built-in | N/A |
| Performance | Best | Near-native | Good (Rust engine) | Overhead | Depends on queries |
| Relations | Manual JOINs | Manual JOINs | Declarative | Declarative | Population |
| Database | One per driver | PG/MySQL/SQLite | PG/MySQL/SQLite/Mongo | PG/MySQL/SQLite/MSSQL | MongoDB only |
| Ecosystem | Huge | Mature | Growing fast | Mature | Mature |
EXPLAIN ANALYZEStart with Prisma for new TypeScript projects. Use raw SQL when you need performance or complex queries. Always use connection pooling, parameterised queries, and migrations.