Last active
June 15, 2025 20:38
-
-
Save quidmonkey/ac09c2793717fba52de82df6e7bbeba7 to your computer and use it in GitHub Desktop.
Nodejs Postgres Migrations with Multi-Schema Support in the vein of the postgres-migrations lib
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const minimist = require('minimist); | |
Const { migrateAll } = require('./migrate'); | |
const args = require('minimist')(process.argv.slice(2)); | |
const dir = args.dir; | |
if (!dir) { | |
console.error('Usage: node migrate.js --dir <migrationsDir>'); | |
process.exit(1); | |
} | |
let connectionString = process.env.DATABASE_URL; | |
if (!connectionString) { | |
const { | |
DATABASE_NAME, | |
DATABASE_HOST, | |
DATABASE_USER, | |
DATABASE_PASSWORD, | |
} = process.env; | |
if (!DATABASE_NAME || !DATABASE_HOST || !DATABASE_USER || !DATABASE_PASSWORD) { | |
console.error('❌ Missing one or more required DB environment variables:'); | |
console.error(' DATABASE_NAME, DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD'); | |
process.exit(1); | |
} | |
connectionString = `postgresql://${encodeURIComponent(DATABASE_USER)}:${encodeURIComponent(DATABASE_PASSWORD)}@${DATABASE_HOST}/${DATABASE_NAME}`; | |
} | |
migrateAll({ dir, connectionString }).catch(err => { | |
console.error('Migration error:', err); | |
process.exit(1); | |
}); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const crypto = require('crypto'); | |
const fs = require('fs'); | |
const path = require('path'); | |
const { Client } = require('pg'); | |
const MIGRATION_TABLE = 'migration_history'; | |
export async function applyMigrations({ client, schema, dir }) { | |
// Ensure migration history table with hash column exists | |
await client.query(` | |
CREATE SCHEMA IF NOT EXISTS ${schema}; | |
CREATE TABLE IF NOT EXISTS ${schema}.migration_history ( | |
id SERIAL PRIMARY KEY, | |
filename TEXT UNIQUE NOT NULL, | |
hash TEXT NOT NULL, | |
applied_at TIMESTAMPTZ DEFAULT now() | |
); | |
`); | |
const files = fs.readdirSync(dir) | |
.filter(file => file.endsWith('.sql')) | |
.sort((a, b) => a.localeCompare(b)); | |
const filenameRegex = /^(\d{4})_(.+)\.sql$/; | |
files.forEach((file, i) => { | |
const match = file.match(filenameRegex); | |
if (!match) { | |
console.error(`❌ Invalid filename: "${file}". Use format: 0001_description.sql`); | |
process.exit(1); | |
} | |
const expectedInOrderPrefix = String(i + 1).padStart(4, '0'); | |
if (match[1] !== expectedInOrderPrefix) { | |
console.error(`❌ Migration order mismatch: expected ${expectedInOrderPrefix}, found ${file}`); | |
process.exit(1); | |
} | |
}); | |
// Get already-applied migrations and their hashes | |
const { rows: appliedMigrations } = await client.query( | |
`SELECT filename, hash FROM ${schema}.migration_history` | |
); | |
const appliedMap = new Map(appliedMigrations.map(row => [row.filename, row.hash])); | |
for (const file of files) { | |
const filePath = path.join(dir, file); | |
const sql = fs.readFileSync(filePath, 'utf8'); | |
const hash = crypto.createHash('md5').update(sql).digest('hex'); | |
if (appliedMap.has(file)) { | |
const appliedHash = appliedMap.get(file); | |
if (appliedHash !== hash) { | |
console.error(`❌ [${schema}] Migration hash mismatch for ${file}`); | |
console.error(`Expected hash: ${appliedHash}`); | |
console.error(`Current file hash: ${hash}`); | |
console.error(`Migrations should be immutable. Revert the change or use a new migration.`); | |
process.exit(1); | |
} | |
console.log(`✔ [${schema}] Skipping already applied: ${file}`); | |
continue; | |
} | |
try { | |
await client.query('BEGIN'); | |
await client.query(`SET search_path TO ${schema}, public;`); | |
await client.query(sql); | |
await client.query( | |
`INSERT INTO ${schema}.migration_history (filename, hash) VALUES ($1, $2)`, | |
[file, hash] | |
); | |
await client.query('COMMIT'); | |
console.log(`✅ [${schema}] Applied: ${file}`); | |
} catch (err) { | |
await client.query('ROLLBACK'); | |
console.error(`❌ [${schema}] Failed to apply: ${file}`); | |
console.error(err.message); | |
process.exit(1); | |
} | |
} | |
} | |
export async function migrateAll({ dir, connectionString }) { | |
const client = new Client({ connectionString }); | |
await client.connect(); | |
try { | |
// Always run migrations for the public schema first | |
await applyMigrations({ client, schema: 'public', dir }); | |
// Fetch tenants | |
const { rows: tenants } = await client.query(`SELECT schema_name FROM public.tenant`); | |
for (const tenant of tenants) { | |
const schema = tenant.schema_name; | |
await applyMigrations({ client, schema, dir }); | |
} | |
console.log('🚀 All migrations applied.'); | |
} finally { | |
await client.end(); | |
} | |
} |
Note that this is intentionally written in JavaScript and not TypeScript, as these scripts are expected run during application startup. It is unlikely and unnecessary that a TypeScript compiler + runtime will exist then.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This script assumes that a tenant like this exists: