Skip to content

Instantly share code, notes, and snippets.

@predragnikolic
Created June 22, 2022 09:40
Show Gist options
  • Save predragnikolic/5845512d9df731d4aa9dae5f8b5698b7 to your computer and use it in GitHub Desktop.
Save predragnikolic/5845512d9df731d4aa9dae5f8b5698b7 to your computer and use it in GitHub Desktop.
Sql migration thing :D
-- @UP
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Migration
-- This table will keep a list of migrations that have been run on this database.
--
CREATE TABLE migration (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
file TEXT NOT NULL,
hash TEXT NOT NULL,
created_at TIMESTAMP DEFAULT NOW()
);
-- @DOWN
DROP TABLE migration;
-- @UP
CREATE TABLE "user"
(
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW(),
deleted_at TIMESTAMP,
email TEXT NOT NULL UNIQUE,
password TEXT NOT NULL,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL
);
-- @DOWN
DROP TABLE "user"
-- @UP
CREATE TABLE item
(
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW(),
deleted_at TIMESTAMP,
name TEXT NOT NULL,
user_id UUID NOT NULL REFERENCES "user"
);
-- @DOWN
DROP TABLE item
{
"compilerOptions": {
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
"module": "CommonJS", /* Specify what module code is generated. */
"moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
"rootDir": ".", /* Specify the root folder within your source files. */
"baseUrl": ".", /* Specify the base directory to resolve non-relative module names. */
"paths": {
"@/*": ["./src/*"],
}, /* Specify a set of entries that re-map imports to additional lookup locations. */
"outDir": "dist",
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
"strict": true, /* Enable all strict type-checking options. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
},
"include": ["./src"],
}
// https://www.antoniovdlc.me/a-look-at-postgresql-migrations-in-node/
import { sql } from "@/db/sql"
import fs from "fs"
import crypto from "crypto"
type Migration = {
id?: string // executed migration have an id
file: string
hash: string
}
const isDownMigration = process.argv.includes('--down')
async function migrate() {
const executedMigrations = await getExecutedMigrations()
const localMigrations = await getLocalMigrations()
validateMigrationsOrder(executedMigrations, localMigrations) // can throw
validateMigrationsHashes(executedMigrations, localMigrations) // can throw
if (isDownMigration) {
await runDownMigration(executedMigrations, localMigrations)
return
}
runAllUpMigration(executedMigrations, localMigrations)
}
async function getLocalMigrations(): Promise<Migration[]> {
let localMigrationFiles = await fs.promises.readdir(__dirname);
localMigrationFiles = localMigrationFiles.filter((file) => file.split(".")[1] === "sql")
const localMigrations = await Promise.all(localMigrationFiles.map(async (file): Promise<Migration> => {
const content = await fs.promises.readFile(`${__dirname}/${file}`, {
encoding: 'utf-8'
})
return {
file,
hash: hashString(content)
}
}))
return localMigrations
}
async function getExecutedMigrations(): Promise<Migration[]> {
let executedMigrations: Migration[] = [];
try {
executedMigrations = await sql<Migration[]>`SELECT id, file, hash FROM migration`;
} catch {
console.warn("First migration");
}
return executedMigrations
}
function validateMigrationsOrder(executedMigrations: Migration[], localMigrations: Migration[]) {
for (const i in executedMigrations) {
const executedMigration = executedMigrations[i] as Migration
const localMigration = localMigrations[i] as Migration | undefined
if (executedMigration && localMigration) {
if (executedMigration.file !== localMigration.file)
throw new Error("Migration order is not correct.")
}
}
}
function validateMigrationsHashes(executedMigrations: Migration[], localMigrations: Migration[]) {
for (const i in executedMigrations) {
const executedMigration = executedMigrations[i] as Migration
const localMigration = localMigrations[i] as Migration | undefined
if (executedMigration && localMigration) {
if (executedMigration.hash !== localMigration.hash)
throw new Error("A run migration have been modified. Check this file: " + executedMigration.file)
}
}
}
function hashString(content: string) {
return crypto.createHash("sha1").update(content, "utf8").digest("hex")
}
function getDownMigration(migrationContent: string): string | null {
const downMigratioRegex = /\s(--\s@DOWN[\s\S].*)/gim
const downMigration = downMigratioRegex.exec(migrationContent)
if (!downMigration) return null
return downMigration[1]
}
function getUpMigration(migrationContent: string): string {
let upMigration = migrationContent
const downMigration = getDownMigration(migrationContent)
if (downMigration) upMigration = upMigration.replace(downMigration, "")
return upMigration
}
async function runDownMigration(executedMigrations: Migration[], localMigrations: Migration[]) {
if (executedMigrations.length === 0) throw new Error('Cannot run down migration. There are no run migrations.')
const lastExecutedMigration = executedMigrations.at(-1)
if (!lastExecutedMigration) throw new Error('lastExecutedMigration is missing')
const lastExecutedMigrationId = lastExecutedMigration?.id
if (!lastExecutedMigrationId) throw new Error('lastExecutedMigrationId is missing')
const migrationToRevert = localMigrations.find(migration => migration.file === lastExecutedMigration.file)
if (!migrationToRevert) throw new Error(`Cannot run down migration, because the "${lastExecutedMigration.file}, is not found.`)
try {
await sql.begin(async sql => {
console.log("START DOWN MIGRATION:", migrationToRevert.file)
const migrationContent = await fs.promises.readFile(`${__dirname}/${migrationToRevert.file}`, {
encoding: 'utf-8'
})
const downMigration = getDownMigration(migrationContent)
if (!downMigration) throw new Error('No down migration is specified in this migration. File: ' + migrationToRevert.file)
console.log(downMigration)
await sql`DELETE FROM migration WHERE id = ${lastExecutedMigrationId};`;
await sql.unsafe(downMigration)
console.log("END DOWN MIGRATION:", migrationToRevert.file)
})
} catch (error) {
console.error('Down migration failed:', error)
process.exit(1);
} finally {
sql.end()
}
}
async function runAllUpMigration(executedMigrations: Migration[], localMigrations: Migration[]) {
// Get outstanding migrations
const migrationsToRun = localMigrations
.filter((localMigration) => !executedMigrations.map(executedMigration => executedMigration.file)
.includes(localMigration.file))
try {
await sql.begin(async sql => {
for (const migration of migrationsToRun) {
console.log("START:", migration.file)
const migrationContent = await fs.promises.readFile(`${__dirname}/${migration.file}`, {
encoding: 'utf-8'
})
const upMigration = getUpMigration(migrationContent)
console.log(upMigration)
await sql.unsafe(upMigration)
const hashedMigrationContent = hashString(migrationContent)
await sql`INSERT INTO migration (file, hash) VALUES (${migration.file}, ${hashedMigrationContent})`;
console.log("END:", migration.file)
}
})
} catch (error) {
console.error('Migrations failed:', error)
process.exit(1);
} finally {
sql.end()
}
console.log('Migration process is done.')
}
migrate()
{
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "npm run db:migrate && npm run db:generate-types && npm run start:backend",
"build": "npx tsc",
"serve": "node --require tsconfig-paths/register ./dist/backend/src/index.js",
"db:generate-types": "node --require dotenv/config node_modules/.bin/pg-to-ts generate",
"db:migrate": "ts-node --require tsconfig-paths/register ./src/migrations/migrate.ts",
"db:migrate:down": "ts-node --require tsconfig-paths/register ./src/migrations/migrate.ts --down",
"start:backend": "nodemon --require tsconfig-paths/register ./src/index.ts"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/bcrypt": "^5.0.0",
"@types/express": "^4.17.13",
"@types/node": "^17.0.42",
"@types/uuid4": "^2.0.0",
"nodemon": "^2.0.16",
"npm": "^8.12.2",
"pg-to-ts": "^4.1.0",
"ts-node": "^10.8.1",
"tsconfig-paths": "^4.0.0",
"typescript": "^4.7.3"
},
"dependencies": {
"bcrypt": "^5.0.1",
"dotenv": "^16.0.1",
"express": "^4.18.1",
"postgres": "^3.2.4",
"uuid4": "^2.0.2"
}
}
// see docs:
// https://github.com/porsager/postgres
import postgres from 'postgres'
export const sql = postgres({
database: "primer",
host: "localhost",
port: 5432
})
@predragnikolic
Copy link
Author

It was fun to make it, but would not recommend to use it in production.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment