#!/usr/bin/env npx tsx /** * Database Migration Runner * * Runs SQL migrations from backend/migrations/*.sql in order. * Tracks applied migrations in schema_migrations table. * * Usage: * npx tsx src/db/run-migrations.ts * * Environment: * DATABASE_URL or CANNAIQ_DB_* variables */ import { Pool } from 'pg'; import * as fs from 'fs/promises'; import * as path from 'path'; import dotenv from 'dotenv'; dotenv.config(); function getConnectionString(): string { if (process.env.DATABASE_URL) { return process.env.DATABASE_URL; } if (process.env.CANNAIQ_DB_URL) { return process.env.CANNAIQ_DB_URL; } const host = process.env.CANNAIQ_DB_HOST || 'localhost'; const port = process.env.CANNAIQ_DB_PORT || '54320'; const name = process.env.CANNAIQ_DB_NAME || 'dutchie_menus'; const user = process.env.CANNAIQ_DB_USER || 'dutchie'; const pass = process.env.CANNAIQ_DB_PASS || 'dutchie_local_pass'; return `postgresql://${user}:${pass}@${host}:${port}/${name}`; } interface MigrationFile { filename: string; number: number; path: string; } async function getMigrationFiles(migrationsDir: string): Promise { const files = await fs.readdir(migrationsDir); const migrations: MigrationFile[] = files .filter(f => f.endsWith('.sql')) .map(filename => { // Extract number from filename like "005_api_tokens.sql" or "073_proxy_timezone.sql" const match = filename.match(/^(\d+)_/); if (!match) return null; return { filename, number: parseInt(match[1], 10), path: path.join(migrationsDir, filename), }; }) .filter((m): m is MigrationFile => m !== null) .sort((a, b) => a.number - b.number); return migrations; } async function ensureMigrationsTable(pool: Pool): Promise { // Migrate to filename-based tracking (handles duplicate version numbers) // Check if old version-based PK exists const pkCheck = await pool.query(` SELECT constraint_name FROM information_schema.table_constraints WHERE table_name = 'schema_migrations' AND constraint_type = 'PRIMARY KEY' `); if (pkCheck.rows.length === 0) { // Table doesn't exist, create with filename as PK await pool.query(` CREATE TABLE IF NOT EXISTS schema_migrations ( filename VARCHAR(255) NOT NULL PRIMARY KEY, version VARCHAR(10), name VARCHAR(255), applied_at TIMESTAMPTZ DEFAULT NOW() ) `); } else { // Table exists - add filename column if missing await pool.query(` ALTER TABLE schema_migrations ADD COLUMN IF NOT EXISTS filename VARCHAR(255) `); // Populate filename from version+name for existing rows await pool.query(` UPDATE schema_migrations SET filename = version || '_' || name || '.sql' WHERE filename IS NULL `); } } async function getAppliedMigrations(pool: Pool): Promise> { // Try filename first, fall back to version_name combo const result = await pool.query(` SELECT COALESCE(filename, version || '_' || name || '.sql') as filename FROM schema_migrations `); return new Set(result.rows.map(r => r.filename)); } async function applyMigration(pool: Pool, migration: MigrationFile): Promise { const sql = await fs.readFile(migration.path, 'utf-8'); // Extract version and name from filename like "005_api_tokens.sql" const version = String(migration.number).padStart(3, '0'); const name = migration.filename.replace(/^\d+_/, '').replace(/\.sql$/, ''); const client = await pool.connect(); try { await client.query('BEGIN'); // Run the migration SQL await client.query(sql); // Record that it was applied - use INSERT with ON CONFLICT for safety await client.query(` INSERT INTO schema_migrations (filename, version, name) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING `, [migration.filename, version, name]); await client.query('COMMIT'); } catch (error) { await client.query('ROLLBACK'); throw error; } finally { client.release(); } } async function main() { const pool = new Pool({ connectionString: getConnectionString() }); // Migrations directory relative to this file const migrationsDir = path.resolve(__dirname, '../../migrations'); console.log('╔════════════════════════════════════════════════════════════╗'); console.log('║ DATABASE MIGRATION RUNNER ║'); console.log('╚════════════════════════════════════════════════════════════╝'); console.log(`Migrations dir: ${migrationsDir}`); console.log(''); try { // Ensure tracking table exists await ensureMigrationsTable(pool); // Get all migration files const allMigrations = await getMigrationFiles(migrationsDir); console.log(`Found ${allMigrations.length} migration files`); // Get already-applied migrations const applied = await getAppliedMigrations(pool); console.log(`Already applied: ${applied.size} migrations`); console.log(''); // Find pending migrations (compare by filename) const pending = allMigrations.filter(m => !applied.has(m.filename)); if (pending.length === 0) { console.log('✅ No pending migrations. Database is up to date.'); await pool.end(); return; } console.log(`Pending migrations: ${pending.length}`); console.log('─'.repeat(60)); // Apply each pending migration for (const migration of pending) { process.stdout.write(` ${migration.filename}... `); try { await applyMigration(pool, migration); console.log('✅'); } catch (error: any) { console.log('❌'); console.error(`\nError applying ${migration.filename}:`); console.error(error.message); process.exit(1); } } console.log(''); console.log('═'.repeat(60)); console.log(`✅ Applied ${pending.length} migrations successfully`); } catch (error: any) { console.error('Migration runner failed:', error.message); process.exit(1); } finally { await pool.end(); } } main();