Files
motovaultpro/backend/src/_system/migrations/run-all.ts
Eric Gullickson cdfba3c1a8
All checks were successful
Deploy to Staging / Build Images (pull_request) Successful in 2m23s
Deploy to Staging / Deploy to Staging (pull_request) Successful in 28s
Deploy to Staging / Verify Staging (pull_request) Successful in 7s
Deploy to Staging / Notify Staging Ready (pull_request) Successful in 6s
Deploy to Staging / Notify Staging Failure (pull_request) Has been skipped
fix: Add audit-log to migration order (refs #10)
The audit_logs table migration was not being executed because the
audit-log feature was missing from MIGRATION_ORDER in run-all.ts,
causing 500 errors when accessing the audit logs API.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-11 11:42:42 -06:00

140 lines
5.2 KiB
TypeScript

/**
* @ai-summary Orchestrates all feature migrations in dependency order
*/
import { Pool } from 'pg';
import { readFileSync, readdirSync } from 'fs';
import { join, resolve } from 'path';
import { appConfig } from '../../core/config/config-loader';
const pool = new Pool({
connectionString: appConfig.getDatabaseUrl(),
});
// Define migration order based on dependencies and packaging layout
// We package migrations under /app/migrations with two roots: features/ and core/
// The update_updated_at_column() function is defined in features/vehicles first,
// and user-preferences trigger depends on it; so run vehicles before core/user-preferences.
const MIGRATION_ORDER = [
'features/vehicles', // Primary entity, defines update_updated_at_column()
'features/platform', // Normalized make/model/trim schema for dropdowns
'features/documents', // Depends on vehicles; provides documents table
'core/user-preferences', // Depends on update_updated_at_column()
'features/fuel-logs', // Depends on vehicles
'features/maintenance', // Depends on vehicles
'features/stations', // Independent
'features/admin', // Admin role management and oversight; depends on update_updated_at_column()
'features/backup', // Admin backup feature; depends on update_updated_at_column()
'features/notifications', // Depends on maintenance and documents
'features/user-profile', // User profile management; independent
'features/terms-agreement', // Terms & Conditions acceptance audit trail
'features/audit-log', // Centralized audit logging; independent
];
// Base directory where migrations are copied inside the image (set by Dockerfile)
const MIGRATIONS_DIR = resolve(process.env['MIGRATIONS_DIR'] || join(__dirname, '../../../migrations'));
async function getExecutedMigrations(): Promise<Record<string, Set<string>>> {
const executed: Record<string, Set<string>> = {};
// Ensure tracking table exists (retry across transient DB restarts)
const retry = async <T>(op: () => Promise<T>, timeoutMs = 60000): Promise<T> => {
const start = Date.now();
while (true) {
try { return await op(); } catch (e) {
if (Date.now() - start > timeoutMs) throw e;
await new Promise(res => setTimeout(res, 2000));
}
}
};
await retry(() => pool.query(`
CREATE TABLE IF NOT EXISTS _migrations (
id SERIAL PRIMARY KEY,
feature VARCHAR(100) NOT NULL,
file VARCHAR(255) NOT NULL,
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE(feature, file)
);
`));
const res = await retry(() => pool.query('SELECT feature, file FROM _migrations'));
for (const row of res.rows) {
if (!executed[row.feature]) executed[row.feature] = new Set();
executed[row.feature].add(row.file);
}
return executed;
}
async function runFeatureMigrations(featureName: string) {
const migrationDir = join(MIGRATIONS_DIR, featureName, 'migrations');
try {
// Guard per-feature in case DB becomes available slightly later on cold start
const ping = async (timeoutMs = 60000) => {
const start = Date.now();
while (true) {
try { await pool.query('SELECT 1'); return; } catch (e) {
if (Date.now() - start > timeoutMs) throw e; await new Promise(r => setTimeout(r, 2000));
}
}
};
await ping();
const files = readdirSync(migrationDir)
.filter(f => f.endsWith('.sql'))
.sort();
const executed = await getExecutedMigrations();
const already = executed[featureName] || new Set<string>();
for (const file of files) {
if (already.has(file)) {
console.log(`↷ Skipping already executed migration: ${featureName}/${file}`);
continue;
}
const sql = readFileSync(join(migrationDir, file), 'utf-8');
console.log(`Running migration: ${featureName}/${file}`);
await pool.query(sql);
await pool.query('INSERT INTO _migrations(feature, file) VALUES ($1, $2) ON CONFLICT DO NOTHING', [featureName, file]);
console.log(`✅ Completed: ${featureName}/${file}`);
}
} catch (error) {
console.error(`❌ Failed migration for ${featureName}:`, error);
throw error;
}
}
async function main() {
try {
console.log('Starting migration orchestration...');
console.log(`Using migrations directory: ${MIGRATIONS_DIR}`);
// Wait for database to be reachable (handles cold starts)
const waitForDb = async (timeoutMs = 60000) => {
const start = Date.now();
while (true) {
try {
await pool.query('SELECT 1');
return;
} catch (e) {
if (Date.now() - start > timeoutMs) throw e;
await new Promise(res => setTimeout(res, 2000));
}
}
};
await waitForDb();
// Run migrations in order
for (const feature of MIGRATION_ORDER) {
console.log(`\nMigrating feature: ${feature}`);
await runFeatureMigrations(feature);
}
console.log('\n✅ All migrations completed successfully');
} catch (error) {
console.error('Migration failed:', error);
process.exit(1);
} finally {
await pool.end();
}
}
if (require.main === module) {
main();
}