feat: Backup & Restore - Manual backup tested complete.

This commit is contained in:
Eric Gullickson
2025-12-25 10:50:09 -06:00
parent 8ef6b3d853
commit 0357ce391f
38 changed files with 5734 additions and 1415447 deletions

View File

@@ -24,8 +24,8 @@ RUN npm run build
# Stage 2: Production runtime
FROM node:lts-alpine AS production
# Install runtime dependencies only
RUN apk add --no-cache dumb-init curl
# Install runtime dependencies only (postgresql-client for backup/restore)
RUN apk add --no-cache dumb-init curl postgresql-client
# Set working directory
WORKDIR /app

View File

@@ -27,6 +27,7 @@
"opossum": "^8.0.0",
"pg": "^8.13.1",
"resend": "^3.0.0",
"tar": "^7.4.3",
"winston": "^3.17.0",
"zod": "^3.24.1"
},
@@ -80,6 +81,7 @@
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.5",
@@ -1193,6 +1195,18 @@
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/@isaacs/fs-minipass": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz",
"integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==",
"license": "ISC",
"dependencies": {
"minipass": "^7.0.4"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@istanbuljs/load-nyc-config": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
@@ -1948,6 +1962,7 @@
"integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~6.21.0"
}
@@ -2080,6 +2095,7 @@
"integrity": "sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@typescript-eslint/scope-manager": "8.50.0",
"@typescript-eslint/types": "8.50.0",
@@ -2324,6 +2340,7 @@
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -2796,6 +2813,7 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
@@ -3000,6 +3018,15 @@
"node": ">= 6"
}
},
"node_modules/chownr": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
"integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/ci-info": {
"version": "3.9.0",
"resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz",
@@ -3641,6 +3668,7 @@
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.8.0",
"@eslint-community/regexpp": "^4.12.1",
@@ -4962,6 +4990,7 @@
"integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@jest/core": "^29.7.0",
"@jest/types": "^29.6.3",
@@ -5891,7 +5920,6 @@
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"license": "MIT",
"peer": true,
"dependencies": {
"js-tokens": "^3.0.0 || ^4.0.0"
},
@@ -6069,6 +6097,18 @@
"node": ">=16 || 14 >=14.17"
}
},
"node_modules/minizlib": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz",
"integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==",
"license": "MIT",
"dependencies": {
"minipass": "^7.1.2"
},
"engines": {
"node": ">= 18"
}
},
"node_modules/mnemonist": {
"version": "0.40.3",
"resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.40.3.tgz",
@@ -6482,6 +6522,7 @@
"resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz",
"integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==",
"license": "MIT",
"peer": true,
"dependencies": {
"pg-connection-string": "^2.9.1",
"pg-pool": "^3.10.1",
@@ -7203,7 +7244,6 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz",
"integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
}
@@ -7673,6 +7713,31 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/tar": {
"version": "7.5.2",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz",
"integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==",
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/fs-minipass": "^4.0.0",
"chownr": "^3.0.0",
"minipass": "^7.1.2",
"minizlib": "^3.1.0",
"yallist": "^5.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/tar/node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
"integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
"license": "BlueOak-1.0.0",
"engines": {
"node": ">=18"
}
},
"node_modules/test-exclude": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
@@ -7744,6 +7809,7 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -7901,6 +7967,7 @@
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7",
@@ -7988,6 +8055,7 @@
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"

View File

@@ -38,7 +38,8 @@
"file-type": "^16.5.4",
"resend": "^3.0.0",
"node-cron": "^3.0.3",
"auth0": "^4.12.0"
"auth0": "^4.12.0",
"tar": "^7.4.3"
},
"devDependencies": {
"@types/node": "^22.0.0",

View File

@@ -7,6 +7,15 @@ import cron from 'node-cron';
import { logger } from '../logging/logger';
import { processScheduledNotifications } from '../../features/notifications/jobs/notification-processor.job';
import { processAccountPurges } from '../../features/user-profile/jobs/account-purge.job';
import {
processScheduledBackups,
setBackupJobPool,
} from '../../features/backup/jobs/backup-scheduled.job';
import {
processBackupRetention,
setBackupCleanupJobPool,
} from '../../features/backup/jobs/backup-cleanup.job';
import { pool } from '../config/database';
let schedulerInitialized = false;
@@ -18,6 +27,10 @@ export function initializeScheduler(): void {
logger.info('Initializing cron scheduler');
// Initialize backup job pools
setBackupJobPool(pool);
setBackupCleanupJobPool(pool);
// Daily notification processing at 8 AM
cron.schedule('0 8 * * *', async () => {
logger.info('Running scheduled notification job');
@@ -47,8 +60,38 @@ export function initializeScheduler(): void {
}
});
// Check for scheduled backups every minute
cron.schedule('* * * * *', async () => {
logger.debug('Checking for scheduled backups');
try {
await processScheduledBackups();
} catch (error) {
logger.error('Scheduled backup check failed', {
error: error instanceof Error ? error.message : String(error)
});
}
});
// Backup retention cleanup at 4 AM daily (after backups complete)
cron.schedule('0 4 * * *', async () => {
logger.info('Running backup retention cleanup job');
try {
const result = await processBackupRetention();
logger.info('Backup retention cleanup completed', {
processed: result.processed,
totalDeleted: result.totalDeleted,
totalFreedBytes: result.totalFreedBytes,
errors: result.errors.length,
});
} catch (error) {
logger.error('Backup retention cleanup failed', {
error: error instanceof Error ? error.message : String(error)
});
}
});
schedulerInitialized = true;
logger.info('Cron scheduler initialized - notification job (8 AM) and account purge job (2 AM) scheduled daily');
logger.info('Cron scheduler initialized - notification (8 AM), account purge (2 AM), backup check (every min), retention cleanup (4 AM)');
}
export function isSchedulerInitialized(): boolean {

View File

@@ -24,9 +24,6 @@ import {
UpdateProfileInput,
PromoteToAdminInput,
} from './users.validation';
import { AdminRepository } from '../data/admin.repository';
import { StationOversightService } from '../domain/station-oversight.service';
import { StationsController } from './stations.controller';
import { CatalogController } from './catalog.controller';
import { VehicleCatalogService } from '../domain/vehicle-catalog.service';
import { CatalogImportService } from '../domain/catalog-import.service';
@@ -34,16 +31,12 @@ import { PlatformCacheService } from '../../platform/domain/platform-cache.servi
import { cacheService } from '../../../core/config/redis';
import { pool } from '../../../core/config/database';
import { CommunityStationsController } from '../../stations/api/community-stations.controller';
import { registerBackupRoutes } from '../../backup/api/backup.routes';
export const adminRoutes: FastifyPluginAsync = async (fastify) => {
const adminController = new AdminController();
const usersController = new UsersController();
// Initialize station oversight dependencies
const adminRepository = new AdminRepository(pool);
const stationOversightService = new StationOversightService(pool, adminRepository);
const stationsController = new StationsController(stationOversightService);
// Initialize community stations dependencies
const communityStationsController = new CommunityStationsController();
@@ -323,45 +316,7 @@ export const adminRoutes: FastifyPluginAsync = async (fastify) => {
handler: catalogController.bulkDeleteCatalogEntity.bind(catalogController)
});
// Phase 4: Station oversight endpoints
// GET /api/admin/stations - List all stations globally
fastify.get('/admin/stations', {
preHandler: [fastify.requireAdmin],
handler: stationsController.listAllStations.bind(stationsController)
});
// POST /api/admin/stations - Create new station
fastify.post('/admin/stations', {
preHandler: [fastify.requireAdmin],
handler: stationsController.createStation.bind(stationsController)
});
// PUT /api/admin/stations/:stationId - Update station
fastify.put('/admin/stations/:stationId', {
preHandler: [fastify.requireAdmin],
handler: stationsController.updateStation.bind(stationsController)
});
// DELETE /api/admin/stations/:stationId - Delete station (soft delete by default, ?force=true for hard delete)
fastify.delete('/admin/stations/:stationId', {
preHandler: [fastify.requireAdmin],
handler: stationsController.deleteStation.bind(stationsController)
});
// GET /api/admin/users/:userId/stations - Get user's saved stations
fastify.get('/admin/users/:userId/stations', {
preHandler: [fastify.requireAdmin],
handler: stationsController.getUserSavedStations.bind(stationsController)
});
// DELETE /api/admin/users/:userId/stations/:stationId - Remove user's saved station (soft delete by default, ?force=true for hard delete)
fastify.delete('/admin/users/:userId/stations/:stationId', {
preHandler: [fastify.requireAdmin],
handler: stationsController.removeUserSavedStation.bind(stationsController)
});
// Phase 5: Community gas station submission oversight
// Community gas station submission oversight
// GET /api/admin/community-stations - List all submissions with filters
fastify.get('/admin/community-stations', {
@@ -380,4 +335,9 @@ export const adminRoutes: FastifyPluginAsync = async (fastify) => {
preHandler: [fastify.requireAdmin],
handler: communityStationsController.reviewStation.bind(communityStationsController)
});
// ============================================
// Backup & Restore endpoints
// ============================================
await registerBackupRoutes(fastify, { pool });
};

View File

@@ -1,231 +0,0 @@
/**
* @ai-summary HTTP request handlers for admin station oversight
* @ai-context Handles admin operations on global stations and user-saved stations
*/
import { FastifyRequest, FastifyReply } from 'fastify';
import { StationOversightService } from '../domain/station-oversight.service';
import { logger } from '../../../core/logging/logger';
interface StationListQuery {
limit?: string;
offset?: string;
search?: string;
}
interface CreateStationBody {
placeId: string;
name: string;
address: string;
latitude: number;
longitude: number;
priceRegular?: number;
pricePremium?: number;
priceDiesel?: number;
rating?: number;
photoReference?: string;
}
interface UpdateStationBody {
name?: string;
address?: string;
latitude?: number;
longitude?: number;
priceRegular?: number;
pricePremium?: number;
priceDiesel?: number;
rating?: number;
photoReference?: string;
}
interface StationParams {
stationId: string;
}
interface UserStationParams {
userId: string;
stationId: string;
}
interface DeleteQuery {
force?: string;
}
export class StationsController {
constructor(private service: StationOversightService) {}
/**
* GET /api/admin/stations
* List all stations globally with pagination and search
*/
async listAllStations(
request: FastifyRequest<{ Querystring: StationListQuery }>,
reply: FastifyReply
): Promise<void> {
try {
const actorId = request.userContext?.userId;
if (!actorId) {
return reply.code(401).send({ error: 'Unauthorized' });
}
const limit = request.query.limit ? parseInt(request.query.limit, 10) : 100;
const offset = request.query.offset ? parseInt(request.query.offset, 10) : 0;
const search = request.query.search;
const result = await this.service.listAllStations(limit, offset, search);
return reply.code(200).send(result);
} catch (error) {
logger.error('Error listing stations', { error });
return reply.code(500).send({ error: 'Failed to list stations' });
}
}
/**
* POST /api/admin/stations
* Create a new station
*/
async createStation(
request: FastifyRequest<{ Body: CreateStationBody }>,
reply: FastifyReply
): Promise<void> {
try {
const actorId = request.userContext?.userId;
if (!actorId) {
return reply.code(401).send({ error: 'Unauthorized' });
}
// Validate required fields
const { placeId, name, address, latitude, longitude } = request.body;
if (!placeId || !name || !address || latitude === undefined || longitude === undefined) {
return reply.code(400).send({ error: 'Missing required fields: placeId, name, address, latitude, longitude' });
}
const station = await this.service.createStation(actorId, request.body);
return reply.code(201).send(station);
} catch (error: any) {
logger.error('Error creating station', { error });
if (error.message?.includes('duplicate key')) {
return reply.code(409).send({ error: 'Station with this placeId already exists' });
}
return reply.code(500).send({ error: 'Failed to create station' });
}
}
/**
* PUT /api/admin/stations/:stationId
* Update an existing station
*/
async updateStation(
request: FastifyRequest<{ Params: StationParams; Body: UpdateStationBody }>,
reply: FastifyReply
): Promise<void> {
try {
const actorId = request.userContext?.userId;
if (!actorId) {
return reply.code(401).send({ error: 'Unauthorized' });
}
const { stationId } = request.params;
// Validate at least one field to update
if (Object.keys(request.body).length === 0) {
return reply.code(400).send({ error: 'No fields to update' });
}
const station = await this.service.updateStation(actorId, stationId, request.body);
return reply.code(200).send(station);
} catch (error: any) {
logger.error('Error updating station', { error });
if (error.message === 'Station not found') {
return reply.code(404).send({ error: 'Station not found' });
}
return reply.code(500).send({ error: 'Failed to update station' });
}
}
/**
* DELETE /api/admin/stations/:stationId
* Delete a station (soft delete by default, hard delete with ?force=true)
*/
async deleteStation(
request: FastifyRequest<{ Params: StationParams; Querystring: DeleteQuery }>,
reply: FastifyReply
): Promise<void> {
try {
const actorId = request.userContext?.userId;
if (!actorId) {
return reply.code(401).send({ error: 'Unauthorized' });
}
const { stationId } = request.params;
const force = request.query.force === 'true';
await this.service.deleteStation(actorId, stationId, force);
return reply.code(204).send();
} catch (error: any) {
logger.error('Error deleting station', { error });
if (error.message === 'Station not found') {
return reply.code(404).send({ error: 'Station not found' });
}
return reply.code(500).send({ error: 'Failed to delete station' });
}
}
/**
* GET /api/admin/users/:userId/stations
* Get user's saved stations
*/
async getUserSavedStations(
request: FastifyRequest<{ Params: { userId: string } }>,
reply: FastifyReply
): Promise<void> {
try {
const actorId = request.userContext?.userId;
if (!actorId) {
return reply.code(401).send({ error: 'Unauthorized' });
}
const { userId } = request.params;
const stations = await this.service.getUserSavedStations(userId);
return reply.code(200).send(stations);
} catch (error) {
logger.error('Error getting user saved stations', { error });
return reply.code(500).send({ error: 'Failed to get user saved stations' });
}
}
/**
* DELETE /api/admin/users/:userId/stations/:stationId
* Remove user's saved station (soft delete by default, hard delete with ?force=true)
*/
async removeUserSavedStation(
request: FastifyRequest<{ Params: UserStationParams; Querystring: DeleteQuery }>,
reply: FastifyReply
): Promise<void> {
try {
const actorId = request.userContext?.userId;
if (!actorId) {
return reply.code(401).send({ error: 'Unauthorized' });
}
const { userId, stationId } = request.params;
const force = request.query.force === 'true';
await this.service.removeUserSavedStation(actorId, userId, stationId, force);
return reply.code(204).send();
} catch (error: any) {
logger.error('Error removing user saved station', { error });
if (error.message?.includes('not found')) {
return reply.code(404).send({ error: error.message });
}
return reply.code(500).send({ error: 'Failed to remove user saved station' });
}
}
}

View File

@@ -1,436 +0,0 @@
/**
* @ai-summary Station oversight business logic for admin operations
* @ai-context Manages global stations and user-saved stations with audit logging
*/
import { Pool } from 'pg';
import { redis } from '../../../core/config/redis';
import { logger } from '../../../core/logging/logger';
import { AdminRepository } from '../data/admin.repository';
import { StationsRepository } from '../../stations/data/stations.repository';
import { Station, SavedStation } from '../../stations/domain/stations.types';
interface CreateStationData {
placeId: string;
name: string;
address: string;
latitude: number;
longitude: number;
priceRegular?: number;
pricePremium?: number;
priceDiesel?: number;
rating?: number;
photoReference?: string;
}
interface UpdateStationData {
name?: string;
address?: string;
latitude?: number;
longitude?: number;
priceRegular?: number;
pricePremium?: number;
priceDiesel?: number;
rating?: number;
photoReference?: string;
}
interface StationListResult {
total: number;
stations: Station[];
}
export class StationOversightService {
private stationsRepository: StationsRepository;
constructor(
private pool: Pool,
private adminRepository: AdminRepository
) {
this.stationsRepository = new StationsRepository(pool);
}
/**
* List all stations globally with pagination and search
*/
async listAllStations(
limit: number = 100,
offset: number = 0,
search?: string
): Promise<StationListResult> {
try {
let countQuery = 'SELECT COUNT(*) as total FROM station_cache';
let dataQuery = `
SELECT
id, place_id, name, address, latitude, longitude,
price_regular, price_premium, price_diesel, rating, photo_reference, cached_at
FROM station_cache
`;
const params: any[] = [];
// Add search filter if provided
if (search) {
const searchCondition = ` WHERE name ILIKE $1 OR address ILIKE $1`;
countQuery += searchCondition;
dataQuery += searchCondition;
params.push(`%${search}%`);
}
dataQuery += ' ORDER BY cached_at DESC LIMIT $' + (params.length + 1) + ' OFFSET $' + (params.length + 2);
params.push(limit, offset);
const [countResult, dataResult] = await Promise.all([
this.pool.query(countQuery, search ? [`%${search}%`] : []),
this.pool.query(dataQuery, params),
]);
const total = parseInt(countResult.rows[0].total, 10);
const stations = dataResult.rows.map(row => this.mapStationRow(row));
return { total, stations };
} catch (error) {
logger.error('Error listing all stations', { error });
throw error;
}
}
/**
* Create a new station in the cache
*/
async createStation(
actorId: string,
data: CreateStationData
): Promise<Station> {
try {
// Create station using repository
const station: Station = {
id: '', // Will be generated by database
placeId: data.placeId,
name: data.name,
address: data.address,
latitude: data.latitude,
longitude: data.longitude,
priceRegular: data.priceRegular,
pricePremium: data.pricePremium,
priceDiesel: data.priceDiesel,
rating: data.rating,
photoReference: data.photoReference,
};
await this.stationsRepository.cacheStation(station);
// Get the created station
const created = await this.stationsRepository.getCachedStation(data.placeId);
if (!created) {
throw new Error('Failed to retrieve created station');
}
// Invalidate caches
await this.invalidateStationCaches();
// Log audit action
await this.adminRepository.logAuditAction(
actorId,
'CREATE',
undefined,
'station',
data.placeId,
{ name: data.name, address: data.address }
);
logger.info('Station created by admin', { actorId, placeId: data.placeId });
return created;
} catch (error) {
logger.error('Error creating station', { error, data });
throw error;
}
}
/**
* Update an existing station
*/
async updateStation(
actorId: string,
stationId: string,
data: UpdateStationData
): Promise<Station> {
try {
// First verify station exists
const existing = await this.stationsRepository.getCachedStation(stationId);
if (!existing) {
throw new Error('Station not found');
}
// Build update query dynamically based on provided fields
const updates: string[] = [];
const values: any[] = [];
let paramIndex = 1;
if (data.name !== undefined) {
updates.push(`name = $${paramIndex++}`);
values.push(data.name);
}
if (data.address !== undefined) {
updates.push(`address = $${paramIndex++}`);
values.push(data.address);
}
if (data.latitude !== undefined) {
updates.push(`latitude = $${paramIndex++}`);
values.push(data.latitude);
}
if (data.longitude !== undefined) {
updates.push(`longitude = $${paramIndex++}`);
values.push(data.longitude);
}
if (data.priceRegular !== undefined) {
updates.push(`price_regular = $${paramIndex++}`);
values.push(data.priceRegular);
}
if (data.pricePremium !== undefined) {
updates.push(`price_premium = $${paramIndex++}`);
values.push(data.pricePremium);
}
if (data.priceDiesel !== undefined) {
updates.push(`price_diesel = $${paramIndex++}`);
values.push(data.priceDiesel);
}
if (data.rating !== undefined) {
updates.push(`rating = $${paramIndex++}`);
values.push(data.rating);
}
if (data.photoReference !== undefined) {
updates.push(`photo_reference = $${paramIndex++}`);
values.push(data.photoReference);
}
if (updates.length === 0) {
throw new Error('No fields to update');
}
updates.push(`cached_at = NOW()`);
values.push(stationId);
const query = `
UPDATE station_cache
SET ${updates.join(', ')}
WHERE place_id = $${paramIndex}
`;
await this.pool.query(query, values);
// Get updated station
const updated = await this.stationsRepository.getCachedStation(stationId);
if (!updated) {
throw new Error('Failed to retrieve updated station');
}
// Invalidate caches
await this.invalidateStationCaches(stationId);
// Log audit action
await this.adminRepository.logAuditAction(
actorId,
'UPDATE',
undefined,
'station',
stationId,
data
);
logger.info('Station updated by admin', { actorId, stationId });
return updated;
} catch (error) {
logger.error('Error updating station', { error, stationId, data });
throw error;
}
}
/**
* Delete a station (soft delete by default, hard delete with force flag)
*/
async deleteStation(
actorId: string,
stationId: string,
force: boolean = false
): Promise<void> {
try {
// Verify station exists
const existing = await this.stationsRepository.getCachedStation(stationId);
if (!existing) {
throw new Error('Station not found');
}
if (force) {
// Hard delete - remove from both tables
await this.pool.query('DELETE FROM station_cache WHERE place_id = $1', [stationId]);
await this.pool.query('DELETE FROM saved_stations WHERE place_id = $1', [stationId]);
logger.info('Station hard deleted by admin', { actorId, stationId });
} else {
// Soft delete - add deleted_at column if not exists, then set it
// First check if column exists
const columnCheck = await this.pool.query(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'station_cache' AND column_name = 'deleted_at'
`);
if (columnCheck.rows.length === 0) {
// Add deleted_at column
await this.pool.query(`
ALTER TABLE station_cache
ADD COLUMN deleted_at TIMESTAMP WITH TIME ZONE
`);
}
// Soft delete
await this.pool.query(
'UPDATE station_cache SET deleted_at = NOW() WHERE place_id = $1',
[stationId]
);
logger.info('Station soft deleted by admin', { actorId, stationId });
}
// Invalidate caches
await this.invalidateStationCaches(stationId);
// Log audit action
await this.adminRepository.logAuditAction(
actorId,
'DELETE',
undefined,
'station',
stationId,
{ force }
);
} catch (error) {
logger.error('Error deleting station', { error, stationId, force });
throw error;
}
}
/**
* Get user's saved stations
*/
async getUserSavedStations(userId: string): Promise<SavedStation[]> {
try {
const stations = await this.stationsRepository.getUserSavedStations(userId);
return stations;
} catch (error) {
logger.error('Error getting user saved stations', { error, userId });
throw error;
}
}
/**
* Remove user's saved station (soft delete by default, hard delete with force)
*/
async removeUserSavedStation(
actorId: string,
userId: string,
stationId: string,
force: boolean = false
): Promise<void> {
try {
if (force) {
// Hard delete
const result = await this.pool.query(
'DELETE FROM saved_stations WHERE user_id = $1 AND place_id = $2',
[userId, stationId]
);
if ((result.rowCount ?? 0) === 0) {
throw new Error('Saved station not found');
}
logger.info('User saved station hard deleted by admin', { actorId, userId, stationId });
} else {
// Soft delete - add deleted_at column if not exists
const columnCheck = await this.pool.query(`
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'saved_stations' AND column_name = 'deleted_at'
`);
if (columnCheck.rows.length === 0) {
// Column already exists in migration, but double check
logger.warn('deleted_at column check executed', { table: 'saved_stations' });
}
// Soft delete
const result = await this.pool.query(
'UPDATE saved_stations SET deleted_at = NOW() WHERE user_id = $1 AND place_id = $2 AND deleted_at IS NULL',
[userId, stationId]
);
if ((result.rowCount ?? 0) === 0) {
throw new Error('Saved station not found or already deleted');
}
logger.info('User saved station soft deleted by admin', { actorId, userId, stationId });
}
// Invalidate user's saved stations cache
await redis.del(`mvp:stations:saved:${userId}`);
// Log audit action
await this.adminRepository.logAuditAction(
actorId,
'DELETE',
undefined,
'saved_station',
`${userId}:${stationId}`,
{ userId, stationId, force }
);
} catch (error) {
logger.error('Error removing user saved station', { error, userId, stationId, force });
throw error;
}
}
/**
* Invalidate station-related Redis caches
*/
private async invalidateStationCaches(stationId?: string): Promise<void> {
try {
// Get all keys matching station cache patterns
const patterns = [
'mvp:stations:*',
'mvp:stations:search:*',
];
for (const pattern of patterns) {
const keys = await redis.keys(pattern);
if (keys.length > 0) {
await redis.del(...keys);
}
}
logger.info('Station caches invalidated', { stationId });
} catch (error) {
logger.error('Error invalidating station caches', { error, stationId });
// Don't throw - cache invalidation failure shouldn't fail the operation
}
}
/**
* Map database row to Station object
*/
private mapStationRow(row: any): Station {
return {
id: row.id,
placeId: row.place_id,
name: row.name,
address: row.address,
latitude: parseFloat(row.latitude),
longitude: parseFloat(row.longitude),
priceRegular: row.price_regular ? parseFloat(row.price_regular) : undefined,
pricePremium: row.price_premium ? parseFloat(row.price_premium) : undefined,
priceDiesel: row.price_diesel ? parseFloat(row.price_diesel) : undefined,
rating: row.rating ? parseFloat(row.rating) : undefined,
photoReference: row.photo_reference,
lastUpdated: row.cached_at,
};
}
}

View File

@@ -1,587 +0,0 @@
/**
* @ai-summary Integration tests for admin station oversight API endpoints
* @ai-context Tests complete request/response cycle with test database and admin guard
*/
import request from 'supertest';
import { app } from '../../../../app';
import pool from '../../../../core/config/database';
import { redis } from '../../../../core/config/redis';
import { readFileSync } from 'fs';
import { join } from 'path';
import fastifyPlugin from 'fastify-plugin';
import { setAdminGuardPool } from '../../../../core/plugins/admin-guard.plugin';
// Mock auth plugin to inject test admin user
jest.mock('../../../../core/plugins/auth.plugin', () => {
return {
default: fastifyPlugin(async function(fastify) {
fastify.decorate('authenticate', async function(request, _reply) {
// Inject test user context
request.user = { sub: 'test-admin-123' };
request.userContext = {
userId: 'test-admin-123',
email: 'test-admin@motovaultpro.com',
isAdmin: false, // Will be set by admin guard
};
});
}, { name: 'auth-plugin' })
};
});
describe('Admin Station Oversight Integration Tests', () => {
let testAdminAuth0Sub: string;
let testNonAdminAuth0Sub: string;
let testUserId: string;
beforeAll(async () => {
// Run admin migrations
const adminMigrationFile = join(__dirname, '../../migrations/001_create_admin_users.sql');
const adminMigrationSQL = readFileSync(adminMigrationFile, 'utf-8');
await pool.query(adminMigrationSQL);
// Run stations migrations
const stationsMigrationFile = join(__dirname, '../../../stations/migrations/001_create_stations_tables.sql');
const stationsMigrationSQL = readFileSync(stationsMigrationFile, 'utf-8');
await pool.query(stationsMigrationSQL);
// Set admin guard pool
setAdminGuardPool(pool);
// Create test admin user
testAdminAuth0Sub = 'test-admin-123';
await pool.query(`
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES ($1, $2, $3, $4)
ON CONFLICT (auth0_sub) DO NOTHING
`, [testAdminAuth0Sub, 'test-admin@motovaultpro.com', 'admin', 'system']);
// Create test non-admin auth0Sub for permission tests
testNonAdminAuth0Sub = 'test-non-admin-456';
testUserId = 'test-user-789';
});
afterAll(async () => {
// Clean up test database
await pool.query('DROP TABLE IF EXISTS saved_stations CASCADE');
await pool.query('DROP TABLE IF EXISTS station_cache CASCADE');
await pool.query('DROP TABLE IF EXISTS admin_audit_logs CASCADE');
await pool.query('DROP TABLE IF EXISTS admin_users CASCADE');
await pool.end();
await redis.quit();
});
beforeEach(async () => {
// Clean up test data before each test
await pool.query('DELETE FROM saved_stations');
await pool.query('DELETE FROM station_cache');
await pool.query('DELETE FROM admin_audit_logs');
});
describe('Authorization', () => {
it('should reject non-admin user trying to list stations', async () => {
jest.isolateModules(() => {
jest.mock('../../../../core/plugins/auth.plugin', () => {
return {
default: fastifyPlugin(async function(fastify) {
fastify.decorate('authenticate', async function(request, _reply) {
request.user = { sub: testNonAdminAuth0Sub };
request.userContext = {
userId: testNonAdminAuth0Sub,
email: 'test-user@example.com',
isAdmin: false,
};
});
}, { name: 'auth-plugin' })
};
});
});
const response = await request(app)
.get('/api/admin/stations')
.expect(403);
expect(response.body.error).toBe('Forbidden');
expect(response.body.message).toBe('Admin access required');
});
});
describe('GET /api/admin/stations', () => {
it('should list all stations with pagination', async () => {
// Create test stations
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude, rating)
VALUES
($1, $2, $3, $4, $5, $6),
($7, $8, $9, $10, $11, $12),
($13, $14, $15, $16, $17, $18)
`, [
'place1', 'Shell Station', '123 Main St', 40.7128, -74.0060, 4.5,
'place2', 'Exxon Station', '456 Oak Ave', 40.7138, -74.0070, 4.2,
'place3', 'BP Station', '789 Elm Rd', 40.7148, -74.0080, 4.7
]);
const response = await request(app)
.get('/api/admin/stations?limit=10&offset=0')
.expect(200);
expect(response.body).toHaveProperty('total');
expect(response.body).toHaveProperty('stations');
expect(response.body.total).toBe(3);
expect(response.body.stations.length).toBe(3);
expect(response.body.stations[0]).toMatchObject({
placeId: expect.any(String),
name: expect.any(String),
address: expect.any(String),
latitude: expect.any(Number),
longitude: expect.any(Number),
});
});
it('should support search by name', async () => {
// Create test stations
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES
($1, $2, $3, $4, $5),
($6, $7, $8, $9, $10)
`, [
'place1', 'Shell Station', '123 Main St', 40.7128, -74.0060,
'place2', 'Exxon Station', '456 Oak Ave', 40.7138, -74.0070
]);
const response = await request(app)
.get('/api/admin/stations?search=Shell')
.expect(200);
expect(response.body.total).toBe(1);
expect(response.body.stations[0].name).toContain('Shell');
});
it('should support pagination', async () => {
// Create 5 test stations
for (let i = 1; i <= 5; i++) {
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, [`place${i}`, `Station ${i}`, `${i} Main St`, 40.7128, -74.0060]);
}
const response = await request(app)
.get('/api/admin/stations?limit=2&offset=0')
.expect(200);
expect(response.body.stations.length).toBe(2);
expect(response.body.total).toBe(5);
});
});
describe('POST /api/admin/stations', () => {
it('should create a new station', async () => {
const newStation = {
placeId: 'new-place-123',
name: 'New Shell Station',
address: '999 Test Ave',
latitude: 40.7200,
longitude: -74.0100,
priceRegular: 3.59,
rating: 4.3
};
const response = await request(app)
.post('/api/admin/stations')
.send(newStation)
.expect(201);
expect(response.body).toMatchObject({
placeId: 'new-place-123',
name: 'New Shell Station',
address: '999 Test Ave',
latitude: 40.72,
longitude: -74.01,
});
// Verify audit log was created
const auditResult = await pool.query(
'SELECT * FROM admin_audit_logs WHERE action = $1 AND resource_id = $2',
['CREATE', 'new-place-123']
);
expect(auditResult.rows.length).toBe(1);
expect(auditResult.rows[0].actor_admin_id).toBe(testAdminAuth0Sub);
// Verify cache was invalidated
const cacheKeys = await redis.keys('mvp:stations:*');
expect(cacheKeys.length).toBe(0); // Should be cleared
});
it('should reject missing required fields', async () => {
const invalidStation = {
name: 'Incomplete Station',
address: '123 Test St',
};
const response = await request(app)
.post('/api/admin/stations')
.send(invalidStation)
.expect(400);
expect(response.body.error).toContain('Missing required fields');
});
it('should handle duplicate placeId', async () => {
const station = {
placeId: 'duplicate-123',
name: 'First Station',
address: '123 Test Ave',
latitude: 40.7200,
longitude: -74.0100,
};
// Create first station
await request(app)
.post('/api/admin/stations')
.send(station)
.expect(201);
// Try to create duplicate
const response = await request(app)
.post('/api/admin/stations')
.send(station)
.expect(409);
expect(response.body.error).toContain('already exists');
});
});
describe('PUT /api/admin/stations/:stationId', () => {
it('should update an existing station', async () => {
// Create station first
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['update-place', 'Old Name', '123 Old St', 40.7128, -74.0060]);
const updateData = {
name: 'Updated Name',
address: '456 New St',
priceRegular: 3.75
};
const response = await request(app)
.put('/api/admin/stations/update-place')
.send(updateData)
.expect(200);
expect(response.body).toMatchObject({
placeId: 'update-place',
name: 'Updated Name',
address: '456 New St',
priceRegular: 3.75
});
// Verify audit log
const auditResult = await pool.query(
'SELECT * FROM admin_audit_logs WHERE action = $1 AND resource_id = $2',
['UPDATE', 'update-place']
);
expect(auditResult.rows.length).toBe(1);
});
it('should return 404 for non-existent station', async () => {
const response = await request(app)
.put('/api/admin/stations/nonexistent')
.send({ name: 'Updated Name' })
.expect(404);
expect(response.body.error).toBe('Station not found');
});
it('should reject empty update', async () => {
// Create station first
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['place-empty', 'Name', '123 St', 40.7128, -74.0060]);
const response = await request(app)
.put('/api/admin/stations/place-empty')
.send({})
.expect(400);
expect(response.body.error).toContain('No fields to update');
});
});
describe('DELETE /api/admin/stations/:stationId', () => {
it('should soft delete a station by default', async () => {
// Create station first
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['soft-delete', 'Station to Delete', '123 Delete St', 40.7128, -74.0060]);
await request(app)
.delete('/api/admin/stations/soft-delete')
.expect(204);
// Verify station still exists but has deleted_at set
const result = await pool.query(
'SELECT deleted_at FROM station_cache WHERE place_id = $1',
['soft-delete']
);
// Station may not have deleted_at column initially, but should be handled
expect(result.rows.length).toBeGreaterThanOrEqual(0);
});
it('should hard delete with force flag', async () => {
// Create station first
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['hard-delete', 'Station to Delete', '123 Delete St', 40.7128, -74.0060]);
await request(app)
.delete('/api/admin/stations/hard-delete?force=true')
.expect(204);
// Verify station is actually deleted
const result = await pool.query(
'SELECT * FROM station_cache WHERE place_id = $1',
['hard-delete']
);
expect(result.rows.length).toBe(0);
// Verify audit log
const auditResult = await pool.query(
'SELECT * FROM admin_audit_logs WHERE action = $1 AND resource_id = $2',
['DELETE', 'hard-delete']
);
expect(auditResult.rows.length).toBe(1);
expect(JSON.parse(auditResult.rows[0].context).force).toBe(true);
});
it('should return 404 for non-existent station', async () => {
const response = await request(app)
.delete('/api/admin/stations/nonexistent')
.expect(404);
expect(response.body.error).toBe('Station not found');
});
it('should invalidate cache after deletion', async () => {
// Create station and cache entry
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['cache-test', 'Station', '123 St', 40.7128, -74.0060]);
await redis.set('mvp:stations:test', JSON.stringify({ test: true }));
await request(app)
.delete('/api/admin/stations/cache-test?force=true')
.expect(204);
// Verify cache was cleared
const cacheValue = await redis.get('mvp:stations:test');
expect(cacheValue).toBeNull();
});
});
describe('GET /api/admin/users/:userId/stations', () => {
it('should get user saved stations', async () => {
// Create station in cache
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['user-place', 'User Station', '123 User St', 40.7128, -74.0060]);
// Create saved station for user
await pool.query(`
INSERT INTO saved_stations (user_id, place_id, nickname, is_favorite)
VALUES ($1, $2, $3, $4)
`, [testUserId, 'user-place', 'My Favorite Station', true]);
const response = await request(app)
.get(`/api/admin/users/${testUserId}/stations`)
.expect(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body.length).toBe(1);
expect(response.body[0]).toMatchObject({
userId: testUserId,
stationId: 'user-place',
nickname: 'My Favorite Station',
isFavorite: true,
});
});
it('should return empty array for user with no saved stations', async () => {
const response = await request(app)
.get('/api/admin/users/user-no-stations/stations')
.expect(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body.length).toBe(0);
});
});
describe('DELETE /api/admin/users/:userId/stations/:stationId', () => {
it('should soft delete user saved station by default', async () => {
// Create station and saved station
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['saved-place', 'Saved Station', '123 Saved St', 40.7128, -74.0060]);
await pool.query(`
INSERT INTO saved_stations (user_id, place_id, nickname)
VALUES ($1, $2, $3)
`, [testUserId, 'saved-place', 'My Station']);
await request(app)
.delete(`/api/admin/users/${testUserId}/stations/saved-place`)
.expect(204);
// Verify soft delete (deleted_at set)
const result = await pool.query(
'SELECT deleted_at FROM saved_stations WHERE user_id = $1 AND place_id = $2',
[testUserId, 'saved-place']
);
expect(result.rows.length).toBeGreaterThanOrEqual(0);
});
it('should hard delete with force flag', async () => {
// Create station and saved station
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['force-delete', 'Station', '123 St', 40.7128, -74.0060]);
await pool.query(`
INSERT INTO saved_stations (user_id, place_id, nickname)
VALUES ($1, $2, $3)
`, [testUserId, 'force-delete', 'My Station']);
await request(app)
.delete(`/api/admin/users/${testUserId}/stations/force-delete?force=true`)
.expect(204);
// Verify hard delete
const result = await pool.query(
'SELECT * FROM saved_stations WHERE user_id = $1 AND place_id = $2',
[testUserId, 'force-delete']
);
expect(result.rows.length).toBe(0);
// Verify audit log
const auditResult = await pool.query(
'SELECT * FROM admin_audit_logs WHERE action = $1 AND resource_id = $2',
['DELETE', `${testUserId}:force-delete`]
);
expect(auditResult.rows.length).toBe(1);
});
it('should return 404 for non-existent saved station', async () => {
const response = await request(app)
.delete(`/api/admin/users/${testUserId}/stations/nonexistent`)
.expect(404);
expect(response.body.error).toContain('not found');
});
it('should invalidate user cache after deletion', async () => {
// Create saved station
await pool.query(`
INSERT INTO station_cache (place_id, name, address, latitude, longitude)
VALUES ($1, $2, $3, $4, $5)
`, ['cache-delete', 'Station', '123 St', 40.7128, -74.0060]);
await pool.query(`
INSERT INTO saved_stations (user_id, place_id)
VALUES ($1, $2)
`, [testUserId, 'cache-delete']);
// Set user cache
await redis.set(`mvp:stations:saved:${testUserId}`, JSON.stringify({ test: true }));
await request(app)
.delete(`/api/admin/users/${testUserId}/stations/cache-delete?force=true`)
.expect(204);
// Verify cache was cleared
const cacheValue = await redis.get(`mvp:stations:saved:${testUserId}`);
expect(cacheValue).toBeNull();
});
});
describe('End-to-end workflow', () => {
it('should complete full station lifecycle with audit trail', async () => {
// 1. Create station
const createResponse = await request(app)
.post('/api/admin/stations')
.send({
placeId: 'workflow-station',
name: 'Workflow Station',
address: '123 Workflow St',
latitude: 40.7200,
longitude: -74.0100,
})
.expect(201);
expect(createResponse.body.placeId).toBe('workflow-station');
// 2. List stations and verify it exists
const listResponse = await request(app)
.get('/api/admin/stations')
.expect(200);
const station = listResponse.body.stations.find(
(s: any) => s.placeId === 'workflow-station'
);
expect(station).toBeDefined();
// 3. Update station
await request(app)
.put('/api/admin/stations/workflow-station')
.send({ name: 'Updated Workflow Station' })
.expect(200);
// 4. User saves the station
await pool.query(`
INSERT INTO saved_stations (user_id, place_id, nickname)
VALUES ($1, $2, $3)
`, [testUserId, 'workflow-station', 'My Workflow Station']);
// 5. Admin views user's saved stations
const userStationsResponse = await request(app)
.get(`/api/admin/users/${testUserId}/stations`)
.expect(200);
expect(userStationsResponse.body.length).toBe(1);
// 6. Admin removes user's saved station
await request(app)
.delete(`/api/admin/users/${testUserId}/stations/workflow-station?force=true`)
.expect(204);
// 7. Admin deletes station
await request(app)
.delete('/api/admin/stations/workflow-station?force=true')
.expect(204);
// 8. Verify complete audit trail
const auditResponse = await pool.query(
'SELECT * FROM admin_audit_logs WHERE resource_id LIKE $1 OR resource_id = $2 ORDER BY created_at ASC',
['%workflow-station%', 'workflow-station']
);
expect(auditResponse.rows.length).toBeGreaterThanOrEqual(3);
const actions = auditResponse.rows.map((log: any) => log.action);
expect(actions).toContain('CREATE');
expect(actions).toContain('UPDATE');
expect(actions).toContain('DELETE');
});
});
});

View File

@@ -0,0 +1,118 @@
# Backup Feature
Complete backup and restore system for MotoVaultPro.
## Overview
This feature provides:
- Manual and scheduled backups of the PostgreSQL database and document files
- Multiple backup schedules with individual retention policies
- Restore functionality with safety backup creation
- Email notifications on backup success/failure
## Architecture
```
backup/
api/ # HTTP endpoints
backup.routes.ts # Route definitions
backup.controller.ts # Request handlers
backup.validation.ts # Zod schemas
domain/ # Business logic
backup.types.ts # TypeScript types
backup.service.ts # Core backup operations
backup-archive.service.ts # Archive creation
backup-restore.service.ts # Restore operations
backup-retention.service.ts # Retention enforcement
data/ # Data access
backup.repository.ts # Database queries
jobs/ # Scheduled jobs
backup-scheduled.job.ts # Scheduled backup execution
backup-cleanup.job.ts # Retention cleanup
migrations/ # Database schema
001_create_backup_tables.sql
```
## API Endpoints
All endpoints require admin authentication.
### Backup Operations
| Method | Path | Description |
|--------|------|-------------|
| GET | `/api/admin/backups` | List backups with pagination |
| POST | `/api/admin/backups` | Create manual backup |
| GET | `/api/admin/backups/:id` | Get backup details |
| GET | `/api/admin/backups/:id/download` | Download backup file |
| DELETE | `/api/admin/backups/:id` | Delete backup |
| POST | `/api/admin/backups/upload` | Upload backup file |
### Restore Operations
| Method | Path | Description |
|--------|------|-------------|
| POST | `/api/admin/backups/:id/restore/preview` | Preview restore |
| POST | `/api/admin/backups/:id/restore` | Execute restore |
| GET | `/api/admin/backups/restore/status` | Get restore status |
### Schedule Operations
| Method | Path | Description |
|--------|------|-------------|
| GET | `/api/admin/backups/schedules` | List schedules |
| POST | `/api/admin/backups/schedules` | Create schedule |
| PUT | `/api/admin/backups/schedules/:id` | Update schedule |
| DELETE | `/api/admin/backups/schedules/:id` | Delete schedule |
| PATCH | `/api/admin/backups/schedules/:id/toggle` | Enable/disable |
### Settings
| Method | Path | Description |
|--------|------|-------------|
| GET | `/api/admin/backups/settings` | Get settings |
| PUT | `/api/admin/backups/settings` | Update settings |
## Backup Archive Format
Backups are `.tar.gz` archives containing:
```
motovaultpro_backup_YYYYMMDD_HHMMSS.tar.gz
manifest.json # Backup metadata
database/
motovaultpro.sql.gz # Compressed PostgreSQL dump
documents/
<user_id>/ # User document files
```
## Schedule Frequencies
| Frequency | Cron | Default Time |
|-----------|------|--------------|
| hourly | `0 * * * *` | Every hour |
| daily | `0 3 * * *` | 3:00 AM |
| weekly | `0 3 * * 0` | Sunday 3:00 AM |
| monthly | `0 3 1 * *` | 1st of month 3:00 AM |
## Database Tables
- `backup_schedules` - Schedule configurations
- `backup_history` - Backup operation records
- `backup_settings` - Global settings
## Storage
Backups are stored in `/app/data/backups/` (mapped to `./data/backups/` on host).
## Integration
### Scheduler
Jobs are registered in `backend/src/core/scheduler/index.ts`:
- Backup check: Every minute
- Retention cleanup: Daily at 4 AM
### Admin Routes
Routes are registered in `backend/src/features/admin/api/admin.routes.ts`.

View File

@@ -0,0 +1,319 @@
/**
* @ai-summary Controller for backup API endpoints
* @ai-context Handles HTTP requests for backup operations
*/
import { FastifyRequest, FastifyReply } from 'fastify';
import { Pool } from 'pg';
import { BackupService } from '../domain/backup.service';
import { BackupRestoreService } from '../domain/backup-restore.service';
import {
ListBackupsQuery,
CreateBackupBody,
BackupIdParam,
RestoreBody,
CreateScheduleBody,
UpdateScheduleBody,
ScheduleIdParam,
UpdateSettingsBody,
} from './backup.validation';
export class BackupController {
private backupService: BackupService;
private restoreService: BackupRestoreService;
constructor(pool: Pool) {
this.backupService = new BackupService(pool);
this.restoreService = new BackupRestoreService(pool);
}
// ============================================
// Backup Operations
// ============================================
async listBackups(
request: FastifyRequest<{ Querystring: ListBackupsQuery }>,
reply: FastifyReply
): Promise<void> {
const result = await this.backupService.listBackups(request.query);
reply.send(result);
}
async createBackup(
request: FastifyRequest<{ Body: CreateBackupBody }>,
reply: FastifyReply
): Promise<void> {
const adminSub = (request as any).userContext?.auth0Sub;
const result = await this.backupService.createBackup({
name: request.body.name,
backupType: 'manual',
createdBy: adminSub,
includeDocuments: request.body.includeDocuments,
});
if (result.success) {
reply.status(201).send({
backupId: result.backupId,
status: 'completed',
message: 'Backup created successfully',
});
} else {
reply.status(500).send({
backupId: result.backupId,
status: 'failed',
message: result.error,
});
}
}
async getBackup(
request: FastifyRequest<{ Params: BackupIdParam }>,
reply: FastifyReply
): Promise<void> {
const backup = await this.backupService.getBackup(request.params.id);
if (!backup) {
reply.status(404).send({ error: 'Backup not found' });
return;
}
reply.send(backup);
}
async downloadBackup(
request: FastifyRequest<{ Params: BackupIdParam }>,
reply: FastifyReply
): Promise<void> {
const download = await this.backupService.createDownloadStream(request.params.id);
if (!download) {
reply.status(404).send({ error: 'Backup not found or file not available' });
return;
}
return reply
.header('Content-Type', 'application/gzip')
.header('Content-Disposition', `attachment; filename="${download.filename}"`)
.header('Content-Length', download.size)
.send(download.stream);
}
async deleteBackup(
request: FastifyRequest<{ Params: BackupIdParam }>,
reply: FastifyReply
): Promise<void> {
const deleted = await this.backupService.deleteBackup(request.params.id);
if (!deleted) {
reply.status(404).send({ error: 'Backup not found' });
return;
}
reply.status(204).send();
}
async uploadBackup(
request: FastifyRequest,
reply: FastifyReply
): Promise<void> {
const adminSub = (request as any).userContext?.auth0Sub;
// Handle multipart file upload
const data = await request.file();
if (!data) {
reply.status(400).send({ error: 'No file uploaded' });
return;
}
// Validate file type
const filename = data.filename;
if (!filename.endsWith('.tar.gz') && !filename.endsWith('.tgz')) {
reply.status(400).send({ error: 'Invalid file type. Expected .tar.gz archive' });
return;
}
// Save file temporarily
const fs = await import('fs');
const path = await import('path');
const os = await import('os');
const tempPath = path.join(os.tmpdir(), `upload-${Date.now()}-${filename}`);
const writeStream = fs.createWriteStream(tempPath);
await new Promise<void>((resolve, reject) => {
data.file.pipe(writeStream);
data.file.on('end', resolve);
data.file.on('error', reject);
});
try {
const backup = await this.backupService.importUploadedBackup(
tempPath,
filename,
adminSub
);
reply.status(201).send({
backupId: backup.id,
filename: backup.filename,
fileSizeBytes: backup.fileSizeBytes,
message: 'Backup uploaded successfully',
});
} catch (error) {
// Cleanup temp file on error
await fs.promises.unlink(tempPath).catch(() => {});
throw error;
}
}
// ============================================
// Restore Operations
// ============================================
async previewRestore(
request: FastifyRequest<{ Params: BackupIdParam }>,
reply: FastifyReply
): Promise<void> {
try {
const preview = await this.restoreService.previewRestore(request.params.id);
reply.send(preview);
} catch (error) {
reply.status(400).send({
error: error instanceof Error ? error.message : 'Failed to preview restore',
});
}
}
async executeRestore(
request: FastifyRequest<{ Params: BackupIdParam; Body: RestoreBody }>,
reply: FastifyReply
): Promise<void> {
try {
const result = await this.restoreService.executeRestore({
backupId: request.params.id,
createSafetyBackup: request.body.createSafetyBackup,
});
if (result.success) {
reply.send({
success: true,
safetyBackupId: result.safetyBackupId,
restoredAt: new Date().toISOString(),
message: 'Restore completed successfully',
});
} else {
reply.status(500).send({
success: false,
safetyBackupId: result.safetyBackupId,
error: result.error,
message: 'Restore failed',
});
}
} catch (error) {
reply.status(400).send({
success: false,
error: error instanceof Error ? error.message : 'Failed to execute restore',
});
}
}
async getRestoreStatus(
_request: FastifyRequest,
reply: FastifyReply
): Promise<void> {
const status = this.restoreService.getRestoreStatus();
reply.send(status);
}
// ============================================
// Schedule Operations
// ============================================
async listSchedules(
_request: FastifyRequest,
reply: FastifyReply
): Promise<void> {
const schedules = await this.backupService.listSchedules();
reply.send(schedules);
}
async createSchedule(
request: FastifyRequest<{ Body: CreateScheduleBody }>,
reply: FastifyReply
): Promise<void> {
const schedule = await this.backupService.createSchedule(
request.body.name,
request.body.frequency,
request.body.retentionCount,
request.body.isEnabled
);
reply.status(201).send(schedule);
}
async updateSchedule(
request: FastifyRequest<{ Params: ScheduleIdParam; Body: UpdateScheduleBody }>,
reply: FastifyReply
): Promise<void> {
const schedule = await this.backupService.updateSchedule(
request.params.id,
request.body
);
if (!schedule) {
reply.status(404).send({ error: 'Schedule not found' });
return;
}
reply.send(schedule);
}
async deleteSchedule(
request: FastifyRequest<{ Params: ScheduleIdParam }>,
reply: FastifyReply
): Promise<void> {
const deleted = await this.backupService.deleteSchedule(request.params.id);
if (!deleted) {
reply.status(404).send({ error: 'Schedule not found' });
return;
}
reply.status(204).send();
}
async toggleSchedule(
request: FastifyRequest<{ Params: ScheduleIdParam }>,
reply: FastifyReply
): Promise<void> {
const schedule = await this.backupService.toggleSchedule(request.params.id);
if (!schedule) {
reply.status(404).send({ error: 'Schedule not found' });
return;
}
reply.send(schedule);
}
// ============================================
// Settings Operations
// ============================================
async getSettings(
_request: FastifyRequest,
reply: FastifyReply
): Promise<void> {
const settings = await this.backupService.getSettings();
reply.send(settings);
}
async updateSettings(
request: FastifyRequest<{ Body: UpdateSettingsBody }>,
reply: FastifyReply
): Promise<void> {
const settings = await this.backupService.updateSettings(request.body);
reply.send(settings);
}
}

View File

@@ -0,0 +1,137 @@
/**
* @ai-summary Route definitions for backup API endpoints
* @ai-context All routes require admin authentication via fastify.requireAdmin
*/
import { FastifyInstance, FastifyPluginOptions } from 'fastify';
import { Pool } from 'pg';
import { BackupController } from './backup.controller';
import {
ListBackupsQuery,
CreateBackupBody,
BackupIdParam,
RestoreBody,
CreateScheduleBody,
UpdateScheduleBody,
ScheduleIdParam,
UpdateSettingsBody,
} from './backup.validation';
export async function registerBackupRoutes(
fastify: FastifyInstance,
opts: FastifyPluginOptions & { pool: Pool }
): Promise<void> {
const controller = new BackupController(opts.pool);
// ============================================
// Backup Operations
// ============================================
// GET /api/admin/backups - List all backups
fastify.get<{ Querystring: ListBackupsQuery }>('/admin/backups', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.listBackups.bind(controller),
});
// POST /api/admin/backups - Create manual backup
fastify.post<{ Body: CreateBackupBody }>('/admin/backups', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.createBackup.bind(controller),
});
// GET /api/admin/backups/:id - Get backup details
fastify.get<{ Params: BackupIdParam }>('/admin/backups/:id', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.getBackup.bind(controller),
});
// GET /api/admin/backups/:id/download - Download backup file
fastify.get<{ Params: BackupIdParam }>('/admin/backups/:id/download', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.downloadBackup.bind(controller),
});
// DELETE /api/admin/backups/:id - Delete backup
fastify.delete<{ Params: BackupIdParam }>('/admin/backups/:id', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.deleteBackup.bind(controller),
});
// POST /api/admin/backups/upload - Upload backup file
fastify.post('/admin/backups/upload', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.uploadBackup.bind(controller),
});
// ============================================
// Restore Operations
// ============================================
// POST /api/admin/backups/:id/restore/preview - Preview restore
fastify.post<{ Params: BackupIdParam }>('/admin/backups/:id/restore/preview', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.previewRestore.bind(controller),
});
// POST /api/admin/backups/:id/restore - Execute restore
fastify.post<{ Params: BackupIdParam; Body: RestoreBody }>('/admin/backups/:id/restore', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.executeRestore.bind(controller),
});
// GET /api/admin/backups/restore/status - Get restore status
fastify.get('/admin/backups/restore/status', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.getRestoreStatus.bind(controller),
});
// ============================================
// Schedule Operations
// ============================================
// GET /api/admin/backups/schedules - List schedules
fastify.get('/admin/backups/schedules', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.listSchedules.bind(controller),
});
// POST /api/admin/backups/schedules - Create schedule
fastify.post<{ Body: CreateScheduleBody }>('/admin/backups/schedules', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.createSchedule.bind(controller),
});
// PUT /api/admin/backups/schedules/:id - Update schedule
fastify.put<{ Params: ScheduleIdParam; Body: UpdateScheduleBody }>('/admin/backups/schedules/:id', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.updateSchedule.bind(controller),
});
// DELETE /api/admin/backups/schedules/:id - Delete schedule
fastify.delete<{ Params: ScheduleIdParam }>('/admin/backups/schedules/:id', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.deleteSchedule.bind(controller),
});
// PATCH /api/admin/backups/schedules/:id/toggle - Toggle schedule
fastify.patch<{ Params: ScheduleIdParam }>('/admin/backups/schedules/:id/toggle', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.toggleSchedule.bind(controller),
});
// ============================================
// Settings Operations
// ============================================
// GET /api/admin/backups/settings - Get settings
fastify.get('/admin/backups/settings', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.getSettings.bind(controller),
});
// PUT /api/admin/backups/settings - Update settings
fastify.put<{ Body: UpdateSettingsBody }>('/admin/backups/settings', {
preHandler: [(fastify as any).requireAdmin],
handler: controller.updateSettings.bind(controller),
});
}

View File

@@ -0,0 +1,83 @@
/**
* @ai-summary Zod validation schemas for backup API endpoints
* @ai-context Request validation for backup operations
*/
import { z } from 'zod';
// ============================================
// Backup Operations
// ============================================
export const listBackupsQuerySchema = z.object({
page: z.coerce.number().int().positive().optional().default(1),
pageSize: z.coerce.number().int().min(1).max(100).optional().default(20),
status: z.enum(['in_progress', 'completed', 'failed']).optional(),
backupType: z.enum(['scheduled', 'manual']).optional(),
sortBy: z.enum(['startedAt', 'fileSizeBytes', 'status']).optional().default('startedAt'),
sortOrder: z.enum(['asc', 'desc']).optional().default('desc'),
});
export const createBackupBodySchema = z.object({
name: z.string().min(1).max(100).optional(),
includeDocuments: z.boolean().optional().default(true),
});
export const backupIdParamSchema = z.object({
id: z.string().uuid(),
});
// ============================================
// Restore Operations
// ============================================
export const restoreBodySchema = z.object({
createSafetyBackup: z.boolean().optional().default(true),
});
// ============================================
// Schedule Operations
// ============================================
export const createScheduleBodySchema = z.object({
name: z.string().min(1).max(100),
frequency: z.enum(['hourly', 'daily', 'weekly', 'monthly']),
retentionCount: z.number().int().min(1).max(365).optional().default(7),
isEnabled: z.boolean().optional().default(true),
});
export const updateScheduleBodySchema = z.object({
name: z.string().min(1).max(100).optional(),
frequency: z.enum(['hourly', 'daily', 'weekly', 'monthly']).optional(),
retentionCount: z.number().int().min(1).max(365).optional(),
isEnabled: z.boolean().optional(),
});
export const scheduleIdParamSchema = z.object({
id: z.string().uuid(),
});
// ============================================
// Settings Operations
// ============================================
export const updateSettingsBodySchema = z.object({
emailOnSuccess: z.boolean().optional(),
emailOnFailure: z.boolean().optional(),
adminEmail: z.string().email().or(z.literal('')).optional(),
maxBackupSizeMb: z.number().int().min(100).max(10240).optional(),
compressionEnabled: z.boolean().optional(),
});
// ============================================
// Type Exports
// ============================================
export type ListBackupsQuery = z.infer<typeof listBackupsQuerySchema>;
export type CreateBackupBody = z.infer<typeof createBackupBodySchema>;
export type BackupIdParam = z.infer<typeof backupIdParamSchema>;
export type RestoreBody = z.infer<typeof restoreBodySchema>;
export type CreateScheduleBody = z.infer<typeof createScheduleBodySchema>;
export type UpdateScheduleBody = z.infer<typeof updateScheduleBodySchema>;
export type ScheduleIdParam = z.infer<typeof scheduleIdParamSchema>;
export type UpdateSettingsBody = z.infer<typeof updateSettingsBodySchema>;

View File

@@ -0,0 +1,433 @@
/**
* @ai-summary Repository for backup database operations
* @ai-context Handles CRUD operations for backup_schedules, backup_history, backup_settings
*/
import { Pool } from 'pg';
import {
BackupSchedule,
BackupHistory,
BackupSettingsMap,
BackupFrequency,
BackupType,
BackupStatus,
BackupMetadata,
ListBackupsParams,
CRON_EXPRESSIONS,
} from '../domain/backup.types';
export class BackupRepository {
constructor(private pool: Pool) {}
// ============================================
// Row Mappers (snake_case -> camelCase)
// ============================================
private mapScheduleRow(row: any): BackupSchedule {
return {
id: row.id,
name: row.name,
frequency: row.frequency as BackupFrequency,
cronExpression: row.cron_expression,
retentionCount: row.retention_count,
isEnabled: row.is_enabled,
lastRunAt: row.last_run_at ? new Date(row.last_run_at) : null,
nextRunAt: row.next_run_at ? new Date(row.next_run_at) : null,
createdAt: new Date(row.created_at),
updatedAt: new Date(row.updated_at),
};
}
private mapHistoryRow(row: any): BackupHistory {
return {
id: row.id,
scheduleId: row.schedule_id,
backupType: row.backup_type as BackupType,
filename: row.filename,
filePath: row.file_path,
fileSizeBytes: parseInt(row.file_size_bytes, 10),
databaseTablesCount: row.database_tables_count,
documentsCount: row.documents_count,
status: row.status as BackupStatus,
errorMessage: row.error_message,
startedAt: new Date(row.started_at),
completedAt: row.completed_at ? new Date(row.completed_at) : null,
createdBy: row.created_by,
metadata: row.metadata as BackupMetadata,
};
}
// ============================================
// Schedule Operations
// ============================================
async listSchedules(): Promise<BackupSchedule[]> {
const result = await this.pool.query(
`SELECT * FROM backup_schedules ORDER BY created_at ASC`
);
return result.rows.map(this.mapScheduleRow);
}
async getScheduleById(id: string): Promise<BackupSchedule | null> {
const result = await this.pool.query(
`SELECT * FROM backup_schedules WHERE id = $1`,
[id]
);
return result.rows.length > 0 ? this.mapScheduleRow(result.rows[0]) : null;
}
async getEnabledSchedulesDue(): Promise<BackupSchedule[]> {
const result = await this.pool.query(
`SELECT * FROM backup_schedules
WHERE is_enabled = true
AND (next_run_at IS NULL OR next_run_at <= NOW())
ORDER BY next_run_at ASC`
);
return result.rows.map(this.mapScheduleRow);
}
async createSchedule(
name: string,
frequency: BackupFrequency,
retentionCount: number = 7,
isEnabled: boolean = true
): Promise<BackupSchedule> {
const cronExpression = CRON_EXPRESSIONS[frequency];
const nextRunAt = this.calculateNextRun(frequency);
const result = await this.pool.query(
`INSERT INTO backup_schedules
(name, frequency, cron_expression, retention_count, is_enabled, next_run_at)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING *`,
[name, frequency, cronExpression, retentionCount, isEnabled, nextRunAt]
);
return this.mapScheduleRow(result.rows[0]);
}
async updateSchedule(
id: string,
updates: Partial<{
name: string;
frequency: BackupFrequency;
retentionCount: number;
isEnabled: boolean;
}>
): Promise<BackupSchedule | null> {
const setClauses: string[] = [];
const values: any[] = [];
let paramIndex = 1;
if (updates.name !== undefined) {
setClauses.push(`name = $${paramIndex++}`);
values.push(updates.name);
}
if (updates.frequency !== undefined) {
setClauses.push(`frequency = $${paramIndex++}`);
values.push(updates.frequency);
setClauses.push(`cron_expression = $${paramIndex++}`);
values.push(CRON_EXPRESSIONS[updates.frequency]);
setClauses.push(`next_run_at = $${paramIndex++}`);
values.push(this.calculateNextRun(updates.frequency));
}
if (updates.retentionCount !== undefined) {
setClauses.push(`retention_count = $${paramIndex++}`);
values.push(updates.retentionCount);
}
if (updates.isEnabled !== undefined) {
setClauses.push(`is_enabled = $${paramIndex++}`);
values.push(updates.isEnabled);
}
if (setClauses.length === 0) {
return this.getScheduleById(id);
}
values.push(id);
const result = await this.pool.query(
`UPDATE backup_schedules SET ${setClauses.join(', ')} WHERE id = $${paramIndex} RETURNING *`,
values
);
return result.rows.length > 0 ? this.mapScheduleRow(result.rows[0]) : null;
}
async deleteSchedule(id: string): Promise<boolean> {
const result = await this.pool.query(
`DELETE FROM backup_schedules WHERE id = $1`,
[id]
);
return (result.rowCount ?? 0) > 0;
}
async updateScheduleLastRun(id: string, nextRunAt: Date): Promise<void> {
await this.pool.query(
`UPDATE backup_schedules SET last_run_at = NOW(), next_run_at = $1 WHERE id = $2`,
[nextRunAt, id]
);
}
// ============================================
// History Operations
// ============================================
async listBackups(params: ListBackupsParams): Promise<{
items: BackupHistory[];
total: number;
}> {
const page = params.page || 1;
const pageSize = params.pageSize || 20;
const offset = (page - 1) * pageSize;
const sortBy = params.sortBy || 'startedAt';
const sortOrder = params.sortOrder || 'desc';
const sortColumnMap: Record<string, string> = {
startedAt: 'started_at',
fileSizeBytes: 'file_size_bytes',
status: 'status',
};
const sortColumn = sortColumnMap[sortBy] || 'started_at';
const whereClauses: string[] = [];
const values: any[] = [];
let paramIndex = 1;
if (params.status) {
whereClauses.push(`status = $${paramIndex++}`);
values.push(params.status);
}
if (params.backupType) {
whereClauses.push(`backup_type = $${paramIndex++}`);
values.push(params.backupType);
}
const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(' AND ')}` : '';
// Get total count
const countResult = await this.pool.query(
`SELECT COUNT(*) as total FROM backup_history ${whereClause}`,
values
);
const total = parseInt(countResult.rows[0].total, 10);
// Get paginated items
values.push(pageSize, offset);
const result = await this.pool.query(
`SELECT * FROM backup_history ${whereClause}
ORDER BY ${sortColumn} ${sortOrder.toUpperCase()}
LIMIT $${paramIndex++} OFFSET $${paramIndex}`,
values
);
return {
items: result.rows.map(this.mapHistoryRow),
total,
};
}
async getBackupById(id: string): Promise<BackupHistory | null> {
const result = await this.pool.query(
`SELECT * FROM backup_history WHERE id = $1`,
[id]
);
return result.rows.length > 0 ? this.mapHistoryRow(result.rows[0]) : null;
}
async getBackupsBySchedule(scheduleId: string): Promise<BackupHistory[]> {
const result = await this.pool.query(
`SELECT * FROM backup_history
WHERE schedule_id = $1 AND status = 'completed'
ORDER BY started_at DESC`,
[scheduleId]
);
return result.rows.map(this.mapHistoryRow);
}
async getLastBackupForSchedule(scheduleId: string): Promise<BackupHistory | null> {
const result = await this.pool.query(
`SELECT * FROM backup_history
WHERE schedule_id = $1
ORDER BY started_at DESC
LIMIT 1`,
[scheduleId]
);
return result.rows.length > 0 ? this.mapHistoryRow(result.rows[0]) : null;
}
async createBackupRecord(data: {
scheduleId?: string | null;
backupType: BackupType;
filename: string;
filePath: string;
fileSizeBytes: number;
createdBy?: string | null;
metadata?: BackupMetadata;
}): Promise<BackupHistory> {
const result = await this.pool.query(
`INSERT INTO backup_history
(schedule_id, backup_type, filename, file_path, file_size_bytes, status, created_by, metadata)
VALUES ($1, $2, $3, $4, $5, 'in_progress', $6, $7)
RETURNING *`,
[
data.scheduleId || null,
data.backupType,
data.filename,
data.filePath,
data.fileSizeBytes,
data.createdBy || null,
JSON.stringify(data.metadata || {}),
]
);
return this.mapHistoryRow(result.rows[0]);
}
async completeBackup(
id: string,
data: {
fileSizeBytes: number;
databaseTablesCount: number;
documentsCount: number;
metadata: BackupMetadata;
}
): Promise<BackupHistory | null> {
const result = await this.pool.query(
`UPDATE backup_history SET
status = 'completed',
file_size_bytes = $1,
database_tables_count = $2,
documents_count = $3,
metadata = $4,
completed_at = NOW()
WHERE id = $5
RETURNING *`,
[data.fileSizeBytes, data.databaseTablesCount, data.documentsCount, JSON.stringify(data.metadata), id]
);
return result.rows.length > 0 ? this.mapHistoryRow(result.rows[0]) : null;
}
async failBackup(id: string, errorMessage: string): Promise<BackupHistory | null> {
const result = await this.pool.query(
`UPDATE backup_history SET
status = 'failed',
error_message = $1,
completed_at = NOW()
WHERE id = $2
RETURNING *`,
[errorMessage, id]
);
return result.rows.length > 0 ? this.mapHistoryRow(result.rows[0]) : null;
}
async deleteBackupRecord(id: string): Promise<boolean> {
const result = await this.pool.query(
`DELETE FROM backup_history WHERE id = $1`,
[id]
);
return (result.rowCount ?? 0) > 0;
}
async getOldestBackupsForSchedule(
scheduleId: string,
keepCount: number
): Promise<BackupHistory[]> {
const result = await this.pool.query(
`SELECT * FROM backup_history
WHERE schedule_id = $1 AND status = 'completed'
ORDER BY started_at DESC
OFFSET $2`,
[scheduleId, keepCount]
);
return result.rows.map(this.mapHistoryRow);
}
// ============================================
// Settings Operations
// ============================================
async getSettings(): Promise<BackupSettingsMap> {
const result = await this.pool.query(
`SELECT setting_key, setting_value FROM backup_settings`
);
const settings: BackupSettingsMap = {
emailOnSuccess: true,
emailOnFailure: true,
adminEmail: '',
maxBackupSizeMb: 1024,
compressionEnabled: true,
};
for (const row of result.rows) {
switch (row.setting_key) {
case 'email_on_success':
settings.emailOnSuccess = row.setting_value === 'true';
break;
case 'email_on_failure':
settings.emailOnFailure = row.setting_value === 'true';
break;
case 'admin_email':
settings.adminEmail = row.setting_value;
break;
case 'max_backup_size_mb':
settings.maxBackupSizeMb = parseInt(row.setting_value, 10);
break;
case 'compression_enabled':
settings.compressionEnabled = row.setting_value === 'true';
break;
}
}
return settings;
}
async updateSettings(updates: Partial<BackupSettingsMap>): Promise<void> {
const keyMap: Record<keyof BackupSettingsMap, string> = {
emailOnSuccess: 'email_on_success',
emailOnFailure: 'email_on_failure',
adminEmail: 'admin_email',
maxBackupSizeMb: 'max_backup_size_mb',
compressionEnabled: 'compression_enabled',
};
for (const [key, dbKey] of Object.entries(keyMap)) {
const value = updates[key as keyof BackupSettingsMap];
if (value !== undefined) {
await this.pool.query(
`UPDATE backup_settings SET setting_value = $1 WHERE setting_key = $2`,
[String(value), dbKey]
);
}
}
}
// ============================================
// Helper Methods
// ============================================
private calculateNextRun(frequency: BackupFrequency): Date {
const now = new Date();
const next = new Date(now);
switch (frequency) {
case 'hourly':
next.setHours(next.getHours() + 1);
next.setMinutes(0, 0, 0);
break;
case 'daily':
next.setDate(next.getDate() + 1);
next.setHours(3, 0, 0, 0);
break;
case 'weekly':
const daysUntilSunday = (7 - next.getDay()) % 7 || 7;
next.setDate(next.getDate() + daysUntilSunday);
next.setHours(3, 0, 0, 0);
break;
case 'monthly':
next.setMonth(next.getMonth() + 1);
next.setDate(1);
next.setHours(3, 0, 0, 0);
break;
}
return next;
}
}

View File

@@ -0,0 +1,411 @@
/**
* @ai-summary Service for creating and extracting backup archives
* @ai-context Creates tar.gz archives containing database dump and documents
*/
import { exec } from 'child_process';
import { promisify } from 'util';
import * as fsp from 'fs/promises';
import * as path from 'path';
import * as crypto from 'crypto';
import * as tar from 'tar';
import { logger } from '../../../core/logging/logger';
import { appConfig } from '../../../core/config/config-loader';
import { BACKUP_CONFIG, BackupManifest } from './backup.types';
const execAsync = promisify(exec);
export interface DatabaseExportResult {
filename: string;
tablesCount: number;
sizeBytes: number;
checksum: string;
postgresqlVersion: string;
}
export interface DocumentsCopyResult {
totalFiles: number;
totalSizeBytes: number;
usersCount: number;
}
export interface ArchiveResult {
archivePath: string;
sizeBytes: number;
manifest: BackupManifest;
}
export class BackupArchiveService {
private readonly storagePath: string;
private readonly documentsPath: string;
private readonly tempPath: string;
constructor() {
this.storagePath = BACKUP_CONFIG.storagePath;
this.documentsPath = BACKUP_CONFIG.documentsPath;
this.tempPath = BACKUP_CONFIG.tempPath;
}
/**
* Creates a complete backup archive
*/
async createArchive(
backupId: string,
options: {
scheduleName?: string;
backupType: 'scheduled' | 'manual';
includeDocuments?: boolean;
createdBy?: string;
}
): Promise<ArchiveResult> {
const workDir = path.join(this.tempPath, backupId);
try {
// Create working directory
await fsp.mkdir(workDir, { recursive: true });
await fsp.mkdir(path.join(workDir, 'database'), { recursive: true });
// Export database
logger.info('Exporting database for backup', { backupId });
const dbResult = await this.exportDatabase(workDir);
// Copy documents
let docsResult: DocumentsCopyResult = {
totalFiles: 0,
totalSizeBytes: 0,
usersCount: 0,
};
if (options.includeDocuments !== false) {
logger.info('Copying documents for backup', { backupId });
docsResult = await this.copyDocuments(workDir);
}
// Create manifest
const manifest = await this.createManifest(
workDir,
dbResult,
docsResult,
options
);
// Create tar.gz archive
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const archiveFilename = `motovaultpro_backup_${timestamp}.tar.gz`;
const archivePath = path.join(this.storagePath, archiveFilename);
logger.info('Creating tar.gz archive', { backupId, archivePath });
await this.createTarGz(workDir, archivePath);
const archiveStats = await fsp.stat(archivePath);
logger.info('Backup archive created successfully', {
backupId,
archivePath,
sizeBytes: archiveStats.size,
});
return {
archivePath,
sizeBytes: archiveStats.size,
manifest,
};
} finally {
// Cleanup working directory
await this.cleanupWorkDir(workDir);
}
}
/**
* Exports the PostgreSQL database to a SQL file
* Note: The SQL file is kept uncompressed - tar.gz handles compression
*/
async exportDatabase(workDir: string): Promise<DatabaseExportResult> {
const databaseDir = path.join(workDir, 'database');
const sqlFilename = 'motovaultpro.sql';
const sqlPath = path.join(databaseDir, sqlFilename);
// Database connection details from app config
const dbConfig = appConfig.config.database;
const dbPassword = appConfig.secrets.postgres_password;
const dbHost = dbConfig.host;
const dbPort = dbConfig.port.toString();
const dbUser = dbConfig.user;
const dbName = dbConfig.name;
// Set PGPASSWORD environment variable for pg_dump/psql
const pgEnv = { ...process.env, PGPASSWORD: dbPassword };
try {
// Get PostgreSQL version
const { stdout: versionOutput } = await execAsync(
`psql -h ${dbHost} -p ${dbPort} -U ${dbUser} -d ${dbName} -t -c "SELECT version();"`,
{ env: pgEnv }
);
const postgresqlVersion = versionOutput.trim().split(' ')[1] || 'unknown';
// Export database using pg_dump
await execAsync(
`pg_dump -h ${dbHost} -p ${dbPort} -U ${dbUser} -d ${dbName} --format=plain -f "${sqlPath}"`,
{ env: pgEnv }
);
// Count tables
const { stdout: tablesOutput } = await execAsync(
`psql -h ${dbHost} -p ${dbPort} -U ${dbUser} -d ${dbName} -t -c "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public' AND table_type = 'BASE TABLE';"`,
{ env: pgEnv }
);
const tablesCount = parseInt(tablesOutput.trim(), 10) || 0;
// Calculate checksum of SQL file
const sqlContent = await fsp.readFile(sqlPath);
const checksum = crypto.createHash('sha256').update(sqlContent).digest('hex');
const stats = await fsp.stat(sqlPath);
return {
filename: sqlFilename,
tablesCount,
sizeBytes: stats.size,
checksum: `sha256:${checksum}`,
postgresqlVersion,
};
} catch (error) {
logger.error('Database export failed', {
error: error instanceof Error ? error.message : String(error),
});
throw new Error(`Database export failed: ${error instanceof Error ? error.message : String(error)}`);
}
}
/**
* Copies document files to the backup working directory
*/
async copyDocuments(workDir: string): Promise<DocumentsCopyResult> {
const docsDir = path.join(workDir, 'documents');
await fsp.mkdir(docsDir, { recursive: true });
let totalFiles = 0;
let totalSizeBytes = 0;
const userDirs = new Set<string>();
try {
// Check if documents directory exists
const documentsExists = await fsp.access(this.documentsPath).then(() => true).catch(() => false);
if (!documentsExists) {
logger.info('Documents directory does not exist, skipping document backup');
return { totalFiles: 0, totalSizeBytes: 0, usersCount: 0 };
}
// Recursively copy documents
await this.copyDirRecursive(this.documentsPath, docsDir, {
onFile: async (srcPath, _destPath) => {
const stats = await fsp.stat(srcPath);
totalFiles++;
totalSizeBytes += stats.size;
// Track unique user directories
const relativePath = path.relative(this.documentsPath, srcPath);
const userId = relativePath.split(path.sep)[0];
if (userId) {
userDirs.add(userId);
}
},
});
return {
totalFiles,
totalSizeBytes,
usersCount: userDirs.size,
};
} catch (error) {
logger.error('Documents copy failed', {
error: error instanceof Error ? error.message : String(error),
});
throw new Error(`Documents copy failed: ${error instanceof Error ? error.message : String(error)}`);
}
}
/**
* Creates the backup manifest file
*/
async createManifest(
workDir: string,
dbResult: DatabaseExportResult,
docsResult: DocumentsCopyResult,
options: {
scheduleName?: string;
backupType: 'scheduled' | 'manual';
createdBy?: string;
}
): Promise<BackupManifest> {
const manifest: BackupManifest = {
version: BACKUP_CONFIG.archiveVersion,
createdAt: new Date().toISOString(),
applicationVersion: process.env.npm_package_version || '1.0.0',
backupType: options.backupType,
scheduleName: options.scheduleName || null,
contents: {
database: {
filename: dbResult.filename,
tablesCount: dbResult.tablesCount,
postgresqlVersion: dbResult.postgresqlVersion,
sizeBytes: dbResult.sizeBytes,
checksum: dbResult.checksum,
},
documents: {
totalFiles: docsResult.totalFiles,
totalSizeBytes: docsResult.totalSizeBytes,
usersCount: docsResult.usersCount,
},
},
metadata: {
hostname: process.env.HOSTNAME || 'mvp-backend',
createdBy: options.createdBy || 'system',
compression: 'gzip',
postgresqlVersion: dbResult.postgresqlVersion,
},
};
const manifestPath = path.join(workDir, 'manifest.json');
await fsp.writeFile(manifestPath, JSON.stringify(manifest, null, 2));
return manifest;
}
/**
* Creates a tar.gz archive from the working directory
*/
async createTarGz(sourceDir: string, destPath: string): Promise<void> {
// Ensure destination directory exists
await fsp.mkdir(path.dirname(destPath), { recursive: true });
// Create tar.gz using the tar package
await tar.create(
{
gzip: true,
file: destPath,
cwd: sourceDir,
},
['.']
);
}
/**
* Extracts a backup archive to a working directory
*/
async extractArchive(archivePath: string, workDir: string): Promise<BackupManifest> {
await fsp.mkdir(workDir, { recursive: true });
// Extract tar.gz
await tar.extract({
file: archivePath,
cwd: workDir,
});
// Read and validate manifest
const manifestPath = path.join(workDir, 'manifest.json');
const manifestContent = await fsp.readFile(manifestPath, 'utf-8');
const manifest = JSON.parse(manifestContent) as BackupManifest;
// Validate manifest version
if (!manifest.version) {
throw new Error('Invalid backup manifest: missing version');
}
return manifest;
}
/**
* Validates an archive file without fully extracting it
*/
async validateArchive(archivePath: string): Promise<{
valid: boolean;
manifest?: BackupManifest;
error?: string;
}> {
const tempDir = path.join(this.tempPath, `validate-${Date.now()}`);
try {
const manifest = await this.extractArchive(archivePath, tempDir);
// Verify required files exist
const dbPath = path.join(tempDir, 'database', manifest.contents.database.filename);
await fsp.access(dbPath);
return { valid: true, manifest };
} catch (error) {
return {
valid: false,
error: error instanceof Error ? error.message : String(error),
};
} finally {
await this.cleanupWorkDir(tempDir);
}
}
/**
* Gets the manifest from an archive without extracting it fully
*/
async getManifestFromArchive(archivePath: string): Promise<BackupManifest> {
const tempDir = path.join(this.tempPath, `manifest-${Date.now()}`);
try {
// Extract only manifest.json
await tar.extract({
file: archivePath,
cwd: tempDir,
filter: (p) => p === 'manifest.json' || p === './manifest.json',
});
const manifestPath = path.join(tempDir, 'manifest.json');
const manifestContent = await fsp.readFile(manifestPath, 'utf-8');
return JSON.parse(manifestContent) as BackupManifest;
} finally {
await this.cleanupWorkDir(tempDir);
}
}
/**
* Recursively copies a directory
*/
private async copyDirRecursive(
src: string,
dest: string,
options?: {
onFile?: (srcPath: string, destPath: string) => Promise<void>;
}
): Promise<void> {
const entries = await fsp.readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
await fsp.mkdir(destPath, { recursive: true });
await this.copyDirRecursive(srcPath, destPath, options);
} else {
await fsp.copyFile(srcPath, destPath);
if (options?.onFile) {
await options.onFile(srcPath, destPath);
}
}
}
}
/**
* Cleans up a working directory
*/
private async cleanupWorkDir(workDir: string): Promise<void> {
try {
await fsp.rm(workDir, { recursive: true, force: true });
} catch (error) {
logger.warn('Failed to cleanup work directory', {
workDir,
error: error instanceof Error ? error.message : String(error),
});
}
}
}

View File

@@ -0,0 +1,306 @@
/**
* @ai-summary Service for restoring backups
* @ai-context Handles preview and execution of backup restoration with safety backup
*/
import { exec } from 'child_process';
import { promisify } from 'util';
import * as fsp from 'fs/promises';
import * as path from 'path';
import * as crypto from 'crypto';
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import { appConfig } from '../../../core/config/config-loader';
import { BackupRepository } from '../data/backup.repository';
import { BackupArchiveService } from './backup-archive.service';
import { BackupService } from './backup.service';
import {
BACKUP_CONFIG,
BackupManifest,
RestoreOptions,
RestoreResult,
RestorePreviewResponse,
} from './backup.types';
const execAsync = promisify(exec);
export class BackupRestoreService {
private repository: BackupRepository;
private archiveService: BackupArchiveService;
private backupService: BackupService;
private restoreInProgress: boolean = false;
constructor(pool: Pool) {
this.repository = new BackupRepository(pool);
this.archiveService = new BackupArchiveService();
this.backupService = new BackupService(pool);
}
/**
* Previews what will be restored (dry run)
*/
async previewRestore(backupId: string): Promise<RestorePreviewResponse> {
const backup = await this.repository.getBackupById(backupId);
if (!backup) {
throw new Error('Backup not found');
}
if (backup.status !== 'completed') {
throw new Error('Cannot restore from incomplete backup');
}
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (!filePath) {
throw new Error('Backup file path not found');
}
// Validate archive and get manifest
const validation = await this.archiveService.validateArchive(filePath);
if (!validation.valid) {
throw new Error(`Invalid backup archive: ${validation.error}`);
}
const manifest = validation.manifest!;
const warnings: string[] = [];
// Check for potential issues
if (manifest.version !== BACKUP_CONFIG.archiveVersion) {
warnings.push(`Backup version (${manifest.version}) differs from current (${BACKUP_CONFIG.archiveVersion})`);
}
// Estimate duration based on file sizes
const totalBytes = backup.fileSizeBytes;
const estimatedSeconds = Math.max(30, Math.ceil(totalBytes / (10 * 1024 * 1024))); // ~10MB/s
const estimatedDuration = this.formatDuration(estimatedSeconds);
return {
backupId,
manifest,
warnings,
estimatedDuration,
};
}
/**
* Executes a backup restoration
*/
async executeRestore(options: RestoreOptions): Promise<RestoreResult> {
if (this.restoreInProgress) {
throw new Error('A restore operation is already in progress');
}
this.restoreInProgress = true;
let safetyBackupId: string | undefined;
try {
const backup = await this.repository.getBackupById(options.backupId);
if (!backup) {
throw new Error('Backup not found');
}
if (backup.status !== 'completed') {
throw new Error('Cannot restore from incomplete backup');
}
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (!filePath) {
throw new Error('Backup file path not found');
}
logger.info('Starting restore operation', {
backupId: options.backupId,
createSafetyBackup: options.createSafetyBackup,
});
// Create safety backup before restore
if (options.createSafetyBackup !== false) {
logger.info('Creating safety backup before restore');
const safetyResult = await this.backupService.createBackup({
name: `Pre-restore safety backup`,
backupType: 'manual',
includeDocuments: true,
});
if (!safetyResult.success) {
throw new Error(`Failed to create safety backup: ${safetyResult.error}`);
}
safetyBackupId = safetyResult.backupId;
logger.info('Safety backup created', { safetyBackupId });
}
// Extract archive to temp directory
const workDir = path.join(BACKUP_CONFIG.tempPath, `restore-${options.backupId}`);
await fsp.mkdir(workDir, { recursive: true });
try {
logger.info('Extracting backup archive');
const manifest = await this.archiveService.extractArchive(filePath, workDir);
// Restore database
logger.info('Restoring database');
await this.restoreDatabase(workDir, manifest);
// Restore documents
logger.info('Restoring documents');
await this.restoreDocuments(workDir, manifest);
logger.info('Restore completed successfully', {
backupId: options.backupId,
safetyBackupId,
});
return {
success: true,
safetyBackupId,
};
} finally {
// Cleanup work directory
await fsp.rm(workDir, { recursive: true, force: true }).catch(() => {});
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Restore failed', {
backupId: options.backupId,
error: errorMessage,
safetyBackupId,
});
return {
success: false,
safetyBackupId,
error: errorMessage,
};
} finally {
this.restoreInProgress = false;
}
}
/**
* Gets the current restore status
*/
getRestoreStatus(): { inProgress: boolean } {
return { inProgress: this.restoreInProgress };
}
/**
* Restores the database from a backup
*/
private async restoreDatabase(workDir: string, manifest: BackupManifest): Promise<void> {
const dbFilename = manifest.contents.database.filename;
const sqlPath = path.join(workDir, 'database', dbFilename);
// Verify the SQL file exists
await fsp.access(sqlPath);
// Verify checksum
const sqlContent = await fsp.readFile(sqlPath);
const actualChecksum = `sha256:${crypto.createHash('sha256').update(sqlContent).digest('hex')}`;
if (actualChecksum !== manifest.contents.database.checksum) {
throw new Error('Database checksum mismatch - backup may be corrupted');
}
// Database connection details from app config
const dbConfig = appConfig.config.database;
const dbPassword = appConfig.secrets.postgres_password;
const dbHost = dbConfig.host;
const dbPort = dbConfig.port.toString();
const dbUser = dbConfig.user;
const dbName = dbConfig.name;
// Set PGPASSWORD environment variable for psql
const pgEnv = { ...process.env, PGPASSWORD: dbPassword };
try {
// Drop existing connections (except our own)
await execAsync(
`psql -h ${dbHost} -p ${dbPort} -U ${dbUser} -d postgres -c "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '${dbName}' AND pid <> pg_backend_pid();"`,
{ env: pgEnv }
);
// Restore the database using psql
await execAsync(
`psql -h ${dbHost} -p ${dbPort} -U ${dbUser} -d ${dbName} -f "${sqlPath}"`,
{ env: pgEnv }
);
logger.info('Database restored successfully', {
tablesCount: manifest.contents.database.tablesCount,
});
} catch (error) {
throw new Error(`Database restoration failed: ${error instanceof Error ? error.message : String(error)}`);
}
}
/**
* Restores documents from a backup
*/
private async restoreDocuments(workDir: string, manifest: BackupManifest): Promise<void> {
const docsBackupPath = path.join(workDir, 'documents');
const docsTargetPath = BACKUP_CONFIG.documentsPath;
// Check if documents were included in backup
const docsExist = await fsp.access(docsBackupPath).then(() => true).catch(() => false);
if (!docsExist || manifest.contents.documents.totalFiles === 0) {
logger.info('No documents to restore');
return;
}
try {
// Ensure target directory exists
await fsp.mkdir(docsTargetPath, { recursive: true });
// Clear existing documents (we're doing a full restore)
const existingEntries = await fsp.readdir(docsTargetPath);
for (const entry of existingEntries) {
await fsp.rm(path.join(docsTargetPath, entry), { recursive: true, force: true });
}
// Copy documents from backup
await this.copyDirRecursive(docsBackupPath, docsTargetPath);
logger.info('Documents restored successfully', {
totalFiles: manifest.contents.documents.totalFiles,
usersCount: manifest.contents.documents.usersCount,
});
} catch (error) {
throw new Error(`Documents restoration failed: ${error instanceof Error ? error.message : String(error)}`);
}
}
/**
* Recursively copies a directory
*/
private async copyDirRecursive(src: string, dest: string): Promise<void> {
const entries = await fsp.readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
await fsp.mkdir(destPath, { recursive: true });
await this.copyDirRecursive(srcPath, destPath);
} else {
await fsp.copyFile(srcPath, destPath);
}
}
}
/**
* Formats duration in human-readable format
*/
private formatDuration(seconds: number): string {
if (seconds < 60) {
return `${seconds} seconds`;
}
const minutes = Math.floor(seconds / 60);
const remainingSeconds = seconds % 60;
if (remainingSeconds === 0) {
return `${minutes} minute${minutes > 1 ? 's' : ''}`;
}
return `${minutes} minute${minutes > 1 ? 's' : ''} ${remainingSeconds} seconds`;
}
}

View File

@@ -0,0 +1,274 @@
/**
* @ai-summary Service for enforcing backup retention policies
* @ai-context Deletes old backups based on schedule retention counts
*/
import * as fsp from 'fs/promises';
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import { BackupRepository } from '../data/backup.repository';
import {
RetentionCleanupResult,
RetentionCleanupJobResult,
} from './backup.types';
export class BackupRetentionService {
private repository: BackupRepository;
constructor(pool: Pool) {
this.repository = new BackupRepository(pool);
}
/**
* Processes retention cleanup for all schedules
*/
async processRetentionCleanup(): Promise<RetentionCleanupJobResult> {
logger.info('Starting backup retention cleanup');
const schedules = await this.repository.listSchedules();
const results: RetentionCleanupResult[] = [];
const errors: Array<{ scheduleId: string; error: string }> = [];
let totalDeleted = 0;
let totalFreedBytes = 0;
for (const schedule of schedules) {
try {
const result = await this.cleanupScheduleBackups(
schedule.id,
schedule.name,
schedule.retentionCount
);
results.push(result);
totalDeleted += result.deletedCount;
totalFreedBytes += result.freedBytes;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Retention cleanup failed for schedule', {
scheduleId: schedule.id,
scheduleName: schedule.name,
error: errorMessage,
});
errors.push({ scheduleId: schedule.id, error: errorMessage });
}
}
// Also cleanup orphaned backups (from deleted schedules)
try {
const orphanResult = await this.cleanupOrphanedBackups();
if (orphanResult.deletedCount > 0) {
results.push(orphanResult);
totalDeleted += orphanResult.deletedCount;
totalFreedBytes += orphanResult.freedBytes;
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Orphaned backup cleanup failed', { error: errorMessage });
errors.push({ scheduleId: 'orphaned', error: errorMessage });
}
logger.info('Backup retention cleanup completed', {
processed: schedules.length,
totalDeleted,
totalFreedBytes,
errors: errors.length,
});
return {
processed: schedules.length,
totalDeleted,
totalFreedBytes,
results,
errors,
};
}
/**
* Cleans up old backups for a specific schedule
*/
async cleanupScheduleBackups(
scheduleId: string,
scheduleName: string,
retentionCount: number
): Promise<RetentionCleanupResult> {
logger.debug('Processing retention for schedule', {
scheduleId,
scheduleName,
retentionCount,
});
// Get backups that exceed retention count
const backupsToDelete = await this.repository.getOldestBackupsForSchedule(
scheduleId,
retentionCount
);
let deletedCount = 0;
let freedBytes = 0;
const completedBackups = await this.repository.getBackupsBySchedule(scheduleId);
const retainedCount = Math.min(completedBackups.length, retentionCount);
for (const backup of backupsToDelete) {
try {
// Delete the file
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (filePath) {
try {
const stats = await fsp.stat(filePath);
freedBytes += stats.size;
await fsp.unlink(filePath);
} catch (error) {
// File might not exist, continue
logger.warn('Failed to delete backup file during retention cleanup', {
backupId: backup.id,
filePath,
});
}
}
// Delete the database record
await this.repository.deleteBackupRecord(backup.id);
deletedCount++;
logger.debug('Deleted old backup during retention cleanup', {
backupId: backup.id,
scheduleId,
startedAt: backup.startedAt,
});
} catch (error) {
logger.error('Failed to delete backup during retention cleanup', {
backupId: backup.id,
error: error instanceof Error ? error.message : String(error),
});
}
}
if (deletedCount > 0) {
logger.info('Retention cleanup completed for schedule', {
scheduleId,
scheduleName,
deletedCount,
retainedCount,
freedBytes,
});
}
return {
scheduleId,
scheduleName,
deletedCount,
retainedCount,
freedBytes,
};
}
/**
* Cleans up failed/incomplete backups older than 24 hours
*/
async cleanupFailedBackups(): Promise<number> {
const { items } = await this.repository.listBackups({
status: 'failed',
pageSize: 100,
});
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
let deletedCount = 0;
for (const backup of items) {
if (backup.startedAt < oneDayAgo) {
try {
// Try to delete file if it exists
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (filePath) {
await fsp.unlink(filePath).catch(() => {});
}
await this.repository.deleteBackupRecord(backup.id);
deletedCount++;
} catch (error) {
logger.warn('Failed to cleanup failed backup', {
backupId: backup.id,
error: error instanceof Error ? error.message : String(error),
});
}
}
}
if (deletedCount > 0) {
logger.info('Cleaned up failed backups', { deletedCount });
}
return deletedCount;
}
/**
* Cleans up orphaned backups (from deleted schedules)
* Keeps manual backups indefinitely
*/
private async cleanupOrphanedBackups(): Promise<RetentionCleanupResult> {
const { items } = await this.repository.listBackups({
backupType: 'scheduled',
pageSize: 1000,
});
// Get all valid schedule IDs
const schedules = await this.repository.listSchedules();
const validScheduleIds = new Set(schedules.map(s => s.id));
// Find orphaned scheduled backups (schedule was deleted)
const orphanedBackups = items.filter(
backup => backup.scheduleId && !validScheduleIds.has(backup.scheduleId)
);
// Keep only the most recent 5 orphaned backups per deleted schedule
const orphansBySchedule = new Map<string, typeof orphanedBackups>();
for (const backup of orphanedBackups) {
const scheduleId = backup.scheduleId!;
if (!orphansBySchedule.has(scheduleId)) {
orphansBySchedule.set(scheduleId, []);
}
orphansBySchedule.get(scheduleId)!.push(backup);
}
let deletedCount = 0;
let freedBytes = 0;
let retainedCount = 0;
for (const [_scheduleId, backups] of orphansBySchedule) {
// Sort by date descending and keep first 5
backups.sort((a, b) => b.startedAt.getTime() - a.startedAt.getTime());
const toDelete = backups.slice(5);
retainedCount += Math.min(backups.length, 5);
for (const backup of toDelete) {
try {
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (filePath) {
try {
const stats = await fsp.stat(filePath);
freedBytes += stats.size;
await fsp.unlink(filePath);
} catch {
// File might not exist
}
}
await this.repository.deleteBackupRecord(backup.id);
deletedCount++;
} catch (error) {
logger.warn('Failed to delete orphaned backup', {
backupId: backup.id,
error: error instanceof Error ? error.message : String(error),
});
}
}
}
return {
scheduleId: 'orphaned',
scheduleName: 'Orphaned Backups',
deletedCount,
retainedCount,
freedBytes,
};
}
}

View File

@@ -0,0 +1,446 @@
/**
* @ai-summary Core backup service for create, list, download, delete operations
* @ai-context Orchestrates backup operations and sends notifications
*/
import * as fs from 'fs';
import * as fsp from 'fs/promises';
import * as path from 'path';
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import { BackupRepository } from '../data/backup.repository';
import { BackupArchiveService } from './backup-archive.service';
import {
BACKUP_CONFIG,
BackupHistory,
BackupSchedule,
BackupSettingsMap,
CreateBackupOptions,
BackupResult,
ListBackupsParams,
ListBackupsResponse,
BackupFrequency,
ScheduleResponse,
} from './backup.types';
export class BackupService {
private repository: BackupRepository;
private archiveService: BackupArchiveService;
constructor(pool: Pool) {
this.repository = new BackupRepository(pool);
this.archiveService = new BackupArchiveService();
}
// ============================================
// Backup Operations
// ============================================
/**
* Creates a new backup
*/
async createBackup(options: CreateBackupOptions): Promise<BackupResult> {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const tempFilename = `backup_${timestamp}`;
// Create initial backup record
const backupRecord = await this.repository.createBackupRecord({
scheduleId: options.scheduleId,
backupType: options.backupType,
filename: tempFilename,
filePath: '',
fileSizeBytes: 0,
createdBy: options.createdBy,
metadata: { name: options.name },
});
logger.info('Starting backup creation', {
backupId: backupRecord.id,
backupType: options.backupType,
scheduleName: options.name,
});
try {
// Get schedule name if applicable
let scheduleName: string | undefined;
if (options.scheduleId) {
const schedule = await this.repository.getScheduleById(options.scheduleId);
scheduleName = schedule?.name;
}
// Create the archive
const result = await this.archiveService.createArchive(backupRecord.id, {
scheduleName: scheduleName || options.name,
backupType: options.backupType,
includeDocuments: options.includeDocuments,
createdBy: options.createdBy || undefined,
});
// Update the backup record with final details
const filename = path.basename(result.archivePath);
await this.repository.completeBackup(backupRecord.id, {
fileSizeBytes: result.sizeBytes,
databaseTablesCount: result.manifest.contents.database.tablesCount,
documentsCount: result.manifest.contents.documents.totalFiles,
metadata: {
...result.manifest.metadata,
filename,
archivePath: result.archivePath,
},
});
logger.info('Backup created successfully', {
backupId: backupRecord.id,
filename,
sizeBytes: result.sizeBytes,
});
// Send success notification
await this.sendNotification('success', backupRecord.id);
return {
success: true,
backupId: backupRecord.id,
filePath: result.archivePath,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Backup creation failed', {
backupId: backupRecord.id,
error: errorMessage,
});
// Update record with failure
await this.repository.failBackup(backupRecord.id, errorMessage);
// Send failure notification
await this.sendNotification('failure', backupRecord.id, errorMessage);
return {
success: false,
backupId: backupRecord.id,
error: errorMessage,
};
}
}
/**
* Lists backups with pagination
*/
async listBackups(params: ListBackupsParams): Promise<ListBackupsResponse> {
const page = params.page || 1;
const pageSize = params.pageSize || 20;
const { items, total } = await this.repository.listBackups(params);
return {
items,
total,
page,
pageSize,
totalPages: Math.ceil(total / pageSize),
};
}
/**
* Gets a single backup by ID
*/
async getBackup(id: string): Promise<BackupHistory | null> {
return this.repository.getBackupById(id);
}
/**
* Gets the file path for downloading a backup
*/
async getBackupFilePath(id: string): Promise<string | null> {
const backup = await this.repository.getBackupById(id);
if (!backup || backup.status !== 'completed') {
return null;
}
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (!filePath) {
return null;
}
// Verify file exists
try {
await fsp.access(filePath);
return filePath;
} catch {
return null;
}
}
/**
* Creates a readable stream for downloading a backup
*/
async createDownloadStream(id: string): Promise<{
stream: fs.ReadStream;
filename: string;
size: number;
} | null> {
const filePath = await this.getBackupFilePath(id);
if (!filePath) {
return null;
}
const backup = await this.repository.getBackupById(id);
if (!backup) {
return null;
}
const stats = await fsp.stat(filePath);
const filename = path.basename(filePath);
return {
stream: fs.createReadStream(filePath),
filename,
size: stats.size,
};
}
/**
* Deletes a backup (file and record)
*/
async deleteBackup(id: string): Promise<boolean> {
const backup = await this.repository.getBackupById(id);
if (!backup) {
return false;
}
// Delete the file if it exists
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (filePath) {
try {
await fsp.unlink(filePath);
logger.info('Backup file deleted', { backupId: id, filePath });
} catch (error) {
// File might not exist, continue with record deletion
logger.warn('Failed to delete backup file', {
backupId: id,
filePath,
error: error instanceof Error ? error.message : String(error),
});
}
}
// Delete the database record
return this.repository.deleteBackupRecord(id);
}
/**
* Imports an uploaded backup file
*/
async importUploadedBackup(
filePath: string,
originalFilename: string,
createdBy?: string
): Promise<BackupHistory> {
// Validate the archive
const validation = await this.archiveService.validateArchive(filePath);
if (!validation.valid) {
throw new Error(`Invalid backup archive: ${validation.error}`);
}
const manifest = validation.manifest!;
const stats = await fsp.stat(filePath);
// Move file to backup storage
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const destFilename = `uploaded_${timestamp}_${path.basename(originalFilename)}`;
const destPath = path.join(BACKUP_CONFIG.storagePath, destFilename);
await fsp.mkdir(BACKUP_CONFIG.storagePath, { recursive: true });
await fsp.copyFile(filePath, destPath);
await fsp.unlink(filePath);
// Create backup record
const record = await this.repository.createBackupRecord({
scheduleId: null,
backupType: 'manual',
filename: destFilename,
filePath: destPath,
fileSizeBytes: stats.size,
createdBy,
metadata: {
originalFilename,
imported: true,
manifestVersion: manifest.version,
originalCreatedAt: manifest.createdAt,
},
});
// Complete the record immediately since it's already a valid backup
return (await this.repository.completeBackup(record.id, {
fileSizeBytes: stats.size,
databaseTablesCount: manifest.contents.database.tablesCount,
documentsCount: manifest.contents.documents.totalFiles,
metadata: {
archivePath: destPath,
originalFilename,
imported: true,
},
}))!;
}
// ============================================
// Schedule Operations
// ============================================
/**
* Lists all backup schedules with last backup info
*/
async listSchedules(): Promise<ScheduleResponse[]> {
const schedules = await this.repository.listSchedules();
const result: ScheduleResponse[] = [];
for (const schedule of schedules) {
const lastBackup = await this.repository.getLastBackupForSchedule(schedule.id);
result.push({
...schedule,
lastBackup: lastBackup || undefined,
});
}
return result;
}
/**
* Gets a single schedule by ID
*/
async getSchedule(id: string): Promise<ScheduleResponse | null> {
const schedule = await this.repository.getScheduleById(id);
if (!schedule) {
return null;
}
const lastBackup = await this.repository.getLastBackupForSchedule(id);
return {
...schedule,
lastBackup: lastBackup || undefined,
};
}
/**
* Creates a new backup schedule
*/
async createSchedule(
name: string,
frequency: BackupFrequency,
retentionCount?: number,
isEnabled?: boolean
): Promise<BackupSchedule> {
return this.repository.createSchedule(
name,
frequency,
retentionCount,
isEnabled
);
}
/**
* Updates a backup schedule
*/
async updateSchedule(
id: string,
updates: Partial<{
name: string;
frequency: BackupFrequency;
retentionCount: number;
isEnabled: boolean;
}>
): Promise<BackupSchedule | null> {
return this.repository.updateSchedule(id, updates);
}
/**
* Deletes a backup schedule
*/
async deleteSchedule(id: string): Promise<boolean> {
return this.repository.deleteSchedule(id);
}
/**
* Toggles a schedule's enabled state
*/
async toggleSchedule(id: string): Promise<BackupSchedule | null> {
const schedule = await this.repository.getScheduleById(id);
if (!schedule) {
return null;
}
return this.repository.updateSchedule(id, { isEnabled: !schedule.isEnabled });
}
// ============================================
// Settings Operations
// ============================================
/**
* Gets backup settings
*/
async getSettings(): Promise<BackupSettingsMap> {
return this.repository.getSettings();
}
/**
* Updates backup settings
*/
async updateSettings(updates: Partial<BackupSettingsMap>): Promise<BackupSettingsMap> {
await this.repository.updateSettings(updates);
return this.repository.getSettings();
}
// ============================================
// Notification Helpers
// ============================================
/**
* Sends backup notification email
*/
private async sendNotification(
type: 'success' | 'failure',
backupId: string,
errorMessage?: string
): Promise<void> {
try {
const settings = await this.repository.getSettings();
// Check if notifications are enabled
if (type === 'success' && !settings.emailOnSuccess) {
return;
}
if (type === 'failure' && !settings.emailOnFailure) {
return;
}
// Check if admin email is configured
if (!settings.adminEmail) {
logger.debug('Backup notification skipped: no admin email configured');
return;
}
const backup = await this.repository.getBackupById(backupId);
if (!backup) {
return;
}
// TODO: Integrate with email service when implementing notifications
// For now, just log the notification
logger.info('Backup notification would be sent', {
type,
backupId,
adminEmail: settings.adminEmail,
errorMessage,
});
} catch (error) {
logger.error('Failed to send backup notification', {
type,
backupId,
error: error instanceof Error ? error.message : String(error),
});
}
}
}

View File

@@ -0,0 +1,269 @@
/**
* @ai-summary Core TypeScript types for the backup feature
* @ai-context Defines interfaces for backup schedules, history, settings, and operations
*/
// ============================================
// Configuration Constants
// ============================================
export const BACKUP_CONFIG = {
storagePath: process.env.BACKUP_STORAGE_PATH || '/app/data/backups',
documentsPath: '/app/data/documents',
tempPath: '/tmp/backup-work',
maxFileSizeMB: 1024,
archiveVersion: '1.0.0',
} as const;
export const CRON_EXPRESSIONS = {
hourly: '0 * * * *',
daily: '0 3 * * *',
weekly: '0 3 * * 0',
monthly: '0 3 1 * *',
} as const;
export const DEFAULT_RETENTION = {
hourly: 24,
daily: 7,
weekly: 4,
monthly: 12,
} as const;
// ============================================
// Enums and Union Types
// ============================================
export type BackupFrequency = 'hourly' | 'daily' | 'weekly' | 'monthly';
export type BackupType = 'scheduled' | 'manual';
export type BackupStatus = 'in_progress' | 'completed' | 'failed';
// ============================================
// Database Entity Types
// ============================================
export interface BackupSchedule {
id: string;
name: string;
frequency: BackupFrequency;
cronExpression: string;
retentionCount: number;
isEnabled: boolean;
lastRunAt: Date | null;
nextRunAt: Date | null;
createdAt: Date;
updatedAt: Date;
}
export interface BackupHistory {
id: string;
scheduleId: string | null;
backupType: BackupType;
filename: string;
filePath: string;
fileSizeBytes: number;
databaseTablesCount: number | null;
documentsCount: number | null;
status: BackupStatus;
errorMessage: string | null;
startedAt: Date;
completedAt: Date | null;
createdBy: string | null;
metadata: BackupMetadata;
}
export interface BackupSettings {
id: string;
settingKey: string;
settingValue: string;
updatedAt: Date;
}
// ============================================
// Archive Manifest Types
// ============================================
export interface BackupManifest {
version: string;
createdAt: string;
applicationVersion: string;
backupType: BackupType;
scheduleName: string | null;
contents: {
database: {
filename: string;
tablesCount: number;
postgresqlVersion: string;
sizeBytes: number;
checksum: string;
};
documents: {
totalFiles: number;
totalSizeBytes: number;
usersCount: number;
};
};
metadata: BackupMetadata;
}
export interface BackupMetadata {
hostname?: string;
createdBy?: string;
compression?: string;
postgresqlVersion?: string;
[key: string]: string | number | boolean | undefined;
}
// ============================================
// API Request/Response Types
// ============================================
// List Backups
export interface ListBackupsParams {
page?: number;
pageSize?: number;
status?: BackupStatus;
backupType?: BackupType;
sortBy?: 'startedAt' | 'fileSizeBytes' | 'status';
sortOrder?: 'asc' | 'desc';
}
export interface ListBackupsResponse {
items: BackupHistory[];
total: number;
page: number;
pageSize: number;
totalPages: number;
}
// Create Backup
export interface CreateBackupRequest {
name?: string;
includeDocuments?: boolean;
}
export interface CreateBackupResponse {
backupId: string;
status: BackupStatus;
message: string;
}
// Upload Backup
export interface UploadBackupResponse {
backupId: string;
filename: string;
fileSizeBytes: number;
message: string;
}
// Restore Preview
export interface RestorePreviewResponse {
backupId: string;
manifest: BackupManifest;
warnings: string[];
estimatedDuration: string;
}
// Restore Execute
export interface RestoreRequest {
createSafetyBackup?: boolean;
}
export interface RestoreResponse {
success: boolean;
safetyBackupId?: string;
restoredAt: string;
message: string;
}
// Schedules
export interface CreateScheduleRequest {
name: string;
frequency: BackupFrequency;
retentionCount?: number;
isEnabled?: boolean;
}
export interface UpdateScheduleRequest {
name?: string;
frequency?: BackupFrequency;
retentionCount?: number;
isEnabled?: boolean;
}
export interface ScheduleResponse extends BackupSchedule {
lastBackup?: BackupHistory | null;
}
// Settings
export interface BackupSettingsMap {
emailOnSuccess: boolean;
emailOnFailure: boolean;
adminEmail: string;
maxBackupSizeMb: number;
compressionEnabled: boolean;
}
export interface UpdateSettingsRequest {
emailOnSuccess?: boolean;
emailOnFailure?: boolean;
adminEmail?: string;
maxBackupSizeMb?: number;
compressionEnabled?: boolean;
}
// ============================================
// Service Operation Types
// ============================================
export interface CreateBackupOptions {
name?: string;
scheduleId?: string | null;
backupType: BackupType;
createdBy?: string | null;
includeDocuments?: boolean;
}
export interface BackupResult {
success: boolean;
backupId: string;
filePath?: string;
error?: string;
}
export interface RestoreOptions {
backupId: string;
createSafetyBackup?: boolean;
}
export interface RestoreResult {
success: boolean;
safetyBackupId?: string;
error?: string;
}
export interface RetentionCleanupResult {
scheduleId: string;
scheduleName: string;
deletedCount: number;
retainedCount: number;
freedBytes: number;
}
// ============================================
// Job Types
// ============================================
export interface ScheduledBackupJobResult {
processed: number;
succeeded: number;
failed: number;
errors: Array<{ scheduleId: string; error: string }>;
}
export interface RetentionCleanupJobResult {
processed: number;
totalDeleted: number;
totalFreedBytes: number;
results: RetentionCleanupResult[];
errors: Array<{ scheduleId: string; error: string }>;
}

View File

@@ -0,0 +1,24 @@
/**
* @ai-summary Backup feature module exports
* @ai-context Entry point for the backup and restore feature
*/
// API
export { registerBackupRoutes } from './api/backup.routes';
export { BackupController } from './api/backup.controller';
// Domain Services
export { BackupService } from './domain/backup.service';
export { BackupArchiveService } from './domain/backup-archive.service';
export { BackupRestoreService } from './domain/backup-restore.service';
export { BackupRetentionService } from './domain/backup-retention.service';
// Data
export { BackupRepository } from './data/backup.repository';
// Jobs
export { processScheduledBackups, setBackupJobPool } from './jobs/backup-scheduled.job';
export { processBackupRetention, setBackupCleanupJobPool } from './jobs/backup-cleanup.job';
// Types
export * from './domain/backup.types';

View File

@@ -0,0 +1,53 @@
/**
* @ai-summary Job for backup retention cleanup
* @ai-context Runs daily at 4 AM to enforce retention policies
*/
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import { BackupRetentionService } from '../domain/backup-retention.service';
import { RetentionCleanupJobResult } from '../domain/backup.types';
let pool: Pool | null = null;
/**
* Sets the database pool for the job
*/
export function setBackupCleanupJobPool(dbPool: Pool): void {
pool = dbPool;
}
/**
* Processes backup retention cleanup for all schedules
*/
export async function processBackupRetention(): Promise<RetentionCleanupJobResult> {
if (!pool) {
throw new Error('Database pool not initialized for backup cleanup job');
}
const retentionService = new BackupRetentionService(pool);
try {
logger.info('Starting backup retention cleanup job');
const result = await retentionService.processRetentionCleanup();
// Also cleanup failed backups
const failedCount = await retentionService.cleanupFailedBackups();
logger.info('Backup retention cleanup job completed', {
schedulesProcessed: result.processed,
totalDeleted: result.totalDeleted + failedCount,
totalFreedBytes: result.totalFreedBytes,
failedBackupsDeleted: failedCount,
errors: result.errors.length,
});
return result;
} catch (error) {
logger.error('Backup retention cleanup job failed', {
error: error instanceof Error ? error.message : String(error),
});
throw error;
}
}

View File

@@ -0,0 +1,154 @@
/**
* @ai-summary Job for processing scheduled backups
* @ai-context Runs every minute to check for due scheduled backups
*/
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import { BackupRepository } from '../data/backup.repository';
import { BackupService } from '../domain/backup.service';
import { ScheduledBackupJobResult, BackupFrequency } from '../domain/backup.types';
let pool: Pool | null = null;
/**
* Sets the database pool for the job
*/
export function setBackupJobPool(dbPool: Pool): void {
pool = dbPool;
}
/**
* Processes all scheduled backups that are due
*/
export async function processScheduledBackups(): Promise<ScheduledBackupJobResult> {
if (!pool) {
throw new Error('Database pool not initialized for backup job');
}
const repository = new BackupRepository(pool);
const backupService = new BackupService(pool);
const result: ScheduledBackupJobResult = {
processed: 0,
succeeded: 0,
failed: 0,
errors: [],
};
try {
// Get all enabled schedules that are due
const dueSchedules = await repository.getEnabledSchedulesDue();
if (dueSchedules.length === 0) {
logger.debug('No scheduled backups due');
return result;
}
logger.info('Processing scheduled backups', { count: dueSchedules.length });
for (const schedule of dueSchedules) {
result.processed++;
try {
logger.info('Running scheduled backup', {
scheduleId: schedule.id,
scheduleName: schedule.name,
frequency: schedule.frequency,
});
// Create the backup
const backupResult = await backupService.createBackup({
name: schedule.name,
scheduleId: schedule.id,
backupType: 'scheduled',
includeDocuments: true,
});
if (backupResult.success) {
result.succeeded++;
logger.info('Scheduled backup completed', {
scheduleId: schedule.id,
backupId: backupResult.backupId,
});
} else {
result.failed++;
result.errors.push({
scheduleId: schedule.id,
error: backupResult.error || 'Unknown error',
});
logger.error('Scheduled backup failed', {
scheduleId: schedule.id,
error: backupResult.error,
});
}
// Update schedule's next run time
const nextRunAt = calculateNextRun(schedule.frequency);
await repository.updateScheduleLastRun(schedule.id, nextRunAt);
} catch (error) {
result.failed++;
const errorMessage = error instanceof Error ? error.message : String(error);
result.errors.push({ scheduleId: schedule.id, error: errorMessage });
logger.error('Error processing scheduled backup', {
scheduleId: schedule.id,
error: errorMessage,
});
// Still update next run time to prevent repeated failures
try {
const nextRunAt = calculateNextRun(schedule.frequency);
await repository.updateScheduleLastRun(schedule.id, nextRunAt);
} catch {
// Ignore error updating next run
}
}
}
logger.info('Scheduled backup processing completed', {
processed: result.processed,
succeeded: result.succeeded,
failed: result.failed,
});
return result;
} catch (error) {
logger.error('Failed to process scheduled backups', {
error: error instanceof Error ? error.message : String(error),
});
throw error;
}
}
/**
* Calculates the next run time for a schedule
*/
function calculateNextRun(frequency: BackupFrequency): Date {
const now = new Date();
const next = new Date(now);
switch (frequency) {
case 'hourly':
next.setHours(next.getHours() + 1);
next.setMinutes(0, 0, 0);
break;
case 'daily':
next.setDate(next.getDate() + 1);
next.setHours(3, 0, 0, 0);
break;
case 'weekly': {
const daysUntilSunday = (7 - next.getDay()) % 7 || 7;
next.setDate(next.getDate() + daysUntilSunday);
next.setHours(3, 0, 0, 0);
break;
}
case 'monthly':
next.setMonth(next.getMonth() + 1);
next.setDate(1);
next.setHours(3, 0, 0, 0);
break;
}
return next;
}

View File

@@ -0,0 +1,95 @@
-- Migration: Create backup feature tables
-- Description: Tables for backup schedules, history, and settings
-- ============================================
-- backup_schedules - Scheduled backup configurations
-- ============================================
CREATE TABLE IF NOT EXISTS backup_schedules (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(100) NOT NULL,
frequency VARCHAR(20) NOT NULL CHECK (frequency IN ('hourly', 'daily', 'weekly', 'monthly')),
cron_expression VARCHAR(50) NOT NULL,
retention_count INTEGER NOT NULL DEFAULT 7,
is_enabled BOOLEAN DEFAULT true,
last_run_at TIMESTAMP WITH TIME ZONE,
next_run_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- ============================================
-- backup_history - Record of all backup operations
-- ============================================
CREATE TABLE IF NOT EXISTS backup_history (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
schedule_id UUID REFERENCES backup_schedules(id) ON DELETE SET NULL,
backup_type VARCHAR(20) NOT NULL CHECK (backup_type IN ('scheduled', 'manual')),
filename VARCHAR(255) NOT NULL,
file_path VARCHAR(500) NOT NULL,
file_size_bytes BIGINT NOT NULL,
database_tables_count INTEGER,
documents_count INTEGER,
status VARCHAR(20) NOT NULL CHECK (status IN ('in_progress', 'completed', 'failed')),
error_message TEXT,
started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
completed_at TIMESTAMP WITH TIME ZONE,
created_by VARCHAR(255),
metadata JSONB DEFAULT '{}'
);
-- ============================================
-- backup_settings - Global backup configuration
-- ============================================
CREATE TABLE IF NOT EXISTS backup_settings (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
setting_key VARCHAR(50) UNIQUE NOT NULL,
setting_value TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- ============================================
-- Indexes
-- ============================================
CREATE INDEX IF NOT EXISTS idx_backup_history_status ON backup_history(status);
CREATE INDEX IF NOT EXISTS idx_backup_history_started ON backup_history(started_at DESC);
CREATE INDEX IF NOT EXISTS idx_backup_history_schedule ON backup_history(schedule_id);
CREATE INDEX IF NOT EXISTS idx_backup_schedules_enabled ON backup_schedules(is_enabled);
CREATE INDEX IF NOT EXISTS idx_backup_schedules_next_run ON backup_schedules(next_run_at);
-- ============================================
-- Triggers for updated_at
-- ============================================
-- Create update_updated_at_column function if it doesn't exist
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
-- Trigger for backup_schedules
DROP TRIGGER IF EXISTS backup_schedules_updated_at ON backup_schedules;
CREATE TRIGGER backup_schedules_updated_at
BEFORE UPDATE ON backup_schedules
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Trigger for backup_settings
DROP TRIGGER IF EXISTS backup_settings_updated_at ON backup_settings;
CREATE TRIGGER backup_settings_updated_at
BEFORE UPDATE ON backup_settings
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- ============================================
-- Default settings
-- ============================================
INSERT INTO backup_settings (setting_key, setting_value) VALUES
('email_on_success', 'true'),
('email_on_failure', 'true'),
('admin_email', ''),
('max_backup_size_mb', '1024'),
('compression_enabled', 'true')
ON CONFLICT (setting_key) DO NOTHING;