Added Documents Feature

This commit is contained in:
Eric Gullickson
2025-09-28 20:35:46 -05:00
parent 2e1b588270
commit 775a1ff69e
66 changed files with 5655 additions and 944 deletions

View File

@@ -4,14 +4,10 @@
import { Pool } from 'pg';
import { readFileSync, readdirSync } from 'fs';
import { join, resolve } from 'path';
import { env } from '../../core/config/environment';
import { appConfig } from '../../core/config/config-loader';
const pool = new Pool({
host: env.DB_HOST,
port: env.DB_PORT,
database: env.DB_NAME,
user: env.DB_USER,
password: env.DB_PASSWORD,
connectionString: appConfig.getDatabaseUrl(),
});
// Define migration order based on dependencies and packaging layout
@@ -20,6 +16,7 @@ const pool = new Pool({
// and user-preferences trigger depends on it; so run vehicles before core/user-preferences.
const MIGRATION_ORDER = [
'features/vehicles', // Primary entity, defines update_updated_at_column()
'features/documents', // Depends on vehicles; provides documents table
'core/user-preferences', // Depends on update_updated_at_column()
'features/fuel-logs', // Depends on vehicles
'features/maintenance', // Depends on vehicles

View File

@@ -5,17 +5,20 @@
import Fastify, { FastifyInstance } from 'fastify';
import cors from '@fastify/cors';
import helmet from '@fastify/helmet';
import fastifyMultipart from '@fastify/multipart';
// Core plugins
import authPlugin from './core/plugins/auth.plugin';
import loggingPlugin from './core/plugins/logging.plugin';
import errorPlugin from './core/plugins/error.plugin';
import { appConfig } from './core/config/config-loader';
// Fastify feature routes
import { vehiclesRoutes } from './features/vehicles/api/vehicles.routes';
import { fuelLogsRoutes } from './features/fuel-logs/api/fuel-logs.routes';
import { stationsRoutes } from './features/stations/api/stations.routes';
import tenantManagementRoutes from './features/tenant-management/index';
import { documentsRoutes } from './features/documents/api/documents.routes';
async function buildApp(): Promise<FastifyInstance> {
const app = Fastify({
@@ -27,6 +30,36 @@ async function buildApp(): Promise<FastifyInstance> {
await app.register(cors);
await app.register(loggingPlugin);
await app.register(errorPlugin);
// Multipart upload support with config-driven size limits
const parseSizeToBytes = (val: string): number => {
// Accept forms like "10MB", "5M", "1048576", "20kb", case-insensitive
const s = String(val).trim().toLowerCase();
const match = s.match(/^(\d+)(b|kb|k|mb|m|gb|g)?$/i);
if (!match) {
// Fallback: try to parse integer bytes
const n = parseInt(s, 10);
return Number.isFinite(n) && n > 0 ? n : 10 * 1024 * 1024; // default 10MB
}
const num = parseInt(match[1], 10);
const unit = match[2] || 'b';
switch (unit) {
case 'b': return num;
case 'k':
case 'kb': return num * 1024;
case 'm':
case 'mb': return num * 1024 * 1024;
case 'g':
case 'gb': return num * 1024 * 1024 * 1024;
default: return num;
}
};
const fileSizeLimit = parseSizeToBytes(appConfig.config.performance.max_request_size);
await app.register(fastifyMultipart, {
limits: {
fileSize: fileSizeLimit,
},
});
// Authentication plugin
await app.register(authPlugin);
@@ -39,7 +72,17 @@ async function buildApp(): Promise<FastifyInstance> {
status: 'healthy',
timestamp: new Date().toISOString(),
environment: process.env.NODE_ENV,
features: ['vehicles', 'fuel-logs', 'stations', 'maintenance']
features: ['vehicles', 'documents', 'fuel-logs', 'stations', 'maintenance']
});
});
// API-prefixed health for Traefik route validation and diagnostics
app.get('/api/health', async (_request, reply) => {
return reply.code(200).send({
status: 'healthy',
scope: 'api',
timestamp: new Date().toISOString(),
features: ['vehicles', 'documents', 'fuel-logs', 'stations', 'maintenance']
});
});
@@ -67,6 +110,7 @@ async function buildApp(): Promise<FastifyInstance> {
// Register Fastify feature routes
await app.register(vehiclesRoutes, { prefix: '/api' });
await app.register(documentsRoutes, { prefix: '/api' });
await app.register(fuelLogsRoutes, { prefix: '/api' });
await app.register(stationsRoutes, { prefix: '/api' });
await app.register(tenantManagementRoutes);

View File

@@ -0,0 +1,23 @@
# Core Module Index
## Configuration (`src/core/config/`)
- `config-loader.ts` — Load and validate environment variables
- `database.ts` — PostgreSQL connection pool
- `redis.ts` — Redis client and cache helpers
- `tenant.ts` — Tenant configuration utilities
## Plugins (`src/core/plugins/`)
- `auth.plugin.ts` — Auth0 JWT via JWKS (@fastify/jwt, get-jwks)
- `error.plugin.ts` — Error handling
- `logging.plugin.ts` — Request logging
## Logging (`src/core/logging/`)
- `logger.ts` — Structured logging (Winston)
## Middleware
- `middleware/tenant.ts` — Tenant extraction/validation
## Storage (`src/core/storage/`)
- `storage.service.ts` — Storage abstraction
- `adapters/minio.adapter.ts` — MinIO S3-compatible adapter

View File

@@ -0,0 +1,295 @@
/**
* K8s-aligned Configuration Loader
* Loads configuration from YAML files and secrets from mounted files
* Replaces environment variable based configuration for production k8s compatibility
*/
import * as yaml from 'js-yaml';
import * as fs from 'fs';
import * as path from 'path';
import { z } from 'zod';
import { logger } from '../logging/logger';
// Configuration schema definition
const configSchema = z.object({
// Server configuration
server: z.object({
name: z.string(),
port: z.number(),
environment: z.string(),
tenant_id: z.string(),
node_env: z.string(),
}),
// Database configuration
database: z.object({
host: z.string(),
port: z.number(),
name: z.string(),
user: z.string(),
pool_size: z.number().optional().default(20),
}),
// Redis configuration
redis: z.object({
host: z.string(),
port: z.number(),
db: z.number().optional().default(0),
}),
// Auth0 configuration
auth0: z.object({
domain: z.string(),
audience: z.string(),
}),
// Platform services configuration
platform: z.object({
services: z.object({
vehicles: z.object({
url: z.string(),
timeout: z.string(),
}),
tenants: z.object({
url: z.string(),
timeout: z.string(),
}),
}),
}),
// MinIO configuration
minio: z.object({
endpoint: z.string(),
port: z.number(),
bucket: z.string(),
}),
// External APIs configuration
external: z.object({
vpic: z.object({
url: z.string(),
timeout: z.string(),
}),
}),
// Service configuration
service: z.object({
name: z.string(),
}),
// CORS configuration
cors: z.object({
origins: z.array(z.string()),
allow_credentials: z.boolean(),
max_age: z.number(),
}),
// Frontend configuration
frontend: z.object({
tenant_id: z.string(),
api_base_url: z.string(),
auth0: z.object({
domain: z.string(),
audience: z.string(),
}),
}),
// Health check configuration
health: z.object({
endpoints: z.object({
basic: z.string(),
ready: z.string(),
live: z.string(),
startup: z.string(),
}),
probes: z.object({
startup: z.object({
initial_delay: z.string(),
period: z.string(),
timeout: z.string(),
failure_threshold: z.number(),
}),
readiness: z.object({
period: z.string(),
timeout: z.string(),
failure_threshold: z.number(),
}),
liveness: z.object({
period: z.string(),
timeout: z.string(),
failure_threshold: z.number(),
}),
}),
}),
// Logging configuration
logging: z.object({
level: z.string(),
format: z.string(),
destinations: z.array(z.string()),
}),
// Performance configuration
performance: z.object({
request_timeout: z.string(),
max_request_size: z.string(),
compression_enabled: z.boolean(),
circuit_breaker: z.object({
enabled: z.boolean(),
failure_threshold: z.number(),
timeout: z.string(),
}),
}),
});
// Secrets schema definition
const secretsSchema = z.object({
postgres_password: z.string(),
minio_access_key: z.string(),
minio_secret_key: z.string(),
platform_vehicles_api_key: z.string(),
auth0_client_secret: z.string(),
google_maps_api_key: z.string(),
});
type Config = z.infer<typeof configSchema>;
type Secrets = z.infer<typeof secretsSchema>;
export interface AppConfiguration {
config: Config;
secrets: Secrets;
// Convenience accessors for common patterns
getDatabaseUrl(): string;
getRedisUrl(): string;
getAuth0Config(): { domain: string; audience: string; clientSecret: string };
getPlatformServiceConfig(service: 'vehicles' | 'tenants'): { url: string; apiKey: string };
getMinioConfig(): { endpoint: string; port: number; accessKey: string; secretKey: string; bucket: string };
}
class ConfigurationLoader {
private configPath: string;
private secretsDir: string;
private cachedConfig: AppConfiguration | null = null;
constructor() {
this.configPath = process.env.CONFIG_PATH || '/app/config/production.yml';
this.secretsDir = process.env.SECRETS_DIR || '/run/secrets';
}
private loadYamlConfig(): Config {
if (!fs.existsSync(this.configPath)) {
throw new Error(`Configuration file not found at ${this.configPath}`);
}
try {
const fileContents = fs.readFileSync(this.configPath, 'utf8');
const yamlData = yaml.load(fileContents) as any;
return configSchema.parse(yamlData);
} catch (error) {
logger.error(`Failed to load configuration from ${this.configPath}`, { error });
throw new Error(`Configuration loading failed: ${error}`);
}
}
private loadSecrets(): Secrets {
const secrets: Partial<Secrets> = {};
const secretFiles = [
'postgres-password',
'minio-access-key',
'minio-secret-key',
'platform-vehicles-api-key',
'auth0-client-secret',
'google-maps-api-key',
];
for (const secretFile of secretFiles) {
const secretPath = path.join(this.secretsDir, secretFile);
const secretKey = secretFile.replace(/-/g, '_') as keyof Secrets;
if (fs.existsSync(secretPath)) {
try {
const secretValue = fs.readFileSync(secretPath, 'utf8').trim();
(secrets as any)[secretKey] = secretValue;
} catch (error) {
logger.error(`Failed to read secret file ${secretPath}`, { error });
}
} else {
logger.error(`Secret file not found: ${secretPath}`);
}
}
try {
return secretsSchema.parse(secrets);
} catch (error) {
logger.error('Secrets validation failed', { error });
throw new Error(`Secrets loading failed: ${error}`);
}
}
public load(): AppConfiguration {
if (this.cachedConfig) {
return this.cachedConfig;
}
const config = this.loadYamlConfig();
const secrets = this.loadSecrets();
this.cachedConfig = {
config,
secrets,
getDatabaseUrl(): string {
return `postgresql://${config.database.user}:${secrets.postgres_password}@${config.database.host}:${config.database.port}/${config.database.name}`;
},
getRedisUrl(): string {
return `redis://${config.redis.host}:${config.redis.port}/${config.redis.db}`;
},
getAuth0Config() {
return {
domain: config.auth0.domain,
audience: config.auth0.audience,
clientSecret: secrets.auth0_client_secret,
};
},
getPlatformServiceConfig(service: 'vehicles' | 'tenants') {
const serviceConfig = config.platform.services[service];
const apiKey = service === 'vehicles' ? secrets.platform_vehicles_api_key : 'mvp-platform-tenants-secret-key';
return {
url: serviceConfig.url,
apiKey,
};
},
getMinioConfig() {
return {
endpoint: config.minio.endpoint,
port: config.minio.port,
accessKey: secrets.minio_access_key,
secretKey: secrets.minio_secret_key,
bucket: config.minio.bucket,
};
},
};
logger.info('Configuration loaded successfully', {
configSource: 'yaml',
secretsSource: 'files',
});
return this.cachedConfig;
}
}
// Export singleton instance
const configLoader = new ConfigurationLoader();
export const appConfig = configLoader.load();
// Export types for use in other modules
export type { Config, Secrets };

View File

@@ -1,52 +0,0 @@
/**
* @ai-summary Environment configuration with validation
* @ai-context Validates all env vars at startup, single source of truth
*/
import { z } from 'zod';
import * as dotenv from 'dotenv';
dotenv.config();
const envSchema = z.object({
NODE_ENV: z.string().default('production'),
PORT: z.string().transform(Number).default('3001'),
// Database
DB_HOST: z.string().default('localhost'),
DB_PORT: z.string().transform(Number).default('5432'),
DB_NAME: z.string().default('motovaultpro'),
DB_USER: z.string().default('postgres'),
DB_PASSWORD: z.string().default('password'),
// Redis
REDIS_HOST: z.string().default('localhost'),
REDIS_PORT: z.string().transform(Number).default('6379'),
// Auth0 - Required for JWT validation
AUTH0_DOMAIN: z.string().min(1, 'AUTH0_DOMAIN is required for JWT authentication'),
AUTH0_CLIENT_ID: z.string().min(1, 'AUTH0_CLIENT_ID is required'),
AUTH0_CLIENT_SECRET: z.string().min(1, 'AUTH0_CLIENT_SECRET is required'),
AUTH0_AUDIENCE: z.string().min(1, 'AUTH0_AUDIENCE is required for JWT validation'),
// External APIs
GOOGLE_MAPS_API_KEY: z.string().default('development'),
VPIC_API_URL: z.string().default('https://vpic.nhtsa.dot.gov/api/vehicles'),
// Platform Services
PLATFORM_VEHICLES_API_URL: z.string().default('http://mvp-platform-vehicles-api:8000'),
PLATFORM_VEHICLES_API_KEY: z.string().default('mvp-platform-vehicles-secret-key'),
// MinIO
MINIO_ENDPOINT: z.string().default('localhost'),
MINIO_PORT: z.string().transform(Number).default('9000'),
MINIO_ACCESS_KEY: z.string().default('minioadmin'),
MINIO_SECRET_KEY: z.string().default('minioadmin123'),
MINIO_BUCKET: z.string().default('motovaultpro'),
});
export type Environment = z.infer<typeof envSchema>;
// Validate and export - now with defaults for build-time compilation
export const env = envSchema.parse(process.env);
// Environment configuration validated and exported

View File

@@ -1,4 +1,5 @@
import axios from 'axios';
import { appConfig } from './config-loader';
// Simple in-memory cache for tenant validation
const tenantValidityCache = new Map<string, { ok: boolean; ts: number }>();
@@ -17,18 +18,18 @@ export interface TenantConfig {
}
export const getTenantConfig = (): TenantConfig => {
const tenantId = process.env.TENANT_ID || 'admin';
const tenantId = appConfig.config.server.tenant_id;
const databaseUrl = tenantId === 'admin'
? `postgresql://${process.env.DB_USER || 'motovault_user'}:${process.env.DB_PASSWORD}@${process.env.DB_HOST || 'postgres'}:${process.env.DB_PORT || '5432'}/${process.env.DB_NAME || 'motovault'}`
: `postgresql://motovault_user:${process.env.DB_PASSWORD}@${tenantId}-postgres:5432/motovault`;
? appConfig.getDatabaseUrl()
: `postgresql://${appConfig.config.database.user}:${appConfig.secrets.postgres_password}@${tenantId}-postgres:5432/${appConfig.config.database.name}`;
const redisUrl = tenantId === 'admin'
? `redis://${process.env.REDIS_HOST || 'redis'}:${process.env.REDIS_PORT || '6379'}`
? appConfig.getRedisUrl()
: `redis://${tenantId}-redis:6379`;
const platformServicesUrl = process.env.PLATFORM_TENANTS_API_URL || 'http://mvp-platform-tenants:8000';
const platformServicesUrl = appConfig.getPlatformServiceConfig('tenants').url;
return {
tenantId,
databaseUrl,
@@ -48,7 +49,7 @@ export const isValidTenant = async (tenantId: string): Promise<boolean> => {
let ok = false;
try {
const baseUrl = process.env.PLATFORM_TENANTS_API_URL || 'http://mvp-platform-tenants:8000';
const baseUrl = appConfig.getPlatformServiceConfig('tenants').url;
const url = `${baseUrl}/api/v1/tenants/${encodeURIComponent(tenantId)}`;
const resp = await axios.get(url, { timeout: 2000 });
ok = resp.status === 200;

View File

@@ -5,7 +5,7 @@
import { FastifyPluginAsync, FastifyRequest, FastifyReply } from 'fastify';
import fp from 'fastify-plugin';
import buildGetJwks from 'get-jwks';
import { env } from '../config/environment';
import { appConfig } from '../config/config-loader';
import { logger } from '../logging/logger';
declare module 'fastify' {
@@ -19,8 +19,10 @@ declare module 'fastify' {
}
const authPlugin: FastifyPluginAsync = async (fastify) => {
const auth0Config = appConfig.getAuth0Config();
// Security validation: ensure AUTH0_DOMAIN is properly configured
if (!env.AUTH0_DOMAIN || !env.AUTH0_DOMAIN.includes('.auth0.com')) {
if (!auth0Config.domain || !auth0Config.domain.includes('.auth0.com')) {
throw new Error('AUTH0_DOMAIN must be a valid Auth0 domain');
}
@@ -37,7 +39,7 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
const { header: { kid, alg }, payload: { iss } } = token;
// Validate issuer matches Auth0 domain (security: prevent issuer spoofing)
const expectedIssuer = `https://${env.AUTH0_DOMAIN}/`;
const expectedIssuer = `https://${auth0Config.domain}/`;
if (iss !== expectedIssuer) {
throw new Error(`Invalid issuer: ${iss}`);
}
@@ -49,16 +51,16 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
alg
});
} catch (error) {
logger.error('JWKS key retrieval failed', {
logger.error('JWKS key retrieval failed', {
error: error instanceof Error ? error.message : 'Unknown error',
domain: env.AUTH0_DOMAIN
domain: auth0Config.domain
});
throw error;
}
},
verify: {
allowedIss: `https://${env.AUTH0_DOMAIN}/`,
allowedAud: env.AUTH0_AUDIENCE,
allowedIss: `https://${auth0Config.domain}/`,
allowedAud: auth0Config.audience,
},
});
@@ -67,9 +69,9 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
try {
await request.jwtVerify();
logger.info('JWT authentication successful', {
logger.info('JWT authentication successful', {
userId: request.user?.sub?.substring(0, 8) + '...',
audience: env.AUTH0_AUDIENCE
audience: auth0Config.audience
});
} catch (error) {
logger.warn('JWT authentication failed', {

View File

@@ -0,0 +1,66 @@
import { Client as MinioClient } from 'minio';
import type { Readable } from 'stream';
import { appConfig } from '../../config/config-loader';
import type { HeadObjectResult, SignedUrlOptions, StorageService } from '../storage.service';
export function createMinioAdapter(): StorageService {
const { endpoint, port, accessKey, secretKey } = appConfig.getMinioConfig();
const client = new MinioClient({
endPoint: endpoint,
port,
useSSL: false,
accessKey,
secretKey,
});
const normalizeMeta = (contentType?: string, metadata?: Record<string, string>) => {
const meta: Record<string, string> = { ...(metadata || {}) };
if (contentType) meta['Content-Type'] = contentType;
return meta;
};
const adapter: StorageService = {
async putObject(bucket, key, body, contentType, metadata) {
const meta = normalizeMeta(contentType, metadata);
// For Buffer or string, size is known. For Readable, omit size for chunked encoding.
if (Buffer.isBuffer(body) || typeof body === 'string') {
await client.putObject(bucket, key, body as any, (body as any).length ?? undefined, meta);
} else {
await client.putObject(bucket, key, body as Readable, undefined, meta);
}
},
async getObjectStream(bucket, key) {
return client.getObject(bucket, key);
},
async deleteObject(bucket, key) {
await client.removeObject(bucket, key);
},
async headObject(bucket, key): Promise<HeadObjectResult> {
const stat = await client.statObject(bucket, key);
// minio types: size, etag, lastModified, metaData
return {
size: stat.size,
etag: stat.etag,
lastModified: stat.lastModified ? new Date(stat.lastModified) : undefined,
contentType: (stat.metaData && (stat.metaData['content-type'] || stat.metaData['Content-Type'])) || undefined,
metadata: stat.metaData || undefined,
};
},
async getSignedUrl(bucket, key, options?: SignedUrlOptions) {
const expires = Math.max(1, Math.min(7 * 24 * 3600, options?.expiresSeconds ?? 300));
if (options?.method === 'PUT') {
// MinIO SDK has presignedPutObject for PUT
return client.presignedPutObject(bucket, key, expires);
}
// Default GET
return client.presignedGetObject(bucket, key, expires);
},
};
return adapter;
}

View File

@@ -0,0 +1,49 @@
/**
* Provider-agnostic storage facade with S3-compatible surface.
* Initial implementation backed by MinIO using the official SDK.
*/
import type { Readable } from 'stream';
import { createMinioAdapter } from './adapters/minio.adapter';
export type ObjectBody = Buffer | Readable | string;
export interface SignedUrlOptions {
method: 'GET' | 'PUT';
expiresSeconds?: number; // default 300s
}
export interface HeadObjectResult {
size: number;
etag?: string;
lastModified?: Date;
contentType?: string;
metadata?: Record<string, string>;
}
export interface StorageService {
putObject(
bucket: string,
key: string,
body: ObjectBody,
contentType?: string,
metadata?: Record<string, string>
): Promise<void>;
getObjectStream(bucket: string, key: string): Promise<Readable>;
deleteObject(bucket: string, key: string): Promise<void>;
headObject(bucket: string, key: string): Promise<HeadObjectResult>;
getSignedUrl(bucket: string, key: string, options?: SignedUrlOptions): Promise<string>;
}
// Simple factory — currently only MinIO; can add S3 in future without changing feature code
let singleton: StorageService | null = null;
export function getStorageService(): StorageService {
if (!singleton) {
singleton = createMinioAdapter();
}
return singleton;
}

View File

@@ -0,0 +1,35 @@
# Documents Feature Capsule
## Quick Summary (50 tokens)
Secure vehicle document management with S3-compatible storage. Metadata and file uploads with private access, user and vehicle ownership enforcement, and mobile-first UX.
## API Endpoints
- GET /api/documents
- GET /api/documents/:id
- POST /api/documents
- PUT /api/documents/:id
- DELETE /api/documents/:id
- GET /api/documents/vehicle/:vehicleId
- POST /api/documents/:id/upload
- GET /api/documents/:id/download
## Structure
- **api/** - HTTP endpoints, routes, validators
- **domain/** - Business logic, types, rules
- **data/** - Repository, database queries
- **migrations/** - Feature-specific schema
- **tests/** - All feature tests
## Dependencies
- Internal: core/auth, core/middleware/tenant, core/storage
- Database: documents table
## Quick Commands
```bash
# Run feature tests
npm test -- features/documents
# Run migrations (all features)
npm run migrate:all
```

View File

@@ -0,0 +1,325 @@
import { FastifyReply, FastifyRequest } from 'fastify';
import { DocumentsService } from '../domain/documents.service';
import type { CreateBody, IdParams, ListQuery, UpdateBody } from './documents.validation';
import { getStorageService } from '../../../core/storage/storage.service';
import { appConfig } from '../../../core/config/config-loader';
import { logger } from '../../../core/logging/logger';
import path from 'path';
import { Transform, TransformCallback } from 'stream';
export class DocumentsController {
private readonly service = new DocumentsService();
async list(request: FastifyRequest<{ Querystring: ListQuery }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
logger.info('Documents list requested', {
operation: 'documents.list',
user_id: userId,
filters: {
vehicle_id: request.query.vehicleId,
type: request.query.type,
expires_before: request.query.expiresBefore,
},
});
const docs = await this.service.listDocuments(userId, {
vehicleId: request.query.vehicleId,
type: request.query.type,
expiresBefore: request.query.expiresBefore,
});
logger.info('Documents list retrieved', {
operation: 'documents.list.success',
user_id: userId,
document_count: docs.length,
});
return reply.code(200).send(docs);
}
async get(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document get requested', {
operation: 'documents.get',
user_id: userId,
document_id: documentId,
});
const doc = await this.service.getDocument(userId, documentId);
if (!doc) {
logger.warn('Document not found', {
operation: 'documents.get.not_found',
user_id: userId,
document_id: documentId,
});
return reply.code(404).send({ error: 'Not Found' });
}
logger.info('Document retrieved', {
operation: 'documents.get.success',
user_id: userId,
document_id: documentId,
vehicle_id: doc.vehicle_id,
document_type: doc.document_type,
});
return reply.code(200).send(doc);
}
async create(request: FastifyRequest<{ Body: CreateBody }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
logger.info('Document create requested', {
operation: 'documents.create',
user_id: userId,
vehicle_id: request.body.vehicle_id,
document_type: request.body.document_type,
title: request.body.title,
});
const created = await this.service.createDocument(userId, request.body);
logger.info('Document created', {
operation: 'documents.create.success',
user_id: userId,
document_id: created.id,
vehicle_id: created.vehicle_id,
document_type: created.document_type,
title: created.title,
});
return reply.code(201).send(created);
}
async update(request: FastifyRequest<{ Params: IdParams; Body: UpdateBody }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document update requested', {
operation: 'documents.update',
user_id: userId,
document_id: documentId,
update_fields: Object.keys(request.body),
});
const updated = await this.service.updateDocument(userId, documentId, request.body);
if (!updated) {
logger.warn('Document not found for update', {
operation: 'documents.update.not_found',
user_id: userId,
document_id: documentId,
});
return reply.code(404).send({ error: 'Not Found' });
}
logger.info('Document updated', {
operation: 'documents.update.success',
user_id: userId,
document_id: documentId,
vehicle_id: updated.vehicle_id,
title: updated.title,
});
return reply.code(200).send(updated);
}
async remove(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document delete requested', {
operation: 'documents.delete',
user_id: userId,
document_id: documentId,
});
// If object exists, delete it from storage first
const existing = await this.service.getDocument(userId, documentId);
if (existing && existing.storage_bucket && existing.storage_key) {
const storage = getStorageService();
try {
await storage.deleteObject(existing.storage_bucket, existing.storage_key);
logger.info('Document file deleted from storage', {
operation: 'documents.delete.storage_cleanup',
user_id: userId,
document_id: documentId,
storage_key: existing.storage_key,
});
} catch (e) {
logger.warn('Failed to delete document file from storage', {
operation: 'documents.delete.storage_cleanup_failed',
user_id: userId,
document_id: documentId,
storage_key: existing.storage_key,
error: e instanceof Error ? e.message : 'Unknown error',
});
// Non-fatal: proceed with soft delete
}
}
await this.service.deleteDocument(userId, documentId);
logger.info('Document deleted', {
operation: 'documents.delete.success',
user_id: userId,
document_id: documentId,
vehicle_id: existing?.vehicle_id,
had_file: !!(existing?.storage_key),
});
return reply.code(204).send();
}
async upload(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document upload requested', {
operation: 'documents.upload',
user_id: userId,
document_id: documentId,
});
const doc = await this.service.getDocument(userId, documentId);
if (!doc) {
logger.warn('Document not found for upload', {
operation: 'documents.upload.not_found',
user_id: userId,
document_id: documentId,
});
return reply.code(404).send({ error: 'Not Found' });
}
const mp = await (request as any).file({ limits: { files: 1 } });
if (!mp) {
logger.warn('No file provided for upload', {
operation: 'documents.upload.no_file',
user_id: userId,
document_id: documentId,
});
return reply.code(400).send({ error: 'Bad Request', message: 'No file provided' });
}
const allowed = new Set(['application/pdf', 'image/jpeg', 'image/png']);
const contentType = mp.mimetype as string | undefined;
if (!contentType || !allowed.has(contentType)) {
logger.warn('Unsupported file type for upload', {
operation: 'documents.upload.unsupported_type',
user_id: userId,
document_id: documentId,
content_type: contentType,
file_name: mp.filename,
});
return reply.code(415).send({ error: 'Unsupported Media Type' });
}
const originalName: string = mp.filename || 'upload';
const ext = (() => {
const e = path.extname(originalName).replace(/^\./, '').toLowerCase();
if (e) return e;
if (contentType === 'application/pdf') return 'pdf';
if (contentType === 'image/jpeg') return 'jpg';
if (contentType === 'image/png') return 'png';
return 'bin';
})();
class CountingStream extends Transform {
public bytes = 0;
override _transform(chunk: any, _enc: BufferEncoding, cb: TransformCallback) {
this.bytes += chunk.length || 0;
cb(null, chunk);
}
}
const counter = new CountingStream();
mp.file.pipe(counter);
const storage = getStorageService();
const bucket = (doc.storage_bucket || appConfig.getMinioConfig().bucket);
const version = 'v1';
const unique = cryptoRandom();
const key = `documents/${userId}/${doc.vehicle_id}/${doc.id}/${version}/${unique}.${ext}`;
await storage.putObject(bucket, key, counter, contentType, { 'x-original-filename': originalName });
const updated = await this.service['repo'].updateStorageMeta(doc.id, userId, {
storage_bucket: bucket,
storage_key: key,
file_name: originalName,
content_type: contentType,
file_size: counter.bytes,
file_hash: null,
});
logger.info('Document upload completed', {
operation: 'documents.upload.success',
user_id: userId,
document_id: documentId,
vehicle_id: doc.vehicle_id,
file_name: originalName,
content_type: contentType,
file_size: counter.bytes,
storage_key: key,
});
return reply.code(200).send(updated);
}
async download(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document download requested', {
operation: 'documents.download',
user_id: userId,
document_id: documentId,
});
const doc = await this.service.getDocument(userId, documentId);
if (!doc || !doc.storage_bucket || !doc.storage_key) {
logger.warn('Document or file not found for download', {
operation: 'documents.download.not_found',
user_id: userId,
document_id: documentId,
has_document: !!doc,
has_storage_info: !!(doc?.storage_bucket && doc?.storage_key),
});
return reply.code(404).send({ error: 'Not Found' });
}
const storage = getStorageService();
let head: Partial<import('../../../core/storage/storage.service').HeadObjectResult> = {};
try {
head = await storage.headObject(doc.storage_bucket, doc.storage_key);
} catch { /* ignore */ }
const contentType = head.contentType || doc.content_type || 'application/octet-stream';
const filename = doc.file_name || path.basename(doc.storage_key);
const inlineTypes = new Set(['application/pdf', 'image/jpeg', 'image/png']);
const disposition = inlineTypes.has(contentType) ? 'inline' : 'attachment';
reply.header('Content-Type', contentType);
reply.header('Content-Disposition', `${disposition}; filename="${encodeURIComponent(filename)}"`);
logger.info('Document download initiated', {
operation: 'documents.download.success',
user_id: userId,
document_id: documentId,
vehicle_id: doc.vehicle_id,
file_name: filename,
content_type: contentType,
disposition: disposition,
file_size: head.size || doc.file_size,
});
const stream = await storage.getObjectStream(doc.storage_bucket, doc.storage_key);
return reply.send(stream);
}
}
function cryptoRandom(): string {
// Safe unique suffix for object keys
return Math.random().toString(36).slice(2) + Date.now().toString(36);
}

View File

@@ -0,0 +1,60 @@
/**
* @ai-summary Fastify routes for documents API
*/
import { FastifyInstance, FastifyPluginAsync, FastifyPluginOptions } from 'fastify';
import { tenantMiddleware } from '../../../core/middleware/tenant';
import { DocumentsController } from './documents.controller';
// Note: Validation uses TypeScript types at handler level; follow existing repo pattern (no JSON schema registration)
export const documentsRoutes: FastifyPluginAsync = async (
fastify: FastifyInstance,
_opts: FastifyPluginOptions
) => {
const ctrl = new DocumentsController();
const requireAuth = fastify.authenticate.bind(fastify);
fastify.get('/documents', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.list.bind(ctrl)
});
fastify.get<{ Params: any }>('/documents/:id', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.get.bind(ctrl)
});
fastify.get<{ Params: any }>('/documents/vehicle/:vehicleId', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: async (req, reply) => {
const userId = (req as any).user?.sub as string;
const query = { vehicleId: (req.params as any).vehicleId };
const docs = await ctrl['service'].listDocuments(userId, query);
return reply.code(200).send(docs);
}
});
fastify.post<{ Body: any }>('/documents', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.create.bind(ctrl)
});
fastify.put<{ Params: any; Body: any }>('/documents/:id', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.update.bind(ctrl)
});
fastify.delete<{ Params: any }>('/documents/:id', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.remove.bind(ctrl)
});
fastify.post<{ Params: any }>('/documents/:id/upload', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.upload.bind(ctrl)
});
fastify.get<{ Params: any }>('/documents/:id/download', {
preHandler: [requireAuth, tenantMiddleware as any],
handler: ctrl.download.bind(ctrl)
});
};

View File

@@ -0,0 +1,21 @@
import { z } from 'zod';
import { DocumentTypeSchema, CreateDocumentBodySchema, UpdateDocumentBodySchema } from '../domain/documents.types';
export const ListQuerySchema = z.object({
vehicleId: z.string().uuid().optional(),
type: DocumentTypeSchema.optional(),
expiresBefore: z.string().optional(),
});
export const IdParamsSchema = z.object({ id: z.string().uuid() });
export const VehicleParamsSchema = z.object({ vehicleId: z.string().uuid() });
export const CreateBodySchema = CreateDocumentBodySchema;
export const UpdateBodySchema = UpdateDocumentBodySchema;
export type ListQuery = z.infer<typeof ListQuerySchema>;
export type IdParams = z.infer<typeof IdParamsSchema>;
export type VehicleParams = z.infer<typeof VehicleParamsSchema>;
export type CreateBody = z.infer<typeof CreateBodySchema>;
export type UpdateBody = z.infer<typeof UpdateBodySchema>;

View File

@@ -0,0 +1,94 @@
import { Pool } from 'pg';
import pool from '../../../core/config/database';
import type { DocumentRecord, DocumentType } from '../domain/documents.types';
export class DocumentsRepository {
constructor(private readonly db: Pool = pool) {}
async insert(doc: {
id: string;
user_id: string;
vehicle_id: string;
document_type: DocumentType;
title: string;
notes?: string | null;
details?: any;
issued_date?: string | null;
expiration_date?: string | null;
}): Promise<DocumentRecord> {
const res = await this.db.query(
`INSERT INTO documents (
id, user_id, vehicle_id, document_type, title, notes, details, issued_date, expiration_date
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9)
RETURNING *`,
[
doc.id,
doc.user_id,
doc.vehicle_id,
doc.document_type,
doc.title,
doc.notes ?? null,
doc.details ?? null,
doc.issued_date ?? null,
doc.expiration_date ?? null,
]
);
return res.rows[0] as DocumentRecord;
}
async findById(id: string, userId: string): Promise<DocumentRecord | null> {
const res = await this.db.query(`SELECT * FROM documents WHERE id = $1 AND user_id = $2 AND deleted_at IS NULL`, [id, userId]);
return res.rows[0] || null;
}
async listByUser(userId: string, filters?: { vehicleId?: string; type?: DocumentType; expiresBefore?: string }): Promise<DocumentRecord[]> {
const conds: string[] = ['user_id = $1', 'deleted_at IS NULL'];
const params: any[] = [userId];
let i = 2;
if (filters?.vehicleId) { conds.push(`vehicle_id = $${i++}`); params.push(filters.vehicleId); }
if (filters?.type) { conds.push(`document_type = $${i++}`); params.push(filters.type); }
if (filters?.expiresBefore) { conds.push(`expiration_date <= $${i++}`); params.push(filters.expiresBefore); }
const sql = `SELECT * FROM documents WHERE ${conds.join(' AND ')} ORDER BY created_at DESC`;
const res = await this.db.query(sql, params);
return res.rows as DocumentRecord[];
}
async softDelete(id: string, userId: string): Promise<void> {
await this.db.query(`UPDATE documents SET deleted_at = NOW() WHERE id = $1 AND user_id = $2`, [id, userId]);
}
async updateMetadata(id: string, userId: string, patch: Partial<Pick<DocumentRecord, 'title'|'notes'|'details'|'issued_date'|'expiration_date'>>): Promise<DocumentRecord | null> {
const fields: string[] = [];
const params: any[] = [];
let i = 1;
if (patch.title !== undefined) { fields.push(`title = $${i++}`); params.push(patch.title); }
if (patch.notes !== undefined) { fields.push(`notes = $${i++}`); params.push(patch.notes); }
if (patch.details !== undefined) { fields.push(`details = $${i++}`); params.push(patch.details); }
if (patch.issued_date !== undefined) { fields.push(`issued_date = $${i++}`); params.push(patch.issued_date); }
if (patch.expiration_date !== undefined) { fields.push(`expiration_date = $${i++}`); params.push(patch.expiration_date); }
if (!fields.length) return this.findById(id, userId);
params.push(id, userId);
const sql = `UPDATE documents SET ${fields.join(', ')} WHERE id = $${i++} AND user_id = $${i++} AND deleted_at IS NULL RETURNING *`;
const res = await this.db.query(sql, params);
return res.rows[0] || null;
}
async updateStorageMeta(id: string, userId: string, meta: {
storage_bucket: string; storage_key: string; file_name: string; content_type: string; file_size: number; file_hash?: string | null;
}): Promise<DocumentRecord | null> {
const res = await this.db.query(
`UPDATE documents SET
storage_bucket = $1,
storage_key = $2,
file_name = $3,
content_type = $4,
file_size = $5,
file_hash = $6
WHERE id = $7 AND user_id = $8 AND deleted_at IS NULL
RETURNING *`,
[meta.storage_bucket, meta.storage_key, meta.file_name, meta.content_type, meta.file_size, meta.file_hash ?? null, id, userId]
);
return res.rows[0] || null;
}
}

View File

@@ -0,0 +1,55 @@
import { randomUUID } from 'crypto';
import type { CreateDocumentBody, DocumentRecord, DocumentType, UpdateDocumentBody } from './documents.types';
import { DocumentsRepository } from '../data/documents.repository';
import pool from '../../../core/config/database';
export class DocumentsService {
private readonly repo = new DocumentsRepository(pool);
async createDocument(userId: string, body: CreateDocumentBody): Promise<DocumentRecord> {
await this.assertVehicleOwnership(userId, body.vehicle_id);
const id = randomUUID();
return this.repo.insert({
id,
user_id: userId,
vehicle_id: body.vehicle_id,
document_type: body.document_type as DocumentType,
title: body.title,
notes: body.notes ?? null,
details: body.details ?? null,
issued_date: body.issued_date ?? null,
expiration_date: body.expiration_date ?? null,
});
}
async getDocument(userId: string, id: string): Promise<DocumentRecord | null> {
return this.repo.findById(id, userId);
}
async listDocuments(userId: string, filters?: { vehicleId?: string; type?: DocumentType; expiresBefore?: string }) {
return this.repo.listByUser(userId, filters);
}
async updateDocument(userId: string, id: string, patch: UpdateDocumentBody) {
const existing = await this.repo.findById(id, userId);
if (!existing) return null;
if (patch && typeof patch === 'object') {
return this.repo.updateMetadata(id, userId, patch as any);
}
return existing;
}
async deleteDocument(userId: string, id: string): Promise<void> {
await this.repo.softDelete(id, userId);
}
private async assertVehicleOwnership(userId: string, vehicleId: string) {
const res = await pool.query('SELECT id FROM vehicles WHERE id = $1 AND user_id = $2', [vehicleId, userId]);
if (!res.rows[0]) {
const err: any = new Error('Vehicle not found or not owned by user');
err.statusCode = 403;
throw err;
}
}
}

View File

@@ -0,0 +1,46 @@
import { z } from 'zod';
export const DocumentTypeSchema = z.enum(['insurance', 'registration']);
export type DocumentType = z.infer<typeof DocumentTypeSchema>;
export interface DocumentRecord {
id: string;
user_id: string;
vehicle_id: string;
document_type: DocumentType;
title: string;
notes?: string | null;
details?: Record<string, any> | null;
storage_bucket?: string | null;
storage_key?: string | null;
file_name?: string | null;
content_type?: string | null;
file_size?: number | null;
file_hash?: string | null;
issued_date?: string | null;
expiration_date?: string | null;
created_at: string;
updated_at: string;
deleted_at?: string | null;
}
export const CreateDocumentBodySchema = z.object({
vehicle_id: z.string().uuid(),
document_type: DocumentTypeSchema,
title: z.string().min(1).max(200),
notes: z.string().max(10000).optional(),
details: z.record(z.any()).optional(),
issued_date: z.string().optional(),
expiration_date: z.string().optional(),
});
export type CreateDocumentBody = z.infer<typeof CreateDocumentBodySchema>;
export const UpdateDocumentBodySchema = z.object({
title: z.string().min(1).max(200).optional(),
notes: z.string().max(10000).nullable().optional(),
details: z.record(z.any()).optional(),
issued_date: z.string().nullable().optional(),
expiration_date: z.string().nullable().optional(),
});
export type UpdateDocumentBody = z.infer<typeof UpdateDocumentBodySchema>;

View File

@@ -0,0 +1,6 @@
/**
* @ai-summary Public API for documents feature capsule
*/
export { documentsRoutes } from './api/documents.routes';
export type { DocumentType, DocumentRecord, CreateDocumentBody, UpdateDocumentBody } from './domain/documents.types';

View File

@@ -0,0 +1,47 @@
-- Documents feature schema
-- Depends on vehicles table and update_updated_at_column() from vehicles feature
CREATE TABLE IF NOT EXISTS documents (
id UUID PRIMARY KEY,
user_id VARCHAR(255) NOT NULL,
vehicle_id UUID NOT NULL REFERENCES vehicles(id) ON DELETE CASCADE,
document_type VARCHAR(32) NOT NULL CHECK (document_type IN ('insurance','registration')),
title VARCHAR(200) NOT NULL,
notes TEXT NULL,
details JSONB NULL,
storage_bucket VARCHAR(128) NULL,
storage_key VARCHAR(512) NULL,
file_name VARCHAR(255) NULL,
content_type VARCHAR(128) NULL,
file_size BIGINT NULL,
file_hash VARCHAR(128) NULL,
issued_date DATE NULL,
expiration_date DATE NULL,
created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() NOT NULL,
updated_at TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() NOT NULL,
deleted_at TIMESTAMP WITHOUT TIME ZONE NULL
);
-- Update trigger for updated_at
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_trigger WHERE tgname = 'set_timestamp_documents'
) THEN
CREATE TRIGGER set_timestamp_documents
BEFORE UPDATE ON documents
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
END IF;
END $$;
-- Indexes
CREATE INDEX IF NOT EXISTS idx_documents_user_id ON documents(user_id);
CREATE INDEX IF NOT EXISTS idx_documents_vehicle_id ON documents(vehicle_id);
CREATE INDEX IF NOT EXISTS idx_documents_user_vehicle ON documents(user_id, vehicle_id);
CREATE INDEX IF NOT EXISTS idx_documents_type ON documents(document_type);
CREATE INDEX IF NOT EXISTS idx_documents_expiration ON documents(expiration_date);

View File

@@ -0,0 +1,435 @@
/**
* @ai-summary Integration tests for Documents API endpoints
* @ai-context Tests full API flow with auth, database, and storage
*/
import request from 'supertest';
import { FastifyInstance } from 'fastify';
import { build } from '../../../../app';
import path from 'path';
import fs from 'fs';
describe('Documents Integration Tests', () => {
let app: FastifyInstance;
let testUserId: string;
let testVehicleId: string;
let authToken: string;
beforeAll(async () => {
app = build({ logger: false });
await app.ready();
// Create test user context
testUserId = 'test-user-' + Date.now();
authToken = 'Bearer test-token';
// Create test vehicle for document association
const vehicleData = {
vin: '1HGBH41JXMN109186',
nickname: 'Test Car',
color: 'Blue',
odometerReading: 50000,
};
const vehicleResponse = await request(app.server)
.post('/api/vehicles')
.set('Authorization', authToken)
.send(vehicleData);
testVehicleId = vehicleResponse.body.id;
});
afterAll(async () => {
await app.close();
});
describe('POST /api/documents', () => {
it('should create document metadata', async () => {
const documentData = {
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Car Insurance Policy',
notes: 'Annual policy',
details: { provider: 'State Farm', policy_number: '12345' },
issued_date: '2024-01-01',
expiration_date: '2024-12-31',
};
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData)
.expect(201);
expect(response.body).toMatchObject({
id: expect.any(String),
user_id: testUserId,
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Car Insurance Policy',
notes: 'Annual policy',
details: { provider: 'State Farm', policy_number: '12345' },
issued_date: '2024-01-01',
expiration_date: '2024-12-31',
created_at: expect.any(String),
updated_at: expect.any(String),
});
// Storage fields should be null initially
expect(response.body.storage_bucket).toBeNull();
expect(response.body.storage_key).toBeNull();
expect(response.body.file_name).toBeNull();
});
it('should reject document for non-existent vehicle', async () => {
const documentData = {
vehicle_id: 'non-existent-vehicle',
document_type: 'registration',
title: 'Invalid Document',
};
await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData)
.expect(403);
});
it('should require authentication', async () => {
const documentData = {
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Unauthorized Document',
};
await request(app.server)
.post('/api/documents')
.send(documentData)
.expect(401);
});
});
describe('GET /api/documents', () => {
let testDocumentId: string;
beforeEach(async () => {
// Create test document
const documentData = {
vehicle_id: testVehicleId,
document_type: 'registration',
title: 'Test Document for Listing',
};
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData);
testDocumentId = response.body.id;
});
it('should list user documents', async () => {
const response = await request(app.server)
.get('/api/documents')
.set('Authorization', authToken)
.expect(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body.length).toBeGreaterThan(0);
expect(response.body.some((doc: any) => doc.id === testDocumentId)).toBe(true);
});
it('should filter documents by vehicle', async () => {
const response = await request(app.server)
.get('/api/documents')
.query({ vehicleId: testVehicleId })
.set('Authorization', authToken)
.expect(200);
expect(Array.isArray(response.body)).toBe(true);
response.body.forEach((doc: any) => {
expect(doc.vehicle_id).toBe(testVehicleId);
});
});
it('should filter documents by type', async () => {
const response = await request(app.server)
.get('/api/documents')
.query({ type: 'registration' })
.set('Authorization', authToken)
.expect(200);
expect(Array.isArray(response.body)).toBe(true);
response.body.forEach((doc: any) => {
expect(doc.document_type).toBe('registration');
});
});
});
describe('GET /api/documents/:id', () => {
let testDocumentId: string;
beforeEach(async () => {
const documentData = {
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Single Document Test',
};
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData);
testDocumentId = response.body.id;
});
it('should get single document', async () => {
const response = await request(app.server)
.get(`/api/documents/${testDocumentId}`)
.set('Authorization', authToken)
.expect(200);
expect(response.body).toMatchObject({
id: testDocumentId,
user_id: testUserId,
vehicle_id: testVehicleId,
title: 'Single Document Test',
});
});
it('should return 404 for non-existent document', async () => {
await request(app.server)
.get('/api/documents/non-existent-id')
.set('Authorization', authToken)
.expect(404);
});
});
describe('PUT /api/documents/:id', () => {
let testDocumentId: string;
beforeEach(async () => {
const documentData = {
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Document to Update',
notes: 'Original notes',
};
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData);
testDocumentId = response.body.id;
});
it('should update document metadata', async () => {
const updateData = {
title: 'Updated Document Title',
notes: 'Updated notes',
details: { updated: true },
};
const response = await request(app.server)
.put(`/api/documents/${testDocumentId}`)
.set('Authorization', authToken)
.send(updateData)
.expect(200);
expect(response.body).toMatchObject({
id: testDocumentId,
title: 'Updated Document Title',
notes: 'Updated notes',
details: { updated: true },
updated_at: expect.any(String),
});
});
it('should return 404 for non-existent document', async () => {
await request(app.server)
.put('/api/documents/non-existent-id')
.set('Authorization', authToken)
.send({ title: 'New Title' })
.expect(404);
});
});
describe('File Upload/Download Flow', () => {
let testDocumentId: string;
let testFilePath: string;
beforeAll(() => {
// Create test file
testFilePath = path.join(__dirname, 'test-file.pdf');
fs.writeFileSync(testFilePath, 'Fake PDF content for testing');
});
afterAll(() => {
// Clean up test file
if (fs.existsSync(testFilePath)) {
fs.unlinkSync(testFilePath);
}
});
beforeEach(async () => {
const documentData = {
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Document for Upload Test',
};
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData);
testDocumentId = response.body.id;
});
it('should upload file to document', async () => {
const response = await request(app.server)
.post(`/api/documents/${testDocumentId}/upload`)
.set('Authorization', authToken)
.attach('file', testFilePath)
.expect(200);
expect(response.body).toMatchObject({
id: testDocumentId,
storage_bucket: expect.any(String),
storage_key: expect.any(String),
file_name: 'test-file.pdf',
content_type: expect.any(String),
file_size: expect.any(Number),
});
expect(response.body.storage_key).toMatch(/^documents\//);
});
it('should reject unsupported file types', async () => {
// Create temporary executable file
const execPath = path.join(__dirname, 'test.exe');
fs.writeFileSync(execPath, 'fake executable');
try {
await request(app.server)
.post(`/api/documents/${testDocumentId}/upload`)
.set('Authorization', authToken)
.attach('file', execPath)
.expect(415);
} finally {
if (fs.existsSync(execPath)) {
fs.unlinkSync(execPath);
}
}
});
it('should download uploaded file', async () => {
// First upload a file
await request(app.server)
.post(`/api/documents/${testDocumentId}/upload`)
.set('Authorization', authToken)
.attach('file', testFilePath);
// Then download it
const response = await request(app.server)
.get(`/api/documents/${testDocumentId}/download`)
.set('Authorization', authToken)
.expect(200);
expect(response.headers['content-disposition']).toContain('test-file.pdf');
expect(response.body.toString()).toBe('Fake PDF content for testing');
});
it('should return 404 for download without uploaded file', async () => {
await request(app.server)
.get(`/api/documents/${testDocumentId}/download`)
.set('Authorization', authToken)
.expect(404);
});
});
describe('DELETE /api/documents/:id', () => {
let testDocumentId: string;
beforeEach(async () => {
const documentData = {
vehicle_id: testVehicleId,
document_type: 'registration',
title: 'Document to Delete',
};
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', authToken)
.send(documentData);
testDocumentId = response.body.id;
});
it('should soft delete document', async () => {
await request(app.server)
.delete(`/api/documents/${testDocumentId}`)
.set('Authorization', authToken)
.expect(204);
// Verify document is no longer accessible
await request(app.server)
.get(`/api/documents/${testDocumentId}`)
.set('Authorization', authToken)
.expect(404);
});
it('should return 404 for already deleted document', async () => {
// Delete once
await request(app.server)
.delete(`/api/documents/${testDocumentId}`)
.set('Authorization', authToken)
.expect(204);
// Try to delete again
await request(app.server)
.delete(`/api/documents/${testDocumentId}`)
.set('Authorization', authToken)
.expect(204); // Idempotent behavior
});
});
describe('Authorization and Ownership', () => {
let otherUserDocumentId: string;
beforeEach(async () => {
// Create document as different user
const documentData = {
vehicle_id: testVehicleId,
document_type: 'insurance',
title: 'Other User Document',
};
// Mock different user context
const otherUserToken = 'Bearer other-user-token';
const response = await request(app.server)
.post('/api/documents')
.set('Authorization', otherUserToken)
.send(documentData);
otherUserDocumentId = response.body.id;
});
it('should not allow access to other users documents', async () => {
await request(app.server)
.get(`/api/documents/${otherUserDocumentId}`)
.set('Authorization', authToken)
.expect(404);
});
it('should not allow update of other users documents', async () => {
await request(app.server)
.put(`/api/documents/${otherUserDocumentId}`)
.set('Authorization', authToken)
.send({ title: 'Hacked Title' })
.expect(404);
});
});
});

View File

@@ -0,0 +1,333 @@
/**
* @ai-summary Unit tests for DocumentsRepository
* @ai-context Tests database layer with mocked pool
*/
import { DocumentsRepository } from '../../data/documents.repository';
import type { Pool } from 'pg';
describe('DocumentsRepository', () => {
let repository: DocumentsRepository;
let mockPool: jest.Mocked<Pool>;
beforeEach(() => {
mockPool = {
query: jest.fn(),
} as any;
repository = new DocumentsRepository(mockPool);
});
describe('insert', () => {
const mockDocumentData = {
id: 'doc-123',
user_id: 'user-123',
vehicle_id: 'vehicle-123',
document_type: 'insurance' as const,
title: 'Test Document',
notes: 'Test notes',
details: { provider: 'Test Provider' },
issued_date: '2024-01-01',
expiration_date: '2024-12-31',
};
it('should insert document with all fields', async () => {
const mockResult = { rows: [{ ...mockDocumentData, created_at: '2024-01-01T00:00:00Z' }] };
mockPool.query.mockResolvedValue(mockResult);
const result = await repository.insert(mockDocumentData);
expect(mockPool.query).toHaveBeenCalledWith(
expect.stringContaining('INSERT INTO documents'),
[
'doc-123',
'user-123',
'vehicle-123',
'insurance',
'Test Document',
'Test notes',
{ provider: 'Test Provider' },
'2024-01-01',
'2024-12-31',
]
);
expect(result).toEqual(mockResult.rows[0]);
});
it('should insert document with null optional fields', async () => {
const minimalData = {
id: 'doc-123',
user_id: 'user-123',
vehicle_id: 'vehicle-123',
document_type: 'registration' as const,
title: 'Test Document',
};
const mockResult = { rows: [{ ...minimalData, notes: null, details: null }] };
mockPool.query.mockResolvedValue(mockResult);
const result = await repository.insert(minimalData);
expect(mockPool.query).toHaveBeenCalledWith(
expect.stringContaining('INSERT INTO documents'),
[
'doc-123',
'user-123',
'vehicle-123',
'registration',
'Test Document',
null,
null,
null,
null,
]
);
expect(result).toEqual(mockResult.rows[0]);
});
});
describe('findById', () => {
it('should find document by id and user', async () => {
const mockDocument = { id: 'doc-123', user_id: 'user-123', title: 'Test' };
mockPool.query.mockResolvedValue({ rows: [mockDocument] });
const result = await repository.findById('doc-123', 'user-123');
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE id = $1 AND user_id = $2 AND deleted_at IS NULL',
['doc-123', 'user-123']
);
expect(result).toEqual(mockDocument);
});
it('should return null if document not found', async () => {
mockPool.query.mockResolvedValue({ rows: [] });
const result = await repository.findById('doc-123', 'user-123');
expect(result).toBeNull();
});
});
describe('listByUser', () => {
const mockDocuments = [
{ id: 'doc-1', user_id: 'user-123', title: 'Doc 1' },
{ id: 'doc-2', user_id: 'user-123', title: 'Doc 2' },
];
it('should list all user documents without filters', async () => {
mockPool.query.mockResolvedValue({ rows: mockDocuments });
const result = await repository.listByUser('user-123');
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE user_id = $1 AND deleted_at IS NULL ORDER BY created_at DESC',
['user-123']
);
expect(result).toEqual(mockDocuments);
});
it('should list documents with vehicleId filter', async () => {
mockPool.query.mockResolvedValue({ rows: [mockDocuments[0]] });
const result = await repository.listByUser('user-123', { vehicleId: 'vehicle-123' });
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE user_id = $1 AND deleted_at IS NULL AND vehicle_id = $2 ORDER BY created_at DESC',
['user-123', 'vehicle-123']
);
expect(result).toEqual([mockDocuments[0]]);
});
it('should list documents with type filter', async () => {
mockPool.query.mockResolvedValue({ rows: [mockDocuments[0]] });
const result = await repository.listByUser('user-123', { type: 'insurance' });
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE user_id = $1 AND deleted_at IS NULL AND document_type = $2 ORDER BY created_at DESC',
['user-123', 'insurance']
);
expect(result).toEqual([mockDocuments[0]]);
});
it('should list documents with expiresBefore filter', async () => {
mockPool.query.mockResolvedValue({ rows: [mockDocuments[0]] });
const result = await repository.listByUser('user-123', { expiresBefore: '2024-12-31' });
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE user_id = $1 AND deleted_at IS NULL AND expiration_date <= $2 ORDER BY created_at DESC',
['user-123', '2024-12-31']
);
expect(result).toEqual([mockDocuments[0]]);
});
it('should list documents with multiple filters', async () => {
mockPool.query.mockResolvedValue({ rows: [mockDocuments[0]] });
const result = await repository.listByUser('user-123', {
vehicleId: 'vehicle-123',
type: 'insurance',
expiresBefore: '2024-12-31',
});
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE user_id = $1 AND deleted_at IS NULL AND vehicle_id = $2 AND document_type = $3 AND expiration_date <= $4 ORDER BY created_at DESC',
['user-123', 'vehicle-123', 'insurance', '2024-12-31']
);
expect(result).toEqual([mockDocuments[0]]);
});
});
describe('softDelete', () => {
it('should soft delete document', async () => {
mockPool.query.mockResolvedValue({ rows: [] });
await repository.softDelete('doc-123', 'user-123');
expect(mockPool.query).toHaveBeenCalledWith(
'UPDATE documents SET deleted_at = NOW() WHERE id = $1 AND user_id = $2',
['doc-123', 'user-123']
);
});
});
describe('updateMetadata', () => {
it('should update single field', async () => {
const mockUpdated = { id: 'doc-123', title: 'Updated Title' };
mockPool.query.mockResolvedValue({ rows: [mockUpdated] });
const result = await repository.updateMetadata('doc-123', 'user-123', { title: 'Updated Title' });
expect(mockPool.query).toHaveBeenCalledWith(
'UPDATE documents SET title = $1 WHERE id = $2 AND user_id = $3 AND deleted_at IS NULL RETURNING *',
['Updated Title', 'doc-123', 'user-123']
);
expect(result).toEqual(mockUpdated);
});
it('should update multiple fields', async () => {
const mockUpdated = { id: 'doc-123', title: 'Updated Title', notes: 'Updated notes' };
mockPool.query.mockResolvedValue({ rows: [mockUpdated] });
const result = await repository.updateMetadata('doc-123', 'user-123', {
title: 'Updated Title',
notes: 'Updated notes',
details: { key: 'value' },
});
expect(mockPool.query).toHaveBeenCalledWith(
'UPDATE documents SET title = $1, notes = $2, details = $3 WHERE id = $4 AND user_id = $5 AND deleted_at IS NULL RETURNING *',
['Updated Title', 'Updated notes', { key: 'value' }, 'doc-123', 'user-123']
);
expect(result).toEqual(mockUpdated);
});
it('should handle null values', async () => {
const mockUpdated = { id: 'doc-123', notes: null };
mockPool.query.mockResolvedValue({ rows: [mockUpdated] });
const result = await repository.updateMetadata('doc-123', 'user-123', { notes: null });
expect(mockPool.query).toHaveBeenCalledWith(
'UPDATE documents SET notes = $1 WHERE id = $2 AND user_id = $3 AND deleted_at IS NULL RETURNING *',
[null, 'doc-123', 'user-123']
);
expect(result).toEqual(mockUpdated);
});
it('should return existing record if no fields to update', async () => {
const mockExisting = { id: 'doc-123', title: 'Existing' };
mockPool.query.mockResolvedValue({ rows: [mockExisting] });
const result = await repository.updateMetadata('doc-123', 'user-123', {});
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT * FROM documents WHERE id = $1 AND user_id = $2 AND deleted_at IS NULL',
['doc-123', 'user-123']
);
expect(result).toEqual(mockExisting);
});
it('should return null if document not found', async () => {
mockPool.query.mockResolvedValue({ rows: [] });
const result = await repository.updateMetadata('doc-123', 'user-123', { title: 'New Title' });
expect(result).toBeNull();
});
});
describe('updateStorageMeta', () => {
it('should update storage metadata', async () => {
const storageMeta = {
storage_bucket: 'test-bucket',
storage_key: 'test-key',
file_name: 'test.pdf',
content_type: 'application/pdf',
file_size: 1024,
file_hash: 'hash123',
};
const mockUpdated = { id: 'doc-123', ...storageMeta };
mockPool.query.mockResolvedValue({ rows: [mockUpdated] });
const result = await repository.updateStorageMeta('doc-123', 'user-123', storageMeta);
expect(mockPool.query).toHaveBeenCalledWith(
expect.stringContaining('UPDATE documents SET'),
[
'test-bucket',
'test-key',
'test.pdf',
'application/pdf',
1024,
'hash123',
'doc-123',
'user-123',
]
);
expect(result).toEqual(mockUpdated);
});
it('should handle null file_hash', async () => {
const storageMeta = {
storage_bucket: 'test-bucket',
storage_key: 'test-key',
file_name: 'test.pdf',
content_type: 'application/pdf',
file_size: 1024,
};
mockPool.query.mockResolvedValue({ rows: [{ id: 'doc-123', ...storageMeta, file_hash: null }] });
const result = await repository.updateStorageMeta('doc-123', 'user-123', storageMeta);
expect(mockPool.query).toHaveBeenCalledWith(
expect.stringContaining('UPDATE documents SET'),
[
'test-bucket',
'test-key',
'test.pdf',
'application/pdf',
1024,
null,
'doc-123',
'user-123',
]
);
});
it('should return null if document not found', async () => {
const storageMeta = {
storage_bucket: 'test-bucket',
storage_key: 'test-key',
file_name: 'test.pdf',
content_type: 'application/pdf',
file_size: 1024,
};
mockPool.query.mockResolvedValue({ rows: [] });
const result = await repository.updateStorageMeta('doc-123', 'user-123', storageMeta);
expect(result).toBeNull();
});
});
});

View File

@@ -0,0 +1,261 @@
/**
* @ai-summary Unit tests for DocumentsService
* @ai-context Tests business logic with mocked dependencies
*/
import { DocumentsService } from '../../domain/documents.service';
import { DocumentsRepository } from '../../data/documents.repository';
import pool from '../../../../core/config/database';
// Mock dependencies
jest.mock('../../data/documents.repository');
jest.mock('../../../../core/config/database');
const mockRepository = jest.mocked(DocumentsRepository);
const mockPool = jest.mocked(pool);
describe('DocumentsService', () => {
let service: DocumentsService;
let repositoryInstance: jest.Mocked<DocumentsRepository>;
beforeEach(() => {
jest.clearAllMocks();
repositoryInstance = {
insert: jest.fn(),
findById: jest.fn(),
listByUser: jest.fn(),
updateMetadata: jest.fn(),
updateStorageMeta: jest.fn(),
softDelete: jest.fn(),
} as any;
mockRepository.mockImplementation(() => repositoryInstance);
service = new DocumentsService();
});
describe('createDocument', () => {
const mockDocumentBody = {
vehicle_id: 'vehicle-123',
document_type: 'insurance' as const,
title: 'Car Insurance Policy',
notes: 'Annual insurance policy',
details: { provider: 'State Farm' },
issued_date: '2024-01-01',
expiration_date: '2024-12-31',
};
const mockCreatedDocument = {
id: 'doc-123',
user_id: 'user-123',
vehicle_id: 'vehicle-123',
document_type: 'insurance' as const,
title: 'Car Insurance Policy',
notes: 'Annual insurance policy',
details: { provider: 'State Farm' },
storage_bucket: null,
storage_key: null,
file_name: null,
content_type: null,
file_size: null,
file_hash: null,
issued_date: '2024-01-01',
expiration_date: '2024-12-31',
created_at: '2024-01-01T00:00:00Z',
updated_at: '2024-01-01T00:00:00Z',
deleted_at: null,
};
it('should create a document successfully', async () => {
// Mock vehicle ownership check
mockPool.query.mockResolvedValue({ rows: [{ id: 'vehicle-123' }] });
repositoryInstance.insert.mockResolvedValue(mockCreatedDocument);
const result = await service.createDocument('user-123', mockDocumentBody);
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT id FROM vehicles WHERE id = $1 AND user_id = $2',
['vehicle-123', 'user-123']
);
expect(repositoryInstance.insert).toHaveBeenCalledWith({
id: expect.any(String),
user_id: 'user-123',
vehicle_id: 'vehicle-123',
document_type: 'insurance',
title: 'Car Insurance Policy',
notes: 'Annual insurance policy',
details: { provider: 'State Farm' },
issued_date: '2024-01-01',
expiration_date: '2024-12-31',
});
expect(result).toEqual(mockCreatedDocument);
});
it('should create document with minimal data', async () => {
const minimalBody = {
vehicle_id: 'vehicle-123',
document_type: 'registration' as const,
title: 'Vehicle Registration',
};
mockPool.query.mockResolvedValue({ rows: [{ id: 'vehicle-123' }] });
repositoryInstance.insert.mockResolvedValue({
...mockCreatedDocument,
document_type: 'registration',
title: 'Vehicle Registration',
notes: null,
details: null,
issued_date: null,
expiration_date: null,
});
const result = await service.createDocument('user-123', minimalBody);
expect(repositoryInstance.insert).toHaveBeenCalledWith({
id: expect.any(String),
user_id: 'user-123',
vehicle_id: 'vehicle-123',
document_type: 'registration',
title: 'Vehicle Registration',
notes: null,
details: null,
issued_date: null,
expiration_date: null,
});
});
it('should reject document for non-owned vehicle', async () => {
mockPool.query.mockResolvedValue({ rows: [] });
await expect(service.createDocument('user-123', mockDocumentBody))
.rejects.toThrow('Vehicle not found or not owned by user');
expect(mockPool.query).toHaveBeenCalledWith(
'SELECT id FROM vehicles WHERE id = $1 AND user_id = $2',
['vehicle-123', 'user-123']
);
expect(repositoryInstance.insert).not.toHaveBeenCalled();
});
it('should generate unique IDs for documents', async () => {
mockPool.query.mockResolvedValue({ rows: [{ id: 'vehicle-123' }] });
repositoryInstance.insert.mockResolvedValue(mockCreatedDocument);
await service.createDocument('user-123', mockDocumentBody);
await service.createDocument('user-123', mockDocumentBody);
expect(repositoryInstance.insert).toHaveBeenCalledTimes(2);
const firstCall = repositoryInstance.insert.mock.calls[0][0];
const secondCall = repositoryInstance.insert.mock.calls[1][0];
expect(firstCall.id).not.toEqual(secondCall.id);
expect(firstCall.id).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/);
});
});
describe('getDocument', () => {
it('should return document if found', async () => {
const mockDocument = {
id: 'doc-123',
user_id: 'user-123',
title: 'Test Document',
};
repositoryInstance.findById.mockResolvedValue(mockDocument as any);
const result = await service.getDocument('user-123', 'doc-123');
expect(repositoryInstance.findById).toHaveBeenCalledWith('doc-123', 'user-123');
expect(result).toEqual(mockDocument);
});
it('should return null if document not found', async () => {
repositoryInstance.findById.mockResolvedValue(null);
const result = await service.getDocument('user-123', 'doc-123');
expect(repositoryInstance.findById).toHaveBeenCalledWith('doc-123', 'user-123');
expect(result).toBeNull();
});
});
describe('listDocuments', () => {
const mockDocuments = [
{ id: 'doc-1', title: 'Insurance', document_type: 'insurance' },
{ id: 'doc-2', title: 'Registration', document_type: 'registration' },
];
it('should list all user documents without filters', async () => {
repositoryInstance.listByUser.mockResolvedValue(mockDocuments as any);
const result = await service.listDocuments('user-123');
expect(repositoryInstance.listByUser).toHaveBeenCalledWith('user-123', undefined);
expect(result).toEqual(mockDocuments);
});
it('should list documents with filters', async () => {
const filters = {
vehicleId: 'vehicle-123',
type: 'insurance' as const,
expiresBefore: '2024-12-31',
};
repositoryInstance.listByUser.mockResolvedValue([mockDocuments[0]] as any);
const result = await service.listDocuments('user-123', filters);
expect(repositoryInstance.listByUser).toHaveBeenCalledWith('user-123', filters);
expect(result).toEqual([mockDocuments[0]]);
});
});
describe('updateDocument', () => {
const mockExistingDocument = {
id: 'doc-123',
user_id: 'user-123',
title: 'Original Title',
};
it('should update document successfully', async () => {
const updateData = { title: 'Updated Title', notes: 'Updated notes' };
const updatedDocument = { ...mockExistingDocument, ...updateData };
repositoryInstance.findById.mockResolvedValue(mockExistingDocument as any);
repositoryInstance.updateMetadata.mockResolvedValue(updatedDocument as any);
const result = await service.updateDocument('user-123', 'doc-123', updateData);
expect(repositoryInstance.findById).toHaveBeenCalledWith('doc-123', 'user-123');
expect(repositoryInstance.updateMetadata).toHaveBeenCalledWith('doc-123', 'user-123', updateData);
expect(result).toEqual(updatedDocument);
});
it('should return null if document not found', async () => {
repositoryInstance.findById.mockResolvedValue(null);
const result = await service.updateDocument('user-123', 'doc-123', { title: 'New Title' });
expect(repositoryInstance.findById).toHaveBeenCalledWith('doc-123', 'user-123');
expect(repositoryInstance.updateMetadata).not.toHaveBeenCalled();
expect(result).toBeNull();
});
it('should return existing document if no valid patch provided', async () => {
repositoryInstance.findById.mockResolvedValue(mockExistingDocument as any);
const result = await service.updateDocument('user-123', 'doc-123', null as any);
expect(repositoryInstance.findById).toHaveBeenCalledWith('doc-123', 'user-123');
expect(repositoryInstance.updateMetadata).not.toHaveBeenCalled();
expect(result).toEqual(mockExistingDocument);
});
});
describe('deleteDocument', () => {
it('should delete document successfully', async () => {
repositoryInstance.softDelete.mockResolvedValue(undefined);
await service.deleteDocument('user-123', 'doc-123');
expect(repositoryInstance.softDelete).toHaveBeenCalledWith('doc-123', 'user-123');
});
});
});

View File

@@ -0,0 +1,256 @@
/**
* @ai-summary Unit tests for MinIO storage adapter
* @ai-context Tests storage layer with mocked MinIO client
*/
import { createMinioAdapter } from '../../../../core/storage/adapters/minio.adapter';
import { Client as MinioClient } from 'minio';
import { appConfig } from '../../../../core/config/config-loader';
import { Readable } from 'stream';
// Mock dependencies
jest.mock('minio');
jest.mock('../../../../core/config/config-loader');
const mockMinioClient = jest.mocked(MinioClient);
const mockAppConfig = jest.mocked(appConfig);
describe('MinIO Storage Adapter', () => {
let clientInstance: jest.Mocked<MinioClient>;
let adapter: ReturnType<typeof createMinioAdapter>;
beforeEach(() => {
jest.clearAllMocks();
clientInstance = {
putObject: jest.fn(),
getObject: jest.fn(),
removeObject: jest.fn(),
statObject: jest.fn(),
presignedGetObject: jest.fn(),
presignedPutObject: jest.fn(),
} as any;
mockMinioClient.mockImplementation(() => clientInstance);
mockAppConfig.getMinioConfig.mockReturnValue({
endpoint: 'localhost',
port: 9000,
accessKey: 'testkey',
secretKey: 'testsecret',
bucket: 'test-bucket',
});
adapter = createMinioAdapter();
});
describe('putObject', () => {
it('should upload Buffer with correct parameters', async () => {
const buffer = Buffer.from('test content');
clientInstance.putObject.mockResolvedValue('etag-123');
await adapter.putObject('test-bucket', 'test-key', buffer, 'text/plain', { 'x-custom': 'value' });
expect(clientInstance.putObject).toHaveBeenCalledWith(
'test-bucket',
'test-key',
buffer,
buffer.length,
{
'Content-Type': 'text/plain',
'x-custom': 'value',
}
);
});
it('should upload string with correct parameters', async () => {
const content = 'test content';
clientInstance.putObject.mockResolvedValue('etag-123');
await adapter.putObject('test-bucket', 'test-key', content, 'text/plain');
expect(clientInstance.putObject).toHaveBeenCalledWith(
'test-bucket',
'test-key',
content,
content.length,
{ 'Content-Type': 'text/plain' }
);
});
it('should upload stream without size', async () => {
const stream = new Readable();
clientInstance.putObject.mockResolvedValue('etag-123');
await adapter.putObject('test-bucket', 'test-key', stream, 'application/octet-stream');
expect(clientInstance.putObject).toHaveBeenCalledWith(
'test-bucket',
'test-key',
stream,
undefined,
{ 'Content-Type': 'application/octet-stream' }
);
});
it('should handle upload without content type', async () => {
const buffer = Buffer.from('test');
clientInstance.putObject.mockResolvedValue('etag-123');
await adapter.putObject('test-bucket', 'test-key', buffer);
expect(clientInstance.putObject).toHaveBeenCalledWith(
'test-bucket',
'test-key',
buffer,
buffer.length,
{}
);
});
});
describe('getObjectStream', () => {
it('should return object stream', async () => {
const mockStream = new Readable();
clientInstance.getObject.mockResolvedValue(mockStream);
const result = await adapter.getObjectStream('test-bucket', 'test-key');
expect(clientInstance.getObject).toHaveBeenCalledWith('test-bucket', 'test-key');
expect(result).toBe(mockStream);
});
});
describe('deleteObject', () => {
it('should remove object', async () => {
clientInstance.removeObject.mockResolvedValue(undefined);
await adapter.deleteObject('test-bucket', 'test-key');
expect(clientInstance.removeObject).toHaveBeenCalledWith('test-bucket', 'test-key');
});
});
describe('headObject', () => {
it('should return object metadata', async () => {
const mockStat = {
size: 1024,
etag: 'test-etag',
lastModified: '2024-01-01T00:00:00Z',
metaData: {
'content-type': 'application/pdf',
'x-custom-header': 'custom-value',
},
};
clientInstance.statObject.mockResolvedValue(mockStat);
const result = await adapter.headObject('test-bucket', 'test-key');
expect(clientInstance.statObject).toHaveBeenCalledWith('test-bucket', 'test-key');
expect(result).toEqual({
size: 1024,
etag: 'test-etag',
lastModified: new Date('2024-01-01T00:00:00Z'),
contentType: 'application/pdf',
metadata: mockStat.metaData,
});
});
it('should handle metadata with Content-Type header', async () => {
const mockStat = {
size: 1024,
etag: 'test-etag',
lastModified: '2024-01-01T00:00:00Z',
metaData: {
'Content-Type': 'image/jpeg',
},
};
clientInstance.statObject.mockResolvedValue(mockStat);
const result = await adapter.headObject('test-bucket', 'test-key');
expect(result.contentType).toBe('image/jpeg');
});
it('should handle missing optional fields', async () => {
const mockStat = {
size: 1024,
etag: 'test-etag',
};
clientInstance.statObject.mockResolvedValue(mockStat);
const result = await adapter.headObject('test-bucket', 'test-key');
expect(result).toEqual({
size: 1024,
etag: 'test-etag',
lastModified: undefined,
contentType: undefined,
metadata: undefined,
});
});
});
describe('getSignedUrl', () => {
it('should generate GET signed URL with default expiry', async () => {
clientInstance.presignedGetObject.mockResolvedValue('https://example.com/signed-url');
const result = await adapter.getSignedUrl('test-bucket', 'test-key');
expect(clientInstance.presignedGetObject).toHaveBeenCalledWith('test-bucket', 'test-key', 300);
expect(result).toBe('https://example.com/signed-url');
});
it('should generate GET signed URL with custom expiry', async () => {
clientInstance.presignedGetObject.mockResolvedValue('https://example.com/signed-url');
const result = await adapter.getSignedUrl('test-bucket', 'test-key', {
method: 'GET',
expiresSeconds: 600,
});
expect(clientInstance.presignedGetObject).toHaveBeenCalledWith('test-bucket', 'test-key', 600);
expect(result).toBe('https://example.com/signed-url');
});
it('should generate PUT signed URL', async () => {
clientInstance.presignedPutObject.mockResolvedValue('https://example.com/put-url');
const result = await adapter.getSignedUrl('test-bucket', 'test-key', {
method: 'PUT',
expiresSeconds: 300,
});
expect(clientInstance.presignedPutObject).toHaveBeenCalledWith('test-bucket', 'test-key', 300);
expect(result).toBe('https://example.com/put-url');
});
it('should enforce minimum expiry time', async () => {
clientInstance.presignedGetObject.mockResolvedValue('https://example.com/signed-url');
await adapter.getSignedUrl('test-bucket', 'test-key', { expiresSeconds: 0 });
expect(clientInstance.presignedGetObject).toHaveBeenCalledWith('test-bucket', 'test-key', 1);
});
it('should enforce maximum expiry time', async () => {
clientInstance.presignedGetObject.mockResolvedValue('https://example.com/signed-url');
await adapter.getSignedUrl('test-bucket', 'test-key', { expiresSeconds: 10000000 });
expect(clientInstance.presignedGetObject).toHaveBeenCalledWith('test-bucket', 'test-key', 604800); // 7 days max
});
});
describe('MinioClient instantiation', () => {
it('should create client with correct configuration', () => {
expect(mockMinioClient).toHaveBeenCalledWith({
endPoint: 'localhost',
port: 9000,
useSSL: false,
accessKey: 'testkey',
secretKey: 'testsecret',
});
});
});
});

View File

@@ -1,7 +1,7 @@
# Maintenance Feature Capsule
## Status
- Scaffolded; implementation pending. Endpoints and behavior to be defined.
- WIP: Scaffolded; implementation pending. Track updates in `docs/changes/MULTI-TENANT-REDESIGN.md` and related feature plans.
## Structure
- **api/** - HTTP endpoints, routes, validators
@@ -15,8 +15,8 @@
## Dependencies
- Internal: core/auth, core/cache
- External: (none defined yet)
- Database: maintenance table (see docs/DATABASE-SCHEMA.md)
- External: (none)
- Database: maintenance table (see `docs/DATABASE-SCHEMA.md`)
## Quick Commands
```bash
@@ -27,8 +27,5 @@ npm test -- features/maintenance
npm run migrate:feature maintenance
```
## Clarifications Needed
- Entities/fields and validation rules (e.g., due date, mileage, completion criteria)?
- Planned endpoints and request/response shapes?
- Relationship to vehicles (required foreign keys, cascades)?
- Caching requirements (e.g., upcoming maintenance TTL)?
## API (planned)
- Endpoints and business rules to be finalized; depends on vehicles. See `docs/DATABASE-SCHEMA.md` for current table shape and indexes.

View File

@@ -1,7 +1,7 @@
# Stations Feature Capsule
## Summary
Search nearby gas stations via Google Maps and manage users' saved stations.
## Quick Summary (50 tokens)
Search nearby gas stations via Google Maps and manage users' saved stations with user-owned saved lists. Caches search results for 1 hour. JWT required for all endpoints.
## API Endpoints (JWT required)
- `POST /api/stations/search` — Search nearby stations
@@ -22,7 +22,7 @@ Search nearby gas stations via Google Maps and manage users' saved stations.
## Dependencies
- Internal: core/auth, core/cache
- External: Google Maps API (Places)
- Database: stations table
- Database: stations table (see `docs/DATABASE-SCHEMA.md`)
## Quick Commands
```bash
@@ -32,9 +32,6 @@ npm test -- features/stations
# Run feature migrations
npm run migrate:feature stations
```
## Clarifications Needed
- Search payload structure (required fields, radius/filters)?
- Saved station schema and required fields?
- Caching policy for searches (TTL, cache keys)?
- Rate limits or quotas for Google Maps calls?
## Notes
- Search payload and saved schema to be finalized; align with Google Places best practices and platform quotas. Caching policy: 1 hour TTL (key `stations:search:{query}`).

View File

@@ -4,14 +4,14 @@
*/
import axios from 'axios';
import { env } from '../../../../core/config/environment';
import { appConfig } from '../../../../core/config/config-loader';
import { logger } from '../../../../core/logging/logger';
import { cacheService } from '../../../../core/config/redis';
import { GooglePlacesResponse, GooglePlace } from './google-maps.types';
import { Station } from '../../domain/stations.types';
export class GoogleMapsClient {
private readonly apiKey = env.GOOGLE_MAPS_API_KEY;
private readonly apiKey = appConfig.secrets.google_maps_api_key;
private readonly baseURL = 'https://maps.googleapis.com/maps/api/place';
private readonly cacheTTL = 3600; // 1 hour

View File

@@ -1,7 +1,7 @@
import { Logger } from 'winston';
import { PlatformVehiclesClient } from '../external/platform-vehicles/platform-vehicles.client';
import { VPICClient } from '../external/vpic/vpic.client';
import { env } from '../../../core/config/environment';
import { appConfig } from '../../../core/config/config-loader';
/**
@@ -22,7 +22,7 @@ export class PlatformIntegrationService {
this.vpicClient = vpicClient;
// Feature flag - can be environment variable or runtime config
this.usePlatformService = env.NODE_ENV !== 'test'; // Use platform service except in tests
this.usePlatformService = appConfig.config.server.environment !== 'test'; // Use platform service except in tests
this.logger.info(`Vehicle service integration initialized: usePlatformService=${this.usePlatformService}`);
}

View File

@@ -16,7 +16,7 @@ import {
import { logger } from '../../../core/logging/logger';
import { cacheService } from '../../../core/config/redis';
import { isValidVIN } from '../../../shared-minimal/utils/validators';
import { env } from '../../../core/config/environment';
import { appConfig } from '../../../core/config/config-loader';
import { normalizeMakeName, normalizeModelName } from './name-normalizer';
export class VehiclesService {
@@ -26,10 +26,11 @@ export class VehiclesService {
constructor(private repository: VehiclesRepository) {
// Initialize platform vehicles client
const platformConfig = appConfig.getPlatformServiceConfig('vehicles');
const platformClient = new PlatformVehiclesClient({
baseURL: env.PLATFORM_VEHICLES_API_URL,
apiKey: env.PLATFORM_VEHICLES_API_KEY,
tenantId: process.env.TENANT_ID,
baseURL: platformConfig.url,
apiKey: platformConfig.apiKey,
tenantId: appConfig.config.server.tenant_id,
timeout: 3000,
logger
});

View File

@@ -4,7 +4,7 @@
*/
import axios from 'axios';
import { env } from '../../../../core/config/environment';
import { appConfig } from '../../../../core/config/config-loader';
import { logger } from '../../../../core/logging/logger';
import { cacheService } from '../../../../core/config/redis';
import {
@@ -19,7 +19,7 @@ import {
} from './vpic.types';
export class VPICClient {
private readonly baseURL = env.VPIC_API_URL;
private readonly baseURL = appConfig.config.external.vpic.url;
private readonly cacheTTL = 30 * 24 * 60 * 60; // 30 days in seconds
private readonly dropdownCacheTTL = 7 * 24 * 60 * 60; // 7 days for dropdown data

View File

@@ -3,10 +3,10 @@
* @ai-context Starts the Fastify server with all feature capsules
*/
import { buildApp } from './app';
import { env } from './core/config/environment';
import { appConfig } from './core/config/config-loader';
import { logger } from './core/logging/logger';
const PORT = env.PORT || 3001;
const PORT = appConfig.config.server.port;
async function start() {
try {
@@ -19,7 +19,7 @@ async function start() {
logger.info(`MotoVaultPro backend running`, {
port: PORT,
environment: env.NODE_ENV,
environment: appConfig.config.server.environment,
nodeVersion: process.version,
framework: 'Fastify'
});