fix: Database schema fixes. CI/CD improvements.

This commit is contained in:
Eric Gullickson
2025-12-27 16:23:22 -06:00
parent 344df5184c
commit dc2c731119
26 changed files with 242360 additions and 481192 deletions

View File

@@ -86,16 +86,12 @@ deploy:
- sleep 15
- echo "Step 5/7 Running database migrations..."
- docker compose -f $DOCKER_COMPOSE_FILE run --rm mvp-backend npm run migrate || echo "Migration skipped"
- echo "Step 6/7 Running vehicle ETL import..."
- |
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < data/vehicle-etl/migrations/001_create_vehicle_database.sql
docker exec -i mvp-postgres psql -U postgres -d motovaultpro -c "TRUNCATE TABLE vehicle_options RESTART IDENTITY CASCADE; TRUNCATE TABLE engines RESTART IDENTITY CASCADE; TRUNCATE TABLE transmissions RESTART IDENTITY CASCADE;"
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < data/vehicle-etl/output/01_engines.sql
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < data/vehicle-etl/output/02_transmissions.sql
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < data/vehicle-etl/output/03_vehicle_options.sql
- echo "Step 6/7 Vehicle catalog data..."
# Schema and data now loaded via standard migration system
# Migration runner handles table creation and data loading automatically
- echo "Vehicle catalog loaded via platform feature migration"
- echo "Flushing Redis cache..."
- docker exec mvp-redis redis-cli FLUSHALL
- echo "Vehicle ETL import completed"
- echo "Step 7/7 Starting all services..."
- docker compose -f $DOCKER_COMPOSE_FILE -f $DOCKER_COMPOSE_PROD_FILE up -d
- echo "Waiting for services to initialize..."

View File

@@ -59,7 +59,7 @@ export class CatalogImportService {
async previewImport(csvContent: string): Promise<ImportPreviewResult> {
const previewId = uuidv4();
const toCreate: ImportRow[] = [];
const toUpdate: ImportRow[] = [];
const toUpdate: ImportRow[] = []; // Kept for interface compatibility (will be empty)
const errors: ImportError[] = [];
const lines = csvContent.trim().split('\n');
@@ -146,21 +146,8 @@ export class CatalogImportService {
transmissionType,
};
// Check if record exists to determine create vs update (upsert logic)
const existsResult = await this.pool.query(
`SELECT id FROM vehicle_options
WHERE year = $1 AND make = $2 AND model = $3 AND trim = $4
LIMIT 1`,
[year, make, model, trim]
);
const exists = (existsResult.rowCount || 0) > 0;
// Auto-detect: if exists -> update, else -> create
if (exists) {
toUpdate.push(row);
} else {
// All rows will be inserted with ON CONFLICT handling (proper upsert)
toCreate.push(row);
}
} catch (error: any) {
errors.push({ row: rowNum, error: error.message || 'Parse error' });
}
@@ -239,62 +226,30 @@ export class CatalogImportService {
transmissionId = transResult.rows[0].id;
}
// Insert vehicle option
await client.query(
// Upsert vehicle option (insert or update if exists)
const upsertResult = await client.query(
`INSERT INTO vehicle_options (year, make, model, trim, engine_id, transmission_id)
VALUES ($1, $2, $3, $4, $5, $6)`,
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (year, make, model, trim, engine_id, transmission_id)
DO UPDATE SET updated_at = NOW()
RETURNING (xmax = 0) AS inserted`,
[row.year, row.make, row.model, row.trim, engineId, transmissionId]
);
// Check if this was an insert (xmax=0) or update (xmax!=0)
const wasInserted = upsertResult.rows[0].inserted;
if (wasInserted) {
result.created++;
} catch (error: any) {
result.errors.push({ row: 0, error: `Failed to create ${row.year} ${row.make} ${row.model} ${row.trim}: ${error.message}` });
}
}
// Process updates
for (const row of preview.toUpdate) {
try {
// Get or create engine
let engineId: number | null = null;
if (row.engineName) {
const engineResult = await client.query(
`INSERT INTO engines (name, fuel_type)
VALUES ($1, 'Gas')
ON CONFLICT ((lower(name))) DO UPDATE SET name = EXCLUDED.name
RETURNING id`,
[row.engineName]
);
engineId = engineResult.rows[0].id;
}
// Get or create transmission
let transmissionId: number | null = null;
if (row.transmissionType) {
const transResult = await client.query(
`INSERT INTO transmissions (type)
VALUES ($1)
ON CONFLICT ((lower(type))) DO UPDATE SET type = EXCLUDED.type
RETURNING id`,
[row.transmissionType]
);
transmissionId = transResult.rows[0].id;
}
// Update vehicle option
await client.query(
`UPDATE vehicle_options
SET engine_id = $5, transmission_id = $6, updated_at = NOW()
WHERE year = $1 AND make = $2 AND model = $3 AND trim = $4`,
[row.year, row.make, row.model, row.trim, engineId, transmissionId]
);
} else {
result.updated++;
}
} catch (error: any) {
result.errors.push({ row: 0, error: `Failed to update ${row.year} ${row.make} ${row.model} ${row.trim}: ${error.message}` });
result.errors.push({ row: 0, error: `Failed to upsert ${row.year} ${row.make} ${row.model} ${row.trim}: ${error.message}` });
}
}
// Note: Separate "Process updates" loop removed - ON CONFLICT handles both INSERT and UPDATE
await client.query('COMMIT');
// Remove preview from cache
@@ -306,13 +261,23 @@ export class CatalogImportService {
logger.debug('Vehicle data cache invalidated after import');
}
logger.info('Catalog import completed', {
// Log completion with appropriate level
if (result.errors.length > 0) {
logger.warn('Catalog import completed with errors', {
previewId,
created: result.created,
updated: result.updated,
errors: result.errors.length,
changedBy,
});
} else {
logger.info('Catalog import completed successfully', {
previewId,
created: result.created,
updated: result.updated,
changedBy,
});
}
return result;
} catch (error) {

View File

@@ -180,8 +180,14 @@ export class BackupController {
const preview = await this.restoreService.previewRestore(request.params.id);
reply.send(preview);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to preview restore';
logger.error('Preview restore failed', {
backupId: request.params.id,
error: errorMessage,
stack: error instanceof Error ? error.stack : undefined,
});
reply.status(400).send({
error: error instanceof Error ? error.message : 'Failed to preview restore',
error: errorMessage,
});
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,47 @@
INSERT INTO public.transmissions VALUES (3393, '8-Speed Dual-Clutch', NULL, NULL, '2025-12-27 20:24:19.358069', '2025-12-27 20:24:19.358069');
INSERT INTO public.transmissions VALUES (11, 'Continuously Variable Transmission', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (3404, 'Single-Speed Direct Drive', NULL, NULL, '2025-12-27 20:24:19.358069', '2025-12-27 20:24:19.358069');
INSERT INTO public.transmissions VALUES (15, '5-Speed Automatic Overdrive', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (3413, '2-Speed Direct Drive', NULL, NULL, '2025-12-27 20:24:19.358069', '2025-12-27 20:24:19.358069');
INSERT INTO public.transmissions VALUES (32, '4-Speed CVT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (3072, 'Single-Speed Transmission', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (24, '5-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (9, '4-Speed Automatic Overdrive', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (5304, 'ISR Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (5081, 'Electric', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (10, '5-Speed Manual Overdrive', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (36, '10-Speed Automatic Transmission', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (13, '6-Speed Manual Overdrive', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (22, '1-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (18, '6-Speed CVT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (29, '8-Speed CVT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (4, '5-Speed Manual', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (5, '4-Speed Manual', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (3, '3-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (6, '3-Speed Manual', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (35, '2-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (1184, '9-Speed DCT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (23, '7-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (33, '10-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (34, '10-Speed CVT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (1159, '8-Speed DCT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (1172, '7-Speed DCT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (17, '6-Speed Automatic Overdrive', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (7, '4-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (14, '1-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (25, '7-Speed CVT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (30, '9-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (12, '5-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (20, '6-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (19, '7-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (28, '8-Speed Dual Clutch', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (26, '7-Speed Manual', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (8, '6-Speed Manual', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (115, 'CVT', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (2, 'Manual', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (1, 'Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (27, '9-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (21, '8-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (119, '1-Speed Direct Drive', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (16, '6-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');
INSERT INTO public.transmissions VALUES (31, '10-Speed Automatic', NULL, NULL, '2025-12-27 17:00:28.222415', '2025-12-27 17:00:28.222415');

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,75 @@
/**
* @ai-summary Vehicle catalog data seeding service
* @ai-context Loads vehicle catalog data from exported SQL files after migrations
*/
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import * as fs from 'fs';
import * as path from 'path';
export class CatalogSeedService {
private readonly dataDir = '/app/migrations/features/platform/data';
constructor(private pool: Pool) {}
/**
* Seed vehicle catalog data if tables are empty
*/
async seedIfEmpty(): Promise<void> {
try {
// Check if data already exists
const count = await this.pool.query('SELECT COUNT(*) FROM vehicle_options');
const rowCount = parseInt(count.rows[0].count, 10);
if (rowCount > 0) {
logger.info('Vehicle catalog already seeded, skipping', { rowCount });
return;
}
logger.info('Seeding vehicle catalog data...');
// Load data files in order
await this.loadDataFile('engines.sql');
await this.loadDataFile('transmissions.sql');
await this.loadDataFile('vehicle_options.sql');
// Verify data loaded
const finalCount = await this.pool.query('SELECT COUNT(*) FROM vehicle_options');
const finalRowCount = parseInt(finalCount.rows[0].count, 10);
logger.info('Vehicle catalog seeding complete', { rowCount: finalRowCount });
} catch (error: any) {
logger.error('Failed to seed vehicle catalog', { error: error.message });
throw error;
}
}
/**
* Load and execute a SQL data file
*/
private async loadDataFile(filename: string): Promise<void> {
const filePath = path.join(this.dataDir, filename);
// Check if file exists
if (!fs.existsSync(filePath)) {
logger.warn('Data file not found, skipping', { filePath });
return;
}
logger.info('Loading data file', { filename });
try {
// Read SQL file
const sql = fs.readFileSync(filePath, 'utf-8');
// Execute SQL (pg library handles INSERT statements properly)
await this.pool.query(sql);
logger.info('Data file loaded successfully', { filename });
} catch (error: any) {
logger.error('Failed to load data file', { filename, error: error.message });
throw error;
}
}
}

View File

@@ -6,6 +6,8 @@ import { buildApp } from './app';
import { appConfig } from './core/config/config-loader';
import { logger } from './core/logging/logger';
import { initializeScheduler } from './core/scheduler';
import { pool } from './core/config/database';
import { CatalogSeedService } from './features/platform/domain/catalog-seed.service';
const PORT = appConfig.config.server.port;
@@ -13,6 +15,15 @@ async function start() {
try {
const app = await buildApp();
// Seed vehicle catalog data if needed (runs after migrations)
try {
const catalogSeedService = new CatalogSeedService(pool);
await catalogSeedService.seedIfEmpty();
} catch (seedError) {
logger.warn('Vehicle catalog seeding failed, continuing startup', { seedError });
// Continue startup even if seeding fails (data can be imported later via admin UI)
}
await app.listen({
port: PORT,
host: '0.0.0.0'

View File

View File

@@ -1,84 +0,0 @@
# Vehicle Catalog Data Export
Export the current vehicle catalog database to SQL files for GitLab CI/CD deployment.
## Export Workflow
### Export from Running Database
```bash
cd data/vehicle-etl
python3 export_from_postgres.py
```
**Output:** Creates output/01_engines.sql, output/02_transmissions.sql, output/03_vehicle_options.sql
**Requirements:**
- mvp-postgres container running
- Python 3.7+
### Commit and Deploy
```bash
git add output/*.sql
git commit -m "Update vehicle catalog data from PostgreSQL export"
git push
```
GitLab CI/CD will automatically import these SQL files during deployment.
---
## When to Export
| Scenario | Action |
|----------|--------|
| Admin uploaded CSVs to database | Export and commit |
| Manual corrections in PostgreSQL | Export and commit |
| After adding new vehicle data | Export and commit |
| Preparing for deployment | Export and commit |
---
## Local Testing
```bash
# Export current database state
python3 export_from_postgres.py
# Test import locally
./reset_database.sh
./import_data.sh
docker compose exec mvp-redis redis-cli FLUSHALL
# Verify data
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "
SELECT
(SELECT COUNT(*) FROM engines) as engines,
(SELECT COUNT(*) FROM transmissions) as transmissions,
(SELECT COUNT(*) FROM vehicle_options) as vehicle_options,
(SELECT MIN(year) FROM vehicle_options) as min_year,
(SELECT MAX(year) FROM vehicle_options) as max_year;
"
```
---
## GitLab CI/CD Integration
The pipeline automatically imports SQL files from `output/` directory during deployment (/.gitlab-ci.yml lines 89-98):
- data/vehicle-etl/output/01_engines.sql
- data/vehicle-etl/output/02_transmissions.sql
- data/vehicle-etl/output/03_vehicle_options.sql
Commit updated SQL files to trigger deployment with new data.
---
## Legacy Scripts (Not Used)
The following scripts are legacy from the VehAPI integration and are no longer used:
- vehapi_fetch_snapshot.py (obsolete - VehAPI not used)
- etl_generate_sql.py (obsolete - database export used instead)
These scripts are preserved for historical reference but should not be executed.

View File

@@ -1,322 +0,0 @@
#!/usr/bin/env python3
"""
Export PostgreSQL database to SQL files.
Extracts current state from running mvp-postgres container and generates
SQL import files compatible with the GitLab CI/CD pipeline.
Usage:
python3 export_from_postgres.py
python3 export_from_postgres.py --output-dir custom/path
Output files:
- output/01_engines.sql
- output/02_transmissions.sql
- output/03_vehicle_options.sql
"""
import argparse
import csv
import io
import subprocess
import sys
from pathlib import Path
from typing import Dict, Iterable, List, Sequence
BATCH_SIZE = 1000
def check_python_version():
"""Ensure Python 3.7+ is being used."""
if sys.version_info < (3, 7):
raise RuntimeError(
f"Python 3.7 or higher required. Current version: {sys.version_info.major}.{sys.version_info.minor}"
)
def check_container_running():
"""Verify mvp-postgres container is running."""
try:
result = subprocess.run(
["docker", "ps", "--filter", "name=mvp-postgres", "--format", "{{.Names}}"],
capture_output=True,
text=True,
check=True,
)
if "mvp-postgres" not in result.stdout:
raise RuntimeError(
"mvp-postgres container is not running.\n"
"Start with: docker compose up -d mvp-postgres"
)
except subprocess.CalledProcessError as e:
raise RuntimeError(f"Failed to check Docker containers: {e}")
def sql_value(value):
"""
Convert a Python value to its SQL representation.
- None -> NULL
- str -> 'escaped string' (single quotes doubled)
- int/other -> str(value)
"""
if value is None:
return "NULL"
if isinstance(value, str):
return "'" + value.replace("'", "''") + "'"
return str(value)
def chunked(seq: Iterable[Dict], size: int) -> Iterable[List[Dict]]:
"""
Yield successive chunks of `size` from sequence.
Used to batch INSERT statements for better performance.
"""
chunk: List[Dict] = []
for item in seq:
chunk.append(item)
if len(chunk) >= size:
yield chunk
chunk = []
if chunk:
yield chunk
def write_insert_file(
path: Path,
table: str,
columns: Sequence[str],
rows: Sequence[Dict],
):
"""
Write batched INSERT statements to a SQL file.
Args:
path: Output file path
table: Table name
columns: Column names to insert
rows: List of row dictionaries
"""
path.parent.mkdir(parents=True, exist_ok=True)
with path.open("w", encoding="utf-8") as f:
f.write(f"-- Auto-generated by export_from_postgres.py\n")
if not rows:
f.write(f"-- No rows for {table}\n")
return
for batch in chunked(rows, BATCH_SIZE):
values_sql = ",\n".join(
"(" + ",".join(sql_value(row[col]) for col in columns) + ")"
for row in batch
)
f.write(f"INSERT INTO {table} ({', '.join(columns)}) VALUES\n{values_sql};\n\n")
def execute_psql_copy(query: str) -> str:
"""
Execute a PostgreSQL COPY command via docker exec.
Args:
query: SQL COPY query to execute
Returns:
CSV output as string
Raises:
RuntimeError: If command fails
"""
try:
result = subprocess.run(
[
"docker",
"exec",
"mvp-postgres",
"psql",
"-U",
"postgres",
"-d",
"motovaultpro",
"-c",
query,
],
capture_output=True,
text=True,
check=True,
)
return result.stdout
except subprocess.CalledProcessError as e:
error_msg = e.stderr if e.stderr else str(e)
raise RuntimeError(f"PostgreSQL query failed: {error_msg}")
def export_engines(output_dir: Path) -> int:
"""
Export engines table to 01_engines.sql.
Returns:
Number of records exported
"""
query = "COPY (SELECT id, name, fuel_type FROM engines ORDER BY id) TO STDOUT WITH CSV HEADER"
csv_output = execute_psql_copy(query)
rows = []
try:
reader = csv.DictReader(io.StringIO(csv_output))
for row in reader:
rows.append({
"id": int(row["id"]),
"name": row["name"],
"fuel_type": row["fuel_type"] if row["fuel_type"] else None,
})
except (csv.Error, KeyError, ValueError) as e:
raise RuntimeError(f"Failed to parse engines CSV output: {e}")
write_insert_file(
output_dir / "01_engines.sql",
"engines",
["id", "name", "fuel_type"],
rows,
)
return len(rows)
def export_transmissions(output_dir: Path) -> int:
"""
Export transmissions table to 02_transmissions.sql.
Returns:
Number of records exported
"""
query = "COPY (SELECT id, type FROM transmissions ORDER BY id) TO STDOUT WITH CSV HEADER"
csv_output = execute_psql_copy(query)
rows = []
try:
reader = csv.DictReader(io.StringIO(csv_output))
for row in reader:
rows.append({
"id": int(row["id"]),
"type": row["type"],
})
except (csv.Error, KeyError, ValueError) as e:
raise RuntimeError(f"Failed to parse transmissions CSV output: {e}")
write_insert_file(
output_dir / "02_transmissions.sql",
"transmissions",
["id", "type"],
rows,
)
return len(rows)
def export_vehicle_options(output_dir: Path) -> tuple:
"""
Export vehicle_options table to 03_vehicle_options.sql.
Returns:
Tuple of (record_count, min_year, max_year)
"""
query = """COPY (
SELECT year, make, model, trim, engine_id, transmission_id
FROM vehicle_options
ORDER BY year, make, model, trim
) TO STDOUT WITH CSV HEADER"""
csv_output = execute_psql_copy(query)
rows = []
years = []
try:
reader = csv.DictReader(io.StringIO(csv_output))
for row in reader:
year = int(row["year"])
years.append(year)
rows.append({
"year": year,
"make": row["make"],
"model": row["model"],
"trim": row["trim"],
"engine_id": int(row["engine_id"]) if row["engine_id"] else None,
"transmission_id": int(row["transmission_id"]) if row["transmission_id"] else None,
})
except (csv.Error, KeyError, ValueError) as e:
raise RuntimeError(f"Failed to parse vehicle_options CSV output: {e}")
write_insert_file(
output_dir / "03_vehicle_options.sql",
"vehicle_options",
["year", "make", "model", "trim", "engine_id", "transmission_id"],
rows,
)
min_year = min(years) if years else None
max_year = max(years) if years else None
return len(rows), min_year, max_year
def parse_args() -> argparse.Namespace:
"""Parse command-line arguments."""
parser = argparse.ArgumentParser(
description="Export PostgreSQL vehicle catalog to SQL files.",
)
parser.add_argument(
"--output-dir",
type=Path,
default=Path("output"),
help="Directory to write SQL output files (default: output)",
)
return parser.parse_args()
def main():
"""Main export workflow."""
check_python_version()
args = parse_args()
output_dir: Path = args.output_dir
print("Exporting from PostgreSQL database...")
print()
# Verify container is running
try:
check_container_running()
except RuntimeError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
# Export each table
try:
engines_count = export_engines(output_dir)
print(f" Engines: {engines_count:,} records")
trans_count = export_transmissions(output_dir)
print(f" Transmissions: {trans_count:,} records")
vehicles_count, min_year, max_year = export_vehicle_options(output_dir)
print(f" Vehicle options: {vehicles_count:,} records")
print()
except RuntimeError as e:
print(f"Error during export: {e}", file=sys.stderr)
sys.exit(1)
# Print summary
print("SQL files generated:")
for sql_file in sorted(output_dir.glob("*.sql")):
size_kb = sql_file.stat().st_size / 1024
print(f" - {sql_file} ({size_kb:.0f}KB)")
print()
if min_year and max_year:
print(f"Year coverage: {min_year}-{max_year}")
print()
print("Export complete! Commit these files to deploy:")
print(f" git add {output_dir}/*.sql")
print(f" git commit -m \"Update vehicle catalog from PostgreSQL export ({min_year}-{max_year})\"")
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@@ -1,117 +0,0 @@
#!/bin/bash
#
# Vehicle Catalog CSV Bulk Import Wrapper
#
# Copies CSV file into mvp-backend container and executes bulk import script.
# Handles large CSV files (250k+ rows) that fail in web import.
#
# Usage:
# ./import_catalog.sh <path_to_csv_file>
#
# Example:
# ./import_catalog.sh data/vehicle-etl/import/vehicle-catalog-master.csv
#
# Requirements:
# - mvp-backend container must be running
# - CSV file must have headers: year, make, model, trim
# - Optional headers: engine_name, transmission_type
#
set -euo pipefail
CONTAINER="mvp-backend"
TEMP_CSV_PATH="/tmp/catalog-import.csv"
SCRIPT_PATH="dist/features/admin/scripts/bulk-import-catalog.js"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Print error and exit
error() {
echo -e "${RED}Error: $1${NC}" >&2
exit 1
}
# Print success message
success() {
echo -e "${GREEN}$1${NC}"
}
# Print warning message
warn() {
echo -e "${YELLOW}$1${NC}"
}
# Check if CSV file argument provided
if [ $# -eq 0 ]; then
error "No CSV file specified.
Usage: $0 <path_to_csv_file>
Example:
$0 data/vehicle-etl/import/vehicle-catalog-master.csv"
fi
CSV_FILE="$1"
# Validate CSV file exists
if [ ! -f "$CSV_FILE" ]; then
error "CSV file not found: $CSV_FILE"
fi
# Get absolute path to CSV file
CSV_FILE_ABS=$(cd "$(dirname "$CSV_FILE")" && pwd)/$(basename "$CSV_FILE")
# Check if container is running
if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER}$"; then
error "Container '${CONTAINER}' is not running. Start it with: make start"
fi
echo "=========================================="
echo "Vehicle Catalog Bulk Import"
echo "=========================================="
echo "CSV File: $CSV_FILE_ABS"
echo "Container: $CONTAINER"
echo ""
# Copy CSV file into container
echo "Step 1: Copying CSV file into container..."
if ! docker cp "$CSV_FILE_ABS" "${CONTAINER}:${TEMP_CSV_PATH}"; then
error "Failed to copy CSV file into container"
fi
success "CSV file copied successfully"
echo ""
# Execute import script inside container
echo "Step 2: Running import script..."
echo ""
if docker exec -it "$CONTAINER" node "$SCRIPT_PATH"; then
success "Import completed successfully!"
IMPORT_SUCCESS=true
else
error "Import failed. Check the logs above for details."
IMPORT_SUCCESS=false
fi
# Cleanup: Remove temp CSV file from container
echo ""
echo "Step 3: Cleaning up..."
if docker exec "$CONTAINER" rm -f "$TEMP_CSV_PATH" 2>/dev/null; then
success "Temporary files cleaned up"
else
warn "Warning: Failed to cleanup temp CSV file in container"
fi
echo ""
if [ "$IMPORT_SUCCESS" = true ]; then
echo "=========================================="
success "Import process completed successfully!"
echo "=========================================="
exit 0
else
exit 1
fi

View File

@@ -1,71 +0,0 @@
#!/bin/bash
# Offline import of generated SQL files into PostgreSQL (no network).
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
echo "=========================================="
echo "📥 Automotive Database Import (offline)"
echo "=========================================="
echo ""
require_file() {
if [ ! -f "$1" ]; then
echo "❌ Missing required file: $1"
exit 1
fi
}
if ! docker ps --filter "name=mvp-postgres" --format "{{.Names}}" | grep -q "mvp-postgres"; then
echo "❌ Error: mvp-postgres container is not running"
exit 1
fi
require_file "output/01_engines.sql"
require_file "output/02_transmissions.sql"
require_file "output/03_vehicle_options.sql"
echo "📋 Step 1: Running database schema migration..."
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < migrations/001_create_vehicle_database.sql
echo "✓ Schema migration completed"
echo ""
echo "🧹 Step 2: Truncating existing data..."
docker exec -i mvp-postgres psql -U postgres -d motovaultpro <<'EOF'
TRUNCATE TABLE vehicle_options RESTART IDENTITY CASCADE;
TRUNCATE TABLE engines RESTART IDENTITY CASCADE;
TRUNCATE TABLE transmissions RESTART IDENTITY CASCADE;
EOF
echo "✓ Tables truncated"
echo ""
echo "📥 Step 3: Importing engines..."
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < output/01_engines.sql
echo "✓ Engines imported"
echo ""
echo "📥 Step 4: Importing transmissions..."
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < output/02_transmissions.sql
echo "✓ Transmissions imported"
echo ""
echo "📥 Step 5: Importing vehicle options (observed pairs only)..."
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < output/03_vehicle_options.sql
echo "✓ Vehicle options imported"
echo ""
echo "=========================================="
echo "✅ Import completed"
echo "=========================================="
echo ""
echo "🔍 Database verification:"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "SELECT COUNT(*) as engines FROM engines;"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "SELECT COUNT(*) as transmissions FROM transmissions;"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "SELECT COUNT(*) as vehicle_options FROM vehicle_options;"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "SELECT MIN(year) as min_year, MAX(year) as max_year FROM vehicle_options;"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "SELECT DISTINCT year FROM vehicle_options ORDER BY year LIMIT 5;"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "SELECT DISTINCT year FROM vehicle_options ORDER BY year DESC LIMIT 5;"
echo ""
echo "✓ Database ready for dropdown use."

View File

@@ -1,293 +0,0 @@
-- Migration: Create Automotive Vehicle Selection Database
-- Optimized for dropdown cascade queries
-- Date: 2025-11-10
-- Drop existing tables if they exist
DROP TABLE IF EXISTS vehicle_options CASCADE;
DROP TABLE IF EXISTS engines CASCADE;
DROP TABLE IF EXISTS transmissions CASCADE;
DROP INDEX IF EXISTS idx_vehicle_year;
DROP INDEX IF EXISTS idx_vehicle_make;
DROP INDEX IF EXISTS idx_vehicle_model;
DROP INDEX IF EXISTS idx_vehicle_trim;
DROP INDEX IF EXISTS idx_vehicle_composite;
-- Create engines table with detailed specifications
CREATE TABLE engines (
id SERIAL PRIMARY KEY,
name VARCHAR(255) NOT NULL,
displacement VARCHAR(50),
configuration VARCHAR(50),
horsepower VARCHAR(100),
torque VARCHAR(100),
fuel_type VARCHAR(100),
fuel_system VARCHAR(255),
aspiration VARCHAR(100),
specs_json JSONB,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Prevent duplicate engine display names (case-insensitive)
CREATE UNIQUE INDEX IF NOT EXISTS uq_engines_name_lower ON engines (LOWER(name));
CREATE INDEX idx_engines_displacement ON engines(displacement);
CREATE INDEX idx_engines_config ON engines(configuration);
-- Create transmissions table
CREATE TABLE transmissions (
id SERIAL PRIMARY KEY,
type VARCHAR(100) NOT NULL,
speeds VARCHAR(50),
drive_type VARCHAR(100),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Prevent duplicate transmission display names (case-insensitive)
CREATE UNIQUE INDEX IF NOT EXISTS uq_transmissions_type_lower ON transmissions (LOWER(type));
CREATE INDEX idx_transmissions_type ON transmissions(type);
-- Create denormalized vehicle_options table optimized for dropdown queries
CREATE TABLE vehicle_options (
id SERIAL PRIMARY KEY,
year INTEGER NOT NULL,
make VARCHAR(100) NOT NULL,
model VARCHAR(255) NOT NULL,
trim VARCHAR(255) NOT NULL,
engine_id INTEGER REFERENCES engines(id) ON DELETE SET NULL,
transmission_id INTEGER REFERENCES transmissions(id) ON DELETE SET NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Prevent duplicate vehicle option rows
CREATE UNIQUE INDEX IF NOT EXISTS uq_vehicle_options_full ON vehicle_options (
year, make, model, trim, engine_id, transmission_id
);
-- Indexes for cascading dropdown performance
CREATE INDEX idx_vehicle_year ON vehicle_options(year);
CREATE INDEX idx_vehicle_make ON vehicle_options(make);
CREATE INDEX idx_vehicle_model ON vehicle_options(model);
CREATE INDEX idx_vehicle_trim ON vehicle_options(trim);
CREATE INDEX idx_vehicle_year_make ON vehicle_options(year, make);
CREATE INDEX idx_vehicle_year_make_model ON vehicle_options(year, make, model);
CREATE INDEX idx_vehicle_year_make_model_trim ON vehicle_options(year, make, model, trim);
CREATE INDEX idx_vehicle_year_make_model_trim_engine ON vehicle_options(year, make, model, trim, engine_id);
CREATE INDEX idx_vehicle_year_make_model_trim_trans ON vehicle_options(year, make, model, trim, transmission_id);
-- Full-text search index for admin catalog search
CREATE INDEX idx_vehicle_options_fts ON vehicle_options
USING gin(to_tsvector('english', year::text || ' ' || make || ' ' || model || ' ' || trim));
-- Index on engines.name for join performance during search
CREATE INDEX idx_engines_name ON engines(name);
-- Views for dropdown queries
-- View: Get all available years
CREATE OR REPLACE VIEW available_years AS
SELECT DISTINCT year
FROM vehicle_options
ORDER BY year DESC;
-- View: Get makes by year
CREATE OR REPLACE VIEW makes_by_year AS
SELECT DISTINCT year, make
FROM vehicle_options
ORDER BY year DESC, make ASC;
-- View: Get models by year and make
CREATE OR REPLACE VIEW models_by_year_make AS
SELECT DISTINCT year, make, model
FROM vehicle_options
ORDER BY year DESC, make ASC, model ASC;
-- View: Get trims by year, make, and model
CREATE OR REPLACE VIEW trims_by_year_make_model AS
SELECT DISTINCT year, make, model, trim
FROM vehicle_options
ORDER BY year DESC, make ASC, model ASC, trim ASC;
-- View: Get complete vehicle configurations with engine and transmission details
CREATE OR REPLACE VIEW complete_vehicle_configs AS
SELECT
vo.id,
vo.year,
vo.make,
vo.model,
vo.trim,
e.name AS engine_name,
e.displacement,
e.configuration,
e.horsepower,
e.torque,
e.fuel_type,
t.type AS transmission_type,
t.speeds AS transmission_speeds,
t.drive_type
FROM vehicle_options vo
LEFT JOIN engines e ON vo.engine_id = e.id
LEFT JOIN transmissions t ON vo.transmission_id = t.id
ORDER BY vo.year DESC, vo.make ASC, vo.model ASC, vo.trim ASC;
-- Function to get makes for a specific year
CREATE OR REPLACE FUNCTION get_makes_for_year(p_year INTEGER)
RETURNS TABLE(make VARCHAR) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT vehicle_options.make
FROM vehicle_options
WHERE vehicle_options.year = p_year
ORDER BY vehicle_options.make ASC;
END;
$$ LANGUAGE plpgsql;
-- Function to get models for a specific year and make
CREATE OR REPLACE FUNCTION get_models_for_year_make(p_year INTEGER, p_make VARCHAR)
RETURNS TABLE(model VARCHAR) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT vehicle_options.model
FROM vehicle_options
WHERE vehicle_options.year = p_year
AND vehicle_options.make = p_make
ORDER BY vehicle_options.model ASC;
END;
$$ LANGUAGE plpgsql;
-- Function to get trims for a specific year, make, and model
CREATE OR REPLACE FUNCTION get_trims_for_year_make_model(p_year INTEGER, p_make VARCHAR, p_model VARCHAR)
RETURNS TABLE(trim_name VARCHAR) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT vehicle_options.trim
FROM vehicle_options
WHERE vehicle_options.year = p_year
AND vehicle_options.make = p_make
AND vehicle_options.model = p_model
ORDER BY vehicle_options.trim ASC;
END;
$$ LANGUAGE plpgsql;
-- Function to get engine and transmission options for a specific vehicle
CREATE OR REPLACE FUNCTION get_options_for_vehicle(p_year INTEGER, p_make VARCHAR, p_model VARCHAR, p_trim VARCHAR)
RETURNS TABLE(
engine_name VARCHAR,
engine_displacement VARCHAR,
engine_horsepower VARCHAR,
transmission_type VARCHAR,
transmission_speeds VARCHAR,
drive_type VARCHAR
) AS $$
BEGIN
RETURN QUERY
SELECT
e.name,
e.displacement,
e.horsepower,
t.type,
t.speeds,
t.drive_type
FROM vehicle_options vo
LEFT JOIN engines e ON vo.engine_id = e.id
LEFT JOIN transmissions t ON vo.transmission_id = t.id
WHERE vo.year = p_year
AND vo.make = p_make
AND vo.model = p_model
AND vo.trim = p_trim;
END;
$$ LANGUAGE plpgsql;
-- Helper functions for trim-level options and pair-safe filtering
CREATE OR REPLACE FUNCTION get_transmissions_for_vehicle(p_year INTEGER, p_make VARCHAR, p_model VARCHAR, p_trim VARCHAR)
RETURNS TABLE(
transmission_id INTEGER,
transmission_type VARCHAR
) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT
t.id,
t.type
FROM vehicle_options vo
JOIN transmissions t ON vo.transmission_id = t.id
WHERE vo.year = p_year
AND vo.make = p_make
AND vo.model = p_model
AND vo.trim = p_trim
ORDER BY t.type ASC;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION get_engines_for_vehicle(p_year INTEGER, p_make VARCHAR, p_model VARCHAR, p_trim VARCHAR)
RETURNS TABLE(
engine_id INTEGER,
engine_name VARCHAR
) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT
e.id,
e.name
FROM vehicle_options vo
JOIN engines e ON vo.engine_id = e.id
WHERE vo.year = p_year
AND vo.make = p_make
AND vo.model = p_model
AND vo.trim = p_trim
ORDER BY e.name ASC;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION get_transmissions_for_vehicle_engine(p_year INTEGER, p_make VARCHAR, p_model VARCHAR, p_trim VARCHAR, p_engine_name VARCHAR)
RETURNS TABLE(
transmission_id INTEGER,
transmission_type VARCHAR
) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT
t.id,
t.type
FROM vehicle_options vo
JOIN engines e ON vo.engine_id = e.id
JOIN transmissions t ON vo.transmission_id = t.id
WHERE vo.year = p_year
AND vo.make = p_make
AND vo.model = p_model
AND vo.trim = p_trim
AND e.name = p_engine_name
ORDER BY t.type ASC;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION get_engines_for_vehicle_trans(p_year INTEGER, p_make VARCHAR, p_model VARCHAR, p_trim VARCHAR, p_trans_type VARCHAR)
RETURNS TABLE(
engine_id INTEGER,
engine_name VARCHAR
) AS $$
BEGIN
RETURN QUERY
SELECT DISTINCT
e.id,
e.name
FROM vehicle_options vo
JOIN engines e ON vo.engine_id = e.id
JOIN transmissions t ON vo.transmission_id = t.id
WHERE vo.year = p_year
AND vo.make = p_make
AND vo.model = p_model
AND vo.trim = p_trim
AND t.type = p_trans_type
ORDER BY e.name ASC;
END;
$$ LANGUAGE plpgsql;
COMMENT ON TABLE vehicle_options IS 'Denormalized table optimized for cascading dropdown queries';
COMMENT ON TABLE engines IS 'Engine specifications with detailed technical data';
COMMENT ON TABLE transmissions IS 'Transmission specifications';
COMMENT ON VIEW available_years IS 'Returns all distinct years available in the database';
COMMENT ON VIEW makes_by_year IS 'Returns makes grouped by year for dropdown population';
COMMENT ON VIEW models_by_year_make IS 'Returns models grouped by year and make';
COMMENT ON VIEW trims_by_year_make_model IS 'Returns trims grouped by year, make, and model';
COMMENT ON VIEW complete_vehicle_configs IS 'Complete vehicle configurations with all details';

File diff suppressed because it is too large Load Diff

View File

@@ -1,47 +0,0 @@
-- Auto-generated by export_from_postgres.py
INSERT INTO transmissions (id, type) VALUES
(1,'Automatic'),
(2,'Manual'),
(3,'3-Speed Automatic'),
(4,'5-Speed Manual'),
(5,'4-Speed Manual'),
(6,'3-Speed Manual'),
(7,'4-Speed Automatic'),
(8,'6-Speed Manual'),
(9,'4-Speed Automatic Overdrive'),
(10,'5-Speed Manual Overdrive'),
(11,'Continuously Variable Transmission'),
(12,'5-Speed Automatic'),
(13,'6-Speed Manual Overdrive'),
(14,'1-Speed Dual Clutch'),
(15,'5-Speed Automatic Overdrive'),
(16,'6-Speed Automatic'),
(17,'6-Speed Automatic Overdrive'),
(18,'6-Speed CVT'),
(19,'7-Speed Automatic'),
(20,'6-Speed Dual Clutch'),
(21,'8-Speed Automatic'),
(22,'1-Speed Automatic'),
(23,'7-Speed Dual Clutch'),
(24,'5-Speed Dual Clutch'),
(25,'7-Speed CVT'),
(26,'7-Speed Manual'),
(27,'9-Speed Automatic'),
(28,'8-Speed Dual Clutch'),
(29,'8-Speed CVT'),
(30,'9-Speed Dual Clutch'),
(31,'10-Speed Automatic'),
(32,'4-Speed CVT'),
(33,'10-Speed Dual Clutch'),
(34,'10-Speed CVT'),
(35,'2-Speed Automatic'),
(36,'10-Speed Automatic Transmission'),
(115,'CVT'),
(119,'1-Speed Direct Drive'),
(1159,'8-Speed DCT'),
(1172,'7-Speed DCT'),
(1184,'9-Speed DCT'),
(3072,'Single-Speed Transmission'),
(5081,'Electric'),
(5304,'ISR Automatic');

File diff suppressed because it is too large Load Diff

View File

@@ -1,190 +0,0 @@
#!/usr/bin/env python3
"""
Post-import QA validation for vehicle dropdown data.
Runs basic duplicate and range checks against the motovaultpro Postgres container.
"""
import os
import subprocess
import sys
def run_psql(query: str) -> str:
cmd = [
"docker",
"exec",
"mvp-postgres",
"psql",
"-U",
"postgres",
"-d",
"motovaultpro",
"-At",
"-c",
query,
]
return subprocess.check_output(cmd, text=True)
def check_container():
try:
subprocess.check_output(["docker", "ps"], text=True)
except Exception:
print("❌ Docker not available.")
sys.exit(1)
try:
containers = subprocess.check_output(
["docker", "ps", "--filter", "name=mvp-postgres", "--format", "{{.Names}}"],
text=True,
).strip()
if not containers:
print("❌ mvp-postgres container not running.")
sys.exit(1)
except Exception as exc:
print(f"❌ Failed to check containers: {exc}")
sys.exit(1)
def check_invalid_combinations():
"""Verify known invalid combinations do not exist."""
invalid_combos = [
(1992, "Chevrolet", "Corvette", "Z06"), # Z06 started 2001
(2000, "Chevrolet", "Corvette", "35th Anniversary Edition"), # Was 1988
(2000, "Chevrolet", "Corvette", "Stingray"), # Stingray started 2014
(1995, "Ford", "Mustang", "Mach-E"), # Mach-E is 2021+
(2020, "Tesla", "Cybertruck", "Base"), # Not in production until later
]
issues = []
for year, make, model, trim in invalid_combos:
query = f"""
SELECT COUNT(*) FROM vehicle_options
WHERE year = {year}
AND make = '{make}'
AND model = '{model}'
AND trim = '{trim}'
"""
count = int(run_psql(query).strip())
if count > 0:
issues.append(f"Invalid combo found: {year} {make} {model} {trim}")
return issues
def check_trim_coverage():
"""Report on trim coverage statistics."""
query = """
SELECT
COUNT(DISTINCT (year, make, model)) as total_models,
COUNT(DISTINCT (year, make, model)) FILTER (WHERE trim = 'Base') as base_only,
COUNT(DISTINCT (year, make, model)) FILTER (WHERE trim != 'Base') as has_specific_trims
FROM vehicle_options
"""
result = run_psql(query).strip()
print(f"Trim coverage (total/base_only/has_specific_trims): {result}")
def main():
check_container()
print("🔍 Running QA checks...\n")
queries = {
"engine_duplicate_names": """
SELECT COUNT(*) FROM (
SELECT LOWER(name) as n, COUNT(*) c
FROM engines
GROUP BY 1 HAVING COUNT(*) > 1
) t;
""",
"transmission_duplicate_types": """
SELECT COUNT(*) FROM (
SELECT LOWER(type) as t, COUNT(*) c
FROM transmissions
GROUP BY 1 HAVING COUNT(*) > 1
) t;
""",
"vehicle_option_duplicates": """
SELECT COUNT(*) FROM (
SELECT year, make, model, trim, engine_id, transmission_id, COUNT(*) c
FROM vehicle_options
GROUP BY 1,2,3,4,5,6 HAVING COUNT(*) > 1
) t;
""",
"year_range": """
SELECT MIN(year) || ' - ' || MAX(year) FROM vehicle_options;
""",
"year_range_valid": """
SELECT COUNT(*) FROM (
SELECT 1 FROM vehicle_options WHERE year < 2015 OR year > 2022 LIMIT 1
) t;
""",
"counts": """
SELECT
(SELECT COUNT(*) FROM engines) AS engines,
(SELECT COUNT(*) FROM transmissions) AS transmissions,
(SELECT COUNT(*) FROM vehicle_options) AS vehicle_options;
""",
"cross_join_gaps": """
SELECT COUNT(*) FROM (
SELECT base.year, base.make, base.model, base.trim, e.engine_id, t.transmission_id
FROM (
SELECT DISTINCT year, make, model, trim FROM vehicle_options
) base
JOIN (
SELECT DISTINCT year, make, model, trim, engine_id FROM vehicle_options
) e ON base.year = e.year AND base.make = e.make AND base.model = e.model AND base.trim = e.trim
JOIN (
SELECT DISTINCT year, make, model, trim, transmission_id FROM vehicle_options
) t ON base.year = t.year AND base.make = t.make AND base.model = t.model AND base.trim = t.trim
EXCEPT
SELECT year, make, model, trim, engine_id, transmission_id FROM vehicle_options
) gap;
""",
}
results = {}
for key, query in queries.items():
try:
results[key] = run_psql(query).strip()
except subprocess.CalledProcessError as exc:
print(f"❌ Query failed ({key}): {exc}")
sys.exit(1)
issues_found = False
print(f"Engine duplicate names: {results['engine_duplicate_names']}")
print(f"Transmission duplicate types: {results['transmission_duplicate_types']}")
print(f"Vehicle option duplicates: {results['vehicle_option_duplicates']}")
print(f"Year range: {results['year_range']}")
print(f"Out-of-range years (should be 0): {results['year_range_valid']}")
print(f"Counts (engines, transmissions, vehicle_options): {results['counts']}")
print(f"Cross-join gaps (should be 0 to avoid impossible pairs): {results['cross_join_gaps']}")
if (
results["engine_duplicate_names"] != "0"
or results["transmission_duplicate_types"] != "0"
or results["vehicle_option_duplicates"] != "0"
or results["year_range_valid"] != "0"
or results["cross_join_gaps"] != "0"
):
issues_found = True
invalids = check_invalid_combinations()
if invalids:
issues_found = True
print("\n❌ Invalid combinations detected:")
for issue in invalids:
print(f" - {issue}")
else:
print("\n✅ No known invalid year/make/model/trim combos found.")
check_trim_coverage()
if not issues_found:
print("\n✅ QA checks passed.")
else:
print("\n❌ QA checks found issues.")
if __name__ == "__main__":
main()

View File

@@ -1,56 +0,0 @@
#!/bin/bash
# Reset vehicle database tables before a fresh import.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
echo "=========================================="
echo "Vehicle Database Reset"
echo "=========================================="
echo ""
# Check if postgres container is running
if ! docker ps --filter "name=mvp-postgres" --format "{{.Names}}" | grep -q "mvp-postgres"; then
echo "Error: mvp-postgres container is not running"
exit 1
fi
echo "Current data (before reset):"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c \
"SELECT
(SELECT COUNT(*) FROM engines) as engines,
(SELECT COUNT(*) FROM transmissions) as transmissions,
(SELECT COUNT(*) FROM vehicle_options) as vehicle_options;" 2>/dev/null || echo " Tables may not exist yet"
echo ""
# Confirm reset
read -p "Are you sure you want to reset all vehicle data? (y/N) " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Reset cancelled."
exit 0
fi
echo ""
echo "Truncating tables..."
docker exec -i mvp-postgres psql -U postgres -d motovaultpro <<'EOF'
TRUNCATE TABLE vehicle_options RESTART IDENTITY CASCADE;
TRUNCATE TABLE engines RESTART IDENTITY CASCADE;
TRUNCATE TABLE transmissions RESTART IDENTITY CASCADE;
EOF
echo ""
echo "=========================================="
echo "Reset complete"
echo "=========================================="
echo ""
echo "Verification (should all be 0):"
docker exec mvp-postgres psql -U postgres -d motovaultpro -c \
"SELECT
(SELECT COUNT(*) FROM engines) as engines,
(SELECT COUNT(*) FROM transmissions) as transmissions,
(SELECT COUNT(*) FROM vehicle_options) as vehicle_options;"
echo ""
echo "Ready for fresh import with: ./import_data.sh"

View File

@@ -1,34 +0,0 @@
#!/bin/bash
# Compare database counts with exported SQL file counts
# Usage: ./validate_export.sh
set -e
echo "Validating exported SQL files against database..."
echo ""
# Get counts from database
DB_ENGINES=$(docker exec mvp-postgres psql -U postgres -d motovaultpro -t -A -c "SELECT COUNT(*) FROM engines;")
DB_TRANS=$(docker exec mvp-postgres psql -U postgres -d motovaultpro -t -A -c "SELECT COUNT(*) FROM transmissions;")
DB_VEHICLES=$(docker exec mvp-postgres psql -U postgres -d motovaultpro -t -A -c "SELECT COUNT(*) FROM vehicle_options;")
# Count records in SQL files (count lines starting with '(' which are data rows)
SQL_ENGINES=$(grep -c '^(' output/01_engines.sql)
SQL_TRANS=$(grep -c '^(' output/02_transmissions.sql)
SQL_VEHICLES=$(grep -c '^(' output/03_vehicle_options.sql)
# Display comparison
echo "Database vs SQL File Counts:"
echo " Engines: $DB_ENGINES (DB) vs $SQL_ENGINES (SQL)"
echo " Transmissions: $DB_TRANS (DB) vs $SQL_TRANS (SQL)"
echo " Vehicle Options: $DB_VEHICLES (DB) vs $SQL_VEHICLES (SQL)"
echo ""
# Validate counts match
if [ "$DB_ENGINES" -eq "$SQL_ENGINES" ] && [ "$DB_TRANS" -eq "$SQL_TRANS" ] && [ "$DB_VEHICLES" -eq "$SQL_VEHICLES" ]; then
echo "Validation PASSED - All counts match!"
exit 0
else
echo "Validation FAILED - Counts do not match!"
exit 1
fi

View File

@@ -22,13 +22,15 @@ You are a senior software engineer specializsing in NodeJS, Typescript, front en
- Make no assumptions.
- Ask clarifying questions.
- Ultrathink
- You will be fixing a bug with the application backup function.
- You will be fixing a bug with the vehicle catalog import function.
*** CONTEXT ***
- This is a modern web app for managing a vehicle fleet. It has both a desktop and mobile versions of the site that both need to maintain feature parity. It's currently deployed via docker compose but in the future will be deployed via k8s.
- Read README.md CLAUDE.md and AI-INDEX.md and follow relevant instructions to understand this code repository in the context of this change.
- There is an error when you try and restore a backup.
- Start with this file. /Users/egullickson/Documents/Technology/coding/motovaultpro/backend/src/features/backup/api/backup.controller.ts
- There is an error when you try and import this CSV file.
- Start with this file. data/vehicle-etl/gmc_2022_2026.csv
- The interface says everything is successful.
- I flushed all REDIS cache
*** CHANGES TO IMPLEMENT ***
- Research this code base and ask iterative questions to compile a complete plan.

View File

@@ -347,11 +347,10 @@ export const useImportApply = () => {
return useMutation({
mutationFn: (previewId: string) => adminApi.importApply(previewId),
onSuccess: (result) => {
onSuccess: () => {
// Invalidate cache to refresh catalog data
queryClient.invalidateQueries({ queryKey: ['catalogSearch'] });
toast.success(
`Import completed: ${result.created} created, ${result.updated} updated`
);
// Note: Toast and dialog behavior now handled by parent components
},
onError: (error: ApiError) => {
toast.error(error.response?.data?.error || 'Failed to apply import');

View File

@@ -13,6 +13,8 @@ import {
MoreVert,
Close,
History,
ExpandMore,
ExpandLess,
} from '@mui/icons-material';
import toast from 'react-hot-toast';
import { useAdminAccess } from '../../../core/auth/useAdminAccess';
@@ -29,6 +31,7 @@ import { adminApi } from '../api/admin.api';
import {
CatalogSearchResult,
ImportPreviewResult,
ImportApplyResult,
} from '../types/admin.types';
export const AdminCatalogMobileScreen: React.FC = () => {
@@ -54,6 +57,8 @@ export const AdminCatalogMobileScreen: React.FC = () => {
// Import state
const [importSheet, setImportSheet] = useState(false);
const [importPreview, setImportPreview] = useState<ImportPreviewResult | null>(null);
const [importResult, setImportResult] = useState<ImportApplyResult | null>(null);
const [errorsExpanded, setErrorsExpanded] = useState(false);
const fileInputRef = useRef<HTMLInputElement>(null);
// Hooks
@@ -144,15 +149,38 @@ export const AdminCatalogMobileScreen: React.FC = () => {
if (!importPreview?.previewId) return;
try {
await importApplyMutation.mutateAsync(importPreview.previewId);
const result = await importApplyMutation.mutateAsync(importPreview.previewId);
setImportResult(result);
if (result.errors.length > 0) {
toast.error(
`Import completed with ${result.errors.length} error(s): ${result.created} created, ${result.updated} updated`
);
// Keep sheet open for error review
} else {
toast.success(
`Import completed successfully: ${result.created} created, ${result.updated} updated`
);
// Auto-close on complete success
setImportSheet(false);
setImportPreview(null);
setImportResult(null);
}
refetch();
} catch {
// Error handled by mutation
// Error handled by mutation's onError
}
}, [importPreview, importApplyMutation, refetch]);
const handleImportSheetClose = useCallback(() => {
if (importApplyMutation.isPending) return;
setImportSheet(false);
setImportPreview(null);
setImportResult(null);
setErrorsExpanded(false);
}, [importApplyMutation.isPending]);
// Export handler
const handleExport = useCallback(() => {
setMenuOpen(false);
@@ -435,17 +463,16 @@ export const AdminCatalogMobileScreen: React.FC = () => {
</div>
)}
{/* Import Preview Sheet */}
{importSheet && importPreview && (
{/* Import Preview/Results Sheet */}
{importSheet && (importPreview || importResult) && (
<div className="fixed inset-0 bg-black bg-opacity-50 z-50 flex items-end justify-center">
<div className="bg-white rounded-t-2xl w-full max-w-lg p-6 space-y-4 animate-slide-up max-h-[80vh] overflow-y-auto">
<div className="flex items-center justify-between">
<h2 className="text-xl font-bold text-slate-800">Import Preview</h2>
<h2 className="text-xl font-bold text-slate-800">
{importResult ? 'Import Results' : 'Import Preview'}
</h2>
<button
onClick={() => {
setImportSheet(false);
setImportPreview(null);
}}
onClick={handleImportSheetClose}
disabled={importApplyMutation.isPending}
className="p-2 text-slate-500 hover:text-slate-700"
style={{ minHeight: '44px', minWidth: '44px' }}
@@ -454,7 +481,9 @@ export const AdminCatalogMobileScreen: React.FC = () => {
</button>
</div>
{/* Summary */}
{/* Preview Mode */}
{importPreview && !importResult && (
<>
<div className="flex gap-4 text-sm">
<div className="bg-green-100 text-green-800 px-3 py-2 rounded-lg">
<strong>{importPreview.toCreate.length}</strong> to create
@@ -464,7 +493,6 @@ export const AdminCatalogMobileScreen: React.FC = () => {
</div>
</div>
{/* Errors */}
{importPreview.errors.length > 0 && (
<div className="bg-red-50 border border-red-200 rounded-lg p-3">
<p className="text-red-800 font-semibold mb-2">
@@ -483,7 +511,6 @@ export const AdminCatalogMobileScreen: React.FC = () => {
</div>
)}
{/* Status */}
{importPreview.valid ? (
<div className="bg-green-50 border border-green-200 rounded-lg p-3">
<p className="text-green-800">
@@ -497,22 +524,74 @@ export const AdminCatalogMobileScreen: React.FC = () => {
</p>
</div>
)}
</>
)}
{/* Results Mode */}
{importResult && (
<>
<div className="flex gap-4 text-sm">
<div className="bg-green-100 text-green-800 px-3 py-2 rounded-lg">
<strong>{importResult.created}</strong> created
</div>
<div className="bg-blue-100 text-blue-800 px-3 py-2 rounded-lg">
<strong>{importResult.updated}</strong> updated
</div>
</div>
{importResult.errors.length > 0 && (
<div className="border border-red-500 rounded-lg overflow-hidden">
<button
onClick={() => setErrorsExpanded(!errorsExpanded)}
className="w-full flex items-center justify-between p-4 bg-red-100 hover:bg-red-200 transition"
style={{ minHeight: '44px' }}
>
<span className="text-red-900 font-semibold">
{importResult.errors.length} Error(s) Occurred
</span>
<span className="text-red-900">
{errorsExpanded ? <ExpandLess /> : <ExpandMore />}
</span>
</button>
{errorsExpanded && (
<div className="max-h-96 overflow-y-auto p-4 bg-white">
<ul className="space-y-2">
{importResult.errors.map((err, idx) => (
<li key={idx} className="text-sm font-mono text-slate-700">
<strong>Row {err.row}:</strong> {err.error}
</li>
))}
</ul>
</div>
)}
</div>
)}
{importResult.errors.length === 0 && (
<div className="bg-green-50 border border-green-200 rounded-lg p-3">
<p className="text-green-800">
Import completed successfully with no errors.
</p>
</div>
)}
</>
)}
{/* Action Buttons */}
<div className="flex gap-2 pt-2">
<button
onClick={() => {
setImportSheet(false);
setImportPreview(null);
}}
onClick={handleImportSheetClose}
disabled={importApplyMutation.isPending}
className="flex-1 bg-slate-200 text-slate-700 py-3 rounded-lg font-medium hover:bg-slate-300 transition disabled:opacity-50"
style={{ minHeight: '44px' }}
>
Cancel
{importResult ? 'Close' : 'Cancel'}
</button>
{!importResult && (
<button
onClick={handleImportConfirm}
disabled={!importPreview.valid || importApplyMutation.isPending}
disabled={!importPreview?.valid || importApplyMutation.isPending}
className="flex-1 bg-blue-600 text-white py-3 rounded-lg font-medium hover:bg-blue-700 transition disabled:opacity-50"
style={{ minHeight: '44px' }}
>
@@ -522,6 +601,7 @@ export const AdminCatalogMobileScreen: React.FC = () => {
'Apply Import'
)}
</button>
)}
</div>
</div>
</div>

View File

@@ -28,6 +28,7 @@ import {
Tooltip,
Typography,
Alert,
Collapse,
} from '@mui/material';
import {
Search,
@@ -35,6 +36,8 @@ import {
FileDownload,
FileUpload,
Clear,
ExpandMore,
ExpandLess,
} from '@mui/icons-material';
import toast from 'react-hot-toast';
import { useAdminAccess } from '../../core/auth/useAdminAccess';
@@ -52,6 +55,7 @@ import {
import {
CatalogSearchResult,
ImportPreviewResult,
ImportApplyResult,
} from '../../features/admin/types/admin.types';
const PAGE_SIZE_OPTIONS = [25, 50, 100];
@@ -76,6 +80,8 @@ export const AdminCatalogPage: React.FC = () => {
// Import state
const [importDialogOpen, setImportDialogOpen] = useState(false);
const [importPreview, setImportPreview] = useState<ImportPreviewResult | null>(null);
const [importResult, setImportResult] = useState<ImportApplyResult | null>(null);
const [errorsExpanded, setErrorsExpanded] = useState(false);
const fileInputRef = useRef<HTMLInputElement>(null);
// Hooks
@@ -217,15 +223,38 @@ export const AdminCatalogPage: React.FC = () => {
if (!importPreview?.previewId) return;
try {
await importApplyMutation.mutateAsync(importPreview.previewId);
const result = await importApplyMutation.mutateAsync(importPreview.previewId);
setImportResult(result);
if (result.errors.length > 0) {
toast.error(
`Import completed with ${result.errors.length} error(s): ${result.created} created, ${result.updated} updated`
);
// Keep dialog open for error review
} else {
toast.success(
`Import completed successfully: ${result.created} created, ${result.updated} updated`
);
// Auto-close on complete success
setImportDialogOpen(false);
setImportPreview(null);
setImportResult(null);
}
refetch();
} catch (error) {
// Error is handled by mutation
// Error is handled by mutation's onError
}
}, [importPreview, importApplyMutation, refetch]);
const handleImportDialogClose = useCallback(() => {
if (importApplyMutation.isPending) return;
setImportDialogOpen(false);
setImportPreview(null);
setImportResult(null);
setErrorsExpanded(false);
}, [importApplyMutation.isPending]);
// Export handler
const handleExport = useCallback(() => {
exportMutation.mutate();
@@ -506,18 +535,20 @@ export const AdminCatalogPage: React.FC = () => {
</DialogActions>
</Dialog>
{/* Import Preview Dialog */}
{/* Import Preview/Results Dialog */}
<Dialog
open={importDialogOpen}
onClose={() => !importApplyMutation.isPending && setImportDialogOpen(false)}
onClose={handleImportDialogClose}
maxWidth="md"
fullWidth
>
<DialogTitle>Import Preview</DialogTitle>
<DialogTitle>
{importResult ? 'Import Results' : 'Import Preview'}
</DialogTitle>
<DialogContent>
{importPreview && (
{/* Preview Mode */}
{importPreview && !importResult && (
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 2, mt: 1 }}>
{/* Summary */}
<Box sx={{ display: 'flex', gap: 3 }}>
<Typography>
<strong>To Create:</strong> {importPreview.toCreate.length}
@@ -527,7 +558,6 @@ export const AdminCatalogPage: React.FC = () => {
</Typography>
</Box>
{/* Errors */}
{importPreview.errors.length > 0 && (
<Alert severity="error">
<Typography variant="subtitle2" gutterBottom>
@@ -546,7 +576,6 @@ export const AdminCatalogPage: React.FC = () => {
</Alert>
)}
{/* Valid status */}
{importPreview.valid ? (
<Alert severity="success">
The import file is valid and ready to be applied.
@@ -558,15 +587,77 @@ export const AdminCatalogPage: React.FC = () => {
)}
</Box>
)}
{/* Results Mode */}
{importResult && (
<Box sx={{ display: 'flex', flexDirection: 'column', gap: 2, mt: 1 }}>
<Box sx={{ display: 'flex', gap: 3 }}>
<Typography>
<strong>Created:</strong> {importResult.created}
</Typography>
<Typography>
<strong>Updated:</strong> {importResult.updated}
</Typography>
</Box>
{importResult.errors.length > 0 && (
<Box sx={{ border: 1, borderColor: 'error.main', borderRadius: 1 }}>
<Box
onClick={() => setErrorsExpanded(!errorsExpanded)}
sx={{
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center',
p: 2,
bgcolor: 'error.light',
cursor: 'pointer',
'&:hover': { bgcolor: 'error.main', color: 'white' },
}}
>
<Typography variant="subtitle2" sx={{ fontWeight: 600 }}>
{importResult.errors.length} Error(s) Occurred
</Typography>
<IconButton size="small" sx={{ color: 'inherit' }}>
{errorsExpanded ? <ExpandLess /> : <ExpandMore />}
</IconButton>
</Box>
<Collapse in={errorsExpanded}>
<Box sx={{ maxHeight: 400, overflow: 'auto', p: 2, bgcolor: 'background.paper' }}>
<Box component="ul" sx={{ m: 0, pl: 2 }}>
{importResult.errors.map((err, idx) => (
<Typography
component="li"
key={idx}
variant="body2"
sx={{ mb: 1, fontFamily: 'monospace', fontSize: '0.875rem' }}
>
<strong>Row {err.row}:</strong> {err.error}
</Typography>
))}
</Box>
</Box>
</Collapse>
</Box>
)}
{importResult.errors.length === 0 && (
<Alert severity="success">
Import completed successfully with no errors.
</Alert>
)}
</Box>
)}
</DialogContent>
<DialogActions>
<Button
onClick={() => setImportDialogOpen(false)}
onClick={handleImportDialogClose}
disabled={importApplyMutation.isPending}
sx={{ textTransform: 'none' }}
>
Cancel
{importResult ? 'Close' : 'Cancel'}
</Button>
{!importResult && (
<Button
onClick={handleImportConfirm}
disabled={!importPreview?.valid || importApplyMutation.isPending}
@@ -575,6 +666,7 @@ export const AdminCatalogPage: React.FC = () => {
>
{importApplyMutation.isPending ? <CircularProgress size={20} /> : 'Apply Import'}
</Button>
)}
</DialogActions>
</Dialog>
</Box>