Make/Model Data Loading
This commit is contained in:
@@ -16,6 +16,7 @@ const pool = new Pool({
|
||||
// and user-preferences trigger depends on it; so run vehicles before core/user-preferences.
|
||||
const MIGRATION_ORDER = [
|
||||
'features/vehicles', // Primary entity, defines update_updated_at_column()
|
||||
'features/platform', // Normalized make/model/trim schema for dropdowns
|
||||
'features/documents', // Depends on vehicles; provides documents table
|
||||
'core/user-preferences', // Depends on update_updated_at_column()
|
||||
'features/fuel-logs', // Depends on vehicles
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
-- Create dedicated schema for normalized vehicle lookup data
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_namespace WHERE nspname = 'vehicles'
|
||||
) THEN
|
||||
EXECUTE 'CREATE SCHEMA vehicles';
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Create manufacturers table
|
||||
CREATE TABLE IF NOT EXISTS vehicles.make (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
name VARCHAR(150) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE OR REPLACE FUNCTION vehicles.touch_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS touch_make_updated_at ON vehicles.make;
|
||||
CREATE TRIGGER touch_make_updated_at
|
||||
BEFORE UPDATE ON vehicles.make
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION vehicles.touch_updated_at();
|
||||
|
||||
-- Create models table
|
||||
CREATE TABLE IF NOT EXISTS vehicles.model (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
make_id BIGINT NOT NULL REFERENCES vehicles.make(id) ON DELETE CASCADE,
|
||||
name VARCHAR(150) NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT vehicles_model_unique UNIQUE(make_id, name)
|
||||
);
|
||||
|
||||
DROP TRIGGER IF EXISTS touch_model_updated_at ON vehicles.model;
|
||||
CREATE TRIGGER touch_model_updated_at
|
||||
BEFORE UPDATE ON vehicles.model
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION vehicles.touch_updated_at();
|
||||
|
||||
-- Create model_year table
|
||||
CREATE TABLE IF NOT EXISTS vehicles.model_year (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
model_id BIGINT NOT NULL REFERENCES vehicles.model(id) ON DELETE CASCADE,
|
||||
year INTEGER NOT NULL CHECK (year BETWEEN 1900 AND 2100),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT vehicles_model_year_unique UNIQUE(model_id, year)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_model_year_year ON vehicles.model_year(year DESC);
|
||||
|
||||
DROP TRIGGER IF EXISTS touch_model_year_updated_at ON vehicles.model_year;
|
||||
CREATE TRIGGER touch_model_year_updated_at
|
||||
BEFORE UPDATE ON vehicles.model_year
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION vehicles.touch_updated_at();
|
||||
|
||||
-- Create trims table
|
||||
CREATE TABLE IF NOT EXISTS vehicles.trim (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
model_year_id BIGINT NOT NULL REFERENCES vehicles.model_year(id) ON DELETE CASCADE,
|
||||
name VARCHAR(150) NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT vehicles_trim_unique UNIQUE(model_year_id, name)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_trim_model_year ON vehicles.trim(model_year_id);
|
||||
|
||||
DROP TRIGGER IF EXISTS touch_trim_updated_at ON vehicles.trim;
|
||||
CREATE TRIGGER touch_trim_updated_at
|
||||
BEFORE UPDATE ON vehicles.trim
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION vehicles.touch_updated_at();
|
||||
|
||||
-- Create engines table
|
||||
CREATE TABLE IF NOT EXISTS vehicles.engine (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
name VARCHAR(200) NOT NULL UNIQUE,
|
||||
code VARCHAR(50),
|
||||
displacement_l NUMERIC(5,2),
|
||||
cylinders SMALLINT,
|
||||
fuel_type VARCHAR(50),
|
||||
aspiration VARCHAR(50),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
DROP TRIGGER IF EXISTS touch_engine_updated_at ON vehicles.engine;
|
||||
CREATE TRIGGER touch_engine_updated_at
|
||||
BEFORE UPDATE ON vehicles.engine
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION vehicles.touch_updated_at();
|
||||
|
||||
-- Create trim-engine bridge table
|
||||
CREATE TABLE IF NOT EXISTS vehicles.trim_engine (
|
||||
trim_id BIGINT NOT NULL REFERENCES vehicles.trim(id) ON DELETE CASCADE,
|
||||
engine_id BIGINT NOT NULL REFERENCES vehicles.engine(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (trim_id, engine_id)
|
||||
);
|
||||
|
||||
-- Create transmissions table (static manual/automatic for now)
|
||||
CREATE TABLE IF NOT EXISTS vehicles.transmission (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
DROP TRIGGER IF EXISTS touch_transmission_updated_at ON vehicles.transmission;
|
||||
CREATE TRIGGER touch_transmission_updated_at
|
||||
BEFORE UPDATE ON vehicles.transmission
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION vehicles.touch_updated_at();
|
||||
|
||||
-- Optional bridge for future proofing (not yet populated)
|
||||
CREATE TABLE IF NOT EXISTS vehicles.trim_transmission (
|
||||
trim_id BIGINT NOT NULL REFERENCES vehicles.trim(id) ON DELETE CASCADE,
|
||||
transmission_id BIGINT NOT NULL REFERENCES vehicles.transmission(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY (trim_id, transmission_id)
|
||||
);
|
||||
|
||||
-- Helpful indexes for cascading dropdown lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_model_make ON vehicles.model(make_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_trim_name ON vehicles.trim(LOWER(name));
|
||||
CREATE INDEX IF NOT EXISTS idx_engine_name ON vehicles.engine(LOWER(name));
|
||||
CREATE INDEX IF NOT EXISTS idx_trim_engine_engine ON vehicles.trim_engine(engine_id);
|
||||
@@ -68,6 +68,7 @@ services:
|
||||
start_period: 20s
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- traefik.docker.network=motovaultpro_frontend
|
||||
- "traefik.http.routers.mvp-frontend.rule=(Host(`motovaultpro.com`) || Host(`www.motovaultpro.com`)) && !PathPrefix(`/api`)"
|
||||
- "traefik.http.routers.mvp-frontend.entrypoints=websecure"
|
||||
- "traefik.http.routers.mvp-frontend.tls=true"
|
||||
|
||||
@@ -212,8 +212,6 @@ Single-feature migration is not implemented yet.
|
||||
|
||||
**Password Management**: All database passwords are managed via Docker secrets, mounted from host files:
|
||||
- Application DB: `./secrets/app/postgres-password.txt`
|
||||
- Platform DB: `./secrets/platform/platform-db-password.txt`
|
||||
- Vehicles DB: `./secrets/platform/vehicles-db-password.txt`
|
||||
|
||||
### Connection Pool
|
||||
- **Implementation**: pg (node-postgres)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## Overview
|
||||
|
||||
The MVP Platform module is fully integrated inside the MotoVaultPro backend container. It delivers all platform capabilities without requiring a separate service or container in the simplified five-container stack.
|
||||
The MVP Platform module is fully integrated inside the MotoVaultPro backend container.
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -31,37 +31,6 @@ The platform provides vehicle data capabilities including:
|
||||
- **Database**: Shared mvp-postgres database
|
||||
- **Cache**: Shared mvp-redis cache
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Local Development
|
||||
|
||||
**Start All Services**:
|
||||
```bash
|
||||
make start # Starts the five-container stack
|
||||
```
|
||||
|
||||
**Backend Logs (includes platform module)**:
|
||||
```bash
|
||||
make logs-backend
|
||||
```
|
||||
|
||||
**Backend Shell (platform code lives here)**:
|
||||
```bash
|
||||
make shell-backend
|
||||
```
|
||||
|
||||
### Database Management
|
||||
|
||||
**Shared Database**:
|
||||
- **PostgreSQL** (port 5432): mvp-postgres
|
||||
- **Redis** (port 6379): mvp-redis
|
||||
|
||||
**Database Access**:
|
||||
```bash
|
||||
# PostgreSQL
|
||||
make db-shell-app
|
||||
```
|
||||
|
||||
## Deployment Strategy
|
||||
|
||||
### Integrated Deployment
|
||||
|
||||
27
docs/changes/platform-vehicle-data-loader.md
Normal file
27
docs/changes/platform-vehicle-data-loader.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Platform Vehicle Data Loader Refresh
|
||||
|
||||
## Context
|
||||
- Reintroduced the normalized `vehicles` schema (make/model/model_year/trim/engine + bridges) with a new migration under `backend/src/features/platform/migrations/001_create_vehicle_lookup_schema.sql`.
|
||||
- Added `scripts/load_vehicle_data.py`, a stand-alone Python loader that hydrates the schema from `data/make-models/*.json`.
|
||||
- Loader truncates lookup tables, seeds default transmissions (Automatic, Manual), and inserts deduplicated year → make → model → trim → engine combinations.
|
||||
|
||||
## Follow-up Tasks (Backend API Team)
|
||||
1. **Wire dropdown API to refreshed data**
|
||||
- Run `make migrate` (or `npm run migrate:all` inside backend container) to ensure the new schema exists.
|
||||
- Execute the loader (see command below) so Postgres has the latest lookup entries.
|
||||
- Verify `VehicleDataRepository` queries and Redis caching logic continue to function against the reinstated tables.
|
||||
2. **Add Makefile wrapper**
|
||||
- Create a `make load-vehicle-data` task that shells into the backend container, installs `psycopg` if needed, and invokes `python3 scripts/load_vehicle_data.py` with the correct DB credentials and data directory.
|
||||
|
||||
### Loader Command Reference
|
||||
```
|
||||
PGPASSWORD=$(cat secrets/app/postgres-password.txt) \
|
||||
python3 scripts/load_vehicle_data.py \
|
||||
--db-host 127.0.0.1 \
|
||||
--db-port 5432 \
|
||||
--db-user postgres \
|
||||
--db-name motovaultpro \
|
||||
--data-dir data/make-models
|
||||
```
|
||||
|
||||
> Run the command from the repository root (outside of containers) while `mvp-postgres` is up. Adjust host/port if executing inside a container.
|
||||
@@ -16,6 +16,13 @@ describe('Gas Stations Feature', () => {
|
||||
cy.visit('/stations');
|
||||
});
|
||||
|
||||
const enterSampleAddress = () => {
|
||||
cy.get('input[name="street"]').clear().type('123 Main St');
|
||||
cy.get('input[name="city"]').clear().type('San Francisco');
|
||||
cy.get('select[name="state"]').select('CA');
|
||||
cy.get('input[name="zip"]').clear().type('94105');
|
||||
};
|
||||
|
||||
describe('Search for Nearby Stations', () => {
|
||||
it('should allow searching with current location', () => {
|
||||
// Mock geolocation
|
||||
@@ -42,10 +49,9 @@ describe('Gas Stations Feature', () => {
|
||||
cy.contains('Shell').or('Chevron').or('76').or('Exxon').should('be.visible');
|
||||
});
|
||||
|
||||
it('should allow searching with manual coordinates', () => {
|
||||
// Enter manual coordinates
|
||||
cy.get('input[name="latitude"]').clear().type('37.7749');
|
||||
cy.get('input[name="longitude"]').clear().type('-122.4194');
|
||||
it('should allow searching with a manual address', () => {
|
||||
// Enter manual address fields
|
||||
enterSampleAddress();
|
||||
|
||||
// Adjust radius
|
||||
cy.get('[data-testid="radius-slider"]').click();
|
||||
@@ -57,16 +63,12 @@ describe('Gas Stations Feature', () => {
|
||||
cy.get('[data-testid="station-card"]').should('exist');
|
||||
});
|
||||
|
||||
it('should handle search errors gracefully', () => {
|
||||
// Enter invalid coordinates
|
||||
cy.get('input[name="latitude"]').clear().type('999');
|
||||
cy.get('input[name="longitude"]').clear().type('999');
|
||||
|
||||
// Search
|
||||
it('should require address details when location is unavailable', () => {
|
||||
// Attempt to search without address or geolocation
|
||||
cy.contains('button', 'Search').click();
|
||||
|
||||
// Verify error message
|
||||
cy.contains('error', { matchCase: false }).should('be.visible');
|
||||
cy.contains('Enter Street, City, State, and ZIP', { matchCase: false }).should('be.visible');
|
||||
});
|
||||
|
||||
it('should display loading state during search', () => {
|
||||
@@ -85,8 +87,7 @@ describe('Gas Stations Feature', () => {
|
||||
describe('View Stations on Map', () => {
|
||||
beforeEach(() => {
|
||||
// Perform a search first
|
||||
cy.get('input[name="latitude"]').clear().type('37.7749');
|
||||
cy.get('input[name="longitude"]').clear().type('-122.4194');
|
||||
enterSampleAddress();
|
||||
cy.contains('button', 'Search').click();
|
||||
cy.wait(2000);
|
||||
});
|
||||
@@ -122,8 +123,7 @@ describe('Gas Stations Feature', () => {
|
||||
describe('Save Station to Favorites', () => {
|
||||
beforeEach(() => {
|
||||
// Search first
|
||||
cy.get('input[name="latitude"]').clear().type('37.7749');
|
||||
cy.get('input[name="longitude"]').clear().type('-122.4194');
|
||||
enterSampleAddress();
|
||||
cy.contains('button', 'Search').click();
|
||||
cy.wait(1000);
|
||||
});
|
||||
|
||||
@@ -78,14 +78,14 @@ StationsMobileScreen (Mobile)
|
||||
|
||||
### StationsSearchForm
|
||||
|
||||
**Purpose**: Search input with geolocation and manual coordinate entry
|
||||
**Purpose**: Search input with geolocation or manual street-level address entry
|
||||
|
||||
**Props**: None (uses hooks internally)
|
||||
|
||||
**Features**:
|
||||
- Geolocation button (requests browser permission)
|
||||
- Manual latitude/longitude inputs
|
||||
- Radius slider (1-50 km)
|
||||
- Manual Street / City / State / ZIP inputs with Google geocoding
|
||||
- Radius slider (1-25 miles)
|
||||
- Loading states
|
||||
- Error handling
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* @ai-summary Form for searching nearby gas stations
|
||||
*/
|
||||
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import React, { useState, useEffect, useMemo } from 'react';
|
||||
import {
|
||||
Box,
|
||||
TextField,
|
||||
@@ -12,12 +12,95 @@ import {
|
||||
FormLabel,
|
||||
Alert,
|
||||
CircularProgress,
|
||||
InputAdornment
|
||||
InputAdornment,
|
||||
MenuItem
|
||||
} from '@mui/material';
|
||||
import LocationIcon from '@mui/icons-material/LocationOn';
|
||||
import MyLocationIcon from '@mui/icons-material/MyLocation';
|
||||
import { StationSearchRequest, GeolocationError } from '../types/stations.types';
|
||||
import { useGeolocation } from '../hooks';
|
||||
import { loadGoogleMaps, getGoogleMapsApi } from '../utils/maps-loader';
|
||||
|
||||
type Coordinates = {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
};
|
||||
|
||||
type GeocoderStatus = 'OK' | 'ZERO_RESULTS' | string;
|
||||
|
||||
interface GeocoderResult {
|
||||
geometry: {
|
||||
location: {
|
||||
lat: () => number;
|
||||
lng: () => number;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface GoogleMapsGeocoder {
|
||||
geocode(
|
||||
request: { address: string },
|
||||
callback: (results: GeocoderResult[] | null, status: GeocoderStatus) => void
|
||||
): void;
|
||||
}
|
||||
|
||||
type GoogleMapsWithGeocoder = {
|
||||
Geocoder: new () => GoogleMapsGeocoder;
|
||||
};
|
||||
|
||||
const US_STATE_OPTIONS = [
|
||||
{ value: 'AL', label: 'Alabama' },
|
||||
{ value: 'AK', label: 'Alaska' },
|
||||
{ value: 'AZ', label: 'Arizona' },
|
||||
{ value: 'AR', label: 'Arkansas' },
|
||||
{ value: 'CA', label: 'California' },
|
||||
{ value: 'CO', label: 'Colorado' },
|
||||
{ value: 'CT', label: 'Connecticut' },
|
||||
{ value: 'DE', label: 'Delaware' },
|
||||
{ value: 'DC', label: 'District of Columbia' },
|
||||
{ value: 'FL', label: 'Florida' },
|
||||
{ value: 'GA', label: 'Georgia' },
|
||||
{ value: 'HI', label: 'Hawaii' },
|
||||
{ value: 'ID', label: 'Idaho' },
|
||||
{ value: 'IL', label: 'Illinois' },
|
||||
{ value: 'IN', label: 'Indiana' },
|
||||
{ value: 'IA', label: 'Iowa' },
|
||||
{ value: 'KS', label: 'Kansas' },
|
||||
{ value: 'KY', label: 'Kentucky' },
|
||||
{ value: 'LA', label: 'Louisiana' },
|
||||
{ value: 'ME', label: 'Maine' },
|
||||
{ value: 'MD', label: 'Maryland' },
|
||||
{ value: 'MA', label: 'Massachusetts' },
|
||||
{ value: 'MI', label: 'Michigan' },
|
||||
{ value: 'MN', label: 'Minnesota' },
|
||||
{ value: 'MS', label: 'Mississippi' },
|
||||
{ value: 'MO', label: 'Missouri' },
|
||||
{ value: 'MT', label: 'Montana' },
|
||||
{ value: 'NE', label: 'Nebraska' },
|
||||
{ value: 'NV', label: 'Nevada' },
|
||||
{ value: 'NH', label: 'New Hampshire' },
|
||||
{ value: 'NJ', label: 'New Jersey' },
|
||||
{ value: 'NM', label: 'New Mexico' },
|
||||
{ value: 'NY', label: 'New York' },
|
||||
{ value: 'NC', label: 'North Carolina' },
|
||||
{ value: 'ND', label: 'North Dakota' },
|
||||
{ value: 'OH', label: 'Ohio' },
|
||||
{ value: 'OK', label: 'Oklahoma' },
|
||||
{ value: 'OR', label: 'Oregon' },
|
||||
{ value: 'PA', label: 'Pennsylvania' },
|
||||
{ value: 'RI', label: 'Rhode Island' },
|
||||
{ value: 'SC', label: 'South Carolina' },
|
||||
{ value: 'SD', label: 'South Dakota' },
|
||||
{ value: 'TN', label: 'Tennessee' },
|
||||
{ value: 'TX', label: 'Texas' },
|
||||
{ value: 'UT', label: 'Utah' },
|
||||
{ value: 'VT', label: 'Vermont' },
|
||||
{ value: 'VA', label: 'Virginia' },
|
||||
{ value: 'WA', label: 'Washington' },
|
||||
{ value: 'WV', label: 'West Virginia' },
|
||||
{ value: 'WI', label: 'Wisconsin' },
|
||||
{ value: 'WY', label: 'Wyoming' }
|
||||
];
|
||||
|
||||
interface StationsSearchFormProps {
|
||||
onSearch: (request: StationSearchRequest) => void;
|
||||
@@ -32,10 +115,15 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
onSearch,
|
||||
isSearching = false
|
||||
}) => {
|
||||
const [latitude, setLatitude] = useState<number | ''>('');
|
||||
const [longitude, setLongitude] = useState<number | ''>('');
|
||||
const [street, setStreet] = useState('');
|
||||
const [city, setCity] = useState('');
|
||||
const [stateCode, setStateCode] = useState('');
|
||||
const [zip, setZip] = useState('');
|
||||
const [radius, setRadius] = useState(5); // Miles
|
||||
const [locationError, setLocationError] = useState<string | null>(null);
|
||||
const [addressError, setAddressError] = useState<string | null>(null);
|
||||
const [resolvedCoordinates, setResolvedCoordinates] = useState<Coordinates | null>(null);
|
||||
const [isGeocoding, setIsGeocoding] = useState(false);
|
||||
|
||||
const {
|
||||
coordinates,
|
||||
@@ -48,9 +136,12 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
// Update form when geolocation succeeds
|
||||
useEffect(() => {
|
||||
if (coordinates) {
|
||||
setLatitude(coordinates.latitude);
|
||||
setLongitude(coordinates.longitude);
|
||||
setResolvedCoordinates({
|
||||
latitude: coordinates.latitude,
|
||||
longitude: coordinates.longitude
|
||||
});
|
||||
setLocationError(null);
|
||||
setAddressError(null);
|
||||
}
|
||||
}, [coordinates]);
|
||||
|
||||
@@ -64,31 +155,87 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
} else if (geoError === GeolocationError.POSITION_UNAVAILABLE) {
|
||||
setLocationError('Location not available. Try a different device.');
|
||||
} else {
|
||||
setLocationError('Unable to get location. Please enter manually.');
|
||||
setLocationError('Unable to get location. Please enter your address.');
|
||||
}
|
||||
}
|
||||
}, [geoError]);
|
||||
|
||||
const handleUseCurrentLocation = () => {
|
||||
clearGeoError();
|
||||
setAddressError(null);
|
||||
requestPermission();
|
||||
};
|
||||
|
||||
const handleSearch = () => {
|
||||
if (latitude === '' || longitude === '') {
|
||||
setLocationError('Please enter coordinates or use current location');
|
||||
return;
|
||||
}
|
||||
const addressIsComplete = useMemo(
|
||||
() => street.trim() !== '' && city.trim() !== '' && stateCode !== '' && zip.trim().length === 5,
|
||||
[street, city, stateCode, zip]
|
||||
);
|
||||
|
||||
const markManualAddressInput = () => {
|
||||
setResolvedCoordinates(null);
|
||||
setAddressError(null);
|
||||
setLocationError(null);
|
||||
};
|
||||
|
||||
const handleSearch = async (): Promise<void> => {
|
||||
const submitWithCoordinates = (coords: Coordinates) => {
|
||||
const request: StationSearchRequest = {
|
||||
latitude: typeof latitude === 'number' ? latitude : 0,
|
||||
longitude: typeof longitude === 'number' ? longitude : 0,
|
||||
latitude: coords.latitude,
|
||||
longitude: coords.longitude,
|
||||
radius: radius * 1609.34 // Convert miles to meters
|
||||
};
|
||||
|
||||
onSearch(request);
|
||||
};
|
||||
|
||||
if (resolvedCoordinates) {
|
||||
submitWithCoordinates(resolvedCoordinates);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!addressIsComplete) {
|
||||
setAddressError('Enter Street, City, State, and ZIP or use current location.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setIsGeocoding(true);
|
||||
await loadGoogleMaps();
|
||||
const maps = getGoogleMapsApi() as unknown as GoogleMapsWithGeocoder;
|
||||
const geocoder = new maps.Geocoder();
|
||||
const formattedAddress = [street, city, stateCode, zip].filter(Boolean).join(', ');
|
||||
|
||||
const coords = await new Promise<Coordinates>((resolve, reject) => {
|
||||
geocoder.geocode({ address: formattedAddress }, (results, status) => {
|
||||
if (status === 'OK' && results && results[0]) {
|
||||
const location = results[0].geometry.location;
|
||||
resolve({
|
||||
latitude: location.lat(),
|
||||
longitude: location.lng()
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (status === 'ZERO_RESULTS') {
|
||||
reject(new Error('Address not found. Please double-check the details.'));
|
||||
return;
|
||||
}
|
||||
|
||||
reject(new Error('Unable to locate that address right now. Try again shortly.'));
|
||||
});
|
||||
});
|
||||
|
||||
setResolvedCoordinates(coords);
|
||||
setLocationError(null);
|
||||
setAddressError(null);
|
||||
submitWithCoordinates(coords);
|
||||
} catch (error) {
|
||||
setAddressError(error instanceof Error ? error.message : 'Unable to locate that address.');
|
||||
} finally {
|
||||
setIsGeocoding(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRadiusChange = (
|
||||
_event: Event,
|
||||
newValue: number | number[]
|
||||
@@ -103,7 +250,7 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
component="form"
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
handleSearch();
|
||||
void handleSearch();
|
||||
}}
|
||||
sx={{ padding: 2, display: 'flex', flexDirection: 'column', gap: 2 }}
|
||||
>
|
||||
@@ -121,17 +268,17 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
{/* Or Divider */}
|
||||
<Box sx={{ textAlign: 'center', color: 'textSecondary' }}>or</Box>
|
||||
|
||||
{/* Manual Latitude Input */}
|
||||
{/* Street Address Input */}
|
||||
<TextField
|
||||
label="Latitude"
|
||||
type="number"
|
||||
value={latitude}
|
||||
label="Street"
|
||||
name="street"
|
||||
value={street}
|
||||
onChange={(e) => {
|
||||
const val = e.target.value;
|
||||
setLatitude(val === '' ? '' : parseFloat(val));
|
||||
setStreet(e.target.value);
|
||||
markManualAddressInput();
|
||||
}}
|
||||
placeholder="37.7749"
|
||||
inputProps={{ step: '0.0001', min: '-90', max: '90' }}
|
||||
placeholder="123 Main St"
|
||||
autoComplete="address-line1"
|
||||
fullWidth
|
||||
InputProps={{
|
||||
startAdornment: (
|
||||
@@ -142,31 +289,87 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Manual Longitude Input */}
|
||||
{/* City Input */}
|
||||
<TextField
|
||||
label="Longitude"
|
||||
type="number"
|
||||
value={longitude}
|
||||
label="City"
|
||||
name="city"
|
||||
value={city}
|
||||
onChange={(e) => {
|
||||
const val = e.target.value;
|
||||
setLongitude(val === '' ? '' : parseFloat(val));
|
||||
setCity(e.target.value);
|
||||
markManualAddressInput();
|
||||
}}
|
||||
placeholder="-122.4194"
|
||||
inputProps={{ step: '0.0001', min: '-180', max: '180' }}
|
||||
placeholder="San Francisco"
|
||||
autoComplete="address-level2"
|
||||
fullWidth
|
||||
InputProps={{
|
||||
startAdornment: (
|
||||
<InputAdornment position="start">
|
||||
<LocationIcon />
|
||||
</InputAdornment>
|
||||
)
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* State and ZIP */}
|
||||
<Box
|
||||
sx={{
|
||||
display: 'grid',
|
||||
gridTemplateColumns: { xs: '1fr', sm: '1fr 1fr' },
|
||||
gap: 2,
|
||||
width: '100%'
|
||||
}}
|
||||
>
|
||||
<TextField
|
||||
select
|
||||
label="State"
|
||||
value={stateCode}
|
||||
onChange={(e) => {
|
||||
setStateCode(e.target.value);
|
||||
markManualAddressInput();
|
||||
}}
|
||||
SelectProps={{
|
||||
displayEmpty: true,
|
||||
renderValue: (selected) => {
|
||||
const value = selected as string;
|
||||
if (!value) {
|
||||
return 'Select state';
|
||||
}
|
||||
const option = US_STATE_OPTIONS.find((state) => state.value === value);
|
||||
return option ? option.label : value;
|
||||
}
|
||||
}}
|
||||
InputLabelProps={{ shrink: true }}
|
||||
inputProps={{ name: 'state' }}
|
||||
fullWidth
|
||||
>
|
||||
<MenuItem value="">
|
||||
<em>Select state</em>
|
||||
</MenuItem>
|
||||
{US_STATE_OPTIONS.map((state) => (
|
||||
<MenuItem key={state.value} value={state.value}>
|
||||
{state.label}
|
||||
</MenuItem>
|
||||
))}
|
||||
</TextField>
|
||||
|
||||
<TextField
|
||||
label="ZIP"
|
||||
name="zip"
|
||||
value={zip}
|
||||
onChange={(e) => {
|
||||
const sanitized = e.target.value.replace(/[^0-9]/g, '').slice(0, 5);
|
||||
setZip(sanitized);
|
||||
markManualAddressInput();
|
||||
}}
|
||||
placeholder="94105"
|
||||
inputProps={{
|
||||
inputMode: 'numeric',
|
||||
pattern: '[0-9]*',
|
||||
maxLength: 5
|
||||
}}
|
||||
autoComplete="postal-code"
|
||||
fullWidth
|
||||
/>
|
||||
</Box>
|
||||
|
||||
{/* Radius Slider */}
|
||||
<FormControl fullWidth>
|
||||
<FormLabel>Search Radius: {radius} mi</FormLabel>
|
||||
<Slider
|
||||
data-testid="radius-slider"
|
||||
value={radius}
|
||||
onChange={handleRadiusChange}
|
||||
min={1}
|
||||
@@ -183,22 +386,32 @@ export const StationsSearchForm: React.FC<StationsSearchFormProps> = ({
|
||||
</FormControl>
|
||||
|
||||
{/* Error Messages */}
|
||||
{locationError && (
|
||||
<Alert severity="error">{locationError}</Alert>
|
||||
{(locationError || addressError) && (
|
||||
<Alert severity="error">{locationError || addressError}</Alert>
|
||||
)}
|
||||
|
||||
{/* Search Button */}
|
||||
<Button
|
||||
variant="contained"
|
||||
color="primary"
|
||||
onClick={handleSearch}
|
||||
disabled={isSearching || latitude === '' || longitude === ''}
|
||||
onClick={() => {
|
||||
void handleSearch();
|
||||
}}
|
||||
disabled={
|
||||
isSearching ||
|
||||
isGeocoding ||
|
||||
(!resolvedCoordinates && !addressIsComplete)
|
||||
}
|
||||
sx={{
|
||||
minHeight: '44px',
|
||||
marginTop: 1
|
||||
}}
|
||||
>
|
||||
{isSearching ? <CircularProgress size={24} /> : 'Search Stations'}
|
||||
{isSearching || isGeocoding ? (
|
||||
<CircularProgress size={24} />
|
||||
) : (
|
||||
'Search Stations'
|
||||
)}
|
||||
</Button>
|
||||
</Box>
|
||||
);
|
||||
|
||||
342
scripts/load_vehicle_data.py
Executable file
342
scripts/load_vehicle_data.py
Executable file
@@ -0,0 +1,342 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Populate the normalized vehicles schema (make/model/model_year/trim/engine)
|
||||
from the JSON sources in data/make-models.
|
||||
|
||||
Example:
|
||||
PGPASSWORD=$(cat secrets/app/postgres-password.txt) \\
|
||||
python3 scripts/load_vehicle_data.py \\
|
||||
--db-user postgres --db-name motovaultpro --db-host 127.0.0.1
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Tuple
|
||||
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError as exc: # pragma: no cover - ease troubleshooting
|
||||
sys.stderr.write(
|
||||
"Error: psycopg is required. Install with `pip install psycopg[binary]`.\n",
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
DEFAULT_DATA_DIR = Path(__file__).resolve().parents[1] / "data" / "make-models"
|
||||
DEFAULT_TRANSMISSIONS = ("Automatic", "Manual")
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Load vehicle dropdown data into Postgres.")
|
||||
parser.add_argument(
|
||||
"--data-dir",
|
||||
default=str(DEFAULT_DATA_DIR),
|
||||
help=f"Directory with make JSON files (default: {DEFAULT_DATA_DIR})",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--database-url",
|
||||
help="Full postgres URL. Falls back to PG* environment variables if omitted.",
|
||||
)
|
||||
parser.add_argument("--db-host", default=os.environ.get("PGHOST", "127.0.0.1"))
|
||||
parser.add_argument("--db-port", type=int, default=int(os.environ.get("PGPORT", 5432)))
|
||||
parser.add_argument("--db-name", default=os.environ.get("PGDATABASE", "motovaultpro"))
|
||||
parser.add_argument("--db-user", default=os.environ.get("PGUSER", "postgres"))
|
||||
parser.add_argument("--db-password", default=os.environ.get("PGPASSWORD"))
|
||||
parser.add_argument(
|
||||
"--transmissions",
|
||||
default=",".join(DEFAULT_TRANSMISSIONS),
|
||||
help="Comma-separated list of transmission labels (default: Automatic,Manual)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-truncate",
|
||||
action="store_true",
|
||||
help="Do not truncate lookup tables before loading (useful for incremental testing).",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def build_conninfo(args: argparse.Namespace) -> str:
|
||||
if args.database_url:
|
||||
return args.database_url
|
||||
|
||||
parts = [
|
||||
f"host={args.db_host}",
|
||||
f"port={args.db_port}",
|
||||
f"dbname={args.db_name}",
|
||||
f"user={args.db_user}",
|
||||
]
|
||||
if args.db_password:
|
||||
parts.append(f"password={args.db_password}")
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def load_json_documents(data_dir: Path) -> List[Tuple[str, dict]]:
|
||||
if not data_dir.exists() or not data_dir.is_dir():
|
||||
raise FileNotFoundError(f"Data directory not found: {data_dir}")
|
||||
|
||||
documents: List[Tuple[str, dict]] = []
|
||||
for file_path in sorted(data_dir.glob("*.json")):
|
||||
with file_path.open("r", encoding="utf-8") as handle:
|
||||
try:
|
||||
documents.append((file_path.name, json.load(handle)))
|
||||
except json.JSONDecodeError as exc:
|
||||
raise ValueError(f"Invalid JSON in {file_path}: {exc}") from exc
|
||||
if not documents:
|
||||
raise RuntimeError(f"No JSON files found under {data_dir}")
|
||||
return documents
|
||||
|
||||
|
||||
def clean_label(value: str) -> str:
|
||||
text = str(value or "").replace("_", " ").strip()
|
||||
text = re.sub(r"\s+", " ", text)
|
||||
return text
|
||||
|
||||
|
||||
def normalize_key(value: str) -> str:
|
||||
text = clean_label(value).lower()
|
||||
return text
|
||||
|
||||
|
||||
def unique_labels(values: Iterable[str]) -> List[str]:
|
||||
seen = set()
|
||||
result: List[str] = []
|
||||
for value in values:
|
||||
label = clean_label(value)
|
||||
if not label:
|
||||
continue
|
||||
key = normalize_key(label)
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
result.append(label)
|
||||
return result
|
||||
|
||||
|
||||
class LoaderCaches:
|
||||
def __init__(self) -> None:
|
||||
self.makes: Dict[str, int] = {}
|
||||
self.models: Dict[Tuple[int, str], int] = {}
|
||||
self.model_years: Dict[Tuple[int, int], int] = {}
|
||||
self.trims: Dict[Tuple[int, str], int] = {}
|
||||
self.engines: Dict[str, int] = {}
|
||||
|
||||
|
||||
class LoaderStats:
|
||||
def __init__(self) -> None:
|
||||
self.counter = Counter()
|
||||
|
||||
def as_dict(self) -> Dict[str, int]:
|
||||
return dict(self.counter)
|
||||
|
||||
def bump(self, key: str, amount: int = 1) -> None:
|
||||
self.counter[key] += amount
|
||||
|
||||
|
||||
def truncate_lookup_tables(cur: psycopg.Cursor) -> None:
|
||||
cur.execute("TRUNCATE vehicles.trim_engine, vehicles.trim_transmission RESTART IDENTITY CASCADE")
|
||||
cur.execute("TRUNCATE vehicles.trim, vehicles.model_year, vehicles.model, vehicles.make RESTART IDENTITY CASCADE")
|
||||
cur.execute("TRUNCATE vehicles.engine, vehicles.transmission RESTART IDENTITY CASCADE")
|
||||
|
||||
|
||||
def ensure_transmissions(cur: psycopg.Cursor, names: Iterable[str]) -> None:
|
||||
for name in unique_labels(names):
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.transmission (name)
|
||||
VALUES (%s)
|
||||
ON CONFLICT (name) DO NOTHING
|
||||
""",
|
||||
(name,),
|
||||
)
|
||||
|
||||
|
||||
def upsert_make(cur: psycopg.Cursor, caches: LoaderCaches, name: str) -> int:
|
||||
key = normalize_key(name)
|
||||
if key in caches.makes:
|
||||
return caches.makes[key]
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.make (name)
|
||||
VALUES (%s)
|
||||
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
|
||||
RETURNING id
|
||||
""",
|
||||
(name,),
|
||||
)
|
||||
make_id = cur.fetchone()[0]
|
||||
caches.makes[key] = make_id
|
||||
return make_id
|
||||
|
||||
|
||||
def upsert_model(cur: psycopg.Cursor, caches: LoaderCaches, make_id: int, name: str) -> int:
|
||||
key = (make_id, normalize_key(name))
|
||||
if key in caches.models:
|
||||
return caches.models[key]
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.model (make_id, name)
|
||||
VALUES (%s, %s)
|
||||
ON CONFLICT (make_id, name) DO UPDATE SET name = EXCLUDED.name
|
||||
RETURNING id
|
||||
""",
|
||||
(make_id, name),
|
||||
)
|
||||
model_id = cur.fetchone()[0]
|
||||
caches.models[key] = model_id
|
||||
return model_id
|
||||
|
||||
|
||||
def upsert_model_year(cur: psycopg.Cursor, caches: LoaderCaches, model_id: int, year: int) -> int:
|
||||
key = (model_id, year)
|
||||
if key in caches.model_years:
|
||||
return caches.model_years[key]
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.model_year (model_id, year)
|
||||
VALUES (%s, %s)
|
||||
ON CONFLICT (model_id, year) DO UPDATE SET year = EXCLUDED.year
|
||||
RETURNING id
|
||||
""",
|
||||
(model_id, year),
|
||||
)
|
||||
model_year_id = cur.fetchone()[0]
|
||||
caches.model_years[key] = model_year_id
|
||||
return model_year_id
|
||||
|
||||
|
||||
def upsert_trim(cur: psycopg.Cursor, caches: LoaderCaches, model_year_id: int, name: str) -> int:
|
||||
key = (model_year_id, normalize_key(name))
|
||||
if key in caches.trims:
|
||||
return caches.trims[key]
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.trim (model_year_id, name)
|
||||
VALUES (%s, %s)
|
||||
ON CONFLICT (model_year_id, name) DO UPDATE SET name = EXCLUDED.name
|
||||
RETURNING id
|
||||
""",
|
||||
(model_year_id, name),
|
||||
)
|
||||
trim_id = cur.fetchone()[0]
|
||||
caches.trims[key] = trim_id
|
||||
return trim_id
|
||||
|
||||
|
||||
def upsert_engine(cur: psycopg.Cursor, caches: LoaderCaches, name: str) -> int:
|
||||
key = normalize_key(name)
|
||||
if key in caches.engines:
|
||||
return caches.engines[key]
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.engine (name)
|
||||
VALUES (%s)
|
||||
ON CONFLICT (name) DO UPDATE SET name = EXCLUDED.name
|
||||
RETURNING id
|
||||
""",
|
||||
(name,),
|
||||
)
|
||||
engine_id = cur.fetchone()[0]
|
||||
caches.engines[key] = engine_id
|
||||
return engine_id
|
||||
|
||||
|
||||
def link_trim_engine(cur: psycopg.Cursor, trim_id: int, engine_id: int) -> None:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO vehicles.trim_engine (trim_id, engine_id)
|
||||
VALUES (%s, %s)
|
||||
ON CONFLICT (trim_id, engine_id) DO NOTHING
|
||||
""",
|
||||
(trim_id, engine_id),
|
||||
)
|
||||
|
||||
|
||||
def process_documents(cur: psycopg.Cursor, documents: List[Tuple[str, dict]], stats: LoaderStats) -> None:
|
||||
caches = LoaderCaches()
|
||||
|
||||
for filename, payload in documents:
|
||||
if not isinstance(payload, dict):
|
||||
stats.bump("skipped_files_invalid_root")
|
||||
print(f"[WARN] Skipping {filename}: root is not an object")
|
||||
continue
|
||||
|
||||
for make_key, year_entries in payload.items():
|
||||
make_name = clean_label(make_key)
|
||||
if not make_name:
|
||||
stats.bump("skipped_makes_invalid_name")
|
||||
continue
|
||||
make_id = upsert_make(cur, caches, make_name)
|
||||
stats.bump("makes")
|
||||
|
||||
for year_entry in year_entries or []:
|
||||
year_raw = year_entry.get("year")
|
||||
try:
|
||||
year = int(year_raw)
|
||||
except (TypeError, ValueError):
|
||||
stats.bump("skipped_years_invalid")
|
||||
continue
|
||||
|
||||
models = year_entry.get("models") or []
|
||||
for model in models:
|
||||
model_name = clean_label(model.get("name", ""))
|
||||
if not model_name:
|
||||
stats.bump("skipped_models_invalid_name")
|
||||
continue
|
||||
|
||||
engine_names = unique_labels(model.get("engines") or [])
|
||||
if not engine_names:
|
||||
stats.bump("skipped_models_missing_engines")
|
||||
continue
|
||||
|
||||
trim_names = unique_labels(model.get("submodels") or [])
|
||||
if not trim_names:
|
||||
trim_names = [model_name]
|
||||
|
||||
model_id = upsert_model(cur, caches, make_id, model_name)
|
||||
model_year_id = upsert_model_year(cur, caches, model_id, year)
|
||||
stats.bump("model_years")
|
||||
|
||||
trim_ids: List[int] = []
|
||||
for trim_name in trim_names:
|
||||
trim_id = upsert_trim(cur, caches, model_year_id, trim_name)
|
||||
trim_ids.append(trim_id)
|
||||
stats.bump("trims")
|
||||
|
||||
for engine_name in engine_names:
|
||||
engine_id = upsert_engine(cur, caches, engine_name)
|
||||
stats.bump("engines")
|
||||
for trim_id in trim_ids:
|
||||
link_trim_engine(cur, trim_id, engine_id)
|
||||
stats.bump("trim_engine_links")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
data_dir = Path(args.data_dir).expanduser().resolve()
|
||||
documents = load_json_documents(data_dir)
|
||||
conninfo = build_conninfo(args)
|
||||
transmissions = unique_labels(args.transmissions.split(","))
|
||||
|
||||
with psycopg.connect(conninfo) as conn:
|
||||
with conn.cursor() as cur:
|
||||
if not args.skip_truncate:
|
||||
truncate_lookup_tables(cur)
|
||||
ensure_transmissions(cur, transmissions or DEFAULT_TRANSMISSIONS)
|
||||
stats = LoaderStats()
|
||||
process_documents(cur, documents, stats)
|
||||
|
||||
print("\nVehicle lookup data load completed.")
|
||||
for key, value in sorted(stats.as_dict().items()):
|
||||
print(f" {key}: {value}")
|
||||
print(f"\nProcessed directory: {data_dir}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
30
scripts/run-data-load.sh
Normal file
30
scripts/run-data-load.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
container=vehicle-loader-$(date +%s)
|
||||
|
||||
# Start on backend network for outbound internet access
|
||||
docker run -d --rm \
|
||||
--name "$container" \
|
||||
--network motovaultpro_backend \
|
||||
-v "$PWD":/workspace \
|
||||
-w /workspace \
|
||||
-e PGPASSWORD="$(cat secrets/app/postgres-password.txt)" \
|
||||
python:3.12-slim \
|
||||
sleep infinity
|
||||
|
||||
# Install psycopg before touching the DB network
|
||||
docker exec "$container" bash -lc "pip install psycopg[binary] >/tmp/pip.log"
|
||||
|
||||
# Now attach to the database network so Postgres is reachable
|
||||
docker network connect motovaultpro_database "$container"
|
||||
|
||||
# Run the loader
|
||||
docker exec "$container" bash -lc "
|
||||
python scripts/load_vehicle_data.py \
|
||||
--db-host mvp-postgres \
|
||||
--db-port 5432 \
|
||||
--db-user postgres \
|
||||
--db-name motovaultpro \
|
||||
--data-dir data/make-models
|
||||
"
|
||||
|
||||
# Clean up
|
||||
docker rm -f "$container"
|
||||
Reference in New Issue
Block a user