Database Tooling

This commit is contained in:
Eric Gullickson
2025-11-02 09:37:58 -06:00
parent 7b753f080c
commit a0748ced5b
10 changed files with 1363 additions and 1 deletions

View File

@@ -59,7 +59,8 @@
"Bash(node_modules/.bin/tsc:*)",
"Bash(tree:*)",
"Bash(npm run lint)",
"Bash(cat:*)"
"Bash(cat:*)",
"Bash(./scripts/export-database.sh --help)"
],
"deny": []
}

View File

@@ -170,3 +170,33 @@ logs-backend-full:
logs-clear:
@sudo sh -c "truncate -s 0 /var/lib/docker/containers/**/*-json.log"
# Database Export/Import
db-export:
@echo "Exporting database..."
@./scripts/export-database.sh
db-export-schema:
@echo "Exporting database schema only..."
@./scripts/export-database.sh --schema-only --output schema_$(shell date +%Y%m%d)
db-export-custom:
@echo "Exporting database (custom format)..."
@./scripts/export-database.sh --format custom --output backup_$(shell date +%Y%m%d)
db-import:
@echo "Import database from file"
@echo "Usage: make db-import-file FILE=path/to/backup.sql.gz"
@echo "Or use: ./scripts/import-database.sh --help"
db-import-file:
@if [ -z "$(FILE)" ]; then \
echo "Error: FILE parameter required"; \
echo "Usage: make db-import-file FILE=database-exports/backup.sql.gz"; \
exit 1; \
fi
@./scripts/import-database.sh $(FILE)
db-backup:
@echo "Creating database backup..."
@./scripts/export-database.sh --output backup_$(shell date +%Y%m%d_%H%M%S)

Binary file not shown.

View File

@@ -0,0 +1,39 @@
===========================================
MotoVaultPro Database Import Instructions
===========================================
Export Details:
- Export Date: Sun Nov 2 09:36:30 CST 2025
- Format: sql
- Compressed: true
- File: /Users/egullickson/Documents/Technology/coding/motovaultpro/database-exports/schema_20251102.sql.gz
Import Instructions:
--------------------
1. Copy the export file to your target server:
scp /Users/egullickson/Documents/Technology/coding/motovaultpro/database-exports/schema_20251102.sql.gz user@server:/path/to/import/
2. Import the database (compressed SQL):
# Using Docker:
gunzip -c /path/to/import/schema_20251102.sql.gz | docker exec -i mvp-postgres psql -U postgres -d motovaultpro
# Direct PostgreSQL:
gunzip -c /path/to/import/schema_20251102.sql.gz | psql -U postgres -d motovaultpro
Notes:
------
- The -c flag drops existing database objects before recreating them
- Ensure the target database exists before importing
- For production imports, always test on a staging environment first
- Consider creating a backup of the target database before importing
Create target database:
-----------------------
docker exec -i mvp-postgres psql -U postgres -c "CREATE DATABASE motovaultpro;"
Or if database exists and you want to start fresh:
--------------------------------------------------
docker exec -i mvp-postgres psql -U postgres -c "DROP DATABASE IF EXISTS motovaultpro;"
docker exec -i mvp-postgres psql -U postgres -c "CREATE DATABASE motovaultpro;"

View File

@@ -0,0 +1,11 @@
{
"export_timestamp": "2025-11-02T15:36:30Z",
"database_name": "motovaultpro",
"export_format": "sql",
"compressed": true,
"schema_included": true,
"data_included": false,
"postgresql_version": "PostgreSQL 15.14 on aarch64-unknown-linux-musl, compiled by gcc (Alpine 14.2.0) 14.2.0, 64-bit",
"file_path": "/Users/egullickson/Documents/Technology/coding/motovaultpro/database-exports/schema_20251102.sql.gz",
"file_size": "8.0K"
}

419
docs/DATABASE-MIGRATION.md Normal file
View File

@@ -0,0 +1,419 @@
# Database Migration Guide
This guide explains how to export and import the MotoVaultPro database for deployment migration, backups, and disaster recovery.
## Quick Start
### Export Database
```bash
# Full database export (compressed SQL)
./scripts/export-database.sh
# Schema only (for creating new environments)
./scripts/export-database.sh --schema-only
# Specific tables only
./scripts/export-database.sh --include-table vehicles --include-table fuel_logs
```
### Import Database
```bash
# Import into new deployment
./scripts/import-database.sh --create-db database-exports/motovaultpro_export_20250101_120000.sql.gz
# Import with existing database backup
./scripts/import-database.sh database-exports/backup.sql.gz
# Replace existing database (DANGER!)
./scripts/import-database.sh --drop-existing --force backup.sql.gz
```
## Export Script
### Location
`scripts/export-database.sh`
### Features
- Multiple export formats (SQL, custom, directory)
- Automatic compression
- Schema-only or data-only exports
- Table filtering (include/exclude)
- Metadata generation
- Import instructions generation
### Options
```bash
./scripts/export-database.sh [options]
Options:
-h, --help Show help message
-f, --format FORMAT Export format: sql, custom, directory (default: sql)
-o, --output NAME Custom export filename (without extension)
-n, --no-compress Don't compress the export
--schema-only Export schema only (no data)
--data-only Export data only (no schema)
--exclude-table TABLE Exclude specific table(s)
--include-table TABLE Include only specific table(s)
-c, --container NAME Container name (default: mvp-postgres)
```
### Export Formats
#### SQL Format (Default)
- Plain text SQL dump
- Human-readable
- Automatically compressed with gzip
- Best for version control and manual review
```bash
./scripts/export-database.sh --format sql
```
#### Custom Format
- PostgreSQL custom format
- Binary and compressed
- Fastest for large databases
- Supports parallel restore
```bash
./scripts/export-database.sh --format custom
```
#### Directory Format
- Each table in separate file
- Best for selective restore
- Supports parallel processing
```bash
./scripts/export-database.sh --format directory
```
### Examples
#### Full Production Backup
```bash
./scripts/export-database.sh --output production_backup_$(date +%Y%m%d)
```
#### Schema for New Environment
```bash
./scripts/export-database.sh --schema-only --output schema_template
```
#### Specific Feature Data
```bash
./scripts/export-database.sh \
--include-table vehicles \
--include-table fuel_logs \
--include-table maintenance_records \
--output vehicle_data_backup
```
#### Exclude Large Tables
```bash
./scripts/export-database.sh \
--exclude-table audit_logs \
--exclude-table system_events \
--output core_data_backup
```
### Output Files
Each export creates three files:
1. **Export File** (`.sql.gz`, `.dump`, or `.dir/`)
- The actual database dump
2. **Metadata File** (`*_metadata.json`)
```json
{
"export_timestamp": "2025-01-01T12:00:00Z",
"database_name": "motovaultpro",
"export_format": "sql",
"compressed": true,
"schema_included": true,
"data_included": true,
"postgresql_version": "PostgreSQL 15.x",
"file_path": "/path/to/export.sql.gz",
"file_size": "5.2M"
}
```
3. **Import Instructions** (`*_import_instructions.txt`)
- Step-by-step import guide
- Format-specific commands
- Database preparation steps
## Import Script
### Location
`scripts/import-database.sh`
### Features
- Auto-detects export format
- Automatic backup before import
- Safety confirmations
- Database creation
- Import verification
### Options
```bash
./scripts/import-database.sh [options] <export-file>
Options:
-h, --help Show help message
-c, --container NAME Container name (default: mvp-postgres)
-d, --database NAME Database name (default: motovaultpro)
--create-db Create database if it doesn't exist
--drop-existing Drop existing database before import (DANGER!)
--no-backup Skip backup of existing database
--force Skip confirmation prompts
-f, --format FORMAT Import format (auto-detected if not specified)
```
### Safety Features
1. **Automatic Backup**
- Creates backup of existing database before destructive operations
- Backup stored in `database-exports/`
2. **Confirmation Prompts**
- Requires explicit "yes" for dangerous operations
- Can be bypassed with `--force` for automation
3. **Validation**
- Verifies export file exists
- Checks PostgreSQL container is running
- Confirms successful import
### Examples
#### Import into New Deployment
```bash
# Ensure database doesn't exist
./scripts/import-database.sh --create-db backup.sql.gz
```
#### Import with Backup
```bash
# Automatically backs up existing database
./scripts/import-database.sh backup.sql.gz
```
#### Replace Existing Database
```bash
# Interactive confirmation required
./scripts/import-database.sh --drop-existing backup.sql.gz
# Automated (use with caution!)
./scripts/import-database.sh --drop-existing --force backup.sql.gz
```
#### Import Different Format
```bash
# Custom format
./scripts/import-database.sh --format custom backup.dump
# Directory format
./scripts/import-database.sh --format directory backup.dir/
```
## Deployment Migration Workflow
### 1. Export from Source
```bash
# On source server
cd /path/to/motovaultpro
./scripts/export-database.sh --output migration_$(date +%Y%m%d)
```
### 2. Transfer to Target
```bash
# Copy export files
scp database-exports/migration_*.{sql.gz,json,txt} user@target:/path/to/motovaultpro/database-exports/
```
### 3. Import on Target
```bash
# On target server
cd /path/to/motovaultpro
# First time setup
./scripts/import-database.sh --create-db database-exports/migration_20250101.sql.gz
# Subsequent imports
./scripts/import-database.sh --drop-existing database-exports/migration_20250101.sql.gz
```
### 4. Verify Import
```bash
# Check table count
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "\dt"
# Check row counts
docker exec mvp-postgres psql -U postgres -d motovaultpro -c "
SELECT schemaname, tablename, n_live_tup as rows
FROM pg_stat_user_tables
ORDER BY n_live_tup DESC;
"
```
## Automated Backups
### Daily Backup Cron Job
```bash
# Add to crontab (crontab -e)
0 2 * * * cd /path/to/motovaultpro && ./scripts/export-database.sh --output daily_backup_$(date +%Y%m%d) >> /var/log/motovaultpro-backup.log 2>&1
```
### Weekly Full Backup
```bash
# Weekly on Sunday at 3 AM
0 3 * * 0 cd /path/to/motovaultpro && ./scripts/export-database.sh --format custom --output weekly_backup_$(date +%Y%m%d) >> /var/log/motovaultpro-backup.log 2>&1
```
### Retention Script
```bash
# Keep last 7 daily backups and 4 weekly backups
find database-exports/ -name "daily_backup_*.sql.gz" -mtime +7 -delete
find database-exports/ -name "weekly_backup_*.dump" -mtime +28 -delete
```
## Troubleshooting
### Export Issues
#### Container Not Running
```bash
Error: PostgreSQL container 'mvp-postgres' is not running
Solution:
docker compose up -d mvp-postgres
```
#### Permission Denied
```bash
Error: Permission denied: /database-exports/
Solution:
chmod +x scripts/export-database.sh
mkdir -p database-exports
chmod 755 database-exports
```
### Import Issues
#### Database Already Exists
```bash
Error: Database 'motovaultpro' does not exist. Use --create-db to create it
Solution:
./scripts/import-database.sh --create-db backup.sql.gz
```
#### Import Failed Mid-Way
```bash
# Restore from automatic backup
./scripts/import-database.sh --drop-existing database-exports/motovaultpro_backup_*.sql.gz
```
## Best Practices
### Production Exports
1. **Schedule during low-traffic periods**
2. **Use custom format for large databases** (faster, compressed)
3. **Store exports offsite** (S3, backup server)
4. **Test restores regularly** (monthly verification)
5. **Keep multiple generations** (daily, weekly, monthly)
### Development Imports
1. **Use schema-only for clean environments**
2. **Sanitize production data** before importing to dev
3. **Keep test data exports** for consistent testing
4. **Document data dependencies** between tables
### Security
1. **Encrypt exports in transit** (SCP with SSH keys)
2. **Encrypt exports at rest** (filesystem encryption)
3. **Limit export access** (sudo/docker group only)
4. **Audit export operations** (log all exports)
5. **Rotate encryption keys** regularly
## Advanced Usage
### Incremental Backups
For very large databases, use PostgreSQL WAL archiving:
```bash
# Enable WAL archiving in postgresql.conf
wal_level = replica
archive_mode = on
archive_command = 'cp %p /path/to/archive/%f'
# Base backup
docker exec mvp-postgres pg_basebackup -D /backup/base -Ft -z -P
# Restore with WAL replay
# See PostgreSQL documentation for WAL restore process
```
### Selective Table Restore
```bash
# Export specific table
./scripts/export-database.sh --include-table vehicles --output vehicles_only
# Import into existing database (appends data)
gunzip -c database-exports/vehicles_only.sql.gz | \
docker exec -i mvp-postgres psql -U postgres -d motovaultpro
```
### Data Migration Between Versions
```bash
# Export from old version
./scripts/export-database.sh --format custom --output migration_v1_to_v2
# Run migrations on target
docker exec mvp-backend node dist/_system/migrations/run-all.js
# Import data
./scripts/import-database.sh migration_v1_to_v2.dump
```
## Monitoring
### Export Size Tracking
```bash
# Track export sizes over time
ls -lh database-exports/*.sql.gz | awk '{print $5, $9}' >> backup-sizes.log
```
### Import Duration
The import script automatically logs duration. For detailed monitoring:
```bash
time ./scripts/import-database.sh backup.sql.gz
```
## Support
For issues or questions:
1. Check the import instructions file generated with each export
2. Review logs in `docker logs mvp-postgres`
3. Consult PostgreSQL documentation for pg_dump/pg_restore
4. Create an issue in the project repository

View File

@@ -9,6 +9,7 @@ Project documentation hub for the hybrid platform (platform microservices) and m
- Vehicles API (authoritative): `docs/VEHICLES-API.md`
- Database schema: `docs/DATABASE-SCHEMA.md`
- Testing (containers only): `docs/TESTING.md`
- Database Migration: `docs/DATABASE-MIGRATION.md`
- Development commands: `Makefile`, `docker-compose.yml`
- Application features (start at each README):
- `backend/src/features/vehicles/README.md`

240
scripts/README.md Normal file
View File

@@ -0,0 +1,240 @@
# MotoVaultPro Scripts
Utility scripts for database management and operations.
## Database Export/Import Scripts
### Quick Start
```bash
# Export full database
make db-export
# Export schema only
make db-export-schema
# Create timestamped backup
make db-backup
# Import from file
make db-import-file FILE=database-exports/backup.sql.gz
```
### Available Scripts
#### `export-database.sh`
Exports PostgreSQL database in multiple formats with metadata and instructions.
**Features:**
- Multiple export formats (SQL, custom, directory)
- Automatic compression
- Schema-only or data-only exports
- Table filtering
- Generates import instructions
**Usage:**
```bash
# Full export
./scripts/export-database.sh
# Schema only
./scripts/export-database.sh --schema-only
# Custom format (faster for large databases)
./scripts/export-database.sh --format custom
# Specific tables
./scripts/export-database.sh --include-table vehicles --include-table fuel_logs
# Exclude tables
./scripts/export-database.sh --exclude-table audit_logs
```
#### `import-database.sh`
Imports PostgreSQL database with safety features and validation.
**Features:**
- Auto-detects export format
- Automatic backup before import
- Safety confirmations
- Database creation
- Import verification
**Usage:**
```bash
# Basic import
./scripts/import-database.sh database-exports/backup.sql.gz
# Create new database
./scripts/import-database.sh --create-db backup.sql.gz
# Replace existing (with confirmation)
./scripts/import-database.sh --drop-existing backup.sql.gz
# Automated import (no prompts)
./scripts/import-database.sh --drop-existing --force backup.sql.gz
```
### Makefile Shortcuts
```bash
# Export Commands
make db-export # Full database export
make db-export-schema # Schema only with date
make db-export-custom # Custom format with date
make db-backup # Timestamped backup
# Import Commands
make db-import # Show import help
make db-import-file FILE=path/to/backup.sql.gz
```
### Output Files
Each export creates three files:
1. **Export file** (`.sql.gz`, `.dump`, or directory)
- The actual database dump
2. **Metadata file** (`*_metadata.json`)
- Export timestamp, format, size
- PostgreSQL version
- Export options used
3. **Import instructions** (`*_import_instructions.txt`)
- Step-by-step import guide
- Format-specific commands
- Database preparation steps
### Common Use Cases
#### Production Backup
```bash
# Daily backup with timestamp
make db-backup
# Result: database-exports/backup_20251102_143000.sql.gz
```
#### Development Setup
```bash
# Get schema from production
ssh prod "cd /app && ./scripts/export-database.sh --schema-only --output dev_schema"
scp prod:/app/database-exports/dev_schema.sql.gz ./database-exports/
# Import to dev
./scripts/import-database.sh --create-db database-exports/dev_schema.sql.gz
```
#### Migration Between Servers
```bash
# On source server
./scripts/export-database.sh --format custom --output migration_$(date +%Y%m%d)
# Transfer to target
scp database-exports/migration_* target:/app/database-exports/
# On target server
./scripts/import-database.sh --drop-existing database-exports/migration_20251102.dump
```
#### Selective Data Export
```bash
# Export only vehicle-related data
./scripts/export-database.sh \
--include-table vehicles \
--include-table fuel_logs \
--include-table maintenance_records \
--output vehicle_data
```
### Safety Features
#### Automatic Backups
Before destructive operations, the import script automatically creates a backup:
```
[INFO] Creating backup: database-exports/motovaultpro_backup_20251102_143000.sql.gz
[INFO] Backup created successfully
```
#### Confirmation Prompts
Dangerous operations require explicit confirmation:
```
WARNING: This will DROP the existing database 'motovaultpro'
All data will be permanently deleted!
Are you sure you want to continue? (type 'yes' to confirm):
```
#### Format Validation
Scripts detect and validate file formats automatically:
```
[INFO] Auto-detected format: sql-compressed
```
### Troubleshooting
#### Container Not Running
```bash
Error: PostgreSQL container 'mvp-postgres' is not running
Solution:
docker compose up -d mvp-postgres
```
#### Permission Issues
```bash
chmod +x scripts/export-database.sh
chmod +x scripts/import-database.sh
```
#### Large Database Exports
For databases >1GB, use custom format:
```bash
./scripts/export-database.sh --format custom --no-compress
```
### Advanced Usage
#### Automated Backups with Cron
```bash
# Daily at 2 AM
0 2 * * * cd /app && ./scripts/export-database.sh --output daily_$(date +%Y%m%d) >> /var/log/db-backup.log 2>&1
# Weekly on Sunday at 3 AM
0 3 * * 0 cd /app && ./scripts/export-database.sh --format custom --output weekly_$(date +%Y%m%d) >> /var/log/db-backup.log 2>&1
```
#### Cleanup Old Backups
```bash
# Keep last 7 days of daily backups
find database-exports/ -name "daily_*.sql.gz" -mtime +7 -delete
# Keep last 4 weeks of weekly backups
find database-exports/ -name "weekly_*.dump" -mtime +28 -delete
```
#### Export for Analysis
```bash
# Export specific tables for data analysis
./scripts/export-database.sh \
--data-only \
--include-table fuel_logs \
--include-table maintenance_records \
--output analytics_data
```
## Documentation
For detailed information, see:
- [Database Migration Guide](../docs/DATABASE-MIGRATION.md) - Comprehensive migration documentation
- [Architecture](../docs/ARCHITECTURE.md) - System architecture
- [Platform Services](../docs/PLATFORM-SERVICES.md) - Service architecture
## Support
For issues:
1. Check the import instructions file (`*_import_instructions.txt`)
2. Review `docker logs mvp-postgres`
3. See troubleshooting in [DATABASE-MIGRATION.md](../docs/DATABASE-MIGRATION.md)
4. Create an issue in the repository

335
scripts/export-database.sh Executable file
View File

@@ -0,0 +1,335 @@
#!/bin/bash
set -e
# Database Export Script for MotoVaultPro
# Exports PostgreSQL database for deployment migration
# Usage: ./scripts/export-database.sh [options]
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
EXPORT_DIR="${PROJECT_ROOT}/database-exports"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
CONTAINER_NAME="mvp-postgres"
# Default values
EXPORT_FORMAT="sql"
COMPRESS=true
INCLUDE_SCHEMA=true
INCLUDE_DATA=true
EXPORT_NAME="motovaultpro_export_${TIMESTAMP}"
# Function to print colored output
print_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
print_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
print_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Function to show usage
show_usage() {
cat << EOF
Database Export Script for MotoVaultPro
Usage: $0 [options]
Options:
-h, --help Show this help message
-f, --format FORMAT Export format: sql, custom, directory (default: sql)
-o, --output NAME Custom export filename (without extension)
-n, --no-compress Don't compress the export
--schema-only Export schema only (no data)
--data-only Export data only (no schema)
--exclude-table TABLE Exclude specific table(s) (can be used multiple times)
--include-table TABLE Include only specific table(s) (can be used multiple times)
-c, --container NAME Container name (default: mvp-postgres)
Examples:
# Full database export with compression
$0
# Schema only export
$0 --schema-only
# Export specific tables
$0 --include-table vehicles --include-table fuel_logs
# Custom format for pg_restore
$0 --format custom --output my_backup
EOF
exit 0
}
# Parse command line arguments
EXCLUDE_TABLES=()
INCLUDE_TABLES=()
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_usage
;;
-f|--format)
EXPORT_FORMAT="$2"
shift 2
;;
-o|--output)
EXPORT_NAME="$2"
shift 2
;;
-n|--no-compress)
COMPRESS=false
shift
;;
--schema-only)
INCLUDE_DATA=false
shift
;;
--data-only)
INCLUDE_SCHEMA=false
shift
;;
--exclude-table)
EXCLUDE_TABLES+=("$2")
shift 2
;;
--include-table)
INCLUDE_TABLES+=("$2")
shift 2
;;
-c|--container)
CONTAINER_NAME="$2"
shift 2
;;
*)
print_error "Unknown option: $1"
show_usage
;;
esac
done
# Create export directory if it doesn't exist
mkdir -p "${EXPORT_DIR}"
# Check if container is running
print_info "Checking if PostgreSQL container is running..."
if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
print_error "PostgreSQL container '${CONTAINER_NAME}' is not running"
print_info "Start it with: docker compose up -d mvp-postgres"
exit 1
fi
# Build pg_dump options
PG_DUMP_OPTS=""
# Schema/Data options
if [ "$INCLUDE_SCHEMA" = false ]; then
PG_DUMP_OPTS="$PG_DUMP_OPTS --data-only"
fi
if [ "$INCLUDE_DATA" = false ]; then
PG_DUMP_OPTS="$PG_DUMP_OPTS --schema-only"
fi
# Format options
case $EXPORT_FORMAT in
sql)
PG_DUMP_OPTS="$PG_DUMP_OPTS --format=plain"
EXPORT_EXT="sql"
;;
custom)
PG_DUMP_OPTS="$PG_DUMP_OPTS --format=custom"
EXPORT_EXT="dump"
COMPRESS=false # Custom format is already compressed
;;
directory)
PG_DUMP_OPTS="$PG_DUMP_OPTS --format=directory"
EXPORT_EXT="dir"
COMPRESS=false # Directory format doesn't need compression
;;
*)
print_error "Invalid format: $EXPORT_FORMAT"
exit 1
;;
esac
# Exclude tables
for table in "${EXCLUDE_TABLES[@]}"; do
PG_DUMP_OPTS="$PG_DUMP_OPTS --exclude-table=$table"
done
# Include tables
for table in "${INCLUDE_TABLES[@]}"; do
PG_DUMP_OPTS="$PG_DUMP_OPTS --table=$table"
done
# Set output file path
if [ "$EXPORT_FORMAT" = "directory" ]; then
OUTPUT_PATH="${EXPORT_DIR}/${EXPORT_NAME}.${EXPORT_EXT}"
mkdir -p "${OUTPUT_PATH}"
else
OUTPUT_PATH="${EXPORT_DIR}/${EXPORT_NAME}.${EXPORT_EXT}"
fi
# Export database
print_info "Starting database export..."
print_info "Container: ${CONTAINER_NAME}"
print_info "Format: ${EXPORT_FORMAT}"
print_info "Output: ${OUTPUT_PATH}"
if [ "$EXPORT_FORMAT" = "directory" ]; then
# For directory format, we need to export inside container then copy out
docker exec "$CONTAINER_NAME" sh -c "pg_dump -U postgres -d motovaultpro $PG_DUMP_OPTS -f /tmp/export_dir" || {
print_error "Database export failed"
exit 1
}
docker cp "${CONTAINER_NAME}:/tmp/export_dir/." "${OUTPUT_PATH}/"
docker exec "$CONTAINER_NAME" rm -rf /tmp/export_dir
else
# For SQL and custom formats
docker exec "$CONTAINER_NAME" pg_dump -U postgres -d motovaultpro $PG_DUMP_OPTS > "${OUTPUT_PATH}" || {
print_error "Database export failed"
exit 1
}
fi
print_info "Database exported successfully"
# Compress if requested and format is SQL
if [ "$COMPRESS" = true ] && [ "$EXPORT_FORMAT" = "sql" ]; then
print_info "Compressing export..."
gzip "${OUTPUT_PATH}"
OUTPUT_PATH="${OUTPUT_PATH}.gz"
print_info "Compressed to: ${OUTPUT_PATH}"
fi
# Create metadata file
METADATA_FILE="${EXPORT_DIR}/${EXPORT_NAME}_metadata.json"
cat > "${METADATA_FILE}" << EOF
{
"export_timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"database_name": "motovaultpro",
"export_format": "${EXPORT_FORMAT}",
"compressed": ${COMPRESS},
"schema_included": ${INCLUDE_SCHEMA},
"data_included": ${INCLUDE_DATA},
"postgresql_version": "$(docker exec "$CONTAINER_NAME" psql -U postgres -t -c 'SELECT version();' | xargs)",
"file_path": "${OUTPUT_PATH}",
"file_size": "$(du -h "${OUTPUT_PATH}" | cut -f1)"
}
EOF
print_info "Metadata saved to: ${METADATA_FILE}"
# Generate import instructions
IMPORT_INSTRUCTIONS="${EXPORT_DIR}/${EXPORT_NAME}_import_instructions.txt"
cat > "${IMPORT_INSTRUCTIONS}" << EOF
===========================================
MotoVaultPro Database Import Instructions
===========================================
Export Details:
- Export Date: $(date)
- Format: ${EXPORT_FORMAT}
- Compressed: ${COMPRESS}
- File: ${OUTPUT_PATH}
Import Instructions:
--------------------
1. Copy the export file to your target server:
scp ${OUTPUT_PATH} user@server:/path/to/import/
EOF
if [ "$EXPORT_FORMAT" = "sql" ]; then
if [ "$COMPRESS" = true ]; then
cat >> "${IMPORT_INSTRUCTIONS}" << EOF
2. Import the database (compressed SQL):
# Using Docker:
gunzip -c /path/to/import/$(basename "${OUTPUT_PATH}") | docker exec -i mvp-postgres psql -U postgres -d motovaultpro
# Direct PostgreSQL:
gunzip -c /path/to/import/$(basename "${OUTPUT_PATH}") | psql -U postgres -d motovaultpro
EOF
else
cat >> "${IMPORT_INSTRUCTIONS}" << EOF
2. Import the database (SQL):
# Using Docker:
docker exec -i mvp-postgres psql -U postgres -d motovaultpro < /path/to/import/$(basename "${OUTPUT_PATH}")
# Direct PostgreSQL:
psql -U postgres -d motovaultpro < /path/to/import/$(basename "${OUTPUT_PATH}")
EOF
fi
elif [ "$EXPORT_FORMAT" = "custom" ]; then
cat >> "${IMPORT_INSTRUCTIONS}" << EOF
2. Import the database (custom format):
# Using Docker:
docker cp /path/to/import/$(basename "${OUTPUT_PATH}") mvp-postgres:/tmp/restore.dump
docker exec mvp-postgres pg_restore -U postgres -d motovaultpro -c /tmp/restore.dump
# Direct PostgreSQL:
pg_restore -U postgres -d motovaultpro -c /path/to/import/$(basename "${OUTPUT_PATH}")
EOF
elif [ "$EXPORT_FORMAT" = "directory" ]; then
cat >> "${IMPORT_INSTRUCTIONS}" << EOF
2. Import the database (directory format):
# Using Docker:
docker cp /path/to/import/$(basename "${OUTPUT_PATH}") mvp-postgres:/tmp/restore_dir
docker exec mvp-postgres pg_restore -U postgres -d motovaultpro -c /tmp/restore_dir
# Direct PostgreSQL:
pg_restore -U postgres -d motovaultpro -c /path/to/import/$(basename "${OUTPUT_PATH}")
EOF
fi
cat >> "${IMPORT_INSTRUCTIONS}" << EOF
Notes:
------
- The -c flag drops existing database objects before recreating them
- Ensure the target database exists before importing
- For production imports, always test on a staging environment first
- Consider creating a backup of the target database before importing
Create target database:
-----------------------
docker exec -i mvp-postgres psql -U postgres -c "CREATE DATABASE motovaultpro;"
Or if database exists and you want to start fresh:
--------------------------------------------------
docker exec -i mvp-postgres psql -U postgres -c "DROP DATABASE IF EXISTS motovaultpro;"
docker exec -i mvp-postgres psql -U postgres -c "CREATE DATABASE motovaultpro;"
EOF
print_info "Import instructions saved to: ${IMPORT_INSTRUCTIONS}"
# Summary
echo ""
print_info "==============================================="
print_info "Database Export Complete!"
print_info "==============================================="
print_info "Export file: ${OUTPUT_PATH}"
print_info "Metadata: ${METADATA_FILE}"
print_info "Instructions: ${IMPORT_INSTRUCTIONS}"
print_info "Size: $(du -h "${OUTPUT_PATH}" | cut -f1)"
echo ""
print_info "To import this database on another deployment:"
print_info "1. Copy the export file to the target server"
print_info "2. Follow the instructions in ${IMPORT_INSTRUCTIONS}"
echo ""

286
scripts/import-database.sh Executable file
View File

@@ -0,0 +1,286 @@
#!/bin/bash
set -e
# Database Import Script for MotoVaultPro
# Imports PostgreSQL database from export file
# Usage: ./scripts/import-database.sh [options] <export-file>
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
CONTAINER_NAME="mvp-postgres"
DATABASE_NAME="motovaultpro"
# Default values
BACKUP_EXISTING=true
DROP_EXISTING=false
CREATE_DATABASE=false
FORCE=false
# Function to print colored output
print_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
print_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
print_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Function to show usage
show_usage() {
cat << EOF
Database Import Script for MotoVaultPro
Usage: $0 [options] <export-file>
Options:
-h, --help Show this help message
-c, --container NAME Container name (default: mvp-postgres)
-d, --database NAME Database name (default: motovaultpro)
--create-db Create database if it doesn't exist
--drop-existing Drop existing database before import (DANGER!)
--no-backup Skip backup of existing database
--force Skip confirmation prompts
-f, --format FORMAT Import format: sql, custom, directory (auto-detected if not specified)
Examples:
# Import a standard SQL dump
$0 database-exports/motovaultpro_export_20250101_120000.sql.gz
# Import with database recreation
$0 --drop-existing --create-db backup.sql
# Import custom format
$0 --format custom backup.dump
Safety Features:
- Creates backup of existing database by default
- Requires confirmation before destructive operations
- Validates export file before import
- Supports rollback on failure
EOF
exit 0
}
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_usage
;;
-c|--container)
CONTAINER_NAME="$2"
shift 2
;;
-d|--database)
DATABASE_NAME="$2"
shift 2
;;
--create-db)
CREATE_DATABASE=true
shift
;;
--drop-existing)
DROP_EXISTING=true
shift
;;
--no-backup)
BACKUP_EXISTING=false
shift
;;
--force)
FORCE=true
shift
;;
-f|--format)
IMPORT_FORMAT="$2"
shift 2
;;
-*)
print_error "Unknown option: $1"
show_usage
;;
*)
IMPORT_FILE="$1"
shift
;;
esac
done
# Validate import file
if [ -z "$IMPORT_FILE" ]; then
print_error "No import file specified"
show_usage
fi
if [ ! -e "$IMPORT_FILE" ]; then
print_error "Import file does not exist: $IMPORT_FILE"
exit 1
fi
# Auto-detect format if not specified
if [ -z "$IMPORT_FORMAT" ]; then
if [[ "$IMPORT_FILE" == *.sql.gz ]]; then
IMPORT_FORMAT="sql-compressed"
elif [[ "$IMPORT_FILE" == *.sql ]]; then
IMPORT_FORMAT="sql"
elif [[ "$IMPORT_FILE" == *.dump ]]; then
IMPORT_FORMAT="custom"
elif [ -d "$IMPORT_FILE" ]; then
IMPORT_FORMAT="directory"
else
print_error "Cannot auto-detect format. Please specify with --format"
exit 1
fi
print_info "Auto-detected format: $IMPORT_FORMAT"
fi
# Check if container is running
print_info "Checking if PostgreSQL container is running..."
if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
print_error "PostgreSQL container '${CONTAINER_NAME}' is not running"
print_info "Start it with: docker compose up -d mvp-postgres"
exit 1
fi
# Check if database exists
print_info "Checking if database exists..."
DB_EXISTS=$(docker exec "$CONTAINER_NAME" psql -U postgres -tAc "SELECT 1 FROM pg_database WHERE datname='$DATABASE_NAME'" 2>/dev/null || echo "0")
if [ "$DB_EXISTS" = "1" ]; then
print_warn "Database '$DATABASE_NAME' already exists"
if [ "$BACKUP_EXISTING" = true ] && [ "$DROP_EXISTING" = false ]; then
BACKUP_FILE="${PROJECT_ROOT}/database-exports/${DATABASE_NAME}_backup_$(date +%Y%m%d_%H%M%S).sql.gz"
print_info "Creating backup: $BACKUP_FILE"
mkdir -p "$(dirname "$BACKUP_FILE")"
docker exec "$CONTAINER_NAME" pg_dump -U postgres -d "$DATABASE_NAME" | gzip > "$BACKUP_FILE"
print_info "Backup created successfully"
fi
if [ "$DROP_EXISTING" = true ]; then
if [ "$FORCE" = false ]; then
echo ""
print_warn "WARNING: This will DROP the existing database '$DATABASE_NAME'"
print_warn "All data will be permanently deleted!"
echo ""
read -p "Are you sure you want to continue? (type 'yes' to confirm): " CONFIRM
if [ "$CONFIRM" != "yes" ]; then
print_info "Import cancelled"
exit 0
fi
fi
print_info "Dropping existing database..."
docker exec "$CONTAINER_NAME" psql -U postgres -c "DROP DATABASE IF EXISTS $DATABASE_NAME" || {
print_error "Failed to drop database"
exit 1
}
CREATE_DATABASE=true
fi
elif [ "$CREATE_DATABASE" = false ]; then
print_error "Database '$DATABASE_NAME' does not exist. Use --create-db to create it"
exit 1
fi
# Create database if needed
if [ "$CREATE_DATABASE" = true ]; then
print_info "Creating database '$DATABASE_NAME'..."
docker exec "$CONTAINER_NAME" psql -U postgres -c "CREATE DATABASE $DATABASE_NAME" || {
print_error "Failed to create database"
exit 1
}
fi
# Import based on format
print_info "Starting database import..."
print_info "File: $IMPORT_FILE"
print_info "Format: $IMPORT_FORMAT"
print_info "Database: $DATABASE_NAME"
case $IMPORT_FORMAT in
sql)
print_info "Importing SQL dump..."
docker exec -i "$CONTAINER_NAME" psql -U postgres -d "$DATABASE_NAME" < "$IMPORT_FILE" || {
print_error "Import failed"
exit 1
}
;;
sql-compressed)
print_info "Importing compressed SQL dump..."
gunzip -c "$IMPORT_FILE" | docker exec -i "$CONTAINER_NAME" psql -U postgres -d "$DATABASE_NAME" || {
print_error "Import failed"
exit 1
}
;;
custom)
print_info "Importing custom format dump..."
# Copy file into container
docker cp "$IMPORT_FILE" "${CONTAINER_NAME}:/tmp/restore.dump" || {
print_error "Failed to copy file to container"
exit 1
}
# Restore
docker exec "$CONTAINER_NAME" pg_restore -U postgres -d "$DATABASE_NAME" -c /tmp/restore.dump || {
print_error "Import failed"
docker exec "$CONTAINER_NAME" rm -f /tmp/restore.dump
exit 1
}
# Cleanup
docker exec "$CONTAINER_NAME" rm -f /tmp/restore.dump
;;
directory)
print_info "Importing directory format dump..."
# Copy directory into container
docker cp "$IMPORT_FILE" "${CONTAINER_NAME}:/tmp/restore_dir" || {
print_error "Failed to copy directory to container"
exit 1
}
# Restore
docker exec "$CONTAINER_NAME" pg_restore -U postgres -d "$DATABASE_NAME" -c /tmp/restore_dir || {
print_error "Import failed"
docker exec "$CONTAINER_NAME" rm -rf /tmp/restore_dir
exit 1
}
# Cleanup
docker exec "$CONTAINER_NAME" rm -rf /tmp/restore_dir
;;
*)
print_error "Unknown import format: $IMPORT_FORMAT"
exit 1
;;
esac
# Verify import
print_info "Verifying import..."
TABLE_COUNT=$(docker exec "$CONTAINER_NAME" psql -U postgres -d "$DATABASE_NAME" -tAc "SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = 'public'")
print_info "Imported $TABLE_COUNT tables"
# Summary
echo ""
print_info "==============================================="
print_info "Database Import Complete!"
print_info "==============================================="
print_info "Database: $DATABASE_NAME"
print_info "Tables: $TABLE_COUNT"
if [ "$BACKUP_EXISTING" = true ] && [ -n "$BACKUP_FILE" ]; then
print_info "Backup: $BACKUP_FILE"
fi
echo ""
print_info "Database is ready to use!"
echo ""