Compare commits

..

1 Commits

Author SHA1 Message Date
Eric Gullickson
087cf3b06c fix: Add bottom padding to document form grid container (refs #19)
All checks were successful
Deploy to Staging / Build Images (pull_request) Successful in 2m37s
Deploy to Staging / Deploy to Staging (pull_request) Successful in 27s
Deploy to Staging / Verify Staging (pull_request) Successful in 6s
Deploy to Staging / Notify Staging Ready (pull_request) Successful in 6s
Deploy to Staging / Notify Staging Failure (pull_request) Has been skipped
Added pb-4 to the grid container to create consistent spacing between
the last form field (file input) and the action buttons below.

The previous approach of adding margin to a grid item didn't work as
expected due to CSS Grid layout behavior. Adding padding to the grid
container itself ensures reliable spacing.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-04 16:57:47 -06:00
546 changed files with 3411 additions and 61733 deletions

View File

@@ -1,12 +1,11 @@
{
"version": "6.2.0",
"architecture": "9-container",
"architecture": "simplified-5-container",
"repository": {
"host": "gitea",
"owner": "egullickson",
"repo": "motovaultpro",
"url": "https://git.motovaultpro.com",
"default_branch": "main"
"url": "https://git.motovaultpro.com"
},
"ai_quick_start": {
"load_order": [
@@ -52,7 +51,7 @@
"project_overview": {
"instruction": "Start with README.md for complete architecture context",
"files": ["README.md"],
"completeness": "100% - all navigation and 9-container architecture information"
"completeness": "100% - all navigation and 5-container architecture information"
},
"application_feature_work": {
"instruction": "Load entire application feature directory (features are modules within backend)",
@@ -105,26 +104,6 @@
"type": "cache",
"description": "Redis cache with AOF persistence",
"port": 6379
},
"mvp-ocr": {
"type": "ocr_service",
"description": "Python OCR service with pluggable engine abstraction (PaddleOCR PP-OCRv4 primary, optional Google Vision cloud fallback, Tesseract backward compat)",
"port": 8000
},
"mvp-loki": {
"type": "log_aggregation",
"description": "Grafana Loki for centralized log storage (30-day retention)",
"port": 3100
},
"mvp-alloy": {
"type": "log_collector",
"description": "Grafana Alloy for log collection and forwarding to Loki",
"port": 12345
},
"mvp-grafana": {
"type": "log_visualization",
"description": "Grafana for log querying and visualization",
"port": 3000
}
},
"application_features": {
@@ -311,6 +290,6 @@
"single_tenant_architecture": true,
"simplified_deployment": true,
"docker_first_development": true,
"container_count": 9
"container_count": 5
}
}

View File

@@ -40,79 +40,46 @@
"When moving status, remove the previous status/* label first."
]
},
"sub_issues": {
"when": "Multi-file features (3+ files) or features that benefit from smaller AI context windows.",
"parent_issue": "The original feature issue. Tracks overall status. Only the parent gets status label transitions.",
"sub_issue_title_format": "{type}: {summary} (#{parent_index})",
"sub_issue_body": "First line must be 'Relates to #{parent_index}'. Each sub-issue is a self-contained unit of work.",
"sub_issue_labels": "status/in-progress + same type/* as parent. Sub-issues move to in-progress as they are worked on.",
"sub_issue_milestone": "Same sprint milestone as parent.",
"rules": [
"ONE branch for the parent issue. Never create branches per sub-issue.",
"ONE PR for the parent issue. The PR closes the parent and all sub-issues.",
"Commits reference the specific sub-issue index they implement.",
"Sub-issues should be small enough to fit in a single AI context window.",
"Plan milestones map 1:1 to sub-issues."
],
"examples": {
"parent": "#105 'feat: Add Grafana dashboards and alerting'",
"sub_issues": [
"#106 'feat: Grafana dashboard provisioning infrastructure (#105)'",
"#107 'feat: Application Overview Grafana dashboard (#105)'"
]
}
},
"branching": {
"branch_format": "issue-{parent_index}-{slug}",
"branch_format": "issue-{index}-{slug}",
"target_branch": "main",
"note": "Always use the parent issue index. When sub-issues exist, the branch is for the parent.",
"examples": [
"issue-42-add-fuel-efficiency-report (standalone issue)",
"issue-105-add-grafana-dashboards (parent issue with sub-issues #106-#111)"
]
"example": "issue-42-add-fuel-efficiency-report"
},
"commit_conventions": {
"message_format": "{type}: {short summary} (refs #{index})",
"allowed_types": ["feat", "fix", "chore", "docs", "refactor", "test"],
"note": "When working on a sub-issue, {index} is the sub-issue number. For standalone issues, {index} is the issue number.",
"examples": [
"feat: add fuel efficiency calculation (refs #42)",
"fix: correct VIN validation for pre-1981 vehicles (refs #1)",
"feat: add dashboard provisioning infrastructure (refs #106)",
"feat: add API performance dashboard (refs #108)"
"fix: correct VIN validation for pre-1981 vehicles (refs #1)"
]
},
"pull_requests": {
"title_format": "{type}: {summary} (#{parent_index})",
"note": "PR title always uses the parent issue index.",
"title_format": "{type}: {summary} (#{index})",
"body_requirements": [
"Link parent issue using 'Fixes #{parent_index}'.",
"Link all sub-issues using 'Fixes #{sub_index}' on separate lines.",
"Link issue(s) using 'Fixes #123' or 'Relates to #123'.",
"Include test plan and results.",
"Confirm acceptance criteria completion."
],
"body_example": "Fixes #105\nFixes #106\nFixes #107\nFixes #108\nFixes #109\nFixes #110\nFixes #111",
"merge_policy": "squash_or_rebase_ok",
"template_location": ".gitea/PULL_REQUEST_TEMPLATE.md"
},
"execution_loop": [
"List repo issues in current sprint milestone with status/ready; if none, pull from status/backlog and promote the best candidate to status/ready.",
"Select one issue (prefer smallest size and highest priority).",
"Move parent issue to status/in-progress.",
"Move issue to status/in-progress.",
"[SKILL] Codebase Analysis if unfamiliar area.",
"[SKILL] Problem Analysis if complex problem.",
"[SKILL] Decision Critic if uncertain approach.",
"If multi-file feature (3+ files): decompose into sub-issues per sub_issues rules. Each sub-issue = one plan milestone.",
"[SKILL] Planner writes plan as parent issue comment. Plan milestones map 1:1 to sub-issues.",
"[SKILL] Planner writes plan as issue comment.",
"[SKILL] Plan review cycle: QR plan-completeness -> TW plan-scrub -> QR plan-code -> QR plan-docs.",
"Create ONE branch issue-{parent_index}-{slug} from main.",
"[SKILL] Planner executes plan, delegates to Developer per milestone/sub-issue.",
"[SKILL] QR post-implementation per milestone (results in parent issue comment).",
"Open ONE PR targeting main. Title uses parent index. Body lists 'Fixes #N' for parent and all sub-issues.",
"Move parent issue to status/review.",
"[SKILL] Quality Agent validates with RULE 0/1/2 (result in parent issue comment).",
"Create branch issue-{index}-{slug}.",
"[SKILL] Planner executes plan, delegates to Developer per milestone.",
"[SKILL] QR post-implementation per milestone (results in issue comment).",
"Open PR targeting main and linking issue(s).",
"Move issue to status/review.",
"[SKILL] Quality Agent validates with RULE 0/1/2 (result in issue comment).",
"If CI/tests fail, iterate until pass.",
"When PR is merged, parent and all sub-issues move to status/done. Close any not auto-closed.",
"When PR is merged, move issue to status/done and close issue if not auto-closed.",
"[SKILL] Doc-Sync on affected directories."
],
"skill_integration": {

View File

@@ -7,7 +7,6 @@
| `role-agents/` | Developer, TW, QR, Debugger agents | Delegating execution |
| `agents/` | Domain agents (Feature, Frontend, Platform, Quality) | Domain-specific work |
| `skills/` | Reusable skills | Complex multi-step workflows |
| `hooks/` | PreToolUse hooks (model enforcement) | Debugging hook behavior |
| `output-styles/` | Output formatting templates | Customizing agent output |
| `tdd-guard/` | TDD enforcement utilities | Test-driven development |
@@ -25,5 +24,4 @@
| `skills/incoherence/` | Detect doc/code drift | Periodic audits |
| `skills/prompt-engineer/` | Prompt optimization | Improving AI prompts |
| `agents/` | Domain agents (Feature, Frontend, Platform, Quality) | Domain-specific work |
| `hooks/` | PreToolUse hooks (model enforcement) | Debugging hook behavior |
| `.ai/workflow-contract.json` | Sprint process, skill integration | Issue workflow |

View File

@@ -1,38 +0,0 @@
# hooks/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `enforce-agent-model.sh` | Enforces correct model for Task tool calls | Debugging agent model issues |
## enforce-agent-model.sh
PreToolUse hook that ensures Task tool calls use the correct model based on `subagent_type`.
### Agent Model Mapping
| Agent | Required Model |
|-------|----------------|
| feature-agent | sonnet |
| first-frontend-agent | sonnet |
| platform-agent | sonnet |
| quality-agent | sonnet |
| developer | sonnet |
| technical-writer | sonnet |
| debugger | sonnet |
| quality-reviewer | opus |
| Explore | sonnet |
| Plan | sonnet |
| Bash | sonnet |
| general-purpose | sonnet |
### Behavior
- Blocks Task calls where `model` parameter doesn't match expected value
- Returns error message instructing Claude to retry with correct model
- Unknown agent types are allowed through (no enforcement)
### Adding New Agents
Edit the `get_expected_model()` function in `enforce-agent-model.sh` to add new agent mappings.

View File

@@ -1,58 +0,0 @@
#!/usr/bin/env bash
# Enforces correct model usage for Task tool based on agent definitions
# Blocks Task calls that don't specify the correct model for the subagent_type
# Read tool input from stdin
INPUT=$(cat)
# Extract subagent_type and model from the input
SUBAGENT_TYPE=$(echo "$INPUT" | jq -r '.subagent_type // empty')
MODEL=$(echo "$INPUT" | jq -r '.model // empty')
# If no subagent_type, allow (not an agent call)
if [[ -z "$SUBAGENT_TYPE" ]]; then
exit 0
fi
# Get expected model for agent type
# Most agents use sonnet, quality-reviewer uses opus
get_expected_model() {
case "$1" in
# Custom project agents
feature-agent|first-frontend-agent|platform-agent|quality-agent)
echo "sonnet"
;;
# Role agents
developer|technical-writer|debugger)
echo "sonnet"
;;
quality-reviewer)
echo "opus"
;;
# Built-in agents - default to sonnet for cost efficiency
Explore|Plan|Bash|general-purpose)
echo "sonnet"
;;
*)
# Unknown agent, no enforcement
echo ""
;;
esac
}
EXPECTED_MODEL=$(get_expected_model "$SUBAGENT_TYPE")
# If agent not in mapping, allow (unknown agent type)
if [[ -z "$EXPECTED_MODEL" ]]; then
exit 0
fi
# Check if model matches expected
if [[ "$MODEL" != "$EXPECTED_MODEL" ]]; then
echo "BLOCKED: Agent '$SUBAGENT_TYPE' requires model: '$EXPECTED_MODEL' but got '${MODEL:-<not specified>}'."
echo "Retry with: model: \"$EXPECTED_MODEL\""
exit 1
fi
# Model matches, allow the call
exit 0

View File

@@ -1,23 +0,0 @@
{
"testModules": [
{
"moduleId": "/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/src/features/dashboard/components/__tests__/ActionBar.test.tsx",
"tests": [
{
"name": "Module failed to load (Error)",
"fullName": "Module failed to load (Error)",
"state": "failed",
"errors": [
{
"message": "File not found: tsconfig.json (resolved as: /Users/egullickson/Documents/Technology/coding/motovaultpro/tsconfig.json)",
"name": "Error",
"stack": "Error: File not found: tsconfig.json (resolved as: /Users/egullickson/Documents/Technology/coding/motovaultpro/tsconfig.json)\n at ConfigSet.resolvePath (/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/node_modules/ts-jest/dist/legacy/config/config-set.js:616:19)\n at ConfigSet._setupConfigSet (/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/node_modules/ts-jest/dist/legacy/config/config-set.js:322:71)\n at new ConfigSet (/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/node_modules/ts-jest/dist/legacy/config/config-set.js:206:14)\n at TsJestTransformer._createConfigSet (/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/node_modules/ts-jest/dist/legacy/ts-jest-transformer.js:119:16)\n at TsJestTransformer._configsFor (/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/node_modules/ts-jest/dist/legacy/ts-jest-transformer.js:98:34)\n at TsJestTransformer.getCacheKey (/Users/egullickson/Documents/Technology/coding/motovaultpro/frontend/node_modules/ts-jest/dist/legacy/ts-jest-transformer.js:249:30)\n at ScriptTransformer._getCacheKey (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/@jest/transform/build/index.js:195:41)\n at ScriptTransformer._getFileCachePath (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/@jest/transform/build/index.js:231:27)\n at ScriptTransformer.transformSource (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/@jest/transform/build/index.js:402:32)\n at ScriptTransformer._transformAndBuildScript (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/@jest/transform/build/index.js:519:40)\n at ScriptTransformer.transform (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/@jest/transform/build/index.js:558:19)\n at Runtime.transformFile (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-runtime/build/index.js:1290:53)\n at Runtime._execModule (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-runtime/build/index.js:1243:34)\n at Runtime._loadModule (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-runtime/build/index.js:944:12)\n at Runtime.requireModule (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-runtime/build/index.js:832:12)\n at jestAdapter (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-circus/build/runner.js:84:33)\n at processTicksAndRejections (node:internal/process/task_queues:104:5)\n at runTestInternal (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-runner/build/index.js:275:16)\n at runTest (/Users/egullickson/Documents/Technology/coding/motovaultpro/node_modules/jest-runner/build/index.js:343:7)"
}
]
}
]
}
],
"unhandledErrors": [],
"reason": "failed"
}

View File

@@ -1,36 +0,0 @@
# MotoVaultPro Environment Configuration
# Copy to .env and fill in environment-specific values
# Generated .env files should NOT be committed to version control
#
# Local dev: No .env needed -- base docker-compose.yml defaults are sandbox values
# Staging/Production: CI/CD generates .env from Gitea variables + generate-log-config.sh
# ===========================================
# Stripe Price IDs (environment-specific)
# ===========================================
# Sandbox defaults used for local development
STRIPE_PRO_MONTHLY_PRICE_ID=price_1T1ZHMJXoKkh5RcKwKSSGIlR
STRIPE_PRO_YEARLY_PRICE_ID=price_1T1ZHnJXoKkh5RcKWlG2MPpX
STRIPE_ENTERPRISE_MONTHLY_PRICE_ID=price_1T1ZIBJXoKkh5RcKu2jyhqBN
STRIPE_ENTERPRISE_YEARLY_PRICE_ID=price_1T1ZIQJXoKkh5RcK34YXiJQm
# ===========================================
# Stripe Publishable Key (baked into frontend at build time)
# ===========================================
# VITE_STRIPE_PUBLISHABLE_KEY=pk_test_...
# ===========================================
# Log Levels (generated by scripts/ci/generate-log-config.sh)
# ===========================================
# Run: ./scripts/ci/generate-log-config.sh DEBUG >> .env
#
# BACKEND_LOG_LEVEL=debug
# TRAEFIK_LOG_LEVEL=DEBUG
# POSTGRES_LOG_STATEMENT=all
# POSTGRES_LOG_MIN_DURATION=0
# REDIS_LOGLEVEL=debug
# ===========================================
# Grafana
# ===========================================
# GRAFANA_ADMIN_PASSWORD=admin

View File

@@ -1,14 +0,0 @@
# .gitea/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `PULL_REQUEST_TEMPLATE.md` | PR template | Creating pull requests |
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `workflows/` | CI/CD workflow definitions | Pipeline configuration |
| `ISSUE_TEMPLATE/` | Issue templates (bug, feature, chore) | Creating issues |

36
.gitea/SPRINTS.md Normal file
View File

@@ -0,0 +1,36 @@
# SPRINTS.md — MotoVaultPro Sprint Calendar (2026)
**Cadence:** 2 weeks (14 days)
**Sprint weeks:** Monday → Sunday
**Naming convention:** `Sprint YYYY-MM-DD` (the Monday start date)
> Note: Sprint 26 ends on **2027-01-03** (it crosses into the next year).
| # | Sprint | Start (Mon) | End (Sun) |
|---:|---|---|---|
| 1 | Sprint 2026-01-05 | 2026-01-05 | 2026-01-18 |
| 2 | Sprint 2026-01-19 | 2026-01-19 | 2026-02-01 |
| 3 | Sprint 2026-02-02 | 2026-02-02 | 2026-02-15 |
| 4 | Sprint 2026-02-16 | 2026-02-16 | 2026-03-01 |
| 5 | Sprint 2026-03-02 | 2026-03-02 | 2026-03-15 |
| 6 | Sprint 2026-03-16 | 2026-03-16 | 2026-03-29 |
| 7 | Sprint 2026-03-30 | 2026-03-30 | 2026-04-12 |
| 8 | Sprint 2026-04-13 | 2026-04-13 | 2026-04-26 |
| 9 | Sprint 2026-04-27 | 2026-04-27 | 2026-05-10 |
| 10 | Sprint 2026-05-11 | 2026-05-11 | 2026-05-24 |
| 11 | Sprint 2026-05-25 | 2026-05-25 | 2026-06-07 |
| 12 | Sprint 2026-06-08 | 2026-06-08 | 2026-06-21 |
| 13 | Sprint 2026-06-22 | 2026-06-22 | 2026-07-05 |
| 14 | Sprint 2026-07-06 | 2026-07-06 | 2026-07-19 |
| 15 | Sprint 2026-07-20 | 2026-07-20 | 2026-08-02 |
| 16 | Sprint 2026-08-03 | 2026-08-03 | 2026-08-16 |
| 17 | Sprint 2026-08-17 | 2026-08-17 | 2026-08-30 |
| 18 | Sprint 2026-08-31 | 2026-08-31 | 2026-09-13 |
| 19 | Sprint 2026-09-14 | 2026-09-14 | 2026-09-27 |
| 20 | Sprint 2026-09-28 | 2026-09-28 | 2026-10-11 |
| 21 | Sprint 2026-10-12 | 2026-10-12 | 2026-10-25 |
| 22 | Sprint 2026-10-26 | 2026-10-26 | 2026-11-08 |
| 23 | Sprint 2026-11-09 | 2026-11-09 | 2026-11-22 |
| 24 | Sprint 2026-11-23 | 2026-11-23 | 2026-12-06 |
| 25 | Sprint 2026-12-07 | 2026-12-07 | 2026-12-20 |
| 26 | Sprint 2026-12-21 | 2026-12-21 | 2027-01-03 |

View File

@@ -19,11 +19,9 @@ on:
env:
REGISTRY: git.motovaultpro.com
DEPLOY_PATH: /opt/motovaultpro
BASE_COMPOSE_FILE: docker-compose.yml
COMPOSE_FILE: docker-compose.yml
COMPOSE_BLUE_GREEN: docker-compose.blue-green.yml
COMPOSE_PROD: docker-compose.prod.yml
HEALTH_CHECK_TIMEOUT: "240"
LOG_LEVEL: INFO
HEALTH_CHECK_TIMEOUT: "60"
jobs:
# ============================================
@@ -36,7 +34,6 @@ jobs:
target_stack: ${{ steps.determine-stack.outputs.target_stack }}
backend_image: ${{ steps.set-images.outputs.backend_image }}
frontend_image: ${{ steps.set-images.outputs.frontend_image }}
ocr_image: ${{ steps.set-images.outputs.ocr_image }}
steps:
- name: Check Docker availability
run: |
@@ -56,7 +53,6 @@ jobs:
TAG="${{ inputs.image_tag }}"
echo "backend_image=$REGISTRY/egullickson/backend:$TAG" >> $GITHUB_OUTPUT
echo "frontend_image=$REGISTRY/egullickson/frontend:$TAG" >> $GITHUB_OUTPUT
echo "ocr_image=$REGISTRY/egullickson/ocr:$TAG" >> $GITHUB_OUTPUT
- name: Determine target stack
id: determine-stack
@@ -87,7 +83,6 @@ jobs:
TARGET_STACK: ${{ needs.validate.outputs.target_stack }}
BACKEND_IMAGE: ${{ needs.validate.outputs.backend_image }}
FRONTEND_IMAGE: ${{ needs.validate.outputs.frontend_image }}
OCR_IMAGE: ${{ needs.validate.outputs.ocr_image }}
steps:
- name: Checkout scripts, config, and compose files
uses: actions/checkout@v4
@@ -95,11 +90,8 @@ jobs:
sparse-checkout: |
scripts/
config/
secrets/app/google-wif-config.json
docker-compose.yml
docker-compose.blue-green.yml
docker-compose.prod.yml
.env.example
sparse-checkout-cone-mode: false
fetch-depth: 1
@@ -109,27 +101,6 @@ jobs:
rsync -av --delete "$GITHUB_WORKSPACE/scripts/" "$DEPLOY_PATH/scripts/"
cp "$GITHUB_WORKSPACE/docker-compose.yml" "$DEPLOY_PATH/"
cp "$GITHUB_WORKSPACE/docker-compose.blue-green.yml" "$DEPLOY_PATH/"
cp "$GITHUB_WORKSPACE/docker-compose.prod.yml" "$DEPLOY_PATH/"
# WIF credential config (not a secret -- references Auth0 token script path)
# Remove any Docker-created directory artifact from failed bind mounts
rm -rf "$DEPLOY_PATH/secrets/app/google-wif-config.json"
mkdir -p "$DEPLOY_PATH/secrets/app"
cp "$GITHUB_WORKSPACE/secrets/app/google-wif-config.json" "$DEPLOY_PATH/secrets/app/"
- name: Generate environment configuration
run: |
cd "$DEPLOY_PATH"
{
echo "# Generated by CI/CD - DO NOT EDIT"
echo "STRIPE_PRO_MONTHLY_PRICE_ID=${{ vars.STRIPE_PRO_MONTHLY_PRICE_ID }}"
echo "STRIPE_PRO_YEARLY_PRICE_ID=${{ vars.STRIPE_PRO_YEARLY_PRICE_ID }}"
echo "STRIPE_ENTERPRISE_MONTHLY_PRICE_ID=${{ vars.STRIPE_ENTERPRISE_MONTHLY_PRICE_ID }}"
echo "STRIPE_ENTERPRISE_YEARLY_PRICE_ID=${{ vars.STRIPE_ENTERPRISE_YEARLY_PRICE_ID }}"
echo "VITE_STRIPE_PUBLISHABLE_KEY=${{ vars.VITE_STRIPE_PUBLISHABLE_KEY }}"
echo "GRAFANA_ADMIN_PASSWORD=${{ secrets.GRAFANA_ADMIN_PASSWORD }}"
} > .env
chmod +x scripts/ci/generate-log-config.sh
./scripts/ci/generate-log-config.sh "$LOG_LEVEL" >> .env
- name: Login to registry
run: |
@@ -137,22 +108,17 @@ jobs:
- name: Inject secrets
run: |
cd "$DEPLOY_PATH"
chmod +x scripts/inject-secrets.sh
SECRETS_DIR="$DEPLOY_PATH/secrets/app" ./scripts/inject-secrets.sh
chmod +x "$GITHUB_WORKSPACE/scripts/inject-secrets.sh"
"$GITHUB_WORKSPACE/scripts/inject-secrets.sh"
env:
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}
AUTH0_CLIENT_SECRET: ${{ secrets.AUTH0_CLIENT_SECRET }}
AUTH0_MANAGEMENT_CLIENT_ID: ${{ secrets.AUTH0_MANAGEMENT_CLIENT_ID }}
AUTH0_MANAGEMENT_CLIENT_SECRET: ${{ secrets.AUTH0_MANAGEMENT_CLIENT_SECRET }}
AUTH0_OCR_CLIENT_ID: ${{ secrets.AUTH0_OCR_CLIENT_ID }}
AUTH0_OCR_CLIENT_SECRET: ${{ secrets.AUTH0_OCR_CLIENT_SECRET }}
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
GOOGLE_MAPS_MAP_ID: ${{ secrets.GOOGLE_MAPS_MAP_ID }}
CF_DNS_API_TOKEN: ${{ secrets.CF_DNS_API_TOKEN }}
RESEND_API_KEY: ${{ secrets.RESEND_API_KEY }}
STRIPE_SECRET_KEY: ${{ secrets.STRIPE_SECRET_KEY }}
STRIPE_WEBHOOK_SECRET: ${{ secrets.STRIPE_WEBHOOK_SECRET }}
- name: Initialize data directories
run: |
@@ -170,7 +136,6 @@ jobs:
run: |
docker pull $BACKEND_IMAGE
docker pull $FRONTEND_IMAGE
docker pull $OCR_IMAGE
- name: Record expected image IDs
id: expected-images
@@ -183,50 +148,18 @@ jobs:
echo "frontend_id=$FRONTEND_ID" >> $GITHUB_OUTPUT
echo "backend_id=$BACKEND_ID" >> $GITHUB_OUTPUT
- name: Start shared services
run: |
cd "$DEPLOY_PATH"
# Start shared infrastructure services (database, cache, logging)
# --no-recreate prevents restarting postgres/redis when config files change
# These must persist across blue-green deployments to avoid data service disruption
docker compose -f $BASE_COMPOSE_FILE -f $COMPOSE_BLUE_GREEN -f $COMPOSE_PROD up -d --no-recreate \
mvp-postgres mvp-redis mvp-loki mvp-alloy mvp-grafana
- name: Wait for shared services health
run: |
echo "Waiting for PostgreSQL and Redis to be healthy..."
for service in mvp-postgres mvp-redis; do
for i in $(seq 1 24); do
health=$(docker inspect --format='{{.State.Health.Status}}' $service 2>/dev/null || echo "unknown")
if [ "$health" = "healthy" ]; then
echo "OK: $service is healthy"
break
fi
if [ $i -eq 24 ]; then
echo "ERROR: $service health check timed out (status: $health)"
docker logs $service --tail 50 2>/dev/null || true
exit 1
fi
echo "Waiting for $service... (attempt $i/24, status: $health)"
sleep 5
done
done
echo "All shared services healthy"
- name: Start target stack
run: |
cd "$DEPLOY_PATH"
export BACKEND_IMAGE=$BACKEND_IMAGE
export FRONTEND_IMAGE=$FRONTEND_IMAGE
export OCR_IMAGE=$OCR_IMAGE
# --force-recreate ensures containers are recreated even if image tag is same
# This prevents stale container content when image digest changes
# Start shared OCR service and target stack
docker compose -f $BASE_COMPOSE_FILE -f $COMPOSE_BLUE_GREEN -f $COMPOSE_PROD up -d --force-recreate \
mvp-ocr mvp-frontend-$TARGET_STACK mvp-backend-$TARGET_STACK
docker compose -f $COMPOSE_FILE -f $COMPOSE_BLUE_GREEN up -d --force-recreate \
mvp-frontend-$TARGET_STACK mvp-backend-$TARGET_STACK
- name: Wait for stack initialization
run: sleep 5
run: sleep 10
- name: Verify container images
run: |
@@ -261,7 +194,7 @@ jobs:
- name: Start Traefik
run: |
cd "$DEPLOY_PATH"
docker compose -f $BASE_COMPOSE_FILE -f $COMPOSE_BLUE_GREEN -f $COMPOSE_PROD up -d mvp-traefik
docker compose -f $COMPOSE_FILE -f $COMPOSE_BLUE_GREEN up -d mvp-traefik
- name: Wait for Traefik
run: |
@@ -305,79 +238,22 @@ jobs:
- name: Wait for routing propagation
run: sleep 5
- name: Check container status and health
run: |
for service in mvp-frontend-$TARGET_STACK mvp-backend-$TARGET_STACK mvp-ocr; do
status=$(docker inspect --format='{{.State.Status}}' $service 2>/dev/null || echo "not found")
if [ "$status" != "running" ]; then
echo "ERROR: $service is not running (status: $status)"
docker logs $service --tail 50 2>/dev/null || true
exit 1
fi
echo "OK: $service is running"
done
# Wait for Docker healthchecks to complete (services with healthcheck defined)
echo ""
echo "Waiting for Docker healthchecks..."
for service in mvp-frontend-$TARGET_STACK mvp-backend-$TARGET_STACK mvp-ocr; do
# Check if service has a healthcheck defined
has_healthcheck=$(docker inspect --format='{{if .Config.Healthcheck}}true{{else}}false{{end}}' $service 2>/dev/null || echo "false")
if [ "$has_healthcheck" = "true" ]; then
# 48 attempts x 5 seconds = 4 minutes max wait (backend with fresh migrations can take ~3 min)
for i in $(seq 1 48); do
health=$(docker inspect --format='{{.State.Health.Status}}' $service 2>/dev/null || echo "unknown")
if [ "$health" = "healthy" ]; then
echo "OK: $service is healthy"
break
fi
# Don't fail immediately on unhealthy - container may still be starting up
# and can recover. Let the timeout handle truly broken containers.
if [ $i -eq 48 ]; then
echo "ERROR: $service health check timed out (status: $health)"
docker logs $service --tail 100 2>/dev/null || true
exit 1
fi
echo "Waiting for $service healthcheck... (attempt $i/48, status: $health)"
sleep 5
done
else
echo "SKIP: $service has no healthcheck defined"
fi
done
- name: Wait for backend health
run: |
for i in $(seq 1 12); do
if docker exec mvp-backend-$TARGET_STACK curl -sf http://localhost:3001/health > /dev/null 2>&1; then
echo "OK: Backend health check passed"
exit 0
fi
if [ $i -eq 12 ]; then
echo "ERROR: Backend health check failed after 12 attempts"
docker logs mvp-backend-$TARGET_STACK --tail 100
exit 1
fi
echo "Attempt $i/12: Backend not ready, waiting 5s..."
sleep 5
done
- name: External health check
run: |
REQUIRED_FEATURES='["admin","auth","onboarding","vehicles","documents","fuel-logs","stations","maintenance","platform","notifications","user-profile","user-preferences","user-export"]'
for i in $(seq 1 12); do
for i in 1 2 3 4 5 6; do
RESPONSE=$(curl -sf https://motovaultpro.com/api/health 2>/dev/null) || {
echo "Attempt $i/12: Connection failed, waiting 5s..."
sleep 5
echo "Attempt $i/6: Connection failed, waiting 10s..."
sleep 10
continue
}
# Check status is "healthy"
STATUS=$(echo "$RESPONSE" | jq -r '.status')
if [ "$STATUS" != "healthy" ]; then
echo "Attempt $i/12: Status is '$STATUS', not 'healthy'. Waiting 5s..."
sleep 5
echo "Attempt $i/6: Status is '$STATUS', not 'healthy'. Waiting 10s..."
sleep 10
continue
fi
@@ -387,8 +263,8 @@ jobs:
')
if [ -n "$MISSING" ]; then
echo "Attempt $i/12: Missing features: $MISSING. Waiting 5s..."
sleep 5
echo "Attempt $i/6: Missing features: $MISSING. Waiting 10s..."
sleep 10
continue
fi
@@ -397,7 +273,7 @@ jobs:
exit 0
done
echo "ERROR: Production health check failed after 12 attempts"
echo "ERROR: Production health check failed after 6 attempts"
echo "Last response: $RESPONSE"
exit 1

View File

@@ -15,10 +15,9 @@ on:
env:
REGISTRY: git.motovaultpro.com
DEPLOY_PATH: /opt/motovaultpro
BASE_COMPOSE_FILE: docker-compose.yml
STAGING_COMPOSE_FILE: docker-compose.staging.yml
COMPOSE_FILE: docker-compose.yml
COMPOSE_STAGING: docker-compose.staging.yml
HEALTH_CHECK_TIMEOUT: "60"
LOG_LEVEL: DEBUG
jobs:
# ============================================
@@ -30,7 +29,6 @@ jobs:
outputs:
backend_image: ${{ steps.tags.outputs.backend_image }}
frontend_image: ${{ steps.tags.outputs.frontend_image }}
ocr_image: ${{ steps.tags.outputs.ocr_image }}
short_sha: ${{ steps.tags.outputs.short_sha }}
steps:
- name: Checkout code
@@ -47,7 +45,6 @@ jobs:
SHORT_SHA="${SHORT_SHA:0:7}"
echo "backend_image=$REGISTRY/egullickson/backend:$SHORT_SHA" >> $GITHUB_OUTPUT
echo "frontend_image=$REGISTRY/egullickson/frontend:$SHORT_SHA" >> $GITHUB_OUTPUT
echo "ocr_image=$REGISTRY/egullickson/ocr:$SHORT_SHA" >> $GITHUB_OUTPUT
echo "short_sha=$SHORT_SHA" >> $GITHUB_OUTPUT
- name: Build backend image
@@ -70,32 +67,18 @@ jobs:
--build-arg VITE_AUTH0_CLIENT_ID=${{ vars.VITE_AUTH0_CLIENT_ID }} \
--build-arg VITE_AUTH0_AUDIENCE=${{ vars.VITE_AUTH0_AUDIENCE }} \
--build-arg VITE_API_BASE_URL=/api \
--build-arg VITE_STRIPE_PUBLISHABLE_KEY=${{ vars.VITE_STRIPE_PUBLISHABLE_KEY }} \
--cache-from $REGISTRY/egullickson/frontend:latest \
-t ${{ steps.tags.outputs.frontend_image }} \
-t $REGISTRY/egullickson/frontend:latest \
-f frontend/Dockerfile \
frontend
- name: Build OCR image
run: |
docker build \
--build-arg BUILDKIT_INLINE_CACHE=1 \
--build-arg REGISTRY_MIRRORS=$REGISTRY/egullickson/mirrors \
--cache-from $REGISTRY/egullickson/ocr:latest \
-t ${{ steps.tags.outputs.ocr_image }} \
-t $REGISTRY/egullickson/ocr:latest \
-f ocr/Dockerfile \
ocr
- name: Push images
run: |
docker push ${{ steps.tags.outputs.backend_image }}
docker push ${{ steps.tags.outputs.frontend_image }}
docker push ${{ steps.tags.outputs.ocr_image }}
docker push $REGISTRY/egullickson/backend:latest
docker push $REGISTRY/egullickson/frontend:latest
docker push $REGISTRY/egullickson/ocr:latest
# ============================================
# DEPLOY STAGING - Deploy to staging server
@@ -107,38 +90,10 @@ jobs:
env:
BACKEND_IMAGE: ${{ needs.build.outputs.backend_image }}
FRONTEND_IMAGE: ${{ needs.build.outputs.frontend_image }}
OCR_IMAGE: ${{ needs.build.outputs.ocr_image }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Sync config, scripts, and compose files to deploy path
run: |
rsync -av --delete "$GITHUB_WORKSPACE/config/" "$DEPLOY_PATH/config/"
rsync -av --delete "$GITHUB_WORKSPACE/scripts/" "$DEPLOY_PATH/scripts/"
cp "$GITHUB_WORKSPACE/docker-compose.yml" "$DEPLOY_PATH/"
cp "$GITHUB_WORKSPACE/docker-compose.staging.yml" "$DEPLOY_PATH/"
# WIF credential config (not a secret -- references Auth0 token script path)
# Remove any Docker-created directory artifact from failed bind mounts
rm -rf "$DEPLOY_PATH/secrets/app/google-wif-config.json"
mkdir -p "$DEPLOY_PATH/secrets/app"
cp "$GITHUB_WORKSPACE/secrets/app/google-wif-config.json" "$DEPLOY_PATH/secrets/app/"
- name: Generate environment configuration
run: |
cd "$DEPLOY_PATH"
{
echo "# Generated by CI/CD - DO NOT EDIT"
echo "STRIPE_PRO_MONTHLY_PRICE_ID=${{ vars.STRIPE_PRO_MONTHLY_PRICE_ID }}"
echo "STRIPE_PRO_YEARLY_PRICE_ID=${{ vars.STRIPE_PRO_YEARLY_PRICE_ID }}"
echo "STRIPE_ENTERPRISE_MONTHLY_PRICE_ID=${{ vars.STRIPE_ENTERPRISE_MONTHLY_PRICE_ID }}"
echo "STRIPE_ENTERPRISE_YEARLY_PRICE_ID=${{ vars.STRIPE_ENTERPRISE_YEARLY_PRICE_ID }}"
echo "VITE_STRIPE_PUBLISHABLE_KEY=${{ vars.VITE_STRIPE_PUBLISHABLE_KEY }}"
echo "GRAFANA_ADMIN_PASSWORD=${{ secrets.GRAFANA_ADMIN_PASSWORD }}"
} > .env
chmod +x scripts/ci/generate-log-config.sh
./scripts/ci/generate-log-config.sh "$LOG_LEVEL" >> .env
- name: Login to registry
run: |
echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login -u "${{ secrets.REGISTRY_USER }}" --password-stdin "$REGISTRY"
@@ -153,14 +108,10 @@ jobs:
AUTH0_CLIENT_SECRET: ${{ secrets.AUTH0_CLIENT_SECRET }}
AUTH0_MANAGEMENT_CLIENT_ID: ${{ secrets.AUTH0_MANAGEMENT_CLIENT_ID }}
AUTH0_MANAGEMENT_CLIENT_SECRET: ${{ secrets.AUTH0_MANAGEMENT_CLIENT_SECRET }}
AUTH0_OCR_CLIENT_ID: ${{ secrets.AUTH0_OCR_CLIENT_ID }}
AUTH0_OCR_CLIENT_SECRET: ${{ secrets.AUTH0_OCR_CLIENT_SECRET }}
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
GOOGLE_MAPS_MAP_ID: ${{ secrets.GOOGLE_MAPS_MAP_ID }}
CF_DNS_API_TOKEN: ${{ secrets.CF_DNS_API_TOKEN }}
RESEND_API_KEY: ${{ secrets.RESEND_API_KEY }}
STRIPE_SECRET_KEY: ${{ secrets.STRIPE_SECRET_KEY }}
STRIPE_WEBHOOK_SECRET: ${{ secrets.STRIPE_WEBHOOK_SECRET }}
- name: Initialize data directories
run: |
@@ -178,19 +129,17 @@ jobs:
run: |
docker pull $BACKEND_IMAGE
docker pull $FRONTEND_IMAGE
docker pull $OCR_IMAGE
- name: Deploy staging stack
run: |
cd "$DEPLOY_PATH"
export BACKEND_IMAGE=$BACKEND_IMAGE
export FRONTEND_IMAGE=$FRONTEND_IMAGE
export OCR_IMAGE=$OCR_IMAGE
docker compose -f $BASE_COMPOSE_FILE -f $STAGING_COMPOSE_FILE down --timeout 30 || true
docker compose -f $BASE_COMPOSE_FILE -f $STAGING_COMPOSE_FILE up -d
docker compose -f $COMPOSE_FILE -f $COMPOSE_STAGING down --timeout 30 || true
docker compose -f $COMPOSE_FILE -f $COMPOSE_STAGING up -d
- name: Wait for services
run: sleep 5
run: sleep 15
# ============================================
# VERIFY STAGING - Health checks
@@ -205,7 +154,7 @@ jobs:
- name: Check container status and health
run: |
for service in mvp-frontend-staging mvp-backend-staging mvp-ocr-staging mvp-postgres-staging mvp-redis-staging; do
for service in mvp-frontend-staging mvp-backend-staging mvp-postgres-staging mvp-redis-staging; do
status=$(docker inspect --format='{{.State.Status}}' $service 2>/dev/null || echo "not found")
if [ "$status" != "running" ]; then
echo "ERROR: $service is not running (status: $status)"
@@ -218,25 +167,26 @@ jobs:
# Wait for Docker healthchecks to complete (services with healthcheck defined)
echo ""
echo "Waiting for Docker healthchecks..."
for service in mvp-frontend-staging mvp-backend-staging mvp-ocr-staging mvp-postgres-staging mvp-redis-staging; do
for service in mvp-frontend-staging mvp-backend-staging mvp-postgres-staging mvp-redis-staging; do
# Check if service has a healthcheck defined
has_healthcheck=$(docker inspect --format='{{if .Config.Healthcheck}}true{{else}}false{{end}}' $service 2>/dev/null || echo "false")
if [ "$has_healthcheck" = "true" ]; then
# 48 attempts x 5 seconds = 4 minutes max wait (backend with fresh migrations can take ~3 min)
for i in $(seq 1 48); do
for i in 1 2 3 4 5 6 7 8 9 10; do
health=$(docker inspect --format='{{.State.Health.Status}}' $service 2>/dev/null || echo "unknown")
if [ "$health" = "healthy" ]; then
echo "OK: $service is healthy"
break
fi
# Don't fail immediately on unhealthy - container may still be starting up
# and can recover. Let the timeout handle truly broken containers.
if [ $i -eq 48 ]; then
echo "ERROR: $service health check timed out (status: $health)"
docker logs $service --tail 100 2>/dev/null || true
elif [ "$health" = "unhealthy" ]; then
echo "ERROR: $service is unhealthy"
docker logs $service --tail 50 2>/dev/null || true
exit 1
fi
echo "Waiting for $service healthcheck... (attempt $i/48, status: $health)"
if [ $i -eq 10 ]; then
echo "ERROR: $service health check timed out (status: $health)"
docker logs $service --tail 50 2>/dev/null || true
exit 1
fi
echo "Waiting for $service healthcheck... (attempt $i/10, status: $health)"
sleep 5
done
else
@@ -246,36 +196,36 @@ jobs:
- name: Wait for backend health
run: |
for i in $(seq 1 12); do
for i in 1 2 3 4 5 6; do
if docker exec mvp-backend-staging curl -sf http://localhost:3001/health > /dev/null 2>&1; then
echo "OK: Backend health check passed"
exit 0
fi
if [ $i -eq 12 ]; then
echo "ERROR: Backend health check failed after 12 attempts"
if [ $i -eq 6 ]; then
echo "ERROR: Backend health check failed after 6 attempts"
docker logs mvp-backend-staging --tail 100
exit 1
fi
echo "Attempt $i/12: Backend not ready, waiting 5s..."
sleep 5
echo "Attempt $i/6: Backend not ready, waiting 10s..."
sleep 10
done
- name: Check external endpoint
run: |
REQUIRED_FEATURES='["admin","auth","onboarding","vehicles","documents","fuel-logs","stations","maintenance","platform","notifications","user-profile","user-preferences","user-export"]'
for i in $(seq 1 12); do
for i in 1 2 3 4 5 6; do
RESPONSE=$(curl -sf https://staging.motovaultpro.com/api/health 2>/dev/null) || {
echo "Attempt $i/12: Connection failed, waiting 5s..."
sleep 5
echo "Attempt $i/6: Connection failed, waiting 10s..."
sleep 10
continue
}
# Check status is "healthy"
STATUS=$(echo "$RESPONSE" | jq -r '.status')
if [ "$STATUS" != "healthy" ]; then
echo "Attempt $i/12: Status is '$STATUS', not 'healthy'. Waiting 5s..."
sleep 5
echo "Attempt $i/6: Status is '$STATUS', not 'healthy'. Waiting 10s..."
sleep 10
continue
fi
@@ -285,8 +235,8 @@ jobs:
')
if [ -n "$MISSING" ]; then
echo "Attempt $i/12: Missing features: $MISSING. Waiting 5s..."
sleep 5
echo "Attempt $i/6: Missing features: $MISSING. Waiting 10s..."
sleep 10
continue
fi
@@ -295,7 +245,7 @@ jobs:
exit 0
done
echo "ERROR: Staging health check failed after 12 attempts"
echo "ERROR: Staging health check failed after 6 attempts"
echo "Last response: $RESPONSE"
exit 1

5
.gitignore vendored
View File

@@ -2,7 +2,6 @@ node_modules/
.env
.env.local
.env.backup
.env.logging
dist/
*.log
.DS_Store
@@ -13,16 +12,12 @@ coverage/
*.swo
.venv
.playwright-mcp
__pycache__/
*.py[cod]
*$py.class
# K8s-aligned secret mounts (real files ignored; examples committed)
secrets/**
!secrets/
!secrets/**/
!secrets/**/*.example
!secrets/app/google-wif-config.json
# Traefik ACME certificates (contains private keys)
data/traefik/acme.json

View File

@@ -1,6 +1,6 @@
# MotoVaultPro
Single-tenant vehicle management application with 9-container architecture (6 application: Traefik, Frontend, Backend, OCR, PostgreSQL, Redis + 3 logging: Loki, Alloy, Grafana).
Single-tenant vehicle management application with 5-container architecture (Traefik, Frontend, Backend, PostgreSQL, Redis).
## Files
@@ -8,9 +8,6 @@ Single-tenant vehicle management application with 9-container architecture (6 ap
| ---- | ---- | ------------ |
| `Makefile` | Build, test, deploy commands | Running any make command |
| `docker-compose.yml` | Development container orchestration | Local development setup |
| `docker-compose.staging.yml` | Staging container orchestration | Staging deployment |
| `docker-compose.prod.yml` | Production container orchestration | Production deployment |
| `docker-compose.blue-green.yml` | Blue-green deployment orchestration | Zero-downtime deploys |
| `package.json` | Root workspace dependencies | Dependency management |
| `README.md` | Project overview | First-time setup |
@@ -20,23 +17,19 @@ Single-tenant vehicle management application with 9-container architecture (6 ap
| --------- | ---- | ------------ |
| `backend/` | Fastify API server with feature capsules | Backend development |
| `frontend/` | React/Vite SPA with MUI | Frontend development |
| `ocr/` | Python OCR microservice (Tesseract) | OCR pipeline, receipt/VIN extraction |
| `docs/` | Project documentation hub | Architecture, APIs, testing |
| `config/` | Configuration files (Traefik, logging stack) | Infrastructure setup |
| `scripts/` | Utility scripts (backup, deploy, CI) | Automation tasks |
| `config/` | Configuration files (Traefik, monitoring) | Infrastructure setup |
| `scripts/` | Utility scripts (backup, deploy) | Automation tasks |
| `.ai/` | AI context and workflow contracts | AI-assisted development |
| `.claude/` | Claude Code agents and skills | Delegating to agents, using skills |
| `.gitea/` | Gitea workflows and templates | CI/CD, issue templates |
| `ansible/` | Ansible deployment playbooks | Server provisioning |
| `certs/` | TLS certificates | SSL/TLS configuration |
| `secrets/` | Docker secrets (Stripe keys, Traefik) | Secret management |
| `data/` | Persistent data volumes (backups, documents) | Storage paths, volume mounts |
## Build for staging and production. NOT FOR DEVELOPMENT
## Build
```bash
make setup # First-time setup
make rebuild # Rebuild containers
make setup # First-time setup (builds containers, runs migrations)
make rebuild # Rebuild containers after changes
```
## Test
@@ -174,23 +167,13 @@ Issues are the source of truth. See `.ai/workflow-contract.json` for complete wo
- Every PR must link to at least one issue
- Use Gitea MCP tools for issue/label/branch/PR operations
- Labels: `status/backlog` -> `status/ready` -> `status/in-progress` -> `status/review` -> `status/done`
- Branches: `issue-{parent_index}-{slug}` (e.g., `issue-42-add-fuel-report`)
- Branches: `issue-{index}-{slug}` (e.g., `issue-42-add-fuel-report`)
- Commits: `{type}: {summary} (refs #{index})` (e.g., `feat: add fuel report (refs #42)`)
### Sub-Issue Decomposition
Multi-file features (3+ files) must be broken into sub-issues for smaller AI context windows:
- **Sub-issue title**: `{type}: {summary} (#{parent_index})` -- parent index in title
- **Sub-issue body**: First line `Relates to #{parent_index}`
- **ONE branch** per parent issue only. Never branch per sub-issue.
- **ONE PR** per parent issue. Body lists `Fixes #N` for parent and every sub-issue.
- **Commits** reference the specific sub-issue: `feat: add dashboard (refs #107)`
- **Status labels** tracked on parent only. Sub-issues stay `status/backlog`.
- **Plan milestones** map 1:1 to sub-issues.
## Architecture Context for AI
### 9-Container Architecture
**MotoVaultPro uses a unified architecture:** A single-tenant application with 9 containers - 6 application (Traefik, Frontend, Backend, OCR, PostgreSQL, Redis) + 3 logging (Loki, Alloy, Grafana). Application features in `backend/src/features/[name]/` are self-contained modules within the backend service, including the platform feature for vehicle data and VIN decoding. See `docs/LOGGING.md` for unified logging system documentation.
### Simplified 5-Container Architecture
**MotoVaultPro uses a simplified architecture:** A single-tenant application with 5 containers - Traefik, Frontend, Backend, PostgreSQL, and Redis. Application features in `backend/src/features/[name]/` are self-contained modules within the backend service, including the platform feature for vehicle data and VIN decoding.
### Key Principles for AI Understanding
- **Feature Capsule Organization**: Application features are self-contained modules within the backend

View File

@@ -1,17 +1,17 @@
# MotoVaultPro — Simplified Architecture
9-container architecture (6 application + 3 logging) with integrated platform feature.
Simplified 5-container architecture with integrated platform feature.
## Requirements
- Mobile + Desktop: Implement and test every feature on both.
- Docker-first, production-only: All testing and validation in containers.
- See `CLAUDE.md` for development partnership guidelines.
## Staging and Production Commands. NOT FOR DEVELOPMENT (containers)
## Quick Start (containers)
```bash
make setup # build + start + migrate (uses mvp-* containers)
make start # start 5 services
make rebuild #
make rebuild # rebuild on changes
make logs # tail all logs
make migrate # run DB migrations
```
@@ -240,19 +240,10 @@ make migrate # run DB migrations
Skills: codebase-analysis, problem-analysis, decision-critic, planner, doc-sync
Role-Agents: Developer, Technical Writer (TW), Quality Reviewer (QR), Debugger
Domain Agents: Feature Agent, Frontend Agent, Platform Agent, Quality Agent
Labels: status/backlog -> status/ready -> status/in-progress -> status/review -> status/done
Commits: {type}: {summary} (refs #{N}) | Types: feat, fix, chore, docs, refactor, test
Branches: issue-{N}-{slug} | Example: issue-42-add-fuel-report
SUB-ISSUE PATTERN (multi-file features)
----------------------------------------
Parent: #105 "feat: Add Grafana dashboards"
Sub: #106 "feat: Dashboard provisioning (#105)" <-- parent index in title
Branch: issue-105-add-grafana-dashboards <-- ONE branch, parent index
Commit: feat: add provisioning (refs #106) <-- refs specific sub-issue
PR: feat: Add Grafana dashboards (#105) <-- ONE PR, parent index
Body: Fixes #105, Fixes #106, Fixes #107... <-- closes all
QUALITY RULES
-------------

View File

@@ -1,11 +0,0 @@
# ansible/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `deploy-production-runner.yml` | Production runner deployment | Production deployments |
| `deploy-staging-runner.yml` | Staging runner deployment | Staging deployments |
| `inventory.yml` | Server inventory | Server host configuration |
| `inventory.yml.example` | Example inventory template | Setting up new environments |
| `config.yaml.j2` | Jinja2 config template | Runner configuration |

View File

@@ -269,17 +269,24 @@
when: gitea_registry_token is defined
# ============================================
# Remove Legacy Docker Cleanup (was destroying volumes)
# Maintenance Scripts
# ============================================
- name: Remove legacy Docker cleanup cron job
- name: Create Docker cleanup script
copy:
dest: /usr/local/bin/docker-cleanup.sh
content: |
#!/bin/bash
# Remove unused Docker resources older than 7 days
docker system prune -af --filter "until=168h"
docker volume prune -f
mode: '0755'
- name: Schedule Docker cleanup cron job
cron:
name: "Docker cleanup"
state: absent
- name: Remove legacy Docker cleanup script
file:
path: /usr/local/bin/docker-cleanup.sh
state: absent
minute: "0"
hour: "3"
job: "/usr/local/bin/docker-cleanup.sh >> /var/log/docker-cleanup.log 2>&1"
# ============================================
# Production-Specific Security Hardening

View File

@@ -300,17 +300,24 @@
when: gitea_registry_token is defined
# ============================================
# Remove Legacy Docker Cleanup (was destroying volumes)
# Maintenance Scripts
# ============================================
- name: Remove legacy Docker cleanup cron job
- name: Create Docker cleanup script
copy:
dest: /usr/local/bin/docker-cleanup.sh
content: |
#!/bin/bash
# Remove unused Docker resources older than 7 days
docker system prune -af --filter "until=168h"
docker volume prune -f
mode: '0755'
- name: Schedule Docker cleanup cron job
cron:
name: "Docker cleanup"
state: absent
- name: Remove legacy Docker cleanup script
file:
path: /usr/local/bin/docker-cleanup.sh
state: absent
minute: "0"
hour: "3"
job: "/usr/local/bin/docker-cleanup.sh >> /var/log/docker-cleanup.log 2>&1"
handlers:
- name: Restart act_runner

View File

@@ -7,8 +7,7 @@
| `README.md` | Backend quickstart and commands | Getting started with backend development |
| `package.json` | Dependencies and npm scripts | Adding dependencies, understanding build |
| `tsconfig.json` | TypeScript configuration | Compiler settings, path aliases |
| `eslint.config.js` | ESLint configuration | Linting rules, code style |
| `jest.config.js` | Jest test configuration | Test setup, coverage settings |
| `jest.config.ts` | Jest test configuration | Test setup, coverage settings |
| `Dockerfile` | Container build definition | Docker builds, deployment |
## Subdirectories
@@ -16,4 +15,4 @@
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `src/` | Application source code | Any backend development |
| `scripts/` | Utility scripts (docker-entrypoint) | Container startup, automation |
| `scripts/` | Utility scripts | Database scripts, automation |

View File

@@ -20,26 +20,21 @@
"fastify": "^5.2.0",
"fastify-plugin": "^5.0.1",
"file-type": "^16.5.4",
"form-data": "^4.0.0",
"get-jwks": "^11.0.3",
"ioredis": "^5.4.2",
"js-yaml": "^4.1.0",
"mailparser": "^3.9.3",
"node-cron": "^3.0.3",
"opossum": "^8.0.0",
"pg": "^8.13.1",
"pino": "^9.6.0",
"resend": "^3.0.0",
"stripe": "^20.2.0",
"svix": "^1.85.0",
"tar": "^7.4.3",
"winston": "^3.17.0",
"zod": "^3.24.1"
},
"devDependencies": {
"@eslint/js": "^9.17.0",
"@types/jest": "^29.5.10",
"@types/js-yaml": "^4.0.9",
"@types/mailparser": "^3.4.6",
"@types/node": "^22.0.0",
"@types/node-cron": "^3.0.11",
"@types/opossum": "^8.0.0",
@@ -86,6 +81,7 @@
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.5",
@@ -581,6 +577,15 @@
"dev": true,
"license": "MIT"
},
"node_modules/@colors/colors": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz",
"integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==",
"license": "MIT",
"engines": {
"node": ">=0.1.90"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
@@ -605,6 +610,17 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@dabh/diagnostics": {
"version": "2.0.8",
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.8.tgz",
"integrity": "sha512-R4MSXTVnuMzGD7bzHdW2ZhhdPC/igELENcq5IjEverBvq5hn1SXCWcsi6eSsdWP0/Ur+SItRRjAktmdoX/8R/Q==",
"license": "MIT",
"dependencies": {
"@so-ric/colorspace": "^1.1.6",
"enabled": "2.0.x",
"kuler": "^2.0.0"
}
},
"node_modules/@eslint-community/eslint-utils": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz",
@@ -1768,11 +1784,15 @@
"@sinonjs/commons": "^3.0.0"
}
},
"node_modules/@stablelib/base64": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@stablelib/base64/-/base64-1.0.1.tgz",
"integrity": "sha512-1bnPQqSxSuc3Ii6MhBysoWCg58j97aUjuCSZrGSmDxNqtytIi0k8utUenAwTZN4V5mXXYGsVUI9zeBqy+jBOSQ==",
"license": "MIT"
"node_modules/@so-ric/colorspace": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/@so-ric/colorspace/-/colorspace-1.1.6.tgz",
"integrity": "sha512-/KiKkpHNOBgkFJwu9sh48LkHSMYGyuTcSFK/qMBdnOAlrRJzRSXAOFB5qwzaVQuDl8wAvHVMkaASQDReTahxuw==",
"license": "MIT",
"dependencies": {
"color": "^5.0.2",
"text-hex": "1.0.x"
}
},
"node_modules/@tokenizer/token": {
"version": "0.3.0",
@@ -1929,30 +1949,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/mailparser": {
"version": "3.4.6",
"resolved": "https://registry.npmjs.org/@types/mailparser/-/mailparser-3.4.6.tgz",
"integrity": "sha512-wVV3cnIKzxTffaPH8iRnddX1zahbYB1ZEoAxyhoBo3TBCBuK6nZ8M8JYO/RhsCuuBVOw/DEN/t/ENbruwlxn6Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"iconv-lite": "^0.6.3"
}
},
"node_modules/@types/mailparser/node_modules/iconv-lite": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"dev": true,
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/@types/methods": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/@types/methods/-/methods-1.1.4.tgz",
@@ -1964,8 +1960,9 @@
"version": "22.19.3",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.3.tgz",
"integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==",
"devOptional": true,
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~6.21.0"
}
@@ -2030,6 +2027,12 @@
"@types/superagent": "^8.1.0"
}
},
"node_modules/@types/triple-beam": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz",
"integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==",
"license": "MIT"
},
"node_modules/@types/yargs": {
"version": "17.0.35",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.35.tgz",
@@ -2092,6 +2095,7 @@
"integrity": "sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@typescript-eslint/scope-manager": "8.50.0",
"@typescript-eslint/types": "8.50.0",
@@ -2303,17 +2307,6 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@zone-eu/mailsplit": {
"version": "5.4.8",
"resolved": "https://registry.npmjs.org/@zone-eu/mailsplit/-/mailsplit-5.4.8.tgz",
"integrity": "sha512-eEyACj4JZ7sjzRvy26QhLgKEMWwQbsw1+QZnlLX+/gihcNH07lVPOcnwf5U6UAL7gkc//J3jVd76o/WS+taUiA==",
"license": "(MIT OR EUPL-1.1+)",
"dependencies": {
"libbase64": "1.3.0",
"libmime": "5.3.7",
"libqp": "2.1.1"
}
},
"node_modules/abbrev": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
@@ -2347,6 +2340,7 @@
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -2519,6 +2513,12 @@
"safer-buffer": "^2.1.0"
}
},
"node_modules/async": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz",
"integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==",
"license": "MIT"
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
@@ -2813,6 +2813,7 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
@@ -2898,6 +2899,7 @@
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
"integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
@@ -3090,6 +3092,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/color": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/color/-/color-5.0.3.tgz",
"integrity": "sha512-ezmVcLR3xAVp8kYOm4GS45ZLLgIE6SPAFoduLr6hTDajwb3KZ2F46gulK3XpcwRFb5KKGCSezCBAY4Dw4HsyXA==",
"license": "MIT",
"dependencies": {
"color-convert": "^3.1.3",
"color-string": "^2.1.3"
},
"engines": {
"node": ">=18"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
@@ -3108,6 +3123,48 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"license": "MIT"
},
"node_modules/color-string": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/color-string/-/color-string-2.1.4.tgz",
"integrity": "sha512-Bb6Cq8oq0IjDOe8wJmi4JeNn763Xs9cfrBcaylK1tPypWzyoy2G3l90v9k64kjphl/ZJjPIShFztenRomi8WTg==",
"license": "MIT",
"dependencies": {
"color-name": "^2.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/color-string/node_modules/color-name": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz",
"integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==",
"license": "MIT",
"engines": {
"node": ">=12.20"
}
},
"node_modules/color/node_modules/color-convert": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-3.1.3.tgz",
"integrity": "sha512-fasDH2ont2GqF5HpyO4w0+BcewlhHEZOFn9c1ckZdHpJ56Qb7MHhH/IcJZbBGgvdtwdwNbLvxiBEdg336iA9Sg==",
"license": "MIT",
"dependencies": {
"color-name": "^2.0.0"
},
"engines": {
"node": ">=14.6"
}
},
"node_modules/color/node_modules/color-name": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-2.1.0.tgz",
"integrity": "sha512-1bPaDNFm0axzE4MEAzKPuqKWeRaT43U/hyxKPBdqTfmPF+d6n7FSoTFxLVULUJOmiLp01KjhIPPH+HrXZJN4Rg==",
"license": "MIT",
"engines": {
"node": ">=12.20"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
@@ -3509,14 +3566,11 @@
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"license": "MIT"
},
"node_modules/encoding-japanese": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/encoding-japanese/-/encoding-japanese-2.2.0.tgz",
"integrity": "sha512-EuJWwlHPZ1LbADuKTClvHtwbaFn4rOD+dRAbWysqEOXRc2Uui0hJInNJrsdH0c+OhJA4nrCBdSkW4DD5YxAo6A==",
"license": "MIT",
"engines": {
"node": ">=8.10.0"
}
"node_modules/enabled": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz",
"integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==",
"license": "MIT"
},
"node_modules/entities": {
"version": "4.5.0",
@@ -3614,6 +3668,7 @@
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.8.0",
"@eslint-community/regexpp": "^4.12.1",
@@ -3947,12 +4002,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/fast-sha256": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/fast-sha256/-/fast-sha256-1.3.0.tgz",
"integrity": "sha512-n11RGP/lrWEFI/bWdygLxhI+pVeo1ZYIVwvvPkW7azl/rOy+F3HYRZ2K5zeE9mmkhQppyv9sQFx0JM9UabnpPQ==",
"license": "Unlicense"
},
"node_modules/fast-uri": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz",
@@ -4030,49 +4079,6 @@
],
"license": "MIT"
},
"node_modules/fastify/node_modules/pino": {
"version": "10.3.0",
"resolved": "https://registry.npmjs.org/pino/-/pino-10.3.0.tgz",
"integrity": "sha512-0GNPNzHXBKw6U/InGe79A3Crzyk9bcSyObF9/Gfo9DLEf5qj5RF50RSjsu0W1rZ6ZqRGdzDFCRBQvi9/rSGPtA==",
"license": "MIT",
"dependencies": {
"@pinojs/redact": "^0.4.0",
"atomic-sleep": "^1.0.0",
"on-exit-leak-free": "^2.1.0",
"pino-abstract-transport": "^3.0.0",
"pino-std-serializers": "^7.0.0",
"process-warning": "^5.0.0",
"quick-format-unescaped": "^4.0.3",
"real-require": "^0.2.0",
"safe-stable-stringify": "^2.3.1",
"sonic-boom": "^4.0.1",
"thread-stream": "^4.0.0"
},
"bin": {
"pino": "bin.js"
}
},
"node_modules/fastify/node_modules/pino-abstract-transport": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz",
"integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==",
"license": "MIT",
"dependencies": {
"split2": "^4.0.0"
}
},
"node_modules/fastify/node_modules/thread-stream": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-4.0.0.tgz",
"integrity": "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA==",
"license": "MIT",
"dependencies": {
"real-require": "^0.2.0"
},
"engines": {
"node": ">=20"
}
},
"node_modules/fastparallel": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/fastparallel/-/fastparallel-2.4.1.tgz",
@@ -4112,6 +4118,12 @@
"bser": "2.1.1"
}
},
"node_modules/fecha": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz",
"integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==",
"license": "MIT"
},
"node_modules/file-entry-cache": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz",
@@ -4207,6 +4219,12 @@
"dev": true,
"license": "ISC"
},
"node_modules/fn.name": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz",
"integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==",
"license": "MIT"
},
"node_modules/follow-redirects": {
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
@@ -4561,15 +4579,6 @@
"node": ">= 0.4"
}
},
"node_modules/he": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
"integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
"license": "MIT",
"bin": {
"he": "bin/he"
}
},
"node_modules/helmet": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/helmet/-/helmet-8.1.0.tgz",
@@ -4642,22 +4651,6 @@
"node": ">=10.17.0"
}
},
"node_modules/iconv-lite": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz",
"integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
@@ -4891,7 +4884,6 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -4998,6 +4990,7 @@
"integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@jest/core": "^29.7.0",
"@jest/types": "^29.6.3",
@@ -5780,6 +5773,12 @@
"node": ">=6"
}
},
"node_modules/kuler": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz",
"integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==",
"license": "MIT"
},
"node_modules/leac": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/leac/-/leac-0.6.0.tgz",
@@ -5813,42 +5812,6 @@
"node": ">= 0.8.0"
}
},
"node_modules/libbase64": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/libbase64/-/libbase64-1.3.0.tgz",
"integrity": "sha512-GgOXd0Eo6phYgh0DJtjQ2tO8dc0IVINtZJeARPeiIJqge+HdsWSuaDTe8ztQ7j/cONByDZ3zeB325AHiv5O0dg==",
"license": "MIT"
},
"node_modules/libmime": {
"version": "5.3.7",
"resolved": "https://registry.npmjs.org/libmime/-/libmime-5.3.7.tgz",
"integrity": "sha512-FlDb3Wtha8P01kTL3P9M+ZDNDWPKPmKHWaU/cG/lg5pfuAwdflVpZE+wm9m7pKmC5ww6s+zTxBKS1p6yl3KpSw==",
"license": "MIT",
"dependencies": {
"encoding-japanese": "2.2.0",
"iconv-lite": "0.6.3",
"libbase64": "1.3.0",
"libqp": "2.1.1"
}
},
"node_modules/libmime/node_modules/iconv-lite": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/libqp": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/libqp/-/libqp-2.1.1.tgz",
"integrity": "sha512-0Wd+GPz1O134cP62YU2GTOPNA7Qgl09XwCqM5zpBv87ERCXdfDtyKXvV7c9U22yWJh44QZqBocFnXN11K96qow==",
"license": "MIT"
},
"node_modules/light-my-request": {
"version": "6.6.0",
"resolved": "https://registry.npmjs.org/light-my-request/-/light-my-request-6.6.0.tgz",
@@ -5893,15 +5856,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/linkify-it": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
"integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
"license": "MIT",
"dependencies": {
"uc.micro": "^2.0.0"
}
},
"node_modules/locate-path": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
@@ -5944,12 +5898,28 @@
"dev": true,
"license": "MIT"
},
"node_modules/logform": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/logform/-/logform-2.7.0.tgz",
"integrity": "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==",
"license": "MIT",
"dependencies": {
"@colors/colors": "1.6.0",
"@types/triple-beam": "^1.3.2",
"fecha": "^4.2.0",
"ms": "^2.1.1",
"safe-stable-stringify": "^2.3.1",
"triple-beam": "^1.3.0"
},
"engines": {
"node": ">= 12.0.0"
}
},
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"license": "MIT",
"peer": true,
"dependencies": {
"js-tokens": "^3.0.0 || ^4.0.0"
},
@@ -5966,24 +5936,6 @@
"node": "20 || >=22"
}
},
"node_modules/mailparser": {
"version": "3.9.3",
"resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.9.3.tgz",
"integrity": "sha512-AnB0a3zROum6fLaa52L+/K2SoRJVyFDk78Ea6q1D0ofcZLxWEWDtsS1+OrVqKbV7r5dulKL/AwYQccFGAPpuYQ==",
"license": "MIT",
"dependencies": {
"@zone-eu/mailsplit": "5.4.8",
"encoding-japanese": "2.2.0",
"he": "1.2.0",
"html-to-text": "9.0.5",
"iconv-lite": "0.7.2",
"libmime": "5.3.7",
"linkify-it": "5.0.0",
"nodemailer": "7.0.13",
"punycode.js": "2.3.1",
"tlds": "1.261.0"
}
},
"node_modules/make-dir": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
@@ -6212,15 +6164,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/nodemailer": {
"version": "7.0.13",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.13.tgz",
"integrity": "sha512-PNDFSJdP+KFgdsG3ZzMXCgquO7I6McjY2vlqILjtJd0hy8wEvtugS9xKRF2NWlPNGxvLCXlTNIae4serI7dinw==",
"license": "MIT-0",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/nodemon": {
"version": "3.1.11",
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.11.tgz",
@@ -6315,6 +6258,7 @@
"version": "1.13.4",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
"integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -6348,6 +6292,15 @@
"wrappy": "1"
}
},
"node_modules/one-time": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz",
"integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==",
"license": "MIT",
"dependencies": {
"fn.name": "1.x.x"
}
},
"node_modules/onetime": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
@@ -6569,6 +6522,7 @@
"resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz",
"integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==",
"license": "MIT",
"peer": true,
"dependencies": {
"pg-connection-string": "^2.9.1",
"pg-pool": "^3.10.1",
@@ -6674,9 +6628,9 @@
}
},
"node_modules/pino": {
"version": "9.14.0",
"resolved": "https://registry.npmjs.org/pino/-/pino-9.14.0.tgz",
"integrity": "sha512-8OEwKp5juEvb/MjpIc4hjqfgCNysrS94RIOMXYvpYCdm/jglrKEiAYmiumbmGhCvs+IcInsphYDFwqrjr7398w==",
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/pino/-/pino-10.1.0.tgz",
"integrity": "sha512-0zZC2ygfdqvqK8zJIr1e+wT1T/L+LF6qvqvbzEQ6tiMAoTqEVK9a1K3YRu8HEUvGEvNqZyPJTtb2sNIoTkB83w==",
"license": "MIT",
"dependencies": {
"@pinojs/redact": "^0.4.0",
@@ -6934,15 +6888,6 @@
"node": ">=6"
}
},
"node_modules/punycode.js": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
"integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/pure-rand": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz",
@@ -6961,9 +6906,10 @@
"license": "MIT"
},
"node_modules/qs": {
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.1.0"
@@ -7030,6 +6976,20 @@
"integrity": "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w==",
"license": "MIT"
},
"node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
"license": "MIT",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/readable-web-to-node-stream": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.4.tgz",
@@ -7284,7 +7244,6 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz",
"integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"loose-envify": "^1.1.0"
}
@@ -7360,6 +7319,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -7379,6 +7339,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -7395,6 +7356,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
@@ -7413,6 +7375,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
@@ -7511,6 +7474,15 @@
"dev": true,
"license": "BSD-3-Clause"
},
"node_modules/stack-trace": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
"integrity": "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg==",
"license": "MIT",
"engines": {
"node": "*"
}
},
"node_modules/stack-utils": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
@@ -7540,16 +7512,6 @@
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
"license": "MIT"
},
"node_modules/standardwebhooks": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/standardwebhooks/-/standardwebhooks-1.0.0.tgz",
"integrity": "sha512-BbHGOQK9olHPMvQNHWul6MYlrRTAOKn03rOe4A8O3CLWhNf4YHBqq2HJKKC+sfqpxiBY52pNeesD6jIiLDz8jg==",
"license": "MIT",
"dependencies": {
"@stablelib/base64": "^1.0.0",
"fast-sha256": "^1.3.0"
}
},
"node_modules/steed": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/steed/-/steed-1.1.3.tgz",
@@ -7673,26 +7635,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/stripe": {
"version": "20.2.0",
"resolved": "https://registry.npmjs.org/stripe/-/stripe-20.2.0.tgz",
"integrity": "sha512-m8niTfdm3nPP/yQswRWMwQxqEUcTtB3RTJQ9oo6NINDzgi7aPOadsH/fPXIIfL1Sc5+lqQFKSk7WiO6CXmvaeA==",
"license": "MIT",
"dependencies": {
"qs": "^6.14.1"
},
"engines": {
"node": ">=16"
},
"peerDependencies": {
"@types/node": ">=16"
},
"peerDependenciesMeta": {
"@types/node": {
"optional": true
}
}
},
"node_modules/strtok3": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/strtok3/-/strtok3-6.3.0.tgz",
@@ -7771,29 +7713,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/svix": {
"version": "1.85.0",
"resolved": "https://registry.npmjs.org/svix/-/svix-1.85.0.tgz",
"integrity": "sha512-4OxNw++bnNay8SoBwESgzfjMnYmurS1qBX+luhzvljr6EAPn/hqqmkdCR1pbgIe1K1+BzKZEHjAKz9OYrKJYwQ==",
"license": "MIT",
"dependencies": {
"standardwebhooks": "1.0.0",
"uuid": "^10.0.0"
}
},
"node_modules/svix/node_modules/uuid": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
"integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==",
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/tar": {
"version": "7.5.2",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz",
@@ -7834,6 +7753,12 @@
"node": ">=8"
}
},
"node_modules/text-hex": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==",
"license": "MIT"
},
"node_modules/thread-stream": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz",
@@ -7884,6 +7809,7 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -7891,15 +7817,6 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/tlds": {
"version": "1.261.0",
"resolved": "https://registry.npmjs.org/tlds/-/tlds-1.261.0.tgz",
"integrity": "sha512-QXqwfEl9ddlGBaRFXIvNKK6OhipSiLXuRuLJX5DErz0o0Q0rYxulWLdFryTkV5PkdZct5iMInwYEGe/eR++1AA==",
"license": "MIT",
"bin": {
"tlds": "bin.js"
}
},
"node_modules/tmpl": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz",
@@ -7956,6 +7873,15 @@
"nodetouch": "bin/nodetouch.js"
}
},
"node_modules/triple-beam": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz",
"integrity": "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg==",
"license": "MIT",
"engines": {
"node": ">= 14.0.0"
}
},
"node_modules/ts-api-utils": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
@@ -8041,6 +7967,7 @@
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7",
@@ -8128,6 +8055,7 @@
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -8160,12 +8088,6 @@
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/uc.micro": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
"integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==",
"license": "MIT"
},
"node_modules/uglify-js": {
"version": "3.19.3",
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz",
@@ -8234,6 +8156,12 @@
"punycode": "^2.1.0"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"license": "MIT"
},
"node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
@@ -8290,6 +8218,42 @@
"node": ">= 8"
}
},
"node_modules/winston": {
"version": "3.19.0",
"resolved": "https://registry.npmjs.org/winston/-/winston-3.19.0.tgz",
"integrity": "sha512-LZNJgPzfKR+/J3cHkxcpHKpKKvGfDZVPS4hfJCc4cCG0CgYzvlD6yE/S3CIL/Yt91ak327YCpiF/0MyeZHEHKA==",
"license": "MIT",
"dependencies": {
"@colors/colors": "^1.6.0",
"@dabh/diagnostics": "^2.0.8",
"async": "^3.2.3",
"is-stream": "^2.0.0",
"logform": "^2.7.0",
"one-time": "^1.0.0",
"readable-stream": "^3.4.0",
"safe-stable-stringify": "^2.3.1",
"stack-trace": "0.0.x",
"triple-beam": "^1.3.0",
"winston-transport": "^4.9.0"
},
"engines": {
"node": ">= 12.0.0"
}
},
"node_modules/winston-transport": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.9.0.tgz",
"integrity": "sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A==",
"license": "MIT",
"dependencies": {
"logform": "^2.7.0",
"readable-stream": "^3.6.2",
"triple-beam": "^1.3.0"
},
"engines": {
"node": ">= 12.0.0"
}
},
"node_modules/word-wrap": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",

View File

@@ -18,50 +18,45 @@
"type-check": "tsc --noEmit"
},
"dependencies": {
"@fastify/autoload": "^6.0.1",
"pg": "^8.13.1",
"ioredis": "^5.4.2",
"@fastify/multipart": "^9.0.1",
"axios": "^1.7.9",
"opossum": "^8.0.0",
"winston": "^3.17.0",
"zod": "^3.24.1",
"js-yaml": "^4.1.0",
"fastify": "^5.2.0",
"@fastify/cors": "^11.2.0",
"@fastify/helmet": "^13.0.2",
"@fastify/jwt": "^10.0.0",
"@fastify/multipart": "^9.0.1",
"@fastify/type-provider-typebox": "^6.1.0",
"@sinclair/typebox": "^0.34.0",
"auth0": "^4.12.0",
"axios": "^1.7.9",
"fastify": "^5.2.0",
"fastify-plugin": "^5.0.1",
"file-type": "^16.5.4",
"form-data": "^4.0.0",
"@fastify/autoload": "^6.0.1",
"get-jwks": "^11.0.3",
"ioredis": "^5.4.2",
"js-yaml": "^4.1.0",
"mailparser": "^3.9.3",
"node-cron": "^3.0.3",
"opossum": "^8.0.0",
"pg": "^8.13.1",
"pino": "^9.6.0",
"file-type": "^16.5.4",
"resend": "^3.0.0",
"stripe": "^20.2.0",
"svix": "^1.85.0",
"tar": "^7.4.3",
"zod": "^3.24.1"
"node-cron": "^3.0.3",
"auth0": "^4.12.0",
"tar": "^7.4.3"
},
"devDependencies": {
"@eslint/js": "^9.17.0",
"@types/jest": "^29.5.10",
"@types/js-yaml": "^4.0.9",
"@types/mailparser": "^3.4.6",
"@types/node": "^22.0.0",
"@types/node-cron": "^3.0.11",
"@types/opossum": "^8.0.0",
"@types/pg": "^8.10.9",
"@types/supertest": "^6.0.3",
"eslint": "^9.17.0",
"jest": "^29.7.0",
"nodemon": "^3.1.9",
"supertest": "^7.1.4",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1",
"@types/js-yaml": "^4.0.9",
"@types/node-cron": "^3.0.11",
"typescript": "^5.7.2",
"ts-node": "^10.9.1",
"nodemon": "^3.1.9",
"jest": "^29.7.0",
"@types/jest": "^29.5.10",
"ts-jest": "^29.1.1",
"supertest": "^7.1.4",
"@types/supertest": "^6.0.3",
"@types/opossum": "^8.0.0",
"eslint": "^9.17.0",
"@eslint/js": "^9.17.0",
"typescript-eslint": "^8.18.1"
}
}

View File

@@ -1,10 +0,0 @@
# _system/
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `cli/` | CLI commands and tools | Running backend CLI commands |
| `migrations/` | Database migration runner | Running or writing migrations |
| `schema/` | Database schema generation | Schema export, documentation |
| `scripts/` | System utility scripts | Database maintenance, automation |

View File

@@ -17,8 +17,7 @@ const pool = new Pool({
const MIGRATION_ORDER = [
'features/vehicles', // Primary entity, defines update_updated_at_column()
'features/platform', // Normalized make/model/trim schema for dropdowns
'features/user-profile', // User profile management; needed by documents migration
'features/documents', // Depends on vehicles, user-profile; provides documents table
'features/documents', // Depends on vehicles; provides documents table
'core/user-preferences', // Depends on update_updated_at_column()
'features/fuel-logs', // Depends on vehicles
'features/maintenance', // Depends on vehicles
@@ -26,12 +25,8 @@ const MIGRATION_ORDER = [
'features/admin', // Admin role management and oversight; depends on update_updated_at_column()
'features/backup', // Admin backup feature; depends on update_updated_at_column()
'features/notifications', // Depends on maintenance and documents
'features/email-ingestion', // Depends on documents, notifications (extends email_templates)
'features/user-profile', // User profile management; independent
'features/terms-agreement', // Terms & Conditions acceptance audit trail
'features/audit-log', // Centralized audit logging; independent
'features/ownership-costs', // Depends on vehicles and documents; TCO recurring costs
'features/subscriptions', // Stripe subscriptions; depends on user-profile, vehicles
'core/identity-migration', // Cross-cutting UUID migration; must run after all feature tables exist
];
// Base directory where migrations are copied inside the image (set by Dockerfile)

View File

@@ -10,7 +10,6 @@ import fastifyMultipart from '@fastify/multipart';
// Core plugins
import authPlugin from './core/plugins/auth.plugin';
import adminGuardPlugin, { setAdminGuardPool } from './core/plugins/admin-guard.plugin';
import tierGuardPlugin from './core/plugins/tier-guard.plugin';
import loggingPlugin from './core/plugins/logging.plugin';
import errorPlugin from './core/plugins/error.plugin';
import { appConfig } from './core/config/config-loader';
@@ -25,19 +24,12 @@ import { documentsRoutes } from './features/documents/api/documents.routes';
import { maintenanceRoutes } from './features/maintenance';
import { platformRoutes } from './features/platform';
import { adminRoutes } from './features/admin/api/admin.routes';
import { auditLogRoutes } from './features/audit-log/api/audit-log.routes';
import { notificationsRoutes } from './features/notifications';
import { userProfileRoutes } from './features/user-profile';
import { onboardingRoutes } from './features/onboarding';
import { userPreferencesRoutes } from './features/user-preferences';
import { userExportRoutes } from './features/user-export';
import { userImportRoutes } from './features/user-import';
import { ownershipCostsRoutes } from './features/ownership-costs';
import { subscriptionsRoutes, donationsRoutes, webhooksRoutes } from './features/subscriptions';
import { ocrRoutes } from './features/ocr';
import { emailIngestionWebhookRoutes, emailIngestionRoutes } from './features/email-ingestion';
import { pool } from './core/config/database';
import { configRoutes } from './core/config/config.routes';
async function buildApp(): Promise<FastifyInstance> {
const app = Fastify({
@@ -88,16 +80,13 @@ async function buildApp(): Promise<FastifyInstance> {
await app.register(adminGuardPlugin);
setAdminGuardPool(pool);
// Tier guard plugin - for subscription tier enforcement
await app.register(tierGuardPlugin);
// Health check
app.get('/health', async (_request, reply) => {
return reply.code(200).send({
status: 'healthy',
timestamp: new Date().toISOString(),
environment: process.env['NODE_ENV'],
features: ['admin', 'auth', 'config', 'onboarding', 'vehicles', 'documents', 'fuel-logs', 'stations', 'maintenance', 'platform', 'notifications', 'user-profile', 'user-preferences', 'user-export', 'user-import', 'ownership-costs', 'subscriptions', 'donations', 'ocr', 'email-ingestion']
features: ['admin', 'auth', 'onboarding', 'vehicles', 'documents', 'fuel-logs', 'stations', 'maintenance', 'platform', 'notifications', 'user-profile', 'user-preferences', 'user-export']
});
});
@@ -107,7 +96,7 @@ async function buildApp(): Promise<FastifyInstance> {
status: 'healthy',
scope: 'api',
timestamp: new Date().toISOString(),
features: ['admin', 'auth', 'config', 'onboarding', 'vehicles', 'documents', 'fuel-logs', 'stations', 'maintenance', 'platform', 'notifications', 'user-profile', 'user-preferences', 'user-export', 'user-import', 'ownership-costs', 'subscriptions', 'donations', 'ocr', 'email-ingestion']
features: ['admin', 'auth', 'onboarding', 'vehicles', 'documents', 'fuel-logs', 'stations', 'maintenance', 'platform', 'notifications', 'user-profile', 'user-preferences', 'user-export']
});
});
@@ -143,20 +132,10 @@ async function buildApp(): Promise<FastifyInstance> {
await app.register(communityStationsRoutes, { prefix: '/api' });
await app.register(maintenanceRoutes, { prefix: '/api' });
await app.register(adminRoutes, { prefix: '/api' });
await app.register(auditLogRoutes, { prefix: '/api' });
await app.register(notificationsRoutes, { prefix: '/api' });
await app.register(userProfileRoutes, { prefix: '/api' });
await app.register(userPreferencesRoutes, { prefix: '/api' });
await app.register(userExportRoutes, { prefix: '/api' });
await app.register(userImportRoutes, { prefix: '/api' });
await app.register(ownershipCostsRoutes, { prefix: '/api' });
await app.register(subscriptionsRoutes, { prefix: '/api' });
await app.register(donationsRoutes, { prefix: '/api' });
await app.register(webhooksRoutes, { prefix: '/api' });
await app.register(emailIngestionWebhookRoutes, { prefix: '/api' });
await app.register(emailIngestionRoutes, { prefix: '/api' });
await app.register(ocrRoutes, { prefix: '/api' });
await app.register(configRoutes, { prefix: '/api' });
// 404 handler
app.setNotFoundHandler(async (_request, reply) => {

View File

@@ -11,10 +11,10 @@
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `auth/` | Authentication utilities | JWT handling, user context |
| `config/` | Configuration loading (env, database, redis) and feature tier gating (fuelLog.receiptScan, document.scanMaintenanceSchedule, vehicle.vinDecode) | Environment setup, connection pools, tier requirements |
| `config/` | Configuration loading (env, database, redis) | Environment setup, connection pools |
| `logging/` | Winston structured logging | Log configuration, debugging |
| `middleware/` | Fastify middleware | Request processing, user extraction |
| `plugins/` | Fastify plugins (auth, error, logging, tier guard) | Plugin registration, hooks, tier gating |
| `plugins/` | Fastify plugins (auth, error, logging) | Plugin registration, hooks |
| `scheduler/` | Job scheduling infrastructure | Scheduled tasks, cron jobs |
| `storage/` | Storage abstraction and adapters | File storage, S3/filesystem |
| `user-preferences/` | User preferences data and migrations | User settings storage |

View File

@@ -41,6 +41,14 @@ const configSchema = z.object({
audience: z.string(),
}),
// External APIs configuration (optional)
external: z.object({
vpic: z.object({
url: z.string(),
timeout: z.string(),
}).optional(),
}).optional(),
// Service configuration
service: z.object({
name: z.string(),
@@ -118,10 +126,6 @@ const secretsSchema = z.object({
auth0_management_client_secret: z.string(),
google_maps_api_key: z.string(),
resend_api_key: z.string(),
resend_webhook_secret: z.string().optional(),
// Stripe secrets (API keys only - price IDs are config, not secrets)
stripe_secret_key: z.string(),
stripe_webhook_secret: z.string(),
});
type Config = z.infer<typeof configSchema>;
@@ -136,14 +140,6 @@ export interface AppConfiguration {
getRedisUrl(): string;
getAuth0Config(): { domain: string; audience: string; clientSecret: string };
getAuth0ManagementConfig(): { domain: string; clientId: string; clientSecret: string };
getResendConfig(): {
apiKey: string;
webhookSecret: string | undefined;
};
getStripeConfig(): {
secretKey: string;
webhookSecret: string;
};
}
class ConfigurationLoader {
@@ -182,9 +178,6 @@ class ConfigurationLoader {
'auth0-management-client-secret',
'google-maps-api-key',
'resend-api-key',
'resend-webhook-secret',
'stripe-secret-key',
'stripe-webhook-secret',
];
for (const secretFile of secretFiles) {
@@ -247,27 +240,10 @@ class ConfigurationLoader {
clientSecret: secrets.auth0_management_client_secret,
};
},
getResendConfig() {
return {
apiKey: secrets.resend_api_key,
webhookSecret: secrets.resend_webhook_secret,
};
},
getStripeConfig() {
return {
secretKey: secrets.stripe_secret_key,
webhookSecret: secrets.stripe_webhook_secret,
};
},
};
// Set Resend environment variables for EmailService and webhook verification
// Set RESEND_API_KEY in environment for EmailService
process.env['RESEND_API_KEY'] = secrets.resend_api_key;
if (secrets.resend_webhook_secret) {
process.env['RESEND_WEBHOOK_SECRET'] = secrets.resend_webhook_secret;
}
logger.info('Configuration loaded successfully', {
configSource: 'yaml',

View File

@@ -1,18 +0,0 @@
/**
* @ai-summary Configuration API routes
* @ai-context Exposes feature tier configuration for frontend consumption
*/
import { FastifyPluginAsync } from 'fastify';
import { getAllFeatureConfigs, TIER_LEVELS } from './feature-tiers';
export const configRoutes: FastifyPluginAsync = async (fastify) => {
// GET /api/config/feature-tiers - Get all feature tier configurations
// Public endpoint - no auth required (config is not sensitive)
fastify.get('/config/feature-tiers', async (_request, reply) => {
return reply.code(200).send({
tiers: TIER_LEVELS,
features: getAllFeatureConfigs(),
});
});
};

View File

@@ -1,160 +0,0 @@
/**
* @ai-summary Feature tier configuration and utilities
* @ai-context Defines feature-to-tier mapping for gating premium features
*/
import { SubscriptionTier } from '../../features/user-profile/domain/user-profile.types';
// Tier hierarchy: higher number = higher access level
export const TIER_LEVELS: Record<SubscriptionTier, number> = {
free: 0,
pro: 1,
enterprise: 2,
} as const;
// Feature configuration interface
export interface FeatureConfig {
minTier: SubscriptionTier;
name: string;
upgradePrompt: string;
}
// Feature registry - add new gated features here
export const FEATURE_TIERS: Record<string, FeatureConfig> = {
'document.scanMaintenanceSchedule': {
minTier: 'pro',
name: 'Scan for Maintenance Schedule',
upgradePrompt: 'Upgrade to Pro to automatically extract maintenance schedules from your vehicle manuals.',
},
'vehicle.vinDecode': {
minTier: 'pro',
name: 'VIN Decode',
upgradePrompt: 'Upgrade to Pro to automatically decode VIN and populate vehicle details from the vehicle database.',
},
'fuelLog.receiptScan': {
minTier: 'pro',
name: 'Receipt Scan',
upgradePrompt: 'Upgrade to Pro to scan fuel receipts and auto-fill your fuel log entries.',
},
'maintenance.receiptScan': {
minTier: 'pro',
name: 'Maintenance Receipt Scan',
upgradePrompt: 'Upgrade to Pro to scan maintenance receipts and extract service details automatically.',
},
} as const;
/**
* Get numeric level for a subscription tier
*/
export function getTierLevel(tier: SubscriptionTier): number {
return TIER_LEVELS[tier] ?? 0;
}
/**
* Check if a user tier can access a feature
* Higher tiers inherit access to all lower tier features
*/
export function canAccessFeature(userTier: SubscriptionTier, featureKey: string): boolean {
const feature = FEATURE_TIERS[featureKey];
if (!feature) {
// Unknown features are accessible by all (fail open for unlisted features)
return true;
}
return getTierLevel(userTier) >= getTierLevel(feature.minTier);
}
/**
* Get the minimum required tier for a feature
* Returns null if feature is not gated
*/
export function getRequiredTier(featureKey: string): SubscriptionTier | null {
const feature = FEATURE_TIERS[featureKey];
return feature?.minTier ?? null;
}
/**
* Get full feature configuration
* Returns undefined if feature is not registered
*/
export function getFeatureConfig(featureKey: string): FeatureConfig | undefined {
return FEATURE_TIERS[featureKey];
}
/**
* Get all feature configurations (for API endpoint)
*/
export function getAllFeatureConfigs(): Record<string, FeatureConfig> {
return { ...FEATURE_TIERS };
}
// Vehicle limits per tier
// null indicates unlimited (enterprise tier)
export const VEHICLE_LIMITS: Record<SubscriptionTier, number | null> = {
free: 2,
pro: 5,
enterprise: null,
} as const;
/**
* Vehicle limits vary by subscription tier and must be queryable
* at runtime for both backend enforcement and frontend UI state.
*
* @param tier - User's subscription tier
* @returns Maximum vehicles allowed, or null for unlimited (enterprise tier)
*/
export function getVehicleLimit(tier: SubscriptionTier): number | null {
return VEHICLE_LIMITS[tier] ?? null;
}
/**
* Check if a user can add another vehicle based on their tier and current count.
*
* @param tier - User's subscription tier
* @param currentCount - Number of vehicles user currently has
* @returns true if user can add another vehicle, false if at/over limit
*/
export function canAddVehicle(tier: SubscriptionTier, currentCount: number): boolean {
const limit = getVehicleLimit(tier);
// null limit means unlimited (enterprise)
if (limit === null) {
return true;
}
return currentCount < limit;
}
/**
* Vehicle limit configuration with upgrade prompt.
* Structure supports additional resource types in the future.
*/
export interface VehicleLimitConfig {
limit: number | null;
tier: SubscriptionTier;
upgradePrompt: string;
}
/**
* Get vehicle limit configuration with upgrade prompt for a tier.
*
* @param tier - User's subscription tier
* @returns Configuration with limit and upgrade prompt
*/
export function getVehicleLimitConfig(tier: SubscriptionTier): VehicleLimitConfig {
const limit = getVehicleLimit(tier);
const defaultPrompt = 'Upgrade to access additional vehicles.';
let upgradePrompt: string;
if (tier === 'free') {
upgradePrompt = 'Free tier is limited to 2 vehicles. Upgrade to Pro for up to 5 vehicles, or Enterprise for unlimited.';
} else if (tier === 'pro') {
upgradePrompt = 'Pro tier is limited to 5 vehicles. Upgrade to Enterprise for unlimited vehicles.';
} else {
upgradePrompt = defaultPrompt;
}
return {
limit,
tier,
upgradePrompt,
};
}

View File

@@ -1,225 +0,0 @@
import {
TIER_LEVELS,
FEATURE_TIERS,
VEHICLE_LIMITS,
getTierLevel,
canAccessFeature,
getRequiredTier,
getFeatureConfig,
getAllFeatureConfigs,
getVehicleLimit,
canAddVehicle,
getVehicleLimitConfig,
} from '../feature-tiers';
describe('feature-tiers', () => {
describe('TIER_LEVELS', () => {
it('defines correct tier hierarchy', () => {
expect(TIER_LEVELS.free).toBe(0);
expect(TIER_LEVELS.pro).toBe(1);
expect(TIER_LEVELS.enterprise).toBe(2);
});
it('enterprise > pro > free', () => {
expect(TIER_LEVELS.enterprise).toBeGreaterThan(TIER_LEVELS.pro);
expect(TIER_LEVELS.pro).toBeGreaterThan(TIER_LEVELS.free);
});
});
describe('FEATURE_TIERS', () => {
it('includes scanMaintenanceSchedule feature', () => {
const feature = FEATURE_TIERS['document.scanMaintenanceSchedule'];
expect(feature).toBeDefined();
expect(feature.minTier).toBe('pro');
expect(feature.name).toBe('Scan for Maintenance Schedule');
expect(feature.upgradePrompt).toBeTruthy();
});
it('includes fuelLog.receiptScan feature', () => {
const feature = FEATURE_TIERS['fuelLog.receiptScan'];
expect(feature).toBeDefined();
expect(feature.minTier).toBe('pro');
expect(feature.name).toBe('Receipt Scan');
expect(feature.upgradePrompt).toBeTruthy();
});
});
describe('canAccessFeature - fuelLog.receiptScan', () => {
const featureKey = 'fuelLog.receiptScan';
it('denies access for free tier user', () => {
expect(canAccessFeature('free', featureKey)).toBe(false);
});
it('allows access for pro tier user', () => {
expect(canAccessFeature('pro', featureKey)).toBe(true);
});
it('allows access for enterprise tier user (inherits pro)', () => {
expect(canAccessFeature('enterprise', featureKey)).toBe(true);
});
});
describe('getTierLevel', () => {
it('returns correct level for each tier', () => {
expect(getTierLevel('free')).toBe(0);
expect(getTierLevel('pro')).toBe(1);
expect(getTierLevel('enterprise')).toBe(2);
});
it('returns 0 for unknown tier', () => {
expect(getTierLevel('unknown' as any)).toBe(0);
});
});
describe('canAccessFeature', () => {
const featureKey = 'document.scanMaintenanceSchedule';
it('denies access for free tier to pro feature', () => {
expect(canAccessFeature('free', featureKey)).toBe(false);
});
it('allows access for pro tier to pro feature', () => {
expect(canAccessFeature('pro', featureKey)).toBe(true);
});
it('allows access for enterprise tier to pro feature (inheritance)', () => {
expect(canAccessFeature('enterprise', featureKey)).toBe(true);
});
it('allows access for unknown feature (fail open)', () => {
expect(canAccessFeature('free', 'unknown.feature')).toBe(true);
expect(canAccessFeature('pro', 'unknown.feature')).toBe(true);
expect(canAccessFeature('enterprise', 'unknown.feature')).toBe(true);
});
});
describe('getRequiredTier', () => {
it('returns required tier for known feature', () => {
expect(getRequiredTier('document.scanMaintenanceSchedule')).toBe('pro');
});
it('returns null for unknown feature', () => {
expect(getRequiredTier('unknown.feature')).toBeNull();
});
});
describe('getFeatureConfig', () => {
it('returns full config for known feature', () => {
const config = getFeatureConfig('document.scanMaintenanceSchedule');
expect(config).toEqual({
minTier: 'pro',
name: 'Scan for Maintenance Schedule',
upgradePrompt: expect.any(String),
});
});
it('returns undefined for unknown feature', () => {
expect(getFeatureConfig('unknown.feature')).toBeUndefined();
});
});
describe('getAllFeatureConfigs', () => {
it('returns copy of all feature configs', () => {
const configs = getAllFeatureConfigs();
expect(configs['document.scanMaintenanceSchedule']).toBeDefined();
// Verify it's a copy, not the original
configs['test'] = { minTier: 'free', name: 'test', upgradePrompt: '' };
expect(FEATURE_TIERS['test' as keyof typeof FEATURE_TIERS]).toBeUndefined();
});
});
describe('VEHICLE_LIMITS', () => {
it('defines correct limits for each tier', () => {
expect(VEHICLE_LIMITS.free).toBe(2);
expect(VEHICLE_LIMITS.pro).toBe(5);
expect(VEHICLE_LIMITS.enterprise).toBeNull();
});
});
describe('getVehicleLimit', () => {
it('returns 2 for free tier', () => {
expect(getVehicleLimit('free')).toBe(2);
});
it('returns 5 for pro tier', () => {
expect(getVehicleLimit('pro')).toBe(5);
});
it('returns null for enterprise tier (unlimited)', () => {
expect(getVehicleLimit('enterprise')).toBeNull();
});
});
describe('canAddVehicle', () => {
describe('free tier (limit 2)', () => {
it('returns true when below limit', () => {
expect(canAddVehicle('free', 0)).toBe(true);
expect(canAddVehicle('free', 1)).toBe(true);
});
it('returns false when at limit', () => {
expect(canAddVehicle('free', 2)).toBe(false);
});
it('returns false when over limit', () => {
expect(canAddVehicle('free', 3)).toBe(false);
});
});
describe('pro tier (limit 5)', () => {
it('returns true when below limit', () => {
expect(canAddVehicle('pro', 0)).toBe(true);
expect(canAddVehicle('pro', 4)).toBe(true);
});
it('returns false when at limit', () => {
expect(canAddVehicle('pro', 5)).toBe(false);
});
it('returns false when over limit', () => {
expect(canAddVehicle('pro', 6)).toBe(false);
});
});
describe('enterprise tier (unlimited)', () => {
it('always returns true regardless of count', () => {
expect(canAddVehicle('enterprise', 0)).toBe(true);
expect(canAddVehicle('enterprise', 100)).toBe(true);
expect(canAddVehicle('enterprise', 999999)).toBe(true);
});
});
});
describe('getVehicleLimitConfig', () => {
it('returns correct config for free tier', () => {
const config = getVehicleLimitConfig('free');
expect(config.limit).toBe(2);
expect(config.tier).toBe('free');
expect(config.upgradePrompt).toContain('Free tier is limited to 2 vehicles');
expect(config.upgradePrompt).toContain('Pro');
expect(config.upgradePrompt).toContain('Enterprise');
});
it('returns correct config for pro tier', () => {
const config = getVehicleLimitConfig('pro');
expect(config.limit).toBe(5);
expect(config.tier).toBe('pro');
expect(config.upgradePrompt).toContain('Pro tier is limited to 5 vehicles');
expect(config.upgradePrompt).toContain('Enterprise');
});
it('returns correct config for enterprise tier', () => {
const config = getVehicleLimitConfig('enterprise');
expect(config.limit).toBeNull();
expect(config.tier).toBe('enterprise');
expect(config.upgradePrompt).toBeTruthy();
});
it('provides default upgradePrompt fallback', () => {
const config = getVehicleLimitConfig('enterprise');
expect(config.upgradePrompt).toBe('Upgrade to access additional vehicles.');
});
});
});

View File

@@ -1,404 +0,0 @@
-- Migration: 001_migrate_user_id_to_uuid.sql
-- Feature: identity-migration (cross-cutting)
-- Description: Migrate all user identity columns from VARCHAR(255) storing auth0_sub
-- to UUID referencing user_profiles.id. Admin tables restructured with UUID PKs.
-- Requires: All feature tables must exist (runs last in MIGRATION_ORDER)
BEGIN;
-- ============================================================================
-- PHASE 1: Add new UUID columns alongside existing VARCHAR columns
-- ============================================================================
-- 1a. Feature tables (17 tables with user_id VARCHAR)
ALTER TABLE vehicles ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE fuel_logs ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE maintenance_records ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE maintenance_schedules ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE documents ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE notification_logs ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE user_notifications ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE user_preferences ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE saved_stations ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE audit_logs ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE ownership_costs ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE email_ingestion_queue ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE pending_vehicle_associations ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE subscriptions ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE donations ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE tier_vehicle_selections ADD COLUMN IF NOT EXISTS user_profile_id UUID;
ALTER TABLE terms_agreements ADD COLUMN IF NOT EXISTS user_profile_id UUID;
-- 1b. Special user-reference columns (submitted_by/reported_by store auth0_sub)
ALTER TABLE community_stations ADD COLUMN IF NOT EXISTS submitted_by_uuid UUID;
ALTER TABLE station_removal_reports ADD COLUMN IF NOT EXISTS reported_by_uuid UUID;
-- 1c. Admin table: add id UUID and user_profile_id UUID
ALTER TABLE admin_users ADD COLUMN IF NOT EXISTS id UUID;
ALTER TABLE admin_users ADD COLUMN IF NOT EXISTS user_profile_id UUID;
-- 1d. Admin-referencing columns: add UUID equivalents
ALTER TABLE admin_audit_logs ADD COLUMN IF NOT EXISTS actor_admin_uuid UUID;
ALTER TABLE admin_audit_logs ADD COLUMN IF NOT EXISTS target_admin_uuid UUID;
ALTER TABLE admin_users ADD COLUMN IF NOT EXISTS created_by_uuid UUID;
ALTER TABLE community_stations ADD COLUMN IF NOT EXISTS reviewed_by_uuid UUID;
ALTER TABLE backup_history ADD COLUMN IF NOT EXISTS created_by_uuid UUID;
ALTER TABLE platform_change_log ADD COLUMN IF NOT EXISTS changed_by_uuid UUID;
ALTER TABLE user_profiles ADD COLUMN IF NOT EXISTS deactivated_by_uuid UUID;
-- ============================================================================
-- PHASE 2: Backfill UUID values from user_profiles join
-- ============================================================================
-- 2a. Feature tables: map user_id (auth0_sub) -> user_profiles.id (UUID)
UPDATE vehicles SET user_profile_id = up.id
FROM user_profiles up WHERE vehicles.user_id = up.auth0_sub AND vehicles.user_profile_id IS NULL;
UPDATE fuel_logs SET user_profile_id = up.id
FROM user_profiles up WHERE fuel_logs.user_id = up.auth0_sub AND fuel_logs.user_profile_id IS NULL;
UPDATE maintenance_records SET user_profile_id = up.id
FROM user_profiles up WHERE maintenance_records.user_id = up.auth0_sub AND maintenance_records.user_profile_id IS NULL;
UPDATE maintenance_schedules SET user_profile_id = up.id
FROM user_profiles up WHERE maintenance_schedules.user_id = up.auth0_sub AND maintenance_schedules.user_profile_id IS NULL;
UPDATE documents SET user_profile_id = up.id
FROM user_profiles up WHERE documents.user_id = up.auth0_sub AND documents.user_profile_id IS NULL;
UPDATE notification_logs SET user_profile_id = up.id
FROM user_profiles up WHERE notification_logs.user_id = up.auth0_sub AND notification_logs.user_profile_id IS NULL;
UPDATE user_notifications SET user_profile_id = up.id
FROM user_profiles up WHERE user_notifications.user_id = up.auth0_sub AND user_notifications.user_profile_id IS NULL;
UPDATE user_preferences SET user_profile_id = up.id
FROM user_profiles up WHERE user_preferences.user_id = up.auth0_sub AND user_preferences.user_profile_id IS NULL;
-- 2a-fix. user_preferences has rows where user_id already contains user_profiles.id (UUID)
-- instead of auth0_sub. Match these directly by casting to UUID.
UPDATE user_preferences SET user_profile_id = up.id
FROM user_profiles up
WHERE user_preferences.user_id ~ '^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$'
AND user_preferences.user_id::uuid = up.id
AND user_preferences.user_profile_id IS NULL;
-- Delete truly orphaned user_preferences (UUID user_id with no matching user_profile)
DELETE FROM user_preferences
WHERE user_profile_id IS NULL
AND user_id ~ '^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$'
AND NOT EXISTS (SELECT 1 FROM user_profiles WHERE id = user_preferences.user_id::uuid);
-- Deduplicate user_preferences: same user may have both an auth0_sub row and
-- a UUID row, both now mapping to the same user_profile_id. Keep the newest.
DELETE FROM user_preferences a
USING user_preferences b
WHERE a.user_profile_id = b.user_profile_id
AND a.user_profile_id IS NOT NULL
AND (a.updated_at < b.updated_at OR (a.updated_at = b.updated_at AND a.id < b.id));
UPDATE saved_stations SET user_profile_id = up.id
FROM user_profiles up WHERE saved_stations.user_id = up.auth0_sub AND saved_stations.user_profile_id IS NULL;
UPDATE audit_logs SET user_profile_id = up.id
FROM user_profiles up WHERE audit_logs.user_id = up.auth0_sub AND audit_logs.user_profile_id IS NULL;
UPDATE ownership_costs SET user_profile_id = up.id
FROM user_profiles up WHERE ownership_costs.user_id = up.auth0_sub AND ownership_costs.user_profile_id IS NULL;
UPDATE email_ingestion_queue SET user_profile_id = up.id
FROM user_profiles up WHERE email_ingestion_queue.user_id = up.auth0_sub AND email_ingestion_queue.user_profile_id IS NULL;
UPDATE pending_vehicle_associations SET user_profile_id = up.id
FROM user_profiles up WHERE pending_vehicle_associations.user_id = up.auth0_sub AND pending_vehicle_associations.user_profile_id IS NULL;
UPDATE subscriptions SET user_profile_id = up.id
FROM user_profiles up WHERE subscriptions.user_id = up.auth0_sub AND subscriptions.user_profile_id IS NULL;
UPDATE donations SET user_profile_id = up.id
FROM user_profiles up WHERE donations.user_id = up.auth0_sub AND donations.user_profile_id IS NULL;
UPDATE tier_vehicle_selections SET user_profile_id = up.id
FROM user_profiles up WHERE tier_vehicle_selections.user_id = up.auth0_sub AND tier_vehicle_selections.user_profile_id IS NULL;
UPDATE terms_agreements SET user_profile_id = up.id
FROM user_profiles up WHERE terms_agreements.user_id = up.auth0_sub AND terms_agreements.user_profile_id IS NULL;
-- 2b. Special user columns
UPDATE community_stations SET submitted_by_uuid = up.id
FROM user_profiles up WHERE community_stations.submitted_by = up.auth0_sub AND community_stations.submitted_by_uuid IS NULL;
UPDATE station_removal_reports SET reported_by_uuid = up.id
FROM user_profiles up WHERE station_removal_reports.reported_by = up.auth0_sub AND station_removal_reports.reported_by_uuid IS NULL;
-- ============================================================================
-- PHASE 3: Admin-specific transformations
-- ============================================================================
-- 3a. Create user_profiles entries for any admin_users that lack one
INSERT INTO user_profiles (auth0_sub, email)
SELECT au.auth0_sub, au.email
FROM admin_users au
WHERE NOT EXISTS (
SELECT 1 FROM user_profiles up WHERE up.auth0_sub = au.auth0_sub
)
ON CONFLICT (auth0_sub) DO NOTHING;
-- 3b. Populate admin_users.id (DEFAULT doesn't auto-fill on ALTER ADD COLUMN for existing rows)
UPDATE admin_users SET id = uuid_generate_v4() WHERE id IS NULL;
-- 3c. Backfill admin_users.user_profile_id from user_profiles join
UPDATE admin_users SET user_profile_id = up.id
FROM user_profiles up WHERE admin_users.auth0_sub = up.auth0_sub AND admin_users.user_profile_id IS NULL;
-- 3d. Backfill admin-referencing columns: map auth0_sub -> admin_users.id UUID
UPDATE admin_audit_logs SET actor_admin_uuid = au.id
FROM admin_users au WHERE admin_audit_logs.actor_admin_id = au.auth0_sub AND admin_audit_logs.actor_admin_uuid IS NULL;
UPDATE admin_audit_logs SET target_admin_uuid = au.id
FROM admin_users au WHERE admin_audit_logs.target_admin_id = au.auth0_sub AND admin_audit_logs.target_admin_uuid IS NULL;
UPDATE admin_users au SET created_by_uuid = creator.id
FROM admin_users creator WHERE au.created_by = creator.auth0_sub AND au.created_by_uuid IS NULL;
UPDATE community_stations SET reviewed_by_uuid = au.id
FROM admin_users au WHERE community_stations.reviewed_by = au.auth0_sub AND community_stations.reviewed_by_uuid IS NULL;
UPDATE backup_history SET created_by_uuid = au.id
FROM admin_users au WHERE backup_history.created_by = au.auth0_sub AND backup_history.created_by_uuid IS NULL;
UPDATE platform_change_log SET changed_by_uuid = au.id
FROM admin_users au WHERE platform_change_log.changed_by = au.auth0_sub AND platform_change_log.changed_by_uuid IS NULL;
UPDATE user_profiles SET deactivated_by_uuid = au.id
FROM admin_users au WHERE user_profiles.deactivated_by = au.auth0_sub AND user_profiles.deactivated_by_uuid IS NULL;
-- ============================================================================
-- PHASE 4: Add constraints
-- ============================================================================
-- 4a. Set NOT NULL on feature table UUID columns (audit_logs stays nullable)
ALTER TABLE vehicles ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE fuel_logs ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE maintenance_records ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE maintenance_schedules ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE documents ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE notification_logs ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE user_notifications ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE user_preferences ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE saved_stations ALTER COLUMN user_profile_id SET NOT NULL;
-- audit_logs.user_profile_id stays NULLABLE (system actions have no user)
ALTER TABLE ownership_costs ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE email_ingestion_queue ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE pending_vehicle_associations ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE subscriptions ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE donations ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE tier_vehicle_selections ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE terms_agreements ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE community_stations ALTER COLUMN submitted_by_uuid SET NOT NULL;
ALTER TABLE station_removal_reports ALTER COLUMN reported_by_uuid SET NOT NULL;
-- 4b. Admin table NOT NULL constraints
ALTER TABLE admin_users ALTER COLUMN id SET NOT NULL;
ALTER TABLE admin_users ALTER COLUMN user_profile_id SET NOT NULL;
ALTER TABLE admin_audit_logs ALTER COLUMN actor_admin_uuid SET NOT NULL;
-- target_admin_uuid stays nullable (some actions have no target)
-- created_by_uuid stays nullable (bootstrap admin may not have a creator)
ALTER TABLE platform_change_log ALTER COLUMN changed_by_uuid SET NOT NULL;
-- 4c. Admin table PK transformation
ALTER TABLE admin_users DROP CONSTRAINT admin_users_pkey;
ALTER TABLE admin_users ADD PRIMARY KEY (id);
-- 4d. Add FK constraints to user_profiles(id) with ON DELETE CASCADE
ALTER TABLE vehicles ADD CONSTRAINT fk_vehicles_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE fuel_logs ADD CONSTRAINT fk_fuel_logs_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE maintenance_records ADD CONSTRAINT fk_maintenance_records_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE maintenance_schedules ADD CONSTRAINT fk_maintenance_schedules_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE documents ADD CONSTRAINT fk_documents_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE notification_logs ADD CONSTRAINT fk_notification_logs_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE user_notifications ADD CONSTRAINT fk_user_notifications_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE user_preferences ADD CONSTRAINT fk_user_preferences_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE saved_stations ADD CONSTRAINT fk_saved_stations_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE audit_logs ADD CONSTRAINT fk_audit_logs_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE ownership_costs ADD CONSTRAINT fk_ownership_costs_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE email_ingestion_queue ADD CONSTRAINT fk_email_ingestion_queue_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE pending_vehicle_associations ADD CONSTRAINT fk_pending_vehicle_assoc_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE subscriptions ADD CONSTRAINT fk_subscriptions_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE donations ADD CONSTRAINT fk_donations_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE tier_vehicle_selections ADD CONSTRAINT fk_tier_vehicle_selections_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE terms_agreements ADD CONSTRAINT fk_terms_agreements_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE community_stations ADD CONSTRAINT fk_community_stations_submitted_by
FOREIGN KEY (submitted_by_uuid) REFERENCES user_profiles(id) ON DELETE CASCADE;
ALTER TABLE station_removal_reports ADD CONSTRAINT fk_station_removal_reports_reported_by
FOREIGN KEY (reported_by_uuid) REFERENCES user_profiles(id) ON DELETE CASCADE;
-- 4e. Admin FK constraints
ALTER TABLE admin_users ADD CONSTRAINT fk_admin_users_user_profile_id
FOREIGN KEY (user_profile_id) REFERENCES user_profiles(id);
ALTER TABLE admin_users ADD CONSTRAINT uq_admin_users_user_profile_id
UNIQUE (user_profile_id);
-- ============================================================================
-- PHASE 5: Drop old columns, rename new ones, recreate indexes
-- ============================================================================
-- 5a. Drop old FK constraints on VARCHAR user_id columns
ALTER TABLE subscriptions DROP CONSTRAINT IF EXISTS fk_subscriptions_user_id;
ALTER TABLE donations DROP CONSTRAINT IF EXISTS fk_donations_user_id;
ALTER TABLE tier_vehicle_selections DROP CONSTRAINT IF EXISTS fk_tier_vehicle_selections_user_id;
-- 5b. Drop old UNIQUE constraints involving VARCHAR columns
ALTER TABLE vehicles DROP CONSTRAINT IF EXISTS unique_user_vin;
ALTER TABLE saved_stations DROP CONSTRAINT IF EXISTS unique_user_station;
ALTER TABLE user_preferences DROP CONSTRAINT IF EXISTS user_preferences_user_id_key;
ALTER TABLE station_removal_reports DROP CONSTRAINT IF EXISTS unique_user_station_report;
-- 5c. Drop old indexes on VARCHAR columns
DROP INDEX IF EXISTS idx_vehicles_user_id;
DROP INDEX IF EXISTS idx_fuel_logs_user_id;
DROP INDEX IF EXISTS idx_maintenance_records_user_id;
DROP INDEX IF EXISTS idx_maintenance_schedules_user_id;
DROP INDEX IF EXISTS idx_documents_user_id;
DROP INDEX IF EXISTS idx_documents_user_vehicle;
DROP INDEX IF EXISTS idx_notification_logs_user_id;
DROP INDEX IF EXISTS idx_user_notifications_user_id;
DROP INDEX IF EXISTS idx_user_notifications_unread;
DROP INDEX IF EXISTS idx_user_preferences_user_id;
DROP INDEX IF EXISTS idx_saved_stations_user_id;
DROP INDEX IF EXISTS idx_audit_logs_user_created;
DROP INDEX IF EXISTS idx_ownership_costs_user_id;
DROP INDEX IF EXISTS idx_email_ingestion_queue_user_id;
DROP INDEX IF EXISTS idx_pending_vehicle_assoc_user_id;
DROP INDEX IF EXISTS idx_subscriptions_user_id;
DROP INDEX IF EXISTS idx_donations_user_id;
DROP INDEX IF EXISTS idx_tier_vehicle_selections_user_id;
DROP INDEX IF EXISTS idx_terms_agreements_user_id;
DROP INDEX IF EXISTS idx_community_stations_submitted_by;
DROP INDEX IF EXISTS idx_removal_reports_reported_by;
DROP INDEX IF EXISTS idx_admin_audit_logs_actor_id;
DROP INDEX IF EXISTS idx_admin_audit_logs_target_id;
DROP INDEX IF EXISTS idx_platform_change_log_changed_by;
-- 5d. Drop old VARCHAR user_id columns from feature tables
ALTER TABLE vehicles DROP COLUMN user_id;
ALTER TABLE fuel_logs DROP COLUMN user_id;
ALTER TABLE maintenance_records DROP COLUMN user_id;
ALTER TABLE maintenance_schedules DROP COLUMN user_id;
ALTER TABLE documents DROP COLUMN user_id;
ALTER TABLE notification_logs DROP COLUMN user_id;
ALTER TABLE user_notifications DROP COLUMN user_id;
ALTER TABLE user_preferences DROP COLUMN user_id;
ALTER TABLE saved_stations DROP COLUMN user_id;
ALTER TABLE audit_logs DROP COLUMN user_id;
ALTER TABLE ownership_costs DROP COLUMN user_id;
ALTER TABLE email_ingestion_queue DROP COLUMN user_id;
ALTER TABLE pending_vehicle_associations DROP COLUMN user_id;
ALTER TABLE subscriptions DROP COLUMN user_id;
ALTER TABLE donations DROP COLUMN user_id;
ALTER TABLE tier_vehicle_selections DROP COLUMN user_id;
ALTER TABLE terms_agreements DROP COLUMN user_id;
-- 5e. Rename user_profile_id -> user_id in feature tables
ALTER TABLE vehicles RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE fuel_logs RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE maintenance_records RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE maintenance_schedules RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE documents RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE notification_logs RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE user_notifications RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE user_preferences RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE saved_stations RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE audit_logs RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE ownership_costs RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE email_ingestion_queue RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE pending_vehicle_associations RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE subscriptions RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE donations RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE tier_vehicle_selections RENAME COLUMN user_profile_id TO user_id;
ALTER TABLE terms_agreements RENAME COLUMN user_profile_id TO user_id;
-- 5f. Drop and rename special user columns
ALTER TABLE community_stations DROP COLUMN submitted_by;
ALTER TABLE community_stations RENAME COLUMN submitted_by_uuid TO submitted_by;
ALTER TABLE station_removal_reports DROP COLUMN reported_by;
ALTER TABLE station_removal_reports RENAME COLUMN reported_by_uuid TO reported_by;
-- 5g. Drop and rename admin-referencing columns
ALTER TABLE admin_users DROP COLUMN auth0_sub;
ALTER TABLE admin_users DROP COLUMN created_by;
ALTER TABLE admin_users RENAME COLUMN created_by_uuid TO created_by;
ALTER TABLE admin_audit_logs DROP COLUMN actor_admin_id;
ALTER TABLE admin_audit_logs DROP COLUMN target_admin_id;
ALTER TABLE admin_audit_logs RENAME COLUMN actor_admin_uuid TO actor_admin_id;
ALTER TABLE admin_audit_logs RENAME COLUMN target_admin_uuid TO target_admin_id;
ALTER TABLE community_stations DROP COLUMN reviewed_by;
ALTER TABLE community_stations RENAME COLUMN reviewed_by_uuid TO reviewed_by;
ALTER TABLE backup_history DROP COLUMN created_by;
ALTER TABLE backup_history RENAME COLUMN created_by_uuid TO created_by;
ALTER TABLE platform_change_log DROP COLUMN changed_by;
ALTER TABLE platform_change_log RENAME COLUMN changed_by_uuid TO changed_by;
ALTER TABLE user_profiles DROP COLUMN deactivated_by;
ALTER TABLE user_profiles RENAME COLUMN deactivated_by_uuid TO deactivated_by;
-- 5h. Recreate indexes on new UUID columns (feature tables)
CREATE INDEX idx_vehicles_user_id ON vehicles(user_id);
CREATE INDEX idx_fuel_logs_user_id ON fuel_logs(user_id);
CREATE INDEX idx_maintenance_records_user_id ON maintenance_records(user_id);
CREATE INDEX idx_maintenance_schedules_user_id ON maintenance_schedules(user_id);
CREATE INDEX idx_documents_user_id ON documents(user_id);
CREATE INDEX idx_documents_user_vehicle ON documents(user_id, vehicle_id);
CREATE INDEX idx_notification_logs_user_id ON notification_logs(user_id);
CREATE INDEX idx_user_notifications_user_id ON user_notifications(user_id);
CREATE INDEX idx_user_notifications_unread ON user_notifications(user_id, created_at DESC) WHERE is_read = false;
CREATE INDEX idx_user_preferences_user_id ON user_preferences(user_id);
CREATE INDEX idx_saved_stations_user_id ON saved_stations(user_id);
CREATE INDEX idx_audit_logs_user_created ON audit_logs(user_id, created_at DESC);
CREATE INDEX idx_ownership_costs_user_id ON ownership_costs(user_id);
CREATE INDEX idx_email_ingestion_queue_user_id ON email_ingestion_queue(user_id);
CREATE INDEX idx_pending_vehicle_assoc_user_id ON pending_vehicle_associations(user_id);
CREATE INDEX idx_subscriptions_user_id ON subscriptions(user_id);
CREATE INDEX idx_donations_user_id ON donations(user_id);
CREATE INDEX idx_tier_vehicle_selections_user_id ON tier_vehicle_selections(user_id);
CREATE INDEX idx_terms_agreements_user_id ON terms_agreements(user_id);
-- 5i. Recreate indexes on special columns
CREATE INDEX idx_community_stations_submitted_by ON community_stations(submitted_by);
CREATE INDEX idx_removal_reports_reported_by ON station_removal_reports(reported_by);
CREATE INDEX idx_admin_audit_logs_actor_id ON admin_audit_logs(actor_admin_id);
CREATE INDEX idx_admin_audit_logs_target_id ON admin_audit_logs(target_admin_id);
CREATE INDEX idx_platform_change_log_changed_by ON platform_change_log(changed_by);
-- 5j. Recreate UNIQUE constraints on new UUID columns
ALTER TABLE vehicles ADD CONSTRAINT unique_user_vin UNIQUE(user_id, vin);
ALTER TABLE saved_stations ADD CONSTRAINT unique_user_station UNIQUE(user_id, place_id);
ALTER TABLE user_preferences ADD CONSTRAINT user_preferences_user_id_key UNIQUE(user_id);
ALTER TABLE station_removal_reports ADD CONSTRAINT unique_user_station_report UNIQUE(station_id, reported_by);
COMMIT;

View File

@@ -1,42 +1,24 @@
/**
* @ai-summary Structured logging with Pino (Winston-compatible wrapper)
* @ai-context All features use this for consistent logging. API maintains Winston compatibility.
* @ai-summary Structured logging with Winston
* @ai-context All features use this for consistent logging
*/
import pino from 'pino';
import * as winston from 'winston';
type LogLevel = 'debug' | 'info' | 'warn' | 'error';
const validLevels: LogLevel[] = ['debug', 'info', 'warn', 'error'];
const rawLevel = (process.env.LOG_LEVEL?.toLowerCase() || 'info') as LogLevel;
const level = validLevels.includes(rawLevel) ? rawLevel : 'info';
if (process.env.LOG_LEVEL && rawLevel !== level) {
console.warn(`Invalid LOG_LEVEL "${process.env.LOG_LEVEL}", falling back to "info"`);
}
const pinoLogger = pino({
level,
formatters: {
level: (label) => ({ level: label }),
export const logger = winston.createLogger({
level: 'info',
format: winston.format.combine(
winston.format.timestamp(),
winston.format.errors({ stack: true }),
winston.format.json()
),
defaultMeta: {
service: 'motovaultpro-backend',
},
timestamp: pino.stdTimeFunctions.isoTime,
transports: [
new winston.transports.Console({
format: winston.format.json(),
}),
],
});
// Wrapper maintains logger.info(msg, meta) API for backward compatibility
export const logger = {
info: (msg: string, meta?: object) => pinoLogger.info(meta || {}, msg),
warn: (msg: string, meta?: object) => pinoLogger.warn(meta || {}, msg),
error: (msg: string, meta?: object) => pinoLogger.error(meta || {}, msg),
debug: (msg: string, meta?: object) => pinoLogger.debug(meta || {}, msg),
child: (bindings: object) => {
const childPino = pinoLogger.child(bindings);
return {
info: (msg: string, meta?: object) => childPino.info(meta || {}, msg),
warn: (msg: string, meta?: object) => childPino.warn(meta || {}, msg),
error: (msg: string, meta?: object) => childPino.error(meta || {}, msg),
debug: (msg: string, meta?: object) => childPino.debug(meta || {}, msg),
};
},
};
export default logger;
export default logger;

View File

@@ -1,191 +0,0 @@
import { FastifyRequest, FastifyReply } from 'fastify';
import { requireTier } from './require-tier';
// Mock logger to suppress output during tests
jest.mock('../logging/logger', () => ({
logger: {
error: jest.fn(),
warn: jest.fn(),
debug: jest.fn(),
info: jest.fn(),
},
}));
const createRequest = (subscriptionTier?: string): Partial<FastifyRequest> => {
if (subscriptionTier === undefined) {
return { userContext: undefined };
}
return {
userContext: {
userId: '550e8400-e29b-41d4-a716-446655440000',
email: 'user@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: subscriptionTier as any,
},
};
};
const createReply = (): Partial<FastifyReply> & { statusCode?: number; payload?: unknown } => {
const reply: any = {
sent: false,
code: jest.fn(function (this: any, status: number) {
this.statusCode = status;
return this;
}),
send: jest.fn(function (this: any, payload: unknown) {
this.payload = payload;
this.sent = true;
return this;
}),
};
return reply;
};
describe('requireTier middleware', () => {
afterEach(() => {
jest.clearAllMocks();
});
describe('pro user passes fuelLog.receiptScan check', () => {
it('allows pro user through without sending a response', async () => {
const handler = requireTier('fuelLog.receiptScan');
const request = createRequest('pro');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
expect(reply.send).not.toHaveBeenCalled();
});
});
describe('enterprise user passes all checks (tier inheritance)', () => {
it('allows enterprise user access to pro-gated features', async () => {
const handler = requireTier('fuelLog.receiptScan');
const request = createRequest('enterprise');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
expect(reply.send).not.toHaveBeenCalled();
});
it('allows enterprise user access to document.scanMaintenanceSchedule', async () => {
const handler = requireTier('document.scanMaintenanceSchedule');
const request = createRequest('enterprise');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
expect(reply.send).not.toHaveBeenCalled();
});
it('allows enterprise user access to vehicle.vinDecode', async () => {
const handler = requireTier('vehicle.vinDecode');
const request = createRequest('enterprise');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
expect(reply.send).not.toHaveBeenCalled();
});
});
describe('free user blocked with 403 and correct response body', () => {
it('blocks free user from fuelLog.receiptScan', async () => {
const handler = requireTier('fuelLog.receiptScan');
const request = createRequest('free');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
requiredTier: 'pro',
currentTier: 'free',
featureName: 'Receipt Scan',
upgradePrompt: expect.any(String),
}),
);
});
it('blocks free user from document.scanMaintenanceSchedule', async () => {
const handler = requireTier('document.scanMaintenanceSchedule');
const request = createRequest('free');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
requiredTier: 'pro',
currentTier: 'free',
featureName: 'Scan for Maintenance Schedule',
upgradePrompt: expect.any(String),
}),
);
});
it('response body includes all required fields', async () => {
const handler = requireTier('fuelLog.receiptScan');
const request = createRequest('free');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
const body = (reply.send as jest.Mock).mock.calls[0][0];
expect(body).toHaveProperty('requiredTier', 'pro');
expect(body).toHaveProperty('currentTier', 'free');
expect(body).toHaveProperty('featureName', 'Receipt Scan');
expect(body).toHaveProperty('upgradePrompt');
expect(typeof body.upgradePrompt).toBe('string');
expect(body.upgradePrompt.length).toBeGreaterThan(0);
});
});
describe('unknown feature key returns 500', () => {
it('returns 500 INTERNAL_ERROR for unregistered feature', async () => {
const handler = requireTier('unknown.nonexistent.feature');
const request = createRequest('pro');
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(500);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'INTERNAL_ERROR',
message: 'Unknown feature configuration',
}),
);
});
});
describe('missing user.tier on request returns 403', () => {
it('defaults to free tier when userContext is undefined', async () => {
const handler = requireTier('fuelLog.receiptScan');
const request = createRequest(); // no tier = undefined userContext
const reply = createReply();
await handler(request as FastifyRequest, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
currentTier: 'free',
requiredTier: 'pro',
}),
);
});
});
});

View File

@@ -1,64 +0,0 @@
/**
* @ai-summary Standalone tier guard middleware for route-level feature gating
* @ai-context Returns a Fastify preHandler that checks user subscription tier against feature requirements.
* Must be composed AFTER requireAuth in preHandler arrays.
*/
import { FastifyRequest, FastifyReply } from 'fastify';
import { canAccessFeature, getFeatureConfig } from '../config/feature-tiers';
import { logger } from '../logging/logger';
/**
* Creates a preHandler middleware that enforces subscription tier requirements.
*
* Reads the user's tier from request.userContext.subscriptionTier (set by auth middleware).
* Must be placed AFTER requireAuth in the preHandler chain.
*
* Usage:
* fastify.post('/premium-route', {
* preHandler: [requireAuth, requireTier('fuelLog.receiptScan')],
* handler: controller.method
* });
*
* @param featureKey - Key from FEATURE_TIERS registry (e.g. 'fuelLog.receiptScan')
* @returns Fastify preHandler function
*/
export function requireTier(featureKey: string) {
return async (request: FastifyRequest, reply: FastifyReply): Promise<void> => {
// Validate feature key exists in registry
const featureConfig = getFeatureConfig(featureKey);
if (!featureConfig) {
logger.error('requireTier: unknown feature key', { featureKey });
return reply.code(500).send({
error: 'INTERNAL_ERROR',
message: 'Unknown feature configuration',
});
}
// Get user tier from userContext (populated by auth middleware)
const currentTier = request.userContext?.subscriptionTier || 'free';
if (!canAccessFeature(currentTier, featureKey)) {
logger.warn('requireTier: access denied', {
userId: request.userContext?.userId?.substring(0, 8) + '...',
currentTier,
requiredTier: featureConfig.minTier,
featureKey,
});
return reply.code(403).send({
error: 'TIER_REQUIRED',
requiredTier: featureConfig.minTier,
currentTier,
featureName: featureConfig.name,
upgradePrompt: featureConfig.upgradePrompt,
});
}
logger.debug('requireTier: access granted', {
userId: request.userContext?.userId?.substring(0, 8) + '...',
currentTier,
featureKey,
});
};
}

View File

@@ -58,9 +58,9 @@ const adminGuardPlugin: FastifyPluginAsync = async (fastify) => {
// Check if user is in admin_users table and not revoked
const query = `
SELECT id, user_profile_id, email, role, revoked_at
SELECT auth0_sub, email, role, revoked_at
FROM admin_users
WHERE user_profile_id = $1 AND revoked_at IS NULL
WHERE auth0_sub = $1 AND revoked_at IS NULL
LIMIT 1
`;

View File

@@ -12,7 +12,6 @@ import { logger } from '../logging/logger';
import { UserProfileRepository } from '../../features/user-profile/data/user-profile.repository';
import { pool } from '../config/database';
import { auth0ManagementClient } from '../auth/auth0-management.client';
import { SubscriptionTier } from '../../features/user-profile/domain/user-profile.types';
// Routes that don't require email verification
const VERIFICATION_EXEMPT_ROUTES = [
@@ -57,7 +56,6 @@ declare module 'fastify' {
onboardingCompleted: boolean;
isAdmin: boolean;
adminRecord?: any;
subscriptionTier: SubscriptionTier;
};
}
}
@@ -121,48 +119,43 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
try {
await request.jwtVerify();
// Two identifiers: auth0Sub (external, for Auth0 API) and userId (internal UUID, for all DB operations)
const auth0Sub = request.user?.sub;
if (!auth0Sub) {
const userId = request.user?.sub;
if (!userId) {
throw new Error('Missing user ID in JWT');
}
let userId: string = auth0Sub; // Default to auth0Sub; overwritten with UUID after profile load
// Get or create user profile from database
let email = request.user?.email;
let displayName: string | undefined;
let emailVerified = false;
let onboardingCompleted = false;
let subscriptionTier: SubscriptionTier = 'free';
try {
// If JWT doesn't have email, fetch from Auth0 Management API
if (!email || email.includes('@unknown.local')) {
try {
const auth0User = await auth0ManagementClient.getUser(auth0Sub);
const auth0User = await auth0ManagementClient.getUser(userId);
if (auth0User.email) {
email = auth0User.email;
emailVerified = auth0User.emailVerified;
logger.info('Fetched email from Auth0 Management API', {
userId: auth0Sub.substring(0, 8) + '...',
userId: userId.substring(0, 8) + '...',
hasEmail: true,
});
}
} catch (auth0Error) {
logger.warn('Failed to fetch user from Auth0 Management API', {
userId: auth0Sub.substring(0, 8) + '...',
userId: userId.substring(0, 8) + '...',
error: auth0Error instanceof Error ? auth0Error.message : 'Unknown error',
});
}
}
// Get or create profile with correct email
const profile = await profileRepo.getOrCreate(auth0Sub, {
email: email || `${auth0Sub}@unknown.local`,
const profile = await profileRepo.getOrCreate(userId, {
email: email || `${userId}@unknown.local`,
displayName: request.user?.name || request.user?.nickname,
});
userId = profile.id;
// If profile has placeholder email but we now have real email, update it
if (profile.email.includes('@unknown.local') && email && !email.includes('@unknown.local')) {
@@ -177,12 +170,11 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
displayName = profile.displayName || undefined;
emailVerified = profile.emailVerified;
onboardingCompleted = profile.onboardingCompletedAt !== null;
subscriptionTier = profile.subscriptionTier || 'free';
// Sync email verification status from Auth0 if needed
if (!emailVerified) {
try {
const isVerifiedInAuth0 = await auth0ManagementClient.checkEmailVerified(auth0Sub);
const isVerifiedInAuth0 = await auth0ManagementClient.checkEmailVerified(userId);
if (isVerifiedInAuth0 && !profile.emailVerified) {
await profileRepo.updateEmailVerified(userId, true);
emailVerified = true;
@@ -201,7 +193,7 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
} catch (profileError) {
// Log but don't fail auth if profile fetch fails
logger.warn('Failed to fetch user profile', {
userId: auth0Sub.substring(0, 8) + '...',
userId: userId.substring(0, 8) + '...',
error: profileError instanceof Error ? profileError.message : 'Unknown error',
});
// Fall back to JWT email if available
@@ -216,7 +208,6 @@ const authPlugin: FastifyPluginAsync = async (fastify) => {
emailVerified,
onboardingCompleted,
isAdmin: false, // Default to false; admin status checked by admin guard
subscriptionTier,
};
// Email verification guard - block unverified users from non-exempt routes

View File

@@ -1,24 +1,20 @@
/**
* @ai-summary Fastify request logging plugin with correlation IDs
* @ai-context Logs request/response details with timing and requestId
* @ai-summary Fastify request logging plugin
* @ai-context Logs request/response details with timing
*/
import { FastifyPluginAsync } from 'fastify';
import fp from 'fastify-plugin';
import { randomUUID } from 'crypto';
import { logger } from '../logging/logger';
const loggingPlugin: FastifyPluginAsync = async (fastify) => {
fastify.addHook('onRequest', async (request) => {
request.startTime = Date.now();
// Extract X-Request-Id from Traefik or generate new UUID
request.requestId = (request.headers['x-request-id'] as string) || randomUUID();
});
fastify.addHook('onResponse', async (request, reply) => {
const duration = Date.now() - (request.startTime || Date.now());
logger.info('Request processed', {
requestId: request.requestId,
method: request.method,
path: request.url,
status: reply.statusCode,
@@ -28,13 +24,13 @@ const loggingPlugin: FastifyPluginAsync = async (fastify) => {
});
};
// Augment FastifyRequest to include startTime
declare module 'fastify' {
interface FastifyRequest {
startTime?: number;
requestId?: string;
}
}
export default fp(loggingPlugin, {
name: 'logging-plugin'
});
});

View File

@@ -1,205 +0,0 @@
import Fastify, { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
import tierGuardPlugin from '../tier-guard.plugin';
const createReply = (): Partial<FastifyReply> & { payload?: unknown; statusCode?: number } => {
return {
sent: false,
code: jest.fn(function(this: any, status: number) {
this.statusCode = status;
return this;
}),
send: jest.fn(function(this: any, payload: unknown) {
this.payload = payload;
this.sent = true;
return this;
}),
};
};
describe('tier guard plugin', () => {
let fastify: FastifyInstance;
let authenticateMock: jest.Mock;
beforeEach(async () => {
fastify = Fastify();
// Mock authenticate to set userContext
authenticateMock = jest.fn(async (request: FastifyRequest) => {
request.userContext = {
userId: '550e8400-e29b-41d4-a716-446655440000',
email: 'user@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'free',
};
});
fastify.decorate('authenticate', authenticateMock);
await fastify.register(tierGuardPlugin);
});
afterEach(async () => {
await fastify.close();
jest.clearAllMocks();
});
describe('requireTier with minTier', () => {
it('allows access when user tier meets minimum', async () => {
authenticateMock.mockImplementation(async (request: FastifyRequest) => {
request.userContext = {
userId: '550e8400-e29b-41d4-a716-446655440000',
email: 'user@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'pro',
};
});
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ minTier: 'pro' });
await handler(request, reply as FastifyReply);
expect(authenticateMock).toHaveBeenCalledTimes(1);
expect(reply.code).not.toHaveBeenCalled();
expect(reply.send).not.toHaveBeenCalled();
});
it('allows access when user tier exceeds minimum', async () => {
authenticateMock.mockImplementation(async (request: FastifyRequest) => {
request.userContext = {
userId: '550e8400-e29b-41d4-a716-446655440000',
email: 'user@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'enterprise',
};
});
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ minTier: 'pro' });
await handler(request, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
});
it('denies access when user tier is below minimum', async () => {
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ minTier: 'pro' });
await handler(request, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
requiredTier: 'pro',
currentTier: 'free',
})
);
});
});
describe('requireTier with featureKey', () => {
it('denies free tier access to pro feature', async () => {
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ featureKey: 'document.scanMaintenanceSchedule' });
await handler(request, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
requiredTier: 'pro',
currentTier: 'free',
feature: 'document.scanMaintenanceSchedule',
featureName: 'Scan for Maintenance Schedule',
})
);
});
it('allows pro tier access to pro feature', async () => {
authenticateMock.mockImplementation(async (request: FastifyRequest) => {
request.userContext = {
userId: '550e8400-e29b-41d4-a716-446655440000',
email: 'user@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'pro',
};
});
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ featureKey: 'document.scanMaintenanceSchedule' });
await handler(request, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
});
it('allows access for unknown feature (fail open)', async () => {
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ featureKey: 'unknown.feature' });
await handler(request, reply as FastifyReply);
expect(reply.code).not.toHaveBeenCalled();
});
});
describe('error handling', () => {
it('returns 500 when authenticate handler is not a function', async () => {
const brokenFastify = Fastify();
// Decorate with a non-function value to simulate missing handler
brokenFastify.decorate('authenticate', 'not-a-function' as any);
await brokenFastify.register(tierGuardPlugin);
const request = {} as FastifyRequest;
const reply = createReply();
const handler = brokenFastify.requireTier({ minTier: 'pro' });
await handler(request, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(500);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'Internal server error',
message: 'Authentication handler missing',
})
);
await brokenFastify.close();
});
it('defaults to free tier when userContext is missing', async () => {
authenticateMock.mockImplementation(async () => {
// Don't set userContext
});
const request = {} as FastifyRequest;
const reply = createReply();
const handler = fastify.requireTier({ minTier: 'pro' });
await handler(request, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
currentTier: 'free',
})
);
});
});
});

View File

@@ -1,126 +0,0 @@
/**
* @ai-summary Fastify tier authorization plugin
* @ai-context Enforces subscription tier requirements for protected routes
*/
import { FastifyPluginAsync, FastifyRequest, FastifyReply, FastifyInstance } from 'fastify';
import fp from 'fastify-plugin';
import { logger } from '../logging/logger';
import { SubscriptionTier } from '../../features/user-profile/domain/user-profile.types';
import { canAccessFeature, getFeatureConfig, getTierLevel } from '../config/feature-tiers';
// Tier check options
export interface TierCheckOptions {
minTier?: SubscriptionTier;
featureKey?: string;
}
declare module 'fastify' {
interface FastifyInstance {
requireTier: (options: TierCheckOptions) => (request: FastifyRequest, reply: FastifyReply) => Promise<void>;
}
}
const tierGuardPlugin: FastifyPluginAsync = async (fastify) => {
/**
* Creates a preHandler that enforces tier requirements
*
* Usage:
* fastify.get('/premium-route', {
* preHandler: [fastify.requireTier({ minTier: 'pro' })],
* handler: controller.method
* });
*
* Or with feature key:
* fastify.post('/documents', {
* preHandler: [fastify.requireTier({ featureKey: 'document.scanMaintenanceSchedule' })],
* handler: controller.method
* });
*/
fastify.decorate('requireTier', function(this: FastifyInstance, options: TierCheckOptions) {
const { minTier, featureKey } = options;
return async (request: FastifyRequest, reply: FastifyReply): Promise<void> => {
try {
// Ensure user is authenticated first
if (typeof this.authenticate !== 'function') {
logger.error('Tier guard: authenticate handler missing');
return reply.code(500).send({
error: 'Internal server error',
message: 'Authentication handler missing',
});
}
await this.authenticate(request, reply);
if (reply.sent) {
return;
}
// Get user's subscription tier from context
const userTier = request.userContext?.subscriptionTier || 'free';
// Determine required tier and check access
let hasAccess = false;
let requiredTier: SubscriptionTier = 'free';
let upgradePrompt: string | undefined;
let featureName: string | undefined;
if (featureKey) {
// Feature-based tier check
hasAccess = canAccessFeature(userTier, featureKey);
const config = getFeatureConfig(featureKey);
requiredTier = config?.minTier || 'pro';
upgradePrompt = config?.upgradePrompt;
featureName = config?.name;
} else if (minTier) {
// Direct tier comparison
hasAccess = getTierLevel(userTier) >= getTierLevel(minTier);
requiredTier = minTier;
} else {
// No tier requirement specified - allow access
hasAccess = true;
}
if (!hasAccess) {
logger.warn('Tier guard: user tier insufficient', {
userId: request.userContext?.userId?.substring(0, 8) + '...',
userTier,
requiredTier,
featureKey,
});
return reply.code(403).send({
error: 'TIER_REQUIRED',
requiredTier,
currentTier: userTier,
feature: featureKey || null,
featureName: featureName || null,
upgradePrompt: upgradePrompt || `Upgrade to ${requiredTier} to access this feature.`,
});
}
logger.debug('Tier guard: access granted', {
userId: request.userContext?.userId?.substring(0, 8) + '...',
userTier,
featureKey,
});
} catch (error) {
logger.error('Tier guard: authorization check failed', {
error: error instanceof Error ? error.message : 'Unknown error',
userId: request.userContext?.userId?.substring(0, 8) + '...',
});
return reply.code(500).send({
error: 'Internal server error',
message: 'Tier check failed',
});
}
};
});
};
export default fp(tierGuardPlugin, {
name: 'tier-guard-plugin',
// Note: Requires auth-plugin to be registered first for authenticate decorator
// Dependency check removed to allow testing with mock authenticate
});

View File

@@ -15,14 +15,6 @@ import {
processBackupRetention,
setBackupCleanupJobPool,
} from '../../features/backup/jobs/backup-cleanup.job';
import {
processAuditLogCleanup,
setAuditLogCleanupJobPool,
} from '../../features/audit-log/jobs/cleanup.job';
import {
processGracePeriodExpirations,
setGracePeriodJobPool,
} from '../../features/subscriptions/jobs/grace-period.job';
import { pool } from '../config/database';
let schedulerInitialized = false;
@@ -39,12 +31,6 @@ export function initializeScheduler(): void {
setBackupJobPool(pool);
setBackupCleanupJobPool(pool);
// Initialize audit log cleanup job pool
setAuditLogCleanupJobPool(pool);
// Initialize grace period job pool
setGracePeriodJobPool(pool);
// Daily notification processing at 8 AM
cron.schedule('0 8 * * *', async () => {
logger.info('Running scheduled notification job');
@@ -74,23 +60,6 @@ export function initializeScheduler(): void {
}
});
// Grace period expiration check at 2:30 AM daily
cron.schedule('30 2 * * *', async () => {
logger.info('Running grace period expiration job');
try {
const result = await processGracePeriodExpirations();
logger.info('Grace period job completed', {
processed: result.processed,
downgraded: result.downgraded,
errors: result.errors.length,
});
} catch (error) {
logger.error('Grace period job failed', {
error: error instanceof Error ? error.message : String(error)
});
}
});
// Check for scheduled backups every minute
cron.schedule('* * * * *', async () => {
logger.debug('Checking for scheduled backups');
@@ -121,30 +90,8 @@ export function initializeScheduler(): void {
}
});
// Audit log retention cleanup at 3 AM daily (90-day retention)
cron.schedule('0 3 * * *', async () => {
logger.info('Running audit log cleanup job');
try {
const result = await processAuditLogCleanup();
if (result.success) {
logger.info('Audit log cleanup job completed', {
deletedCount: result.deletedCount,
retentionDays: result.retentionDays,
});
} else {
logger.error('Audit log cleanup job failed', {
error: result.error,
});
}
} catch (error) {
logger.error('Audit log cleanup job failed', {
error: error instanceof Error ? error.message : String(error)
});
}
});
schedulerInitialized = true;
logger.info('Cron scheduler initialized - notification (8 AM), account purge (2 AM), grace period (2:30 AM), audit log cleanup (3 AM), backup check (every min), retention cleanup (4 AM)');
logger.info('Cron scheduler initialized - notification (8 AM), account purge (2 AM), backup check (every min), retention cleanup (4 AM)');
}
export function isSchedulerInitialized(): boolean {

View File

@@ -1,26 +1,23 @@
# backend/src/features/
Feature capsule directory. Each feature is 100% self-contained with api/, domain/, data/, migrations/, tests/.
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `admin/` | Admin role management, catalog CRUD | Admin functionality, user oversight |
| `audit-log/` | Centralized audit logging | Cross-feature event logging, admin logs UI |
| `auth/` | Authentication endpoints | Login, logout, session management |
| `backup/` | Database backup and restore | Backup jobs, data export/import |
| `documents/` | Document storage and management | File uploads, document handling |
| `fuel-logs/` | Fuel consumption tracking | Fuel log CRUD, statistics |
| `maintenance/` | Maintenance record management | Service records, reminders |
| `notifications/` | Email and push notifications | Alert system, email templates |
| `ocr/` | OCR proxy to mvp-ocr service (VIN, receipt, manual extraction) | Image text extraction, receipt scanning, manual PDF extraction, async jobs |
| `onboarding/` | User onboarding flow | First-time user setup |
| `ownership-costs/` | Ownership cost tracking and reports | Cost aggregation, expense analysis |
| `platform/` | Vehicle data and VIN decoding | Make/model lookup, VIN validation |
| `stations/` | Gas station search and favorites | Google Maps integration, station data |
| `subscriptions/` | Stripe payment and billing | Subscription tiers, donations, webhooks |
| `terms-agreement/` | Terms & Conditions acceptance audit | Signup T&C, legal compliance |
| `user-export/` | User data export | GDPR compliance, data portability |
| `user-import/` | User data import | Restore from backup, data migration |
| `user-preferences/` | User preference management | User settings API |
| `user-profile/` | User profile management | Profile CRUD, avatar handling |
| `vehicles/` | Vehicle management | Vehicle CRUD, fleet operations |

View File

@@ -1,18 +0,0 @@
# admin/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `README.md` | Feature documentation | Understanding admin functionality |
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `api/` | HTTP endpoints and routes | API changes |
| `domain/` | Business logic, services, types | Core admin logic |
| `data/` | Repository, database queries | Database operations |
| `migrations/` | Database schema | Schema changes |
| `scripts/` | Admin utility scripts | Admin automation |
| `tests/` | Unit and integration tests | Adding or modifying tests |

View File

@@ -6,12 +6,11 @@
import { FastifyRequest, FastifyReply } from 'fastify';
import { AdminService } from '../domain/admin.service';
import { AdminRepository } from '../data/admin.repository';
import { UserProfileRepository } from '../../user-profile/data/user-profile.repository';
import { pool } from '../../../core/config/database';
import { logger } from '../../../core/logging/logger';
import {
CreateAdminInput,
AdminIdInput,
AdminAuth0SubInput,
AuditLogsQueryInput,
BulkCreateAdminInput,
BulkRevokeAdminInput,
@@ -19,7 +18,7 @@ import {
} from './admin.validation';
import {
createAdminSchema,
adminIdSchema,
adminAuth0SubSchema,
auditLogsQuerySchema,
bulkCreateAdminSchema,
bulkRevokeAdminSchema,
@@ -34,12 +33,10 @@ import {
export class AdminController {
private adminService: AdminService;
private userProfileRepository: UserProfileRepository;
constructor() {
const repository = new AdminRepository(pool);
this.adminService = new AdminService(repository);
this.userProfileRepository = new UserProfileRepository(pool);
}
/**
@@ -50,18 +47,49 @@ export class AdminController {
const userId = request.userContext?.userId;
const userEmail = this.resolveUserEmail(request);
console.log('[DEBUG] Admin verify - userId:', userId);
console.log('[DEBUG] Admin verify - userEmail:', userEmail);
if (userEmail && request.userContext) {
request.userContext.email = userEmail.toLowerCase();
}
if (!userId) {
if (!userId && !userEmail) {
console.log('[DEBUG] Admin verify - No userId or userEmail, returning 401');
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
const adminRecord = await this.adminService.getAdminByUserProfileId(userId);
let adminRecord = userId
? await this.adminService.getAdminByAuth0Sub(userId)
: null;
console.log('[DEBUG] Admin verify - adminRecord by auth0Sub:', adminRecord ? 'FOUND' : 'NOT FOUND');
// Fallback: attempt to resolve admin by email for legacy records
if (!adminRecord && userEmail) {
const emailMatch = await this.adminService.getAdminByEmail(userEmail.toLowerCase());
console.log('[DEBUG] Admin verify - emailMatch:', emailMatch ? 'FOUND' : 'NOT FOUND');
if (emailMatch) {
console.log('[DEBUG] Admin verify - emailMatch.auth0Sub:', emailMatch.auth0Sub);
console.log('[DEBUG] Admin verify - emailMatch.revokedAt:', emailMatch.revokedAt);
}
if (emailMatch && !emailMatch.revokedAt) {
// If the stored auth0Sub differs, link it to the authenticated user
if (userId && emailMatch.auth0Sub !== userId) {
console.log('[DEBUG] Admin verify - Calling linkAdminAuth0Sub to update auth0Sub');
adminRecord = await this.adminService.linkAdminAuth0Sub(userEmail, userId);
console.log('[DEBUG] Admin verify - adminRecord after link:', adminRecord ? 'SUCCESS' : 'FAILED');
} else {
console.log('[DEBUG] Admin verify - Using emailMatch as adminRecord');
adminRecord = emailMatch;
}
}
}
if (adminRecord && !adminRecord.revokedAt) {
if (request.userContext) {
@@ -69,11 +97,12 @@ export class AdminController {
request.userContext.adminRecord = adminRecord;
}
console.log('[DEBUG] Admin verify - Returning isAdmin: true');
// User is an active admin
return reply.code(200).send({
isAdmin: true,
adminRecord: {
id: adminRecord.id,
userProfileId: adminRecord.userProfileId,
auth0Sub: adminRecord.auth0Sub,
email: adminRecord.email,
role: adminRecord.role
}
@@ -85,11 +114,14 @@ export class AdminController {
request.userContext.adminRecord = undefined;
}
console.log('[DEBUG] Admin verify - Returning isAdmin: false');
// User is not an admin
return reply.code(200).send({
isAdmin: false,
adminRecord: null
});
} catch (error) {
console.log('[DEBUG] Admin verify - Error caught:', error instanceof Error ? error.message : 'Unknown error');
logger.error('Error verifying admin access', {
error: error instanceof Error ? error.message : 'Unknown error',
userId: request.userContext?.userId?.substring(0, 8) + '...'
@@ -107,9 +139,9 @@ export class AdminController {
*/
async listAdmins(request: FastifyRequest, reply: FastifyReply) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
@@ -118,6 +150,11 @@ export class AdminController {
const admins = await this.adminService.getAllAdmins();
// Log VIEW action
await this.adminService.getAdminByAuth0Sub(actorId);
// Note: Not logging VIEW as it would create excessive audit entries
// VIEW logging can be enabled if needed for compliance
return reply.code(200).send({
total: admins.length,
admins
@@ -125,7 +162,7 @@ export class AdminController {
} catch (error: any) {
logger.error('Error listing admins', {
error: error.message,
actorUserProfileId: request.userContext?.userId
actorId: request.userContext?.userId
});
return reply.code(500).send({
error: 'Internal server error',
@@ -142,24 +179,15 @@ export class AdminController {
reply: FastifyReply
) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
// Get actor's admin record to get admin ID
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorUserProfileId);
if (!actorAdmin) {
return reply.code(403).send({
error: 'Forbidden',
message: 'Actor is not an admin'
});
}
// Validate request body
const validation = createAdminSchema.safeParse(request.body);
if (!validation.success) {
@@ -172,27 +200,23 @@ export class AdminController {
const { email, role } = validation.data;
// Look up user profile by email to get UUID
const userProfile = await this.userProfileRepository.getByEmail(email);
if (!userProfile) {
return reply.code(404).send({
error: 'Not Found',
message: `No user profile found with email ${email}. User must sign up first.`
});
}
// Generate auth0Sub for the new admin
// In production, this should be the actual Auth0 user ID
// For now, we'll use email-based identifier
const auth0Sub = `auth0|${email.replace('@', '_at_')}`;
const admin = await this.adminService.createAdmin(
email,
role,
userProfile.id,
actorAdmin.id
auth0Sub,
actorId
);
return reply.code(201).send(admin);
} catch (error: any) {
logger.error('Error creating admin', {
error: error.message,
actorUserProfileId: request.userContext?.userId
actorId: request.userContext?.userId
});
if (error.message.includes('already exists')) {
@@ -210,45 +234,36 @@ export class AdminController {
}
/**
* PATCH /api/admin/admins/:id/revoke - Revoke admin access
* PATCH /api/admin/admins/:auth0Sub/revoke - Revoke admin access
*/
async revokeAdmin(
request: FastifyRequest<{ Params: AdminIdInput }>,
request: FastifyRequest<{ Params: AdminAuth0SubInput }>,
reply: FastifyReply
) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
// Get actor's admin record
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorUserProfileId);
if (!actorAdmin) {
return reply.code(403).send({
error: 'Forbidden',
message: 'Actor is not an admin'
});
}
// Validate params
const validation = adminIdSchema.safeParse(request.params);
const validation = adminAuth0SubSchema.safeParse(request.params);
if (!validation.success) {
return reply.code(400).send({
error: 'Bad Request',
message: 'Invalid admin ID parameter',
message: 'Invalid auth0Sub parameter',
details: validation.error.errors
});
}
const { id } = validation.data;
const { auth0Sub } = validation.data;
// Check if admin exists
const targetAdmin = await this.adminService.getAdminById(id);
const targetAdmin = await this.adminService.getAdminByAuth0Sub(auth0Sub);
if (!targetAdmin) {
return reply.code(404).send({
error: 'Not Found',
@@ -257,14 +272,14 @@ export class AdminController {
}
// Revoke the admin (service handles last admin check)
const admin = await this.adminService.revokeAdmin(id, actorAdmin.id);
const admin = await this.adminService.revokeAdmin(auth0Sub, actorId);
return reply.code(200).send(admin);
} catch (error: any) {
logger.error('Error revoking admin', {
error: error.message,
actorUserProfileId: request.userContext?.userId,
targetAdminId: (request.params as any).id
actorId: request.userContext?.userId,
targetAuth0Sub: request.params.auth0Sub
});
if (error.message.includes('Cannot revoke the last active admin')) {
@@ -289,45 +304,36 @@ export class AdminController {
}
/**
* PATCH /api/admin/admins/:id/reinstate - Restore revoked admin
* PATCH /api/admin/admins/:auth0Sub/reinstate - Restore revoked admin
*/
async reinstateAdmin(
request: FastifyRequest<{ Params: AdminIdInput }>,
request: FastifyRequest<{ Params: AdminAuth0SubInput }>,
reply: FastifyReply
) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
// Get actor's admin record
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorUserProfileId);
if (!actorAdmin) {
return reply.code(403).send({
error: 'Forbidden',
message: 'Actor is not an admin'
});
}
// Validate params
const validation = adminIdSchema.safeParse(request.params);
const validation = adminAuth0SubSchema.safeParse(request.params);
if (!validation.success) {
return reply.code(400).send({
error: 'Bad Request',
message: 'Invalid admin ID parameter',
message: 'Invalid auth0Sub parameter',
details: validation.error.errors
});
}
const { id } = validation.data;
const { auth0Sub } = validation.data;
// Check if admin exists
const targetAdmin = await this.adminService.getAdminById(id);
const targetAdmin = await this.adminService.getAdminByAuth0Sub(auth0Sub);
if (!targetAdmin) {
return reply.code(404).send({
error: 'Not Found',
@@ -336,14 +342,14 @@ export class AdminController {
}
// Reinstate the admin
const admin = await this.adminService.reinstateAdmin(id, actorAdmin.id);
const admin = await this.adminService.reinstateAdmin(auth0Sub, actorId);
return reply.code(200).send(admin);
} catch (error: any) {
logger.error('Error reinstating admin', {
error: error.message,
actorUserProfileId: request.userContext?.userId,
targetAdminId: (request.params as any).id
actorId: request.userContext?.userId,
targetAuth0Sub: request.params.auth0Sub
});
if (error.message.includes('not found')) {
@@ -412,24 +418,15 @@ export class AdminController {
reply: FastifyReply
) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
// Get actor's admin record
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorUserProfileId);
if (!actorAdmin) {
return reply.code(403).send({
error: 'Forbidden',
message: 'Actor is not an admin'
});
}
// Validate request body
const validation = bulkCreateAdminSchema.safeParse(request.body);
if (!validation.success) {
@@ -450,21 +447,15 @@ export class AdminController {
try {
const { email, role = 'admin' } = adminInput;
// Look up user profile by email to get UUID
const userProfile = await this.userProfileRepository.getByEmail(email);
if (!userProfile) {
failed.push({
email,
error: `No user profile found with email ${email}. User must sign up first.`
});
continue;
}
// Generate auth0Sub for the new admin
// In production, this should be the actual Auth0 user ID
const auth0Sub = `auth0|${email.replace('@', '_at_')}`;
const admin = await this.adminService.createAdmin(
email,
role,
userProfile.id,
actorAdmin.id
auth0Sub,
actorId
);
created.push(admin);
@@ -472,7 +463,7 @@ export class AdminController {
logger.error('Error creating admin in bulk operation', {
error: error.message,
email: adminInput.email,
actorAdminId: actorAdmin.id
actorId
});
failed.push({
@@ -494,7 +485,7 @@ export class AdminController {
} catch (error: any) {
logger.error('Error in bulk create admins', {
error: error.message,
actorUserProfileId: request.userContext?.userId
actorId: request.userContext?.userId
});
return reply.code(500).send({
@@ -512,24 +503,15 @@ export class AdminController {
reply: FastifyReply
) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
// Get actor's admin record
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorUserProfileId);
if (!actorAdmin) {
return reply.code(403).send({
error: 'Forbidden',
message: 'Actor is not an admin'
});
}
// Validate request body
const validation = bulkRevokeAdminSchema.safeParse(request.body);
if (!validation.success) {
@@ -540,36 +522,37 @@ export class AdminController {
});
}
const { ids } = validation.data;
const { auth0Subs } = validation.data;
const revoked: AdminUser[] = [];
const failed: Array<{ id: string; error: string }> = [];
const failed: Array<{ auth0Sub: string; error: string }> = [];
// Process each revocation sequentially to maintain data consistency
for (const id of ids) {
for (const auth0Sub of auth0Subs) {
try {
// Check if admin exists
const targetAdmin = await this.adminService.getAdminById(id);
const targetAdmin = await this.adminService.getAdminByAuth0Sub(auth0Sub);
if (!targetAdmin) {
failed.push({
id,
auth0Sub,
error: 'Admin user not found'
});
continue;
}
// Attempt to revoke the admin
const admin = await this.adminService.revokeAdmin(id, actorAdmin.id);
const admin = await this.adminService.revokeAdmin(auth0Sub, actorId);
revoked.push(admin);
} catch (error: any) {
logger.error('Error revoking admin in bulk operation', {
error: error.message,
adminId: id,
actorAdminId: actorAdmin.id
auth0Sub,
actorId
});
// Special handling for "last admin" constraint
failed.push({
id,
auth0Sub,
error: error.message || 'Failed to revoke admin'
});
}
@@ -587,7 +570,7 @@ export class AdminController {
} catch (error: any) {
logger.error('Error in bulk revoke admins', {
error: error.message,
actorUserProfileId: request.userContext?.userId
actorId: request.userContext?.userId
});
return reply.code(500).send({
@@ -605,24 +588,15 @@ export class AdminController {
reply: FastifyReply
) {
try {
const actorUserProfileId = request.userContext?.userId;
const actorId = request.userContext?.userId;
if (!actorUserProfileId) {
if (!actorId) {
return reply.code(401).send({
error: 'Unauthorized',
message: 'User context missing'
});
}
// Get actor's admin record
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorUserProfileId);
if (!actorAdmin) {
return reply.code(403).send({
error: 'Forbidden',
message: 'Actor is not an admin'
});
}
// Validate request body
const validation = bulkReinstateAdminSchema.safeParse(request.body);
if (!validation.success) {
@@ -633,36 +607,36 @@ export class AdminController {
});
}
const { ids } = validation.data;
const { auth0Subs } = validation.data;
const reinstated: AdminUser[] = [];
const failed: Array<{ id: string; error: string }> = [];
const failed: Array<{ auth0Sub: string; error: string }> = [];
// Process each reinstatement sequentially to maintain data consistency
for (const id of ids) {
for (const auth0Sub of auth0Subs) {
try {
// Check if admin exists
const targetAdmin = await this.adminService.getAdminById(id);
const targetAdmin = await this.adminService.getAdminByAuth0Sub(auth0Sub);
if (!targetAdmin) {
failed.push({
id,
auth0Sub,
error: 'Admin user not found'
});
continue;
}
// Attempt to reinstate the admin
const admin = await this.adminService.reinstateAdmin(id, actorAdmin.id);
const admin = await this.adminService.reinstateAdmin(auth0Sub, actorId);
reinstated.push(admin);
} catch (error: any) {
logger.error('Error reinstating admin in bulk operation', {
error: error.message,
adminId: id,
actorAdminId: actorAdmin.id
auth0Sub,
actorId
});
failed.push({
id,
auth0Sub,
error: error.message || 'Failed to reinstate admin'
});
}
@@ -680,7 +654,7 @@ export class AdminController {
} catch (error: any) {
logger.error('Error in bulk reinstate admins', {
error: error.message,
actorUserProfileId: request.userContext?.userId
actorId: request.userContext?.userId
});
return reply.code(500).send({
@@ -691,6 +665,9 @@ export class AdminController {
}
private resolveUserEmail(request: FastifyRequest): string | undefined {
console.log('[DEBUG] resolveUserEmail - request.userContext:', JSON.stringify(request.userContext, null, 2));
console.log('[DEBUG] resolveUserEmail - request.user:', JSON.stringify((request as any).user, null, 2));
const candidates: Array<string | undefined> = [
request.userContext?.email,
(request as any).user?.email,
@@ -699,11 +676,15 @@ export class AdminController {
(request as any).user?.preferred_username,
];
console.log('[DEBUG] resolveUserEmail - candidates:', candidates);
for (const value of candidates) {
if (typeof value === 'string' && value.includes('@')) {
console.log('[DEBUG] resolveUserEmail - found email:', value);
return value.trim();
}
}
console.log('[DEBUG] resolveUserEmail - no email found');
return undefined;
}
}

View File

@@ -8,7 +8,8 @@ import { AdminController } from './admin.controller';
import { UsersController } from './users.controller';
import {
CreateAdminInput,
AdminIdInput,
AdminAuth0SubInput,
AuditLogsQueryInput,
BulkCreateAdminInput,
BulkRevokeAdminInput,
BulkReinstateAdminInput,
@@ -17,7 +18,7 @@ import {
} from './admin.validation';
import {
ListUsersQueryInput,
UserIdInput,
UserAuth0SubInput,
UpdateTierInput,
DeactivateUserInput,
UpdateProfileInput,
@@ -65,19 +66,23 @@ export const adminRoutes: FastifyPluginAsync = async (fastify) => {
handler: adminController.createAdmin.bind(adminController)
});
// PATCH /api/admin/admins/:id/revoke - Revoke admin access
fastify.patch<{ Params: AdminIdInput }>('/admin/admins/:id/revoke', {
// PATCH /api/admin/admins/:auth0Sub/revoke - Revoke admin access
fastify.patch<{ Params: AdminAuth0SubInput }>('/admin/admins/:auth0Sub/revoke', {
preHandler: [fastify.requireAdmin],
handler: adminController.revokeAdmin.bind(adminController)
});
// PATCH /api/admin/admins/:id/reinstate - Restore revoked admin
fastify.patch<{ Params: AdminIdInput }>('/admin/admins/:id/reinstate', {
// PATCH /api/admin/admins/:auth0Sub/reinstate - Restore revoked admin
fastify.patch<{ Params: AdminAuth0SubInput }>('/admin/admins/:auth0Sub/reinstate', {
preHandler: [fastify.requireAdmin],
handler: adminController.reinstateAdmin.bind(adminController)
});
// NOTE: GET /api/admin/audit-logs moved to audit-log feature (centralized audit logging)
// GET /api/admin/audit-logs - Fetch audit trail
fastify.get<{ Querystring: AuditLogsQueryInput }>('/admin/audit-logs', {
preHandler: [fastify.requireAdmin],
handler: adminController.getAuditLogs.bind(adminController)
});
// POST /api/admin/admins/bulk - Create multiple admins
fastify.post<{ Body: BulkCreateAdminInput }>('/admin/admins/bulk', {
@@ -117,50 +122,50 @@ export const adminRoutes: FastifyPluginAsync = async (fastify) => {
handler: usersController.listUsers.bind(usersController)
});
// GET /api/admin/users/:userId - Get single user details
fastify.get<{ Params: UserIdInput }>('/admin/users/:userId', {
// GET /api/admin/users/:auth0Sub - Get single user details
fastify.get<{ Params: UserAuth0SubInput }>('/admin/users/:auth0Sub', {
preHandler: [fastify.requireAdmin],
handler: usersController.getUser.bind(usersController)
});
// GET /api/admin/users/:userId/vehicles - Get user's vehicles (admin view)
fastify.get<{ Params: UserIdInput }>('/admin/users/:userId/vehicles', {
// GET /api/admin/users/:auth0Sub/vehicles - Get user's vehicles (admin view)
fastify.get<{ Params: UserAuth0SubInput }>('/admin/users/:auth0Sub/vehicles', {
preHandler: [fastify.requireAdmin],
handler: usersController.getUserVehicles.bind(usersController)
});
// PATCH /api/admin/users/:userId/tier - Update subscription tier
fastify.patch<{ Params: UserIdInput; Body: UpdateTierInput }>('/admin/users/:userId/tier', {
// PATCH /api/admin/users/:auth0Sub/tier - Update subscription tier
fastify.patch<{ Params: UserAuth0SubInput; Body: UpdateTierInput }>('/admin/users/:auth0Sub/tier', {
preHandler: [fastify.requireAdmin],
handler: usersController.updateTier.bind(usersController)
});
// PATCH /api/admin/users/:userId/deactivate - Soft delete user
fastify.patch<{ Params: UserIdInput; Body: DeactivateUserInput }>('/admin/users/:userId/deactivate', {
// PATCH /api/admin/users/:auth0Sub/deactivate - Soft delete user
fastify.patch<{ Params: UserAuth0SubInput; Body: DeactivateUserInput }>('/admin/users/:auth0Sub/deactivate', {
preHandler: [fastify.requireAdmin],
handler: usersController.deactivateUser.bind(usersController)
});
// PATCH /api/admin/users/:userId/reactivate - Restore deactivated user
fastify.patch<{ Params: UserIdInput }>('/admin/users/:userId/reactivate', {
// PATCH /api/admin/users/:auth0Sub/reactivate - Restore deactivated user
fastify.patch<{ Params: UserAuth0SubInput }>('/admin/users/:auth0Sub/reactivate', {
preHandler: [fastify.requireAdmin],
handler: usersController.reactivateUser.bind(usersController)
});
// PATCH /api/admin/users/:userId/profile - Update user email/displayName
fastify.patch<{ Params: UserIdInput; Body: UpdateProfileInput }>('/admin/users/:userId/profile', {
// PATCH /api/admin/users/:auth0Sub/profile - Update user email/displayName
fastify.patch<{ Params: UserAuth0SubInput; Body: UpdateProfileInput }>('/admin/users/:auth0Sub/profile', {
preHandler: [fastify.requireAdmin],
handler: usersController.updateProfile.bind(usersController)
});
// PATCH /api/admin/users/:userId/promote - Promote user to admin
fastify.patch<{ Params: UserIdInput; Body: PromoteToAdminInput }>('/admin/users/:userId/promote', {
// PATCH /api/admin/users/:auth0Sub/promote - Promote user to admin
fastify.patch<{ Params: UserAuth0SubInput; Body: PromoteToAdminInput }>('/admin/users/:auth0Sub/promote', {
preHandler: [fastify.requireAdmin],
handler: usersController.promoteToAdmin.bind(usersController)
});
// DELETE /api/admin/users/:userId - Hard delete user (permanent)
fastify.delete<{ Params: UserIdInput }>('/admin/users/:userId', {
// DELETE /api/admin/users/:auth0Sub - Hard delete user (permanent)
fastify.delete<{ Params: UserAuth0SubInput }>('/admin/users/:auth0Sub', {
preHandler: [fastify.requireAdmin],
handler: usersController.hardDeleteUser.bind(usersController)
});

View File

@@ -10,8 +10,8 @@ export const createAdminSchema = z.object({
role: z.enum(['admin', 'super_admin']).default('admin'),
});
export const adminIdSchema = z.object({
id: z.string().uuid('Invalid admin ID format'),
export const adminAuth0SubSchema = z.object({
auth0Sub: z.string().min(1, 'auth0Sub is required'),
});
export const auditLogsQuerySchema = z.object({
@@ -29,14 +29,14 @@ export const bulkCreateAdminSchema = z.object({
});
export const bulkRevokeAdminSchema = z.object({
ids: z.array(z.string().uuid('Invalid admin ID format'))
.min(1, 'At least one admin ID must be provided')
auth0Subs: z.array(z.string().min(1, 'auth0Sub cannot be empty'))
.min(1, 'At least one auth0Sub must be provided')
.max(100, 'Maximum 100 admins per batch'),
});
export const bulkReinstateAdminSchema = z.object({
ids: z.array(z.string().uuid('Invalid admin ID format'))
.min(1, 'At least one admin ID must be provided')
auth0Subs: z.array(z.string().min(1, 'auth0Sub cannot be empty'))
.min(1, 'At least one auth0Sub must be provided')
.max(100, 'Maximum 100 admins per batch'),
});
@@ -49,7 +49,7 @@ export const bulkDeleteCatalogSchema = z.object({
});
export type CreateAdminInput = z.infer<typeof createAdminSchema>;
export type AdminIdInput = z.infer<typeof adminIdSchema>;
export type AdminAuth0SubInput = z.infer<typeof adminAuth0SubSchema>;
export type AuditLogsQueryInput = z.infer<typeof auditLogsQuerySchema>;
export type BulkCreateAdminInput = z.infer<typeof bulkCreateAdminSchema>;
export type BulkRevokeAdminInput = z.infer<typeof bulkRevokeAdminSchema>;

View File

@@ -7,20 +7,17 @@ import { FastifyRequest, FastifyReply } from 'fastify';
import { UserProfileService } from '../../user-profile/domain/user-profile.service';
import { UserProfileRepository } from '../../user-profile/data/user-profile.repository';
import { AdminRepository } from '../data/admin.repository';
import { SubscriptionsService } from '../../subscriptions/domain/subscriptions.service';
import { SubscriptionsRepository } from '../../subscriptions/data/subscriptions.repository';
import { StripeClient } from '../../subscriptions/external/stripe/stripe.client';
import { pool } from '../../../core/config/database';
import { logger } from '../../../core/logging/logger';
import {
listUsersQuerySchema,
userIdSchema,
userAuth0SubSchema,
updateTierSchema,
deactivateUserSchema,
updateProfileSchema,
promoteToAdminSchema,
ListUsersQueryInput,
UserIdInput,
UserAuth0SubInput,
UpdateTierInput,
DeactivateUserInput,
UpdateProfileInput,
@@ -31,22 +28,15 @@ import { AdminService } from '../domain/admin.service';
export class UsersController {
private userProfileService: UserProfileService;
private adminService: AdminService;
private subscriptionsService: SubscriptionsService;
private userProfileRepository: UserProfileRepository;
private adminRepository: AdminRepository;
constructor() {
this.userProfileRepository = new UserProfileRepository(pool);
this.adminRepository = new AdminRepository(pool);
const subscriptionsRepository = new SubscriptionsRepository(pool);
const stripeClient = new StripeClient();
const adminRepository = new AdminRepository(pool);
this.userProfileService = new UserProfileService(this.userProfileRepository);
this.userProfileService.setAdminRepository(this.adminRepository);
this.adminService = new AdminService(this.adminRepository);
// Admin feature depends on Subscriptions for tier management
// This is intentional - admin has oversight capabilities
this.subscriptionsService = new SubscriptionsService(subscriptionsRepository, stripeClient, pool);
this.userProfileService.setAdminRepository(adminRepository);
this.adminService = new AdminService(adminRepository);
}
/**
@@ -95,10 +85,10 @@ export class UsersController {
}
/**
* GET /api/admin/users/:userId/vehicles - Get user's vehicles (admin view)
* GET /api/admin/users/:auth0Sub/vehicles - Get user's vehicles (admin view)
*/
async getUserVehicles(
request: FastifyRequest<{ Params: UserIdInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput }>,
reply: FastifyReply
) {
try {
@@ -119,7 +109,7 @@ export class UsersController {
}
// Validate path param
const parseResult = userIdSchema.safeParse(request.params);
const parseResult = userAuth0SubSchema.safeParse(request.params);
if (!parseResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -127,14 +117,14 @@ export class UsersController {
});
}
const { userId } = parseResult.data;
const vehicles = await this.userProfileRepository.getUserVehiclesForAdmin(userId);
const { auth0Sub } = parseResult.data;
const vehicles = await this.userProfileRepository.getUserVehiclesForAdmin(auth0Sub);
return reply.code(200).send({ vehicles });
} catch (error) {
logger.error('Error getting user vehicles', {
error: error instanceof Error ? error.message : 'Unknown error',
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
return reply.code(500).send({
@@ -186,10 +176,10 @@ export class UsersController {
}
/**
* GET /api/admin/users/:userId - Get single user details
* GET /api/admin/users/:auth0Sub - Get single user details
*/
async getUser(
request: FastifyRequest<{ Params: UserIdInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput }>,
reply: FastifyReply
) {
try {
@@ -202,7 +192,7 @@ export class UsersController {
}
// Validate path param
const parseResult = userIdSchema.safeParse(request.params);
const parseResult = userAuth0SubSchema.safeParse(request.params);
if (!parseResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -210,8 +200,8 @@ export class UsersController {
});
}
const { userId } = parseResult.data;
const user = await this.userProfileService.getUserDetails(userId);
const { auth0Sub } = parseResult.data;
const user = await this.userProfileService.getUserDetails(auth0Sub);
if (!user) {
return reply.code(404).send({
@@ -224,7 +214,7 @@ export class UsersController {
} catch (error) {
logger.error('Error getting user details', {
error: error instanceof Error ? error.message : 'Unknown error',
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
return reply.code(500).send({
@@ -235,12 +225,10 @@ export class UsersController {
}
/**
* PATCH /api/admin/users/:userId/tier - Update subscription tier
* Uses subscriptionsService.adminOverrideTier() to sync both subscriptions.tier
* and user_profiles.subscription_tier atomically
* PATCH /api/admin/users/:auth0Sub/tier - Update subscription tier
*/
async updateTier(
request: FastifyRequest<{ Params: UserIdInput; Body: UpdateTierInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput; Body: UpdateTierInput }>,
reply: FastifyReply
) {
try {
@@ -253,7 +241,7 @@ export class UsersController {
}
// Validate path param
const paramsResult = userIdSchema.safeParse(request.params);
const paramsResult = userAuth0SubSchema.safeParse(request.params);
if (!paramsResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -270,49 +258,22 @@ export class UsersController {
});
}
const { userId } = paramsResult.data;
const { auth0Sub } = paramsResult.data;
const { subscriptionTier } = bodyResult.data;
// Verify user exists before attempting tier change
const currentUser = await this.userProfileService.getUserDetails(userId);
if (!currentUser) {
return reply.code(404).send({
error: 'Not found',
message: 'User not found',
});
}
const previousTier = currentUser.subscriptionTier;
// Use subscriptionsService to update both tables atomically
await this.subscriptionsService.adminOverrideTier(userId, subscriptionTier);
// Log audit action
await this.adminRepository.logAuditAction(
actorId,
'UPDATE_TIER',
userId,
'user_profile',
currentUser.id,
{ previousTier, newTier: subscriptionTier }
const updatedUser = await this.userProfileService.updateSubscriptionTier(
auth0Sub,
subscriptionTier,
actorId
);
logger.info('User subscription tier updated via admin', {
userId,
previousTier,
newTier: subscriptionTier,
actorId,
});
// Return updated user profile
const updatedUser = await this.userProfileService.getUserDetails(userId);
return reply.code(200).send(updatedUser);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
logger.error('Error updating user tier', {
error: errorMessage,
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
if (errorMessage === 'User not found') {
@@ -330,10 +291,10 @@ export class UsersController {
}
/**
* PATCH /api/admin/users/:userId/deactivate - Soft delete user
* PATCH /api/admin/users/:auth0Sub/deactivate - Soft delete user
*/
async deactivateUser(
request: FastifyRequest<{ Params: UserIdInput; Body: DeactivateUserInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput; Body: DeactivateUserInput }>,
reply: FastifyReply
) {
try {
@@ -346,7 +307,7 @@ export class UsersController {
}
// Validate path param
const paramsResult = userIdSchema.safeParse(request.params);
const paramsResult = userAuth0SubSchema.safeParse(request.params);
if (!paramsResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -363,11 +324,11 @@ export class UsersController {
});
}
const { userId } = paramsResult.data;
const { auth0Sub } = paramsResult.data;
const { reason } = bodyResult.data;
const deactivatedUser = await this.userProfileService.deactivateUser(
userId,
auth0Sub,
actorId,
reason
);
@@ -378,7 +339,7 @@ export class UsersController {
logger.error('Error deactivating user', {
error: errorMessage,
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
if (errorMessage === 'User not found') {
@@ -410,10 +371,10 @@ export class UsersController {
}
/**
* PATCH /api/admin/users/:userId/reactivate - Restore deactivated user
* PATCH /api/admin/users/:auth0Sub/reactivate - Restore deactivated user
*/
async reactivateUser(
request: FastifyRequest<{ Params: UserIdInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput }>,
reply: FastifyReply
) {
try {
@@ -426,7 +387,7 @@ export class UsersController {
}
// Validate path param
const paramsResult = userIdSchema.safeParse(request.params);
const paramsResult = userAuth0SubSchema.safeParse(request.params);
if (!paramsResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -434,10 +395,10 @@ export class UsersController {
});
}
const { userId } = paramsResult.data;
const { auth0Sub } = paramsResult.data;
const reactivatedUser = await this.userProfileService.reactivateUser(
userId,
auth0Sub,
actorId
);
@@ -447,7 +408,7 @@ export class UsersController {
logger.error('Error reactivating user', {
error: errorMessage,
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
if (errorMessage === 'User not found') {
@@ -472,10 +433,10 @@ export class UsersController {
}
/**
* PATCH /api/admin/users/:userId/profile - Update user email/displayName
* PATCH /api/admin/users/:auth0Sub/profile - Update user email/displayName
*/
async updateProfile(
request: FastifyRequest<{ Params: UserIdInput; Body: UpdateProfileInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput; Body: UpdateProfileInput }>,
reply: FastifyReply
) {
try {
@@ -488,7 +449,7 @@ export class UsersController {
}
// Validate path param
const paramsResult = userIdSchema.safeParse(request.params);
const paramsResult = userAuth0SubSchema.safeParse(request.params);
if (!paramsResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -505,11 +466,11 @@ export class UsersController {
});
}
const { userId } = paramsResult.data;
const { auth0Sub } = paramsResult.data;
const updates = bodyResult.data;
const updatedUser = await this.userProfileService.adminUpdateProfile(
userId,
auth0Sub,
updates,
actorId
);
@@ -520,7 +481,7 @@ export class UsersController {
logger.error('Error updating user profile', {
error: errorMessage,
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
if (errorMessage === 'User not found') {
@@ -538,10 +499,10 @@ export class UsersController {
}
/**
* PATCH /api/admin/users/:userId/promote - Promote user to admin
* PATCH /api/admin/users/:auth0Sub/promote - Promote user to admin
*/
async promoteToAdmin(
request: FastifyRequest<{ Params: UserIdInput; Body: PromoteToAdminInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput; Body: PromoteToAdminInput }>,
reply: FastifyReply
) {
try {
@@ -554,7 +515,7 @@ export class UsersController {
}
// Validate path param
const paramsResult = userIdSchema.safeParse(request.params);
const paramsResult = userAuth0SubSchema.safeParse(request.params);
if (!paramsResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -571,11 +532,11 @@ export class UsersController {
});
}
const { userId } = paramsResult.data;
const { auth0Sub } = paramsResult.data;
const { role } = bodyResult.data;
// Get the user profile to verify they exist and get their email
const user = await this.userProfileService.getUserDetails(userId);
// Get the user profile first to verify they exist and get their email
const user = await this.userProfileService.getUserDetails(auth0Sub);
if (!user) {
return reply.code(404).send({
error: 'Not found',
@@ -591,15 +552,12 @@ export class UsersController {
});
}
// Get actor's admin record for audit trail
const actorAdmin = await this.adminService.getAdminByUserProfileId(actorId);
// Create the admin record using the user's UUID
// Create the admin record using the user's real auth0Sub
const adminUser = await this.adminService.createAdmin(
user.email,
role,
userId,
actorAdmin?.id || actorId
auth0Sub, // Use the real auth0Sub from the user profile
actorId
);
return reply.code(201).send(adminUser);
@@ -608,7 +566,7 @@ export class UsersController {
logger.error('Error promoting user to admin', {
error: errorMessage,
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
if (errorMessage.includes('already exists')) {
@@ -626,10 +584,10 @@ export class UsersController {
}
/**
* DELETE /api/admin/users/:userId - Hard delete user (permanent)
* DELETE /api/admin/users/:auth0Sub - Hard delete user (permanent)
*/
async hardDeleteUser(
request: FastifyRequest<{ Params: UserIdInput }>,
request: FastifyRequest<{ Params: UserAuth0SubInput }>,
reply: FastifyReply
) {
try {
@@ -642,7 +600,7 @@ export class UsersController {
}
// Validate path param
const paramsResult = userIdSchema.safeParse(request.params);
const paramsResult = userAuth0SubSchema.safeParse(request.params);
if (!paramsResult.success) {
return reply.code(400).send({
error: 'Validation error',
@@ -650,14 +608,14 @@ export class UsersController {
});
}
const { userId } = paramsResult.data;
const { auth0Sub } = paramsResult.data;
// Optional reason from query params
const reason = (request.query as any)?.reason;
// Hard delete user
await this.userProfileService.adminHardDeleteUser(
userId,
auth0Sub,
actorId,
reason
);
@@ -670,7 +628,7 @@ export class UsersController {
logger.error('Error hard deleting user', {
error: errorMessage,
userId: (request.params as any)?.userId,
auth0Sub: request.params?.auth0Sub,
});
if (errorMessage === 'Cannot delete your own account') {

View File

@@ -19,9 +19,9 @@ export const listUsersQuerySchema = z.object({
sortOrder: z.enum(['asc', 'desc']).default('desc'),
});
// Path param for user UUID
export const userIdSchema = z.object({
userId: z.string().uuid('Invalid user ID format'),
// Path param for user auth0Sub
export const userAuth0SubSchema = z.object({
auth0Sub: z.string().min(1, 'auth0Sub is required'),
});
// Body for updating subscription tier
@@ -50,7 +50,7 @@ export const promoteToAdminSchema = z.object({
// Type exports
export type ListUsersQueryInput = z.infer<typeof listUsersQuerySchema>;
export type UserIdInput = z.infer<typeof userIdSchema>;
export type UserAuth0SubInput = z.infer<typeof userAuth0SubSchema>;
export type UpdateTierInput = z.infer<typeof updateTierSchema>;
export type DeactivateUserInput = z.infer<typeof deactivateUserSchema>;
export type UpdateProfileInput = z.infer<typeof updateProfileSchema>;

View File

@@ -10,49 +10,29 @@ import { logger } from '../../../core/logging/logger';
export class AdminRepository {
constructor(private pool: Pool) {}
async getAdminById(id: string): Promise<AdminUser | null> {
async getAdminByAuth0Sub(auth0Sub: string): Promise<AdminUser | null> {
const query = `
SELECT id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
SELECT auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
FROM admin_users
WHERE id = $1
WHERE auth0_sub = $1
LIMIT 1
`;
try {
const result = await this.pool.query(query, [id]);
const result = await this.pool.query(query, [auth0Sub]);
if (result.rows.length === 0) {
return null;
}
return this.mapRowToAdminUser(result.rows[0]);
} catch (error) {
logger.error('Error fetching admin by id', { error, id });
throw error;
}
}
async getAdminByUserProfileId(userProfileId: string): Promise<AdminUser | null> {
const query = `
SELECT id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
FROM admin_users
WHERE user_profile_id = $1
LIMIT 1
`;
try {
const result = await this.pool.query(query, [userProfileId]);
if (result.rows.length === 0) {
return null;
}
return this.mapRowToAdminUser(result.rows[0]);
} catch (error) {
logger.error('Error fetching admin by user_profile_id', { error, userProfileId });
logger.error('Error fetching admin by auth0_sub', { error, auth0Sub });
throw error;
}
}
async getAdminByEmail(email: string): Promise<AdminUser | null> {
const query = `
SELECT id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
SELECT auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
FROM admin_users
WHERE LOWER(email) = LOWER($1)
LIMIT 1
@@ -72,7 +52,7 @@ export class AdminRepository {
async getAllAdmins(): Promise<AdminUser[]> {
const query = `
SELECT id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
SELECT auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
FROM admin_users
ORDER BY created_at DESC
`;
@@ -88,7 +68,7 @@ export class AdminRepository {
async getActiveAdmins(): Promise<AdminUser[]> {
const query = `
SELECT id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
SELECT auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
FROM admin_users
WHERE revoked_at IS NULL
ORDER BY created_at DESC
@@ -103,61 +83,61 @@ export class AdminRepository {
}
}
async createAdmin(userProfileId: string, email: string, role: string, createdBy: string): Promise<AdminUser> {
async createAdmin(auth0Sub: string, email: string, role: string, createdBy: string): Promise<AdminUser> {
const query = `
INSERT INTO admin_users (user_profile_id, email, role, created_by)
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES ($1, $2, $3, $4)
RETURNING id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
RETURNING auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
`;
try {
const result = await this.pool.query(query, [userProfileId, email, role, createdBy]);
const result = await this.pool.query(query, [auth0Sub, email, role, createdBy]);
if (result.rows.length === 0) {
throw new Error('Failed to create admin user');
}
return this.mapRowToAdminUser(result.rows[0]);
} catch (error) {
logger.error('Error creating admin', { error, userProfileId, email });
logger.error('Error creating admin', { error, auth0Sub, email });
throw error;
}
}
async revokeAdmin(id: string): Promise<AdminUser> {
async revokeAdmin(auth0Sub: string): Promise<AdminUser> {
const query = `
UPDATE admin_users
SET revoked_at = CURRENT_TIMESTAMP
WHERE id = $1
RETURNING id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
WHERE auth0_sub = $1
RETURNING auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
`;
try {
const result = await this.pool.query(query, [id]);
const result = await this.pool.query(query, [auth0Sub]);
if (result.rows.length === 0) {
throw new Error('Admin user not found');
}
return this.mapRowToAdminUser(result.rows[0]);
} catch (error) {
logger.error('Error revoking admin', { error, id });
logger.error('Error revoking admin', { error, auth0Sub });
throw error;
}
}
async reinstateAdmin(id: string): Promise<AdminUser> {
async reinstateAdmin(auth0Sub: string): Promise<AdminUser> {
const query = `
UPDATE admin_users
SET revoked_at = NULL
WHERE id = $1
RETURNING id, user_profile_id, email, role, created_at, created_by, revoked_at, updated_at
WHERE auth0_sub = $1
RETURNING auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
`;
try {
const result = await this.pool.query(query, [id]);
const result = await this.pool.query(query, [auth0Sub]);
if (result.rows.length === 0) {
throw new Error('Admin user not found');
}
return this.mapRowToAdminUser(result.rows[0]);
} catch (error) {
logger.error('Error reinstating admin', { error, id });
logger.error('Error reinstating admin', { error, auth0Sub });
throw error;
}
}
@@ -222,11 +202,30 @@ export class AdminRepository {
}
}
async updateAuth0SubByEmail(email: string, auth0Sub: string): Promise<AdminUser> {
const query = `
UPDATE admin_users
SET auth0_sub = $1,
updated_at = CURRENT_TIMESTAMP
WHERE LOWER(email) = LOWER($2)
RETURNING auth0_sub, email, role, created_at, created_by, revoked_at, updated_at
`;
try {
const result = await this.pool.query(query, [auth0Sub, email]);
if (result.rows.length === 0) {
throw new Error(`Admin user with email ${email} not found`);
}
return this.mapRowToAdminUser(result.rows[0]);
} catch (error) {
logger.error('Error updating admin auth0_sub by email', { error, email, auth0Sub });
throw error;
}
}
private mapRowToAdminUser(row: any): AdminUser {
return {
id: row.id,
userProfileId: row.user_profile_id,
auth0Sub: row.auth0_sub,
email: row.email,
role: row.role,
createdAt: new Date(row.created_at),

View File

@@ -6,25 +6,15 @@
import { AdminRepository } from '../data/admin.repository';
import { AdminUser, AdminAuditLog } from './admin.types';
import { logger } from '../../../core/logging/logger';
import { auditLogService } from '../../audit-log';
export class AdminService {
constructor(private repository: AdminRepository) {}
async getAdminById(id: string): Promise<AdminUser | null> {
async getAdminByAuth0Sub(auth0Sub: string): Promise<AdminUser | null> {
try {
return await this.repository.getAdminById(id);
return await this.repository.getAdminByAuth0Sub(auth0Sub);
} catch (error) {
logger.error('Error getting admin by id', { error });
throw error;
}
}
async getAdminByUserProfileId(userProfileId: string): Promise<AdminUser | null> {
try {
return await this.repository.getAdminByUserProfileId(userProfileId);
} catch (error) {
logger.error('Error getting admin by user_profile_id', { error });
logger.error('Error getting admin by auth0_sub', { error });
throw error;
}
}
@@ -56,7 +46,7 @@ export class AdminService {
}
}
async createAdmin(email: string, role: string, userProfileId: string, createdByAdminId: string): Promise<AdminUser> {
async createAdmin(email: string, role: string, auth0Sub: string, createdBy: string): Promise<AdminUser> {
try {
// Check if admin already exists
const normalizedEmail = email.trim().toLowerCase();
@@ -66,24 +56,14 @@ export class AdminService {
}
// Create new admin
const admin = await this.repository.createAdmin(userProfileId, normalizedEmail, role, createdByAdminId);
const admin = await this.repository.createAdmin(auth0Sub, normalizedEmail, role, createdBy);
// Log audit action (legacy)
await this.repository.logAuditAction(createdByAdminId, 'CREATE', admin.id, 'admin_user', admin.email, {
// Log audit action
await this.repository.logAuditAction(createdBy, 'CREATE', admin.auth0Sub, 'admin_user', admin.email, {
email,
role
});
// Log to unified audit log
await auditLogService.info(
'admin',
userProfileId,
`Admin user created: ${admin.email}`,
'admin_user',
admin.id,
{ email: admin.email, role }
).catch(err => logger.error('Failed to log admin create audit event', { error: err }));
logger.info('Admin user created', { email, role });
return admin;
} catch (error) {
@@ -92,7 +72,7 @@ export class AdminService {
}
}
async revokeAdmin(id: string, revokedByAdminId: string): Promise<AdminUser> {
async revokeAdmin(auth0Sub: string, revokedBy: string): Promise<AdminUser> {
try {
// Check that at least one active admin will remain
const activeAdmins = await this.repository.getActiveAdmins();
@@ -101,51 +81,31 @@ export class AdminService {
}
// Revoke the admin
const admin = await this.repository.revokeAdmin(id);
const admin = await this.repository.revokeAdmin(auth0Sub);
// Log audit action (legacy)
await this.repository.logAuditAction(revokedByAdminId, 'REVOKE', id, 'admin_user', admin.email);
// Log audit action
await this.repository.logAuditAction(revokedBy, 'REVOKE', auth0Sub, 'admin_user', admin.email);
// Log to unified audit log
await auditLogService.info(
'admin',
admin.userProfileId,
`Admin user revoked: ${admin.email}`,
'admin_user',
id,
{ email: admin.email }
).catch(err => logger.error('Failed to log admin revoke audit event', { error: err }));
logger.info('Admin user revoked', { id, email: admin.email });
logger.info('Admin user revoked', { auth0Sub, email: admin.email });
return admin;
} catch (error) {
logger.error('Error revoking admin', { error, id });
logger.error('Error revoking admin', { error, auth0Sub });
throw error;
}
}
async reinstateAdmin(id: string, reinstatedByAdminId: string): Promise<AdminUser> {
async reinstateAdmin(auth0Sub: string, reinstatedBy: string): Promise<AdminUser> {
try {
// Reinstate the admin
const admin = await this.repository.reinstateAdmin(id);
const admin = await this.repository.reinstateAdmin(auth0Sub);
// Log audit action (legacy)
await this.repository.logAuditAction(reinstatedByAdminId, 'REINSTATE', id, 'admin_user', admin.email);
// Log audit action
await this.repository.logAuditAction(reinstatedBy, 'REINSTATE', auth0Sub, 'admin_user', admin.email);
// Log to unified audit log
await auditLogService.info(
'admin',
admin.userProfileId,
`Admin user reinstated: ${admin.email}`,
'admin_user',
id,
{ email: admin.email }
).catch(err => logger.error('Failed to log admin reinstate audit event', { error: err }));
logger.info('Admin user reinstated', { id, email: admin.email });
logger.info('Admin user reinstated', { auth0Sub, email: admin.email });
return admin;
} catch (error) {
logger.error('Error reinstating admin', { error, id });
logger.error('Error reinstating admin', { error, auth0Sub });
throw error;
}
}
@@ -159,4 +119,12 @@ export class AdminService {
}
}
async linkAdminAuth0Sub(email: string, auth0Sub: string): Promise<AdminUser> {
try {
return await this.repository.updateAuth0SubByEmail(email.trim().toLowerCase(), auth0Sub);
} catch (error) {
logger.error('Error linking admin auth0_sub to email', { error, email, auth0Sub });
throw error;
}
}
}

View File

@@ -4,8 +4,7 @@
*/
export interface AdminUser {
id: string;
userProfileId: string;
auth0Sub: string;
email: string;
role: 'admin' | 'super_admin';
createdAt: Date;
@@ -20,11 +19,11 @@ export interface CreateAdminRequest {
}
export interface RevokeAdminRequest {
id: string;
auth0Sub: string;
}
export interface ReinstateAdminRequest {
id: string;
auth0Sub: string;
}
export interface AdminAuditLog {
@@ -72,25 +71,25 @@ export interface BulkCreateAdminResponse {
}
export interface BulkRevokeAdminRequest {
ids: string[];
auth0Subs: string[];
}
export interface BulkRevokeAdminResponse {
revoked: AdminUser[];
failed: Array<{
id: string;
auth0Sub: string;
error: string;
}>;
}
export interface BulkReinstateAdminRequest {
ids: string[];
auth0Subs: string[];
}
export interface BulkReinstateAdminResponse {
reinstated: AdminUser[];
failed: Array<{
id: string;
auth0Sub: string;
error: string;
}>;
}

View File

@@ -4,19 +4,18 @@
*/
import request from 'supertest';
import { buildApp } from '../../../../app';
import { app } from '../../../../app';
import pool from '../../../../core/config/database';
import { FastifyInstance } from 'fastify';
import { readFileSync } from 'fs';
import { join } from 'path';
import fastifyPlugin from 'fastify-plugin';
import { setAdminGuardPool } from '../../../../core/plugins/admin-guard.plugin';
const DEFAULT_ADMIN_ID = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const DEFAULT_ADMIN_SUB = 'test-admin-123';
const DEFAULT_ADMIN_EMAIL = 'test-admin@motovaultpro.com';
let currentUser = {
sub: 'auth0|test-admin-123',
sub: DEFAULT_ADMIN_SUB,
email: DEFAULT_ADMIN_EMAIL,
};
@@ -26,15 +25,11 @@ jest.mock('../../../../core/plugins/auth.plugin', () => {
default: fastifyPlugin(async function(fastify) {
fastify.decorate('authenticate', async function(request, _reply) {
// Inject dynamic test user context
// JWT sub is still auth0|xxx format
request.user = { sub: currentUser.sub };
request.userContext = {
userId: DEFAULT_ADMIN_ID,
userId: currentUser.sub,
email: currentUser.email,
emailVerified: true,
onboardingCompleted: true,
isAdmin: false, // Will be set by admin guard
subscriptionTier: 'free',
};
});
}, { name: 'auth-plugin' })
@@ -42,14 +37,10 @@ jest.mock('../../../../core/plugins/auth.plugin', () => {
});
describe('Admin Management Integration Tests', () => {
let app: FastifyInstance;
let testAdminId: string;
let testAdminAuth0Sub: string;
let testNonAdminAuth0Sub: string;
beforeAll(async () => {
// Build the app
app = await buildApp();
await app.ready();
// Run the admin migration directly using the migration file
const migrationFile = join(__dirname, '../../migrations/001_create_admin_users.sql');
const migrationSQL = readFileSync(migrationFile, 'utf-8');
@@ -59,31 +50,33 @@ describe('Admin Management Integration Tests', () => {
setAdminGuardPool(pool);
// Create test admin user
testAdminId = DEFAULT_ADMIN_ID;
testAdminAuth0Sub = DEFAULT_ADMIN_SUB;
await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (user_profile_id) DO NOTHING
`, [testAdminId, testAdminId, DEFAULT_ADMIN_EMAIL, 'admin', 'system']);
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES ($1, $2, $3, $4)
ON CONFLICT (auth0_sub) DO NOTHING
`, [testAdminAuth0Sub, DEFAULT_ADMIN_EMAIL, 'admin', 'system']);
// Create test non-admin auth0Sub for permission tests
testNonAdminAuth0Sub = 'test-non-admin-456';
});
afterAll(async () => {
// Clean up test database
await pool.query('DROP TABLE IF EXISTS admin_audit_logs CASCADE');
await pool.query('DROP TABLE IF EXISTS admin_users CASCADE');
await app.close();
await pool.end();
});
beforeEach(async () => {
// Clean up test data before each test (except the test admin)
await pool.query(
'DELETE FROM admin_users WHERE user_profile_id != $1',
[testAdminId]
'DELETE FROM admin_users WHERE auth0_sub != $1 AND auth0_sub != $2',
[testAdminAuth0Sub, 'system|bootstrap']
);
await pool.query('DELETE FROM admin_audit_logs');
currentUser = {
sub: 'auth0|test-admin-123',
sub: DEFAULT_ADMIN_SUB,
email: DEFAULT_ADMIN_EMAIL,
};
});
@@ -92,11 +85,11 @@ describe('Admin Management Integration Tests', () => {
it('should reject non-admin user trying to list admins', async () => {
// Create mock for non-admin user
currentUser = {
sub: 'auth0|test-non-admin-456',
sub: testNonAdminAuth0Sub,
email: 'test-user@example.com',
};
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/admins')
.expect(403);
@@ -108,51 +101,51 @@ describe('Admin Management Integration Tests', () => {
describe('GET /api/admin/verify', () => {
it('should confirm admin access for existing admin', async () => {
currentUser = {
sub: 'auth0|test-admin-123',
sub: testAdminAuth0Sub,
email: DEFAULT_ADMIN_EMAIL,
};
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/verify')
.expect(200);
expect(response.body.isAdmin).toBe(true);
expect(response.body.adminRecord).toMatchObject({
id: testAdminId,
auth0Sub: testAdminAuth0Sub,
email: DEFAULT_ADMIN_EMAIL,
});
});
it('should link admin record by email when user_profile_id differs', async () => {
const placeholderId = '9b9a1234-1234-1234-1234-123456789abc';
const realId = 'a1b2c3d4-5678-90ab-cdef-123456789def';
it('should link admin record by email when auth0_sub differs', async () => {
const placeholderSub = 'auth0|placeholder-sub';
const realSub = 'auth0|real-admin-sub';
const email = 'link-admin@example.com';
await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by)
VALUES ($1, $2, $3, $4, $5)
`, [placeholderId, placeholderId, email, 'admin', testAdminId]);
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES ($1, $2, $3, $4)
`, [placeholderSub, email, 'admin', testAdminAuth0Sub]);
currentUser = {
sub: 'auth0|real-admin-sub',
sub: realSub,
email,
};
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/verify')
.expect(200);
expect(response.body.isAdmin).toBe(true);
expect(response.body.adminRecord).toMatchObject({
userProfileId: realId,
auth0Sub: realSub,
email,
});
const record = await pool.query(
'SELECT user_profile_id FROM admin_users WHERE email = $1',
'SELECT auth0_sub FROM admin_users WHERE email = $1',
[email]
);
expect(record.rows[0].user_profile_id).toBe(realId);
expect(record.rows[0].auth0_sub).toBe(realSub);
});
it('should return non-admin response for unknown user', async () => {
@@ -161,7 +154,7 @@ describe('Admin Management Integration Tests', () => {
email: 'non-admin@example.com',
};
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/verify')
.expect(200);
@@ -173,19 +166,17 @@ describe('Admin Management Integration Tests', () => {
describe('GET /api/admin/admins', () => {
it('should list all admin users', async () => {
// Create additional test admins
const admin1Id = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
const admin2Id = '8f14e45f-ceea-367f-a27f-c9a6d0c67e0e';
await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by)
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES
($1, $2, $3, $4, $5),
($6, $7, $8, $9, $10)
($1, $2, $3, $4),
($5, $6, $7, $8)
`, [
admin1Id, admin1Id, 'admin1@example.com', 'admin', testAdminId,
admin2Id, admin2Id, 'admin2@example.com', 'super_admin', testAdminId
'auth0|admin1', 'admin1@example.com', 'admin', testAdminAuth0Sub,
'auth0|admin2', 'admin2@example.com', 'super_admin', testAdminAuth0Sub
]);
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/admins')
.expect(200);
@@ -193,7 +184,7 @@ describe('Admin Management Integration Tests', () => {
expect(response.body).toHaveProperty('admins');
expect(response.body.admins.length).toBeGreaterThanOrEqual(3); // At least test admin + 2 created
expect(response.body.admins[0]).toMatchObject({
id: expect.any(String),
auth0Sub: expect.any(String),
email: expect.any(String),
role: expect.stringMatching(/^(admin|super_admin)$/),
createdAt: expect.any(String),
@@ -203,13 +194,12 @@ describe('Admin Management Integration Tests', () => {
it('should include revoked admins in the list', async () => {
// Create and revoke an admin
const revokedId = 'f1e2d3c4-b5a6-9788-6543-210fedcba987';
await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by, revoked_at)
VALUES ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP)
`, [revokedId, revokedId, 'revoked@example.com', 'admin', testAdminId]);
INSERT INTO admin_users (auth0_sub, email, role, created_by, revoked_at)
VALUES ($1, $2, $3, $4, CURRENT_TIMESTAMP)
`, ['auth0|revoked', 'revoked@example.com', 'admin', testAdminAuth0Sub]);
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/admins')
.expect(200);
@@ -228,17 +218,17 @@ describe('Admin Management Integration Tests', () => {
role: 'admin'
};
const response = await request(app.server)
const response = await request(app)
.post('/api/admin/admins')
.send(newAdminData)
.expect(201);
expect(response.body).toMatchObject({
id: expect.any(String),
auth0Sub: expect.any(String),
email: 'newadmin@example.com',
role: 'admin',
createdAt: expect.any(String),
createdBy: testAdminId,
createdBy: testAdminAuth0Sub,
revokedAt: null
});
@@ -248,7 +238,7 @@ describe('Admin Management Integration Tests', () => {
['CREATE', 'newadmin@example.com']
);
expect(auditResult.rows.length).toBe(1);
expect(auditResult.rows[0].actor_admin_id).toBe(testAdminId);
expect(auditResult.rows[0].actor_admin_id).toBe(testAdminAuth0Sub);
});
it('should reject invalid email', async () => {
@@ -257,7 +247,7 @@ describe('Admin Management Integration Tests', () => {
role: 'admin'
};
const response = await request(app.server)
const response = await request(app)
.post('/api/admin/admins')
.send(invalidData)
.expect(400);
@@ -273,13 +263,13 @@ describe('Admin Management Integration Tests', () => {
};
// Create first admin
await request(app.server)
await request(app)
.post('/api/admin/admins')
.send(adminData)
.expect(201);
// Try to create duplicate
const response = await request(app.server)
const response = await request(app)
.post('/api/admin/admins')
.send(adminData)
.expect(400);
@@ -294,7 +284,7 @@ describe('Admin Management Integration Tests', () => {
role: 'super_admin'
};
const response = await request(app.server)
const response = await request(app)
.post('/api/admin/admins')
.send(superAdminData)
.expect(201);
@@ -307,7 +297,7 @@ describe('Admin Management Integration Tests', () => {
email: 'defaultrole@example.com'
};
const response = await request(app.server)
const response = await request(app)
.post('/api/admin/admins')
.send(adminData)
.expect(201);
@@ -316,24 +306,23 @@ describe('Admin Management Integration Tests', () => {
});
});
describe('PATCH /api/admin/admins/:id/revoke', () => {
describe('PATCH /api/admin/admins/:auth0Sub/revoke', () => {
it('should revoke admin access', async () => {
// Create admin to revoke
const toRevokeId = 'b1c2d3e4-f5a6-7890-1234-567890abcdef';
const createResult = await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by)
VALUES ($1, $2, $3, $4, $5)
RETURNING id
`, [toRevokeId, toRevokeId, 'torevoke@example.com', 'admin', testAdminId]);
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES ($1, $2, $3, $4)
RETURNING auth0_sub
`, ['auth0|to-revoke', 'torevoke@example.com', 'admin', testAdminAuth0Sub]);
const adminId = createResult.rows[0].id;
const auth0Sub = createResult.rows[0].auth0_sub;
const response = await request(app.server)
.patch(`/api/admin/admins/${adminId}/revoke`)
const response = await request(app)
.patch(`/api/admin/admins/${auth0Sub}/revoke`)
.expect(200);
expect(response.body).toMatchObject({
id: adminId,
auth0Sub,
email: 'torevoke@example.com',
revokedAt: expect.any(String)
});
@@ -341,7 +330,7 @@ describe('Admin Management Integration Tests', () => {
// Verify audit log
const auditResult = await pool.query(
'SELECT * FROM admin_audit_logs WHERE action = $1 AND target_admin_id = $2',
['REVOKE', adminId]
['REVOKE', auth0Sub]
);
expect(auditResult.rows.length).toBe(1);
});
@@ -349,12 +338,12 @@ describe('Admin Management Integration Tests', () => {
it('should prevent revoking last active admin', async () => {
// First, ensure only one active admin exists
await pool.query(
'UPDATE admin_users SET revoked_at = CURRENT_TIMESTAMP WHERE user_profile_id != $1',
[testAdminId]
'UPDATE admin_users SET revoked_at = CURRENT_TIMESTAMP WHERE auth0_sub != $1',
[testAdminAuth0Sub]
);
const response = await request(app.server)
.patch(`/api/admin/admins/${testAdminId}/revoke`)
const response = await request(app)
.patch(`/api/admin/admins/${testAdminAuth0Sub}/revoke`)
.expect(400);
expect(response.body.error).toBe('Bad Request');
@@ -362,8 +351,8 @@ describe('Admin Management Integration Tests', () => {
});
it('should return 404 for non-existent admin', async () => {
const response = await request(app.server)
.patch('/api/admin/admins/00000000-0000-0000-0000-000000000000/revoke')
const response = await request(app)
.patch('/api/admin/admins/auth0|nonexistent/revoke')
.expect(404);
expect(response.body.error).toBe('Not Found');
@@ -371,24 +360,23 @@ describe('Admin Management Integration Tests', () => {
});
});
describe('PATCH /api/admin/admins/:id/reinstate', () => {
describe('PATCH /api/admin/admins/:auth0Sub/reinstate', () => {
it('should reinstate revoked admin', async () => {
// Create revoked admin
const reinstateId = 'c2d3e4f5-a6b7-8901-2345-678901bcdef0';
const createResult = await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by, revoked_at)
VALUES ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP)
RETURNING id
`, [reinstateId, reinstateId, 'toreinstate@example.com', 'admin', testAdminId]);
INSERT INTO admin_users (auth0_sub, email, role, created_by, revoked_at)
VALUES ($1, $2, $3, $4, CURRENT_TIMESTAMP)
RETURNING auth0_sub
`, ['auth0|to-reinstate', 'toreinstate@example.com', 'admin', testAdminAuth0Sub]);
const adminId = createResult.rows[0].id;
const auth0Sub = createResult.rows[0].auth0_sub;
const response = await request(app.server)
.patch(`/api/admin/admins/${adminId}/reinstate`)
const response = await request(app)
.patch(`/api/admin/admins/${auth0Sub}/reinstate`)
.expect(200);
expect(response.body).toMatchObject({
id: adminId,
auth0Sub,
email: 'toreinstate@example.com',
revokedAt: null
});
@@ -396,14 +384,14 @@ describe('Admin Management Integration Tests', () => {
// Verify audit log
const auditResult = await pool.query(
'SELECT * FROM admin_audit_logs WHERE action = $1 AND target_admin_id = $2',
['REINSTATE', adminId]
['REINSTATE', auth0Sub]
);
expect(auditResult.rows.length).toBe(1);
});
it('should return 404 for non-existent admin', async () => {
const response = await request(app.server)
.patch('/api/admin/admins/00000000-0000-0000-0000-000000000000/reinstate')
const response = await request(app)
.patch('/api/admin/admins/auth0|nonexistent/reinstate')
.expect(404);
expect(response.body.error).toBe('Not Found');
@@ -412,17 +400,16 @@ describe('Admin Management Integration Tests', () => {
it('should handle reinstating already active admin', async () => {
// Create active admin
const activeId = 'd3e4f5a6-b7c8-9012-3456-789012cdef01';
const createResult = await pool.query(`
INSERT INTO admin_users (id, user_profile_id, email, role, created_by)
VALUES ($1, $2, $3, $4, $5)
RETURNING id
`, [activeId, activeId, 'active@example.com', 'admin', testAdminId]);
INSERT INTO admin_users (auth0_sub, email, role, created_by)
VALUES ($1, $2, $3, $4)
RETURNING auth0_sub
`, ['auth0|already-active', 'active@example.com', 'admin', testAdminAuth0Sub]);
const adminId = createResult.rows[0].id;
const auth0Sub = createResult.rows[0].auth0_sub;
const response = await request(app.server)
.patch(`/api/admin/admins/${adminId}/reinstate`)
const response = await request(app)
.patch(`/api/admin/admins/${auth0Sub}/reinstate`)
.expect(200);
expect(response.body.revokedAt).toBeNull();
@@ -439,12 +426,12 @@ describe('Admin Management Integration Tests', () => {
($5, $6, $7, $8),
($9, $10, $11, $12)
`, [
testAdminId, 'CREATE', 'admin_user', 'test1@example.com',
testAdminId, 'REVOKE', 'admin_user', 'test2@example.com',
testAdminId, 'REINSTATE', 'admin_user', 'test3@example.com'
testAdminAuth0Sub, 'CREATE', 'admin_user', 'test1@example.com',
testAdminAuth0Sub, 'REVOKE', 'admin_user', 'test2@example.com',
testAdminAuth0Sub, 'REINSTATE', 'admin_user', 'test3@example.com'
]);
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/audit-logs')
.expect(200);
@@ -453,7 +440,7 @@ describe('Admin Management Integration Tests', () => {
expect(response.body.logs.length).toBeGreaterThanOrEqual(3);
expect(response.body.logs[0]).toMatchObject({
id: expect.any(String),
actorAdminId: testAdminId,
actorAdminId: testAdminAuth0Sub,
action: expect.any(String),
resourceType: expect.any(String),
createdAt: expect.any(String)
@@ -466,10 +453,10 @@ describe('Admin Management Integration Tests', () => {
await pool.query(`
INSERT INTO admin_audit_logs (actor_admin_id, action, resource_type, resource_id)
VALUES ($1, $2, $3, $4)
`, [testAdminId, 'CREATE', 'admin_user', `test${i}@example.com`]);
`, [testAdminAuth0Sub, 'CREATE', 'admin_user', `test${i}@example.com`]);
}
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/audit-logs?limit=5&offset=0')
.expect(200);
@@ -486,12 +473,12 @@ describe('Admin Management Integration Tests', () => {
($3, $4, CURRENT_TIMESTAMP - INTERVAL '1 minute'),
($5, $6, CURRENT_TIMESTAMP)
`, [
testAdminId, 'FIRST',
testAdminId, 'SECOND',
testAdminId, 'THIRD'
testAdminAuth0Sub, 'FIRST',
testAdminAuth0Sub, 'SECOND',
testAdminAuth0Sub, 'THIRD'
]);
const response = await request(app.server)
const response = await request(app)
.get('/api/admin/audit-logs?limit=3')
.expect(200);
@@ -504,45 +491,45 @@ describe('Admin Management Integration Tests', () => {
describe('End-to-end workflow', () => {
it('should create, revoke, and reinstate admin with full audit trail', async () => {
// 1. Create new admin
const createResponse = await request(app.server)
const createResponse = await request(app)
.post('/api/admin/admins')
.send({ email: 'workflow@example.com', role: 'admin' })
.expect(201);
const adminId = createResponse.body.id;
const auth0Sub = createResponse.body.auth0Sub;
// 2. Verify admin appears in list
const listResponse = await request(app.server)
const listResponse = await request(app)
.get('/api/admin/admins')
.expect(200);
const createdAdmin = listResponse.body.admins.find(
(admin: any) => admin.id === adminId
(admin: any) => admin.auth0Sub === auth0Sub
);
expect(createdAdmin).toBeDefined();
expect(createdAdmin.revokedAt).toBeNull();
// 3. Revoke admin
const revokeResponse = await request(app.server)
.patch(`/api/admin/admins/${adminId}/revoke`)
const revokeResponse = await request(app)
.patch(`/api/admin/admins/${auth0Sub}/revoke`)
.expect(200);
expect(revokeResponse.body.revokedAt).toBeTruthy();
// 4. Reinstate admin
const reinstateResponse = await request(app.server)
.patch(`/api/admin/admins/${adminId}/reinstate`)
const reinstateResponse = await request(app)
.patch(`/api/admin/admins/${auth0Sub}/reinstate`)
.expect(200);
expect(reinstateResponse.body.revokedAt).toBeNull();
// 5. Verify complete audit trail
const auditResponse = await request(app.server)
const auditResponse = await request(app)
.get('/api/admin/audit-logs')
.expect(200);
const workflowLogs = auditResponse.body.logs.filter(
(log: any) => log.targetAdminId === adminId || log.resourceId === 'workflow@example.com'
(log: any) => log.targetAdminId === auth0Sub || log.resourceId === 'workflow@example.com'
);
expect(workflowLogs.length).toBeGreaterThanOrEqual(3);

View File

@@ -26,12 +26,9 @@ describe('admin guard plugin', () => {
fastify = Fastify();
authenticateMock = jest.fn(async (request: FastifyRequest) => {
request.userContext = {
userId: '7c9e6679-7425-40de-944b-e07fc1f90ae7',
userId: 'auth0|admin',
email: 'admin@motovaultpro.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'free',
};
});
fastify.decorate('authenticate', authenticateMock);
@@ -41,7 +38,7 @@ describe('admin guard plugin', () => {
mockPool = {
query: jest.fn().mockResolvedValue({
rows: [{
user_profile_id: '7c9e6679-7425-40de-944b-e07fc1f90ae7',
auth0_sub: 'auth0|admin',
email: 'admin@motovaultpro.com',
role: 'admin',
revoked_at: null,

View File

@@ -6,23 +6,13 @@
import { AdminService } from '../../domain/admin.service';
import { AdminRepository } from '../../data/admin.repository';
// Mock the audit log service
jest.mock('../../../audit-log', () => ({
auditLogService: {
info: jest.fn().mockResolvedValue(undefined),
warn: jest.fn().mockResolvedValue(undefined),
error: jest.fn().mockResolvedValue(undefined),
},
}));
describe('AdminService', () => {
let adminService: AdminService;
let mockRepository: jest.Mocked<AdminRepository>;
beforeEach(() => {
mockRepository = {
getAdminById: jest.fn(),
getAdminByUserProfileId: jest.fn(),
getAdminByAuth0Sub: jest.fn(),
getAdminByEmail: jest.fn(),
getAllAdmins: jest.fn(),
getActiveAdmins: jest.fn(),
@@ -36,31 +26,30 @@ describe('AdminService', () => {
adminService = new AdminService(mockRepository);
});
describe('getAdminById', () => {
describe('getAdminByAuth0Sub', () => {
it('should return admin when found', async () => {
const mockAdmin = {
id: '7c9e6679-7425-40de-944b-e07fc1f90ae7',
userProfileId: '7c9e6679-7425-40de-944b-e07fc1f90ae7',
auth0Sub: 'auth0|123456',
email: 'admin@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: null,
updatedAt: new Date(),
};
mockRepository.getAdminById.mockResolvedValue(mockAdmin);
mockRepository.getAdminByAuth0Sub.mockResolvedValue(mockAdmin);
const result = await adminService.getAdminById('7c9e6679-7425-40de-944b-e07fc1f90ae7');
const result = await adminService.getAdminByAuth0Sub('auth0|123456');
expect(result).toEqual(mockAdmin);
expect(mockRepository.getAdminById).toHaveBeenCalledWith('7c9e6679-7425-40de-944b-e07fc1f90ae7');
expect(mockRepository.getAdminByAuth0Sub).toHaveBeenCalledWith('auth0|123456');
});
it('should return null when admin not found', async () => {
mockRepository.getAdminById.mockResolvedValue(null);
mockRepository.getAdminByAuth0Sub.mockResolvedValue(null);
const result = await adminService.getAdminById('00000000-0000-0000-0000-000000000000');
const result = await adminService.getAdminByAuth0Sub('auth0|unknown');
expect(result).toBeNull();
});
@@ -68,15 +57,12 @@ describe('AdminService', () => {
describe('createAdmin', () => {
it('should create new admin and log audit', async () => {
const newAdminId = '8f14e45f-ceea-367f-a27f-c9a6d0c67e0e';
const creatorId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const mockAdmin = {
id: newAdminId,
userProfileId: newAdminId,
auth0Sub: 'auth0|newadmin',
email: 'newadmin@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: creatorId,
createdBy: 'auth0|existing',
revokedAt: null,
updatedAt: new Date(),
};
@@ -88,16 +74,16 @@ describe('AdminService', () => {
const result = await adminService.createAdmin(
'newadmin@motovaultpro.com',
'admin',
newAdminId,
creatorId
'auth0|newadmin',
'auth0|existing'
);
expect(result).toEqual(mockAdmin);
expect(mockRepository.createAdmin).toHaveBeenCalled();
expect(mockRepository.logAuditAction).toHaveBeenCalledWith(
creatorId,
'auth0|existing',
'CREATE',
mockAdmin.id,
mockAdmin.auth0Sub,
'admin_user',
mockAdmin.email,
expect.any(Object)
@@ -105,14 +91,12 @@ describe('AdminService', () => {
});
it('should reject if admin already exists', async () => {
const existingId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const existingAdmin = {
id: existingId,
userProfileId: existingId,
auth0Sub: 'auth0|existing',
email: 'admin@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: null,
updatedAt: new Date(),
};
@@ -120,46 +104,39 @@ describe('AdminService', () => {
mockRepository.getAdminByEmail.mockResolvedValue(existingAdmin);
await expect(
adminService.createAdmin('admin@motovaultpro.com', 'admin', '8f14e45f-ceea-367f-a27f-c9a6d0c67e0e', existingId)
adminService.createAdmin('admin@motovaultpro.com', 'admin', 'auth0|new', 'auth0|existing')
).rejects.toThrow('already exists');
});
});
describe('revokeAdmin', () => {
it('should revoke admin when multiple active admins exist', async () => {
const toRevokeId = 'a1b2c3d4-e5f6-7890-1234-567890abcdef';
const admin1Id = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
const admin2Id = '8f14e45f-ceea-367f-a27f-c9a6d0c67e0e';
const revokedAdmin = {
id: toRevokeId,
userProfileId: toRevokeId,
auth0Sub: 'auth0|toadmin',
email: 'toadmin@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: new Date(),
updatedAt: new Date(),
};
const activeAdmins = [
{
id: admin1Id,
userProfileId: admin1Id,
auth0Sub: 'auth0|admin1',
email: 'admin1@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: null,
updatedAt: new Date(),
},
{
id: admin2Id,
userProfileId: admin2Id,
auth0Sub: 'auth0|admin2',
email: 'admin2@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: null,
updatedAt: new Date(),
},
@@ -169,22 +146,20 @@ describe('AdminService', () => {
mockRepository.revokeAdmin.mockResolvedValue(revokedAdmin);
mockRepository.logAuditAction.mockResolvedValue({} as any);
const result = await adminService.revokeAdmin(toRevokeId, admin1Id);
const result = await adminService.revokeAdmin('auth0|toadmin', 'auth0|admin1');
expect(result).toEqual(revokedAdmin);
expect(mockRepository.revokeAdmin).toHaveBeenCalledWith(toRevokeId);
expect(mockRepository.revokeAdmin).toHaveBeenCalledWith('auth0|toadmin');
expect(mockRepository.logAuditAction).toHaveBeenCalled();
});
it('should prevent revoking last active admin', async () => {
const lastAdminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const lastAdmin = {
id: lastAdminId,
userProfileId: lastAdminId,
auth0Sub: 'auth0|lastadmin',
email: 'last@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: null,
updatedAt: new Date(),
};
@@ -192,22 +167,19 @@ describe('AdminService', () => {
mockRepository.getActiveAdmins.mockResolvedValue([lastAdmin]);
await expect(
adminService.revokeAdmin(lastAdminId, lastAdminId)
adminService.revokeAdmin('auth0|lastadmin', 'auth0|lastadmin')
).rejects.toThrow('Cannot revoke the last active admin');
});
});
describe('reinstateAdmin', () => {
it('should reinstate revoked admin and log audit', async () => {
const reinstateId = 'b2c3d4e5-f6a7-8901-2345-678901bcdef0';
const adminActorId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const reinstatedAdmin = {
id: reinstateId,
userProfileId: reinstateId,
auth0Sub: 'auth0|reinstate',
email: 'reinstate@motovaultpro.com',
role: 'admin' as const,
role: 'admin',
createdAt: new Date(),
createdBy: '550e8400-e29b-41d4-a716-446655440000',
createdBy: 'system',
revokedAt: null,
updatedAt: new Date(),
};
@@ -215,14 +187,14 @@ describe('AdminService', () => {
mockRepository.reinstateAdmin.mockResolvedValue(reinstatedAdmin);
mockRepository.logAuditAction.mockResolvedValue({} as any);
const result = await adminService.reinstateAdmin(reinstateId, adminActorId);
const result = await adminService.reinstateAdmin('auth0|reinstate', 'auth0|admin');
expect(result).toEqual(reinstatedAdmin);
expect(mockRepository.reinstateAdmin).toHaveBeenCalledWith(reinstateId);
expect(mockRepository.reinstateAdmin).toHaveBeenCalledWith('auth0|reinstate');
expect(mockRepository.logAuditAction).toHaveBeenCalledWith(
adminActorId,
'auth0|admin',
'REINSTATE',
reinstateId,
'auth0|reinstate',
'admin_user',
reinstatedAdmin.email
);

View File

@@ -1,19 +0,0 @@
# audit-log/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `README.md` | Architecture, usage patterns, categories | Understanding audit log system |
| `audit-log.instance.ts` | Singleton service instance | Cross-feature logging integration |
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `api/` | HTTP endpoints for log viewing/export | API route changes |
| `domain/` | Business logic, types, service | Core audit logging logic |
| `data/` | Repository, database queries | Database operations |
| `jobs/` | Scheduled cleanup job | Retention policy |
| `migrations/` | Database schema | Schema changes |
| `__tests__/` | Integration tests | Adding or modifying tests |

View File

@@ -1,168 +0,0 @@
# Audit Log Feature
Centralized audit logging system for tracking all user and system actions across MotoVaultPro.
## Architecture
```
Frontend
+--------------+ +-------------------+
| AdminLogsPage| | AdminLogsMobile |
| (desktop) | | Screen (mobile) |
+------+-------+ +--------+----------+
| |
+-------------------+
|
| useAuditLogs hook
v
adminApi.unifiedAuditLogs
|
| HTTP
v
GET /api/admin/audit-logs?search=X&category=Y&...
GET /api/admin/audit-logs/export
|
+--------v--------+
| AuditLogController |
+--------+--------+
|
+--------v--------+
| AuditLogService |<----- Other services call
| log(category,...)| auditLogService.info()
+--------+--------+
|
+--------v--------+
| AuditLogRepository |
+--------+--------+
v
+-------------+
| audit_logs | (PostgreSQL)
+-------------+
```
## Data Flow
```
Feature Service (vehicles, auth, etc.)
|
| auditLogService.info(category, userId, action, resourceType?, resourceId?, details?)
v
AuditLogService
|
| INSERT INTO audit_logs
v
PostgreSQL audit_logs table
|
| GET /api/admin/audit-logs (with filters)
v
AdminLogsPage/Mobile displays filtered, paginated results
```
## Database Schema
```sql
CREATE TABLE audit_logs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
category VARCHAR(20) NOT NULL CHECK (category IN ('auth', 'vehicle', 'user', 'system', 'admin')),
severity VARCHAR(10) NOT NULL CHECK (severity IN ('info', 'warning', 'error')),
user_id VARCHAR(255), -- NULL for system-initiated actions
action VARCHAR(500) NOT NULL,
resource_type VARCHAR(100),
resource_id VARCHAR(255),
details JSONB,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
```
## Indexes
- `idx_audit_logs_category_created` - B-tree for category filtering
- `idx_audit_logs_severity_created` - B-tree for severity filtering
- `idx_audit_logs_user_created` - B-tree for user filtering
- `idx_audit_logs_created` - B-tree for date ordering
- `idx_audit_logs_action_gin` - GIN trigram for text search
## API Endpoints
### GET /api/admin/audit-logs
Returns paginated audit logs with optional filters.
**Query Parameters:**
- `search` - Text search on action field (ILIKE)
- `category` - Filter by category (auth, vehicle, user, system, admin)
- `severity` - Filter by severity (info, warning, error)
- `startDate` - ISO date string for date range start
- `endDate` - ISO date string for date range end
- `limit` - Page size (default 25, max 100)
- `offset` - Pagination offset
**Response:**
```json
{
"logs": [
{
"id": "uuid",
"category": "vehicle",
"severity": "info",
"userId": "auth0|...",
"action": "Vehicle created: 2024 Toyota Camry",
"resourceType": "vehicle",
"resourceId": "vehicle-uuid",
"details": { "vin": "...", "make": "Toyota" },
"createdAt": "2024-01-15T10:30:00Z"
}
],
"total": 150,
"limit": 25,
"offset": 0
}
```
### GET /api/admin/audit-logs/export
Returns CSV file with filtered audit logs.
**Query Parameters:** Same as list endpoint (except pagination)
**Response:** CSV file download
## Usage in Features
```typescript
import { auditLogService } from '../../audit-log';
// In vehicles.service.ts
await auditLogService.info(
'vehicle',
userId,
`Vehicle created: ${vehicleDesc}`,
'vehicle',
vehicleId,
{ vin, make, model, year }
).catch(err => logger.error('Failed to log audit event', { error: err }));
```
## Retention Policy
- Logs older than 90 days are automatically deleted
- Cleanup job runs daily at 3 AM
- Implemented in `jobs/cleanup.job.ts`
## Categories
| Category | Description | Examples |
|----------|-------------|----------|
| `auth` | Authentication events | Signup, password reset |
| `vehicle` | Vehicle CRUD | Create, update, delete |
| `user` | User management | Profile updates |
| `system` | System operations | Backup, restore |
| `admin` | Admin actions | Grant/revoke admin |
## Severity Levels
| Level | Color (UI) | Description |
|-------|------------|-------------|
| `info` | Blue | Normal operations |
| `warning` | Yellow | Potential issues |
| `error` | Red | Failed operations |

View File

@@ -1,308 +0,0 @@
/**
* @ai-summary Integration tests for audit log wiring across features
* @ai-context Verifies audit logging is properly integrated into auth, vehicle, admin, and backup features
*/
import { Pool } from 'pg';
import { appConfig } from '../../../core/config/config-loader';
import { AuditLogService } from '../domain/audit-log.service';
import { AuditLogRepository } from '../data/audit-log.repository';
describe('AuditLog Feature Integration', () => {
let pool: Pool;
let repository: AuditLogRepository;
let service: AuditLogService;
const createdIds: string[] = [];
beforeAll(async () => {
pool = new Pool({
connectionString: appConfig.getDatabaseUrl(),
});
repository = new AuditLogRepository(pool);
service = new AuditLogService(repository);
});
afterAll(async () => {
// Cleanup test data
if (createdIds.length > 0) {
await pool.query(`DELETE FROM audit_logs WHERE id = ANY($1::uuid[])`, [createdIds]);
}
await pool.end();
});
describe('Vehicle logging integration', () => {
it('should create audit log with vehicle category and correct resource', async () => {
const userId = '550e8400-e29b-41d4-a716-446655440000';
const vehicleId = 'vehicle-uuid-123';
const entry = await service.info(
'vehicle',
userId,
'Vehicle created: 2024 Toyota Camry',
'vehicle',
vehicleId,
{ vin: '1HGBH41JXMN109186', make: 'Toyota', model: 'Camry', year: 2024 }
);
createdIds.push(entry.id);
expect(entry.category).toBe('vehicle');
expect(entry.severity).toBe('info');
expect(entry.userId).toBe(userId);
expect(entry.action).toContain('Vehicle created');
expect(entry.resourceType).toBe('vehicle');
expect(entry.resourceId).toBe(vehicleId);
expect(entry.details).toHaveProperty('vin');
expect(entry.details).toHaveProperty('make', 'Toyota');
});
it('should log vehicle update with correct fields', async () => {
const userId = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
const vehicleId = 'vehicle-uuid-456';
const entry = await service.info(
'vehicle',
userId,
'Vehicle updated: 2024 Toyota Camry',
'vehicle',
vehicleId,
{ updatedFields: ['color', 'licensePlate'] }
);
createdIds.push(entry.id);
expect(entry.category).toBe('vehicle');
expect(entry.action).toContain('Vehicle updated');
expect(entry.details).toHaveProperty('updatedFields');
});
it('should log vehicle deletion with vehicle info', async () => {
const userId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const vehicleId = 'vehicle-uuid-789';
const entry = await service.info(
'vehicle',
userId,
'Vehicle deleted: 2024 Toyota Camry',
'vehicle',
vehicleId,
{ vin: '1HGBH41JXMN109186', make: 'Toyota', model: 'Camry', year: 2024 }
);
createdIds.push(entry.id);
expect(entry.category).toBe('vehicle');
expect(entry.action).toContain('Vehicle deleted');
expect(entry.resourceId).toBe(vehicleId);
});
});
describe('Auth logging integration', () => {
it('should create audit log with auth category for signup', async () => {
const userId = '550e8400-e29b-41d4-a716-446655440000';
const entry = await service.info(
'auth',
userId,
'User signup: test@example.com',
'user',
userId,
{ email: 'test@example.com', ipAddress: '192.168.1.1' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('auth');
expect(entry.severity).toBe('info');
expect(entry.userId).toBe(userId);
expect(entry.action).toContain('signup');
expect(entry.resourceType).toBe('user');
});
it('should create audit log for password reset request', async () => {
const userId = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
const entry = await service.info(
'auth',
userId,
'Password reset requested',
'user',
userId
);
createdIds.push(entry.id);
expect(entry.category).toBe('auth');
expect(entry.action).toBe('Password reset requested');
});
});
describe('Admin logging integration', () => {
it('should create audit log for admin user creation', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const targetAdminId = '8f14e45f-ceea-367f-a27f-c9a6d0c67e0e';
const entry = await service.info(
'admin',
adminId,
'Admin user created: newadmin@example.com',
'admin_user',
targetAdminId,
{ email: 'newadmin@example.com', role: 'admin' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('admin');
expect(entry.severity).toBe('info');
expect(entry.userId).toBe(adminId);
expect(entry.action).toContain('Admin user created');
expect(entry.resourceType).toBe('admin_user');
expect(entry.details).toHaveProperty('role', 'admin');
});
it('should create audit log for admin revocation', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const targetAdminId = 'a1b2c3d4-e5f6-7890-1234-567890abcdef';
const entry = await service.info(
'admin',
adminId,
'Admin user revoked: revoked@example.com',
'admin_user',
targetAdminId,
{ email: 'revoked@example.com' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('admin');
expect(entry.action).toContain('Admin user revoked');
});
it('should create audit log for admin reinstatement', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const targetAdminId = 'b2c3d4e5-f6a7-8901-2345-678901bcdef0';
const entry = await service.info(
'admin',
adminId,
'Admin user reinstated: reinstated@example.com',
'admin_user',
targetAdminId,
{ email: 'reinstated@example.com' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('admin');
expect(entry.action).toContain('Admin user reinstated');
});
});
describe('Backup/System logging integration', () => {
it('should create audit log for backup creation', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const backupId = 'backup-uuid-123';
const entry = await service.info(
'system',
adminId,
'Backup created: Manual backup',
'backup',
backupId,
{ name: 'Manual backup', includeDocuments: true }
);
createdIds.push(entry.id);
expect(entry.category).toBe('system');
expect(entry.severity).toBe('info');
expect(entry.action).toContain('Backup created');
expect(entry.resourceType).toBe('backup');
expect(entry.resourceId).toBe(backupId);
});
it('should create audit log for backup restore', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const backupId = 'backup-uuid-456';
const entry = await service.info(
'system',
adminId,
'Backup restored: backup-uuid-456',
'backup',
backupId,
{ safetyBackupId: 'safety-backup-uuid' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('system');
expect(entry.action).toContain('Backup restored');
});
it('should create error-level audit log for backup failure', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const backupId = 'backup-uuid-789';
const entry = await service.error(
'system',
adminId,
'Backup failed: Daily backup',
'backup',
backupId,
{ error: 'Disk full' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('system');
expect(entry.severity).toBe('error');
expect(entry.action).toContain('Backup failed');
expect(entry.details).toHaveProperty('error', 'Disk full');
});
it('should create error-level audit log for restore failure', async () => {
const adminId = '7c9e6679-7425-40de-944b-e07fc1f90ae7';
const backupId = 'backup-uuid-restore-fail';
const entry = await service.error(
'system',
adminId,
'Backup restore failed: backup-uuid-restore-fail',
'backup',
backupId,
{ error: 'Corrupted archive', safetyBackupId: 'safety-uuid' }
);
createdIds.push(entry.id);
expect(entry.category).toBe('system');
expect(entry.severity).toBe('error');
expect(entry.action).toContain('restore failed');
});
});
describe('Cross-feature audit log queries', () => {
it('should be able to filter logs by category', async () => {
// Search for vehicle logs
const vehicleResult = await service.search(
{ category: 'vehicle' },
{ limit: 100, offset: 0 }
);
expect(vehicleResult.logs.length).toBeGreaterThan(0);
expect(vehicleResult.logs.every((log) => log.category === 'vehicle')).toBe(true);
});
it('should be able to search across all categories', async () => {
const result = await service.search(
{ search: 'created' },
{ limit: 100, offset: 0 }
);
expect(result.logs.length).toBeGreaterThan(0);
// Should find logs from vehicle and admin categories
const categories = new Set(result.logs.map((log) => log.category));
expect(categories.size).toBeGreaterThanOrEqual(1);
});
it('should be able to filter by severity across categories', async () => {
const errorResult = await service.search(
{ severity: 'error' },
{ limit: 100, offset: 0 }
);
expect(errorResult.logs.every((log) => log.severity === 'error')).toBe(true);
});
});
});

View File

@@ -1,126 +0,0 @@
/**
* @ai-summary Integration tests for audit log API routes
* @ai-context Tests endpoints with authentication, filtering, and export
*/
import { FastifyInstance } from 'fastify';
import { Pool } from 'pg';
import { appConfig } from '../../../core/config/config-loader';
// Mock the authentication for testing
const mockAdminUser = {
userId: 'admin-test-user',
email: 'admin@test.com',
isAdmin: true,
};
describe('Audit Log Routes', () => {
let app: FastifyInstance;
let pool: Pool;
const createdIds: string[] = [];
beforeAll(async () => {
// Import and build app
const { default: buildApp } = await import('../../../app');
app = await buildApp();
pool = new Pool({
connectionString: appConfig.getDatabaseUrl(),
});
// Create test data
const testLogs = [
{ category: 'auth', severity: 'info', action: 'User logged in', user_id: 'user-1' },
{ category: 'auth', severity: 'warning', action: 'Failed login attempt', user_id: 'user-2' },
{ category: 'vehicle', severity: 'info', action: 'Vehicle created', user_id: 'user-1' },
{ category: 'admin', severity: 'error', action: 'Admin action failed', user_id: 'admin-1' },
];
for (const log of testLogs) {
const result = await pool.query(
`INSERT INTO audit_logs (category, severity, action, user_id)
VALUES ($1, $2, $3, $4) RETURNING id`,
[log.category, log.severity, log.action, log.user_id]
);
createdIds.push(result.rows[0].id);
}
});
afterAll(async () => {
// Cleanup test data
if (createdIds.length > 0) {
await pool.query(`DELETE FROM audit_logs WHERE id = ANY($1::uuid[])`, [createdIds]);
}
await pool.end();
await app.close();
});
describe('GET /api/admin/audit-logs', () => {
it('should return 403 for non-admin users', async () => {
const response = await app.inject({
method: 'GET',
url: '/api/admin/audit-logs',
headers: {
authorization: 'Bearer non-admin-token',
},
});
expect(response.statusCode).toBe(401);
});
it('should return paginated results for admin', async () => {
// This test requires proper auth mocking which depends on the app setup
// In a real test environment, you'd mock the auth middleware
const response = await app.inject({
method: 'GET',
url: '/api/admin/audit-logs',
// Would need proper auth headers
});
// Without proper auth, expect 401
expect([200, 401]).toContain(response.statusCode);
});
it('should validate category parameter', async () => {
const response = await app.inject({
method: 'GET',
url: '/api/admin/audit-logs?category=invalid',
});
// Either 400 for invalid category or 401 for no auth
expect([400, 401]).toContain(response.statusCode);
});
it('should validate severity parameter', async () => {
const response = await app.inject({
method: 'GET',
url: '/api/admin/audit-logs?severity=invalid',
});
// Either 400 for invalid severity or 401 for no auth
expect([400, 401]).toContain(response.statusCode);
});
});
describe('GET /api/admin/audit-logs/export', () => {
it('should return 401 for non-admin users', async () => {
const response = await app.inject({
method: 'GET',
url: '/api/admin/audit-logs/export',
});
expect(response.statusCode).toBe(401);
});
});
describe('AuditLogController direct tests', () => {
// Test the controller directly without auth
it('should build valid CSV output', async () => {
const { AuditLogController } = await import('../api/audit-log.controller');
const controller = new AuditLogController();
// Controller is instantiated correctly
expect(controller).toBeDefined();
});
});
});

View File

@@ -1,207 +0,0 @@
/**
* @ai-summary Integration tests for AuditLogService
* @ai-context Tests log creation, search, filtering, and cleanup
*/
import { Pool } from 'pg';
import { appConfig } from '../../../core/config/config-loader';
import { AuditLogService } from '../domain/audit-log.service';
import { AuditLogRepository } from '../data/audit-log.repository';
describe('AuditLogService', () => {
let pool: Pool;
let repository: AuditLogRepository;
let service: AuditLogService;
const createdIds: string[] = [];
beforeAll(async () => {
pool = new Pool({
connectionString: appConfig.getDatabaseUrl(),
});
repository = new AuditLogRepository(pool);
service = new AuditLogService(repository);
});
afterAll(async () => {
// Cleanup test data
if (createdIds.length > 0) {
await pool.query(`DELETE FROM audit_logs WHERE id = ANY($1::uuid[])`, [createdIds]);
}
await pool.end();
});
describe('log()', () => {
it('should create log entry with all fields', async () => {
const entry = await service.log(
'auth',
'info',
'user-123',
'User logged in',
'session',
'session-456',
{ ip: '192.168.1.1', browser: 'Chrome' }
);
createdIds.push(entry.id);
expect(entry.id).toBeDefined();
expect(entry.category).toBe('auth');
expect(entry.severity).toBe('info');
expect(entry.userId).toBe('user-123');
expect(entry.action).toBe('User logged in');
expect(entry.resourceType).toBe('session');
expect(entry.resourceId).toBe('session-456');
expect(entry.details).toEqual({ ip: '192.168.1.1', browser: 'Chrome' });
expect(entry.createdAt).toBeInstanceOf(Date);
});
it('should create log entry with null userId for system actions', async () => {
const entry = await service.log(
'system',
'info',
null,
'Scheduled backup started'
);
createdIds.push(entry.id);
expect(entry.id).toBeDefined();
expect(entry.category).toBe('system');
expect(entry.userId).toBeNull();
});
it('should throw error for invalid category', async () => {
await expect(
service.log(
'invalid' as any,
'info',
'user-123',
'Test action'
)
).rejects.toThrow('Invalid audit log category');
});
it('should throw error for invalid severity', async () => {
await expect(
service.log(
'auth',
'invalid' as any,
'user-123',
'Test action'
)
).rejects.toThrow('Invalid audit log severity');
});
});
describe('convenience methods', () => {
it('info() should create info-level log', async () => {
const entry = await service.info('vehicle', 'user-123', 'Vehicle created');
createdIds.push(entry.id);
expect(entry.severity).toBe('info');
});
it('warning() should create warning-level log', async () => {
const entry = await service.warning('user', 'user-123', 'Password reset requested');
createdIds.push(entry.id);
expect(entry.severity).toBe('warning');
});
it('error() should create error-level log', async () => {
const entry = await service.error('admin', 'admin-123', 'Failed to revoke user');
createdIds.push(entry.id);
expect(entry.severity).toBe('error');
});
});
describe('search()', () => {
beforeAll(async () => {
// Create test data for search
const testLogs = [
{ category: 'auth', severity: 'info', action: 'Login successful' },
{ category: 'auth', severity: 'warning', action: 'Login failed' },
{ category: 'vehicle', severity: 'info', action: 'Vehicle created' },
{ category: 'vehicle', severity: 'info', action: 'Vehicle updated' },
{ category: 'admin', severity: 'error', action: 'Admin action failed' },
];
for (const log of testLogs) {
const entry = await service.log(
log.category as any,
log.severity as any,
'test-user',
log.action
);
createdIds.push(entry.id);
}
});
it('should return paginated results', async () => {
const result = await service.search({}, { limit: 10, offset: 0 });
expect(result.logs).toBeInstanceOf(Array);
expect(result.total).toBeGreaterThan(0);
expect(result.limit).toBe(10);
expect(result.offset).toBe(0);
});
it('should filter by category', async () => {
const result = await service.search(
{ category: 'auth' },
{ limit: 100, offset: 0 }
);
expect(result.logs.length).toBeGreaterThan(0);
expect(result.logs.every((log) => log.category === 'auth')).toBe(true);
});
it('should filter by severity', async () => {
const result = await service.search(
{ severity: 'error' },
{ limit: 100, offset: 0 }
);
expect(result.logs.every((log) => log.severity === 'error')).toBe(true);
});
it('should search by action text', async () => {
const result = await service.search(
{ search: 'Login' },
{ limit: 100, offset: 0 }
);
expect(result.logs.length).toBeGreaterThan(0);
expect(result.logs.every((log) => log.action.includes('Login'))).toBe(true);
});
});
describe('cleanup()', () => {
it('should delete entries older than specified days', async () => {
// Create an old entry by directly inserting
await pool.query(`
INSERT INTO audit_logs (category, severity, action, created_at)
VALUES ('system', 'info', 'Old test entry', NOW() - INTERVAL '100 days')
`);
const deletedCount = await service.cleanup(90);
expect(deletedCount).toBeGreaterThanOrEqual(1);
});
it('should not delete recent entries', async () => {
const entry = await service.log('system', 'info', null, 'Recent entry');
createdIds.push(entry.id);
await service.cleanup(90);
// Verify entry still exists
const result = await pool.query(
'SELECT id FROM audit_logs WHERE id = $1',
[entry.id]
);
expect(result.rows.length).toBe(1);
});
});
});

View File

@@ -1,130 +0,0 @@
/**
* @ai-summary Integration tests for audit_logs table migration
* @ai-context Tests table creation, constraints, and indexes
*/
import { Pool } from 'pg';
import { appConfig } from '../../../core/config/config-loader';
describe('Audit Logs Migration', () => {
let pool: Pool;
beforeAll(async () => {
pool = new Pool({
connectionString: appConfig.getDatabaseUrl(),
});
});
afterAll(async () => {
await pool.end();
});
describe('Table Structure', () => {
it('should have audit_logs table with correct columns', async () => {
const result = await pool.query(`
SELECT column_name, data_type, is_nullable
FROM information_schema.columns
WHERE table_name = 'audit_logs'
ORDER BY ordinal_position
`);
const columns = result.rows.map((row) => row.column_name);
expect(columns).toContain('id');
expect(columns).toContain('category');
expect(columns).toContain('severity');
expect(columns).toContain('user_id');
expect(columns).toContain('action');
expect(columns).toContain('resource_type');
expect(columns).toContain('resource_id');
expect(columns).toContain('details');
expect(columns).toContain('created_at');
});
});
describe('CHECK Constraints', () => {
it('should accept valid category values', async () => {
const validCategories = ['auth', 'vehicle', 'user', 'system', 'admin'];
for (const category of validCategories) {
const result = await pool.query(
`INSERT INTO audit_logs (category, severity, action)
VALUES ($1, 'info', 'test action')
RETURNING id`,
[category]
);
expect(result.rows[0].id).toBeDefined();
// Cleanup
await pool.query('DELETE FROM audit_logs WHERE id = $1', [result.rows[0].id]);
}
});
it('should reject invalid category values', async () => {
await expect(
pool.query(
`INSERT INTO audit_logs (category, severity, action)
VALUES ('invalid', 'info', 'test action')`
)
).rejects.toThrow();
});
it('should accept valid severity values', async () => {
const validSeverities = ['info', 'warning', 'error'];
for (const severity of validSeverities) {
const result = await pool.query(
`INSERT INTO audit_logs (category, severity, action)
VALUES ('auth', $1, 'test action')
RETURNING id`,
[severity]
);
expect(result.rows[0].id).toBeDefined();
// Cleanup
await pool.query('DELETE FROM audit_logs WHERE id = $1', [result.rows[0].id]);
}
});
it('should reject invalid severity values', async () => {
await expect(
pool.query(
`INSERT INTO audit_logs (category, severity, action)
VALUES ('auth', 'invalid', 'test action')`
)
).rejects.toThrow();
});
});
describe('Nullable Columns', () => {
it('should allow NULL user_id for system actions', async () => {
const result = await pool.query(
`INSERT INTO audit_logs (category, severity, user_id, action)
VALUES ('system', 'info', NULL, 'system startup')
RETURNING id, user_id`
);
expect(result.rows[0].id).toBeDefined();
expect(result.rows[0].user_id).toBeNull();
// Cleanup
await pool.query('DELETE FROM audit_logs WHERE id = $1', [result.rows[0].id]);
});
});
describe('Indexes', () => {
it('should have required indexes', async () => {
const result = await pool.query(`
SELECT indexname
FROM pg_indexes
WHERE tablename = 'audit_logs'
`);
const indexNames = result.rows.map((row) => row.indexname);
expect(indexNames).toContain('idx_audit_logs_category_created');
expect(indexNames).toContain('idx_audit_logs_severity_created');
expect(indexNames).toContain('idx_audit_logs_user_created');
expect(indexNames).toContain('idx_audit_logs_created');
expect(indexNames).toContain('idx_audit_logs_action_gin');
});
});
});

View File

@@ -1,154 +0,0 @@
/**
* @ai-summary Fastify route handlers for audit log API
* @ai-context HTTP request/response handling for audit log search and export
*/
import { FastifyRequest, FastifyReply } from 'fastify';
import { AuditLogService } from '../domain/audit-log.service';
import { AuditLogRepository } from '../data/audit-log.repository';
import { AuditLogFilters, isValidCategory, isValidSeverity } from '../domain/audit-log.types';
import { pool } from '../../../core/config/database';
import { logger } from '../../../core/logging/logger';
interface AuditLogsQuery {
search?: string;
category?: string;
severity?: string;
startDate?: string;
endDate?: string;
limit?: string;
offset?: string;
}
export class AuditLogController {
private service: AuditLogService;
constructor() {
const repository = new AuditLogRepository(pool);
this.service = new AuditLogService(repository);
}
/**
* GET /api/admin/audit-logs - Search audit logs with filters
*/
async getAuditLogs(
request: FastifyRequest<{ Querystring: AuditLogsQuery }>,
reply: FastifyReply
) {
try {
const { search, category, severity, startDate, endDate, limit, offset } = request.query;
// Validate category if provided
if (category && !isValidCategory(category)) {
return reply.code(400).send({
error: 'Bad Request',
message: `Invalid category: ${category}. Valid values: auth, vehicle, user, system, admin`,
});
}
// Validate severity if provided
if (severity && !isValidSeverity(severity)) {
return reply.code(400).send({
error: 'Bad Request',
message: `Invalid severity: ${severity}. Valid values: info, warning, error`,
});
}
const filters: AuditLogFilters = {
search,
category: category as AuditLogFilters['category'],
severity: severity as AuditLogFilters['severity'],
startDate: startDate ? new Date(startDate) : undefined,
endDate: endDate ? new Date(endDate) : undefined,
};
const pagination = {
limit: Math.min(parseInt(limit || '50', 10), 100),
offset: parseInt(offset || '0', 10),
};
const result = await this.service.search(filters, pagination);
return reply.send(result);
} catch (error) {
logger.error('Error fetching audit logs', { error });
return reply.code(500).send({
error: 'Internal Server Error',
message: 'Failed to fetch audit logs',
});
}
}
/**
* GET /api/admin/audit-logs/export - Export audit logs as CSV
*/
async exportAuditLogs(
request: FastifyRequest<{ Querystring: AuditLogsQuery }>,
reply: FastifyReply
) {
try {
const { search, category, severity, startDate, endDate } = request.query;
// Validate category if provided
if (category && !isValidCategory(category)) {
return reply.code(400).send({
error: 'Bad Request',
message: `Invalid category: ${category}`,
});
}
// Validate severity if provided
if (severity && !isValidSeverity(severity)) {
return reply.code(400).send({
error: 'Bad Request',
message: `Invalid severity: ${severity}`,
});
}
const filters: AuditLogFilters = {
search,
category: category as AuditLogFilters['category'],
severity: severity as AuditLogFilters['severity'],
startDate: startDate ? new Date(startDate) : undefined,
endDate: endDate ? new Date(endDate) : undefined,
};
const { logs, truncated } = await this.service.getForExport(filters);
// Generate CSV
const headers = ['ID', 'Timestamp', 'Category', 'Severity', 'User ID', 'Action', 'Resource Type', 'Resource ID'];
const rows = logs.map((log) => [
log.id,
log.createdAt.toISOString(),
log.category,
log.severity,
log.userId || '',
`"${log.action.replace(/"/g, '""')}"`, // Escape quotes in CSV
log.resourceType || '',
log.resourceId || '',
]);
const csv = [headers.join(','), ...rows.map((row) => row.join(','))].join('\n');
// Set headers for file download
const filename = `audit-logs-${new Date().toISOString().split('T')[0]}.csv`;
reply.header('Content-Type', 'text/csv');
reply.header('Content-Disposition', `attachment; filename="${filename}"`);
// Warn if results were truncated
if (truncated) {
reply.header('X-Export-Truncated', 'true');
reply.header('X-Export-Limit', '5000');
logger.warn('Audit log export was truncated', { exportedCount: logs.length });
}
return reply.send(csv);
} catch (error) {
logger.error('Error exporting audit logs', { error });
return reply.code(500).send({
error: 'Internal Server Error',
message: 'Failed to export audit logs',
});
}
}
}

View File

@@ -1,50 +0,0 @@
/**
* @ai-summary Audit log feature routes
* @ai-context Registers audit log API endpoints with admin authorization
*/
import { FastifyPluginAsync } from 'fastify';
import { AuditLogController } from './audit-log.controller';
interface AuditLogsQuery {
search?: string;
category?: string;
severity?: string;
startDate?: string;
endDate?: string;
limit?: string;
offset?: string;
}
export const auditLogRoutes: FastifyPluginAsync = async (fastify) => {
const controller = new AuditLogController();
/**
* GET /api/admin/audit-logs
* Search audit logs with filters and pagination
*
* Query params:
* - search: Text search on action field
* - category: Filter by category (auth, vehicle, user, system, admin)
* - severity: Filter by severity (info, warning, error)
* - startDate: Filter by start date (ISO string)
* - endDate: Filter by end date (ISO string)
* - limit: Number of results (default 50, max 100)
* - offset: Pagination offset
*/
fastify.get<{ Querystring: AuditLogsQuery }>('/admin/audit-logs', {
preHandler: [fastify.requireAdmin],
handler: controller.getAuditLogs.bind(controller),
});
/**
* GET /api/admin/audit-logs/export
* Export filtered audit logs as CSV file
*
* Query params: same as /admin/audit-logs
*/
fastify.get<{ Querystring: AuditLogsQuery }>('/admin/audit-logs/export', {
preHandler: [fastify.requireAdmin],
handler: controller.exportAuditLogs.bind(controller),
});
};

View File

@@ -1,14 +0,0 @@
/**
* @ai-summary Singleton audit log service instance
* @ai-context Provides centralized audit logging across all features
*/
import { pool } from '../../core/config/database';
import { AuditLogRepository } from './data/audit-log.repository';
import { AuditLogService } from './domain/audit-log.service';
// Create singleton repository and service instances
const repository = new AuditLogRepository(pool);
export const auditLogService = new AuditLogService(repository);
export default auditLogService;

View File

@@ -1,240 +0,0 @@
/**
* @ai-summary Audit log data access layer
* @ai-context Provides parameterized SQL queries for audit log operations
*/
import { Pool } from 'pg';
import {
AuditLogEntry,
CreateAuditLogInput,
AuditLogFilters,
AuditLogPagination,
AuditLogSearchResult,
} from '../domain/audit-log.types';
import { logger } from '../../../core/logging/logger';
// Maximum records for CSV export to prevent memory exhaustion
const MAX_EXPORT_RECORDS = 5000;
export class AuditLogRepository {
constructor(private pool: Pool) {}
/**
* Escape LIKE special characters to prevent pattern injection
*/
private escapeLikePattern(pattern: string): string {
return pattern.replace(/[%_\\]/g, (match) => `\\${match}`);
}
/**
* Build WHERE clause from filters (shared logic for search and export)
*/
private buildWhereClause(filters: AuditLogFilters): {
whereClause: string;
params: unknown[];
nextParamIndex: number;
} {
const conditions: string[] = [];
const params: unknown[] = [];
let paramIndex = 1;
if (filters.search) {
conditions.push(`al.action ILIKE $${paramIndex}`);
params.push(`%${this.escapeLikePattern(filters.search)}%`);
paramIndex++;
}
if (filters.category) {
conditions.push(`al.category = $${paramIndex}`);
params.push(filters.category);
paramIndex++;
}
if (filters.severity) {
conditions.push(`al.severity = $${paramIndex}`);
params.push(filters.severity);
paramIndex++;
}
if (filters.userId) {
conditions.push(`al.user_id = $${paramIndex}`);
params.push(filters.userId);
paramIndex++;
}
if (filters.startDate) {
conditions.push(`al.created_at >= $${paramIndex}`);
params.push(filters.startDate);
paramIndex++;
}
if (filters.endDate) {
conditions.push(`al.created_at <= $${paramIndex}`);
params.push(filters.endDate);
paramIndex++;
}
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
return { whereClause, params, nextParamIndex: paramIndex };
}
/**
* Create a new audit log entry
*/
async create(input: CreateAuditLogInput): Promise<AuditLogEntry> {
const query = `
INSERT INTO audit_logs (category, severity, user_id, action, resource_type, resource_id, details)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, category, severity, user_id, action, resource_type, resource_id, details, created_at,
NULL::text as user_email
`;
try {
const result = await this.pool.query(query, [
input.category,
input.severity,
input.userId || null,
input.action,
input.resourceType || null,
input.resourceId || null,
input.details ? JSON.stringify(input.details) : null,
]);
return this.mapRow(result.rows[0]);
} catch (error) {
logger.error('Error creating audit log', { error, input });
throw error;
}
}
/**
* Search audit logs with filters and pagination
*/
async search(
filters: AuditLogFilters,
pagination: AuditLogPagination
): Promise<AuditLogSearchResult> {
const { whereClause, params, nextParamIndex } = this.buildWhereClause(filters);
// Count query
const countQuery = `SELECT COUNT(*) as total FROM audit_logs al ${whereClause}`;
// Data query with pagination - LEFT JOIN to get user email
const dataQuery = `
SELECT al.id, al.category, al.severity, al.user_id, al.action,
al.resource_type, al.resource_id, al.details, al.created_at,
up.email as user_email
FROM audit_logs al
LEFT JOIN user_profiles up ON al.user_id = up.id
${whereClause}
ORDER BY al.created_at DESC
LIMIT $${nextParamIndex} OFFSET $${nextParamIndex + 1}
`;
try {
const [countResult, dataResult] = await Promise.all([
this.pool.query(countQuery, params),
this.pool.query(dataQuery, [...params, pagination.limit, pagination.offset]),
]);
const total = parseInt(countResult.rows[0].total, 10);
const logs = dataResult.rows.map((row) => this.mapRow(row));
return {
logs,
total,
limit: pagination.limit,
offset: pagination.offset,
};
} catch (error) {
logger.error('Error searching audit logs', { error, filters, pagination });
throw error;
}
}
/**
* Get all logs matching filters for CSV export (limited to prevent memory exhaustion)
*/
async getForExport(filters: AuditLogFilters): Promise<{ logs: AuditLogEntry[]; truncated: boolean }> {
const { whereClause, params } = this.buildWhereClause(filters);
// First, count total matching records
const countQuery = `SELECT COUNT(*) as total FROM audit_logs al ${whereClause}`;
const countResult = await this.pool.query(countQuery, params);
const totalCount = parseInt(countResult.rows[0].total, 10);
const truncated = totalCount > MAX_EXPORT_RECORDS;
const query = `
SELECT al.id, al.category, al.severity, al.user_id, al.action,
al.resource_type, al.resource_id, al.details, al.created_at,
up.email as user_email
FROM audit_logs al
LEFT JOIN user_profiles up ON al.user_id = up.id
${whereClause}
ORDER BY al.created_at DESC
LIMIT ${MAX_EXPORT_RECORDS}
`;
try {
const result = await this.pool.query(query, params);
const logs = result.rows.map((row) => this.mapRow(row));
if (truncated) {
logger.warn('Audit log export truncated', {
totalCount,
exportedCount: logs.length,
limit: MAX_EXPORT_RECORDS,
});
}
return { logs, truncated };
} catch (error) {
logger.error('Error exporting audit logs', { error, filters });
throw error;
}
}
/**
* Delete logs older than specified days (retention cleanup)
*/
async cleanup(olderThanDays: number): Promise<number> {
const query = `
DELETE FROM audit_logs
WHERE created_at < NOW() - INTERVAL '1 day' * $1
`;
try {
const result = await this.pool.query(query, [olderThanDays]);
const deletedCount = result.rowCount || 0;
logger.info('Audit log cleanup completed', {
olderThanDays,
deletedCount,
});
return deletedCount;
} catch (error) {
logger.error('Error cleaning up audit logs', { error, olderThanDays });
throw error;
}
}
/**
* Map database row to AuditLogEntry (snake_case to camelCase)
*/
private mapRow(row: Record<string, unknown>): AuditLogEntry {
return {
id: row.id as string,
category: row.category as AuditLogEntry['category'],
severity: row.severity as AuditLogEntry['severity'],
userId: row.user_id as string | null,
userEmail: (row.user_email as string | null) || null,
action: row.action as string,
resourceType: row.resource_type as string | null,
resourceId: row.resource_id as string | null,
details: row.details as Record<string, unknown> | null,
createdAt: new Date(row.created_at as string),
};
}
}

View File

@@ -1,163 +0,0 @@
/**
* @ai-summary Centralized audit logging service
* @ai-context Provides simple API for all features to log audit events
*/
import { AuditLogRepository } from '../data/audit-log.repository';
import {
AuditLogCategory,
AuditLogSeverity,
AuditLogEntry,
AuditLogFilters,
AuditLogPagination,
AuditLogSearchResult,
isValidCategory,
isValidSeverity,
} from './audit-log.types';
import { logger } from '../../../core/logging/logger';
export class AuditLogService {
constructor(private repository: AuditLogRepository) {}
/**
* Log an audit event
*
* @param category - Event category (auth, vehicle, user, system, admin)
* @param severity - Event severity (info, warning, error)
* @param userId - User who performed the action (null for system actions)
* @param action - Human-readable description of the action
* @param resourceType - Type of resource affected (optional)
* @param resourceId - ID of affected resource (optional)
* @param details - Additional structured data (optional)
*/
async log(
category: AuditLogCategory,
severity: AuditLogSeverity,
userId: string | null,
action: string,
resourceType?: string | null,
resourceId?: string | null,
details?: Record<string, unknown> | null
): Promise<AuditLogEntry> {
// Validate category
if (!isValidCategory(category)) {
const error = new Error(`Invalid audit log category: ${category}`);
logger.error('Invalid audit log category', { category });
throw error;
}
// Validate severity
if (!isValidSeverity(severity)) {
const error = new Error(`Invalid audit log severity: ${severity}`);
logger.error('Invalid audit log severity', { severity });
throw error;
}
try {
const entry = await this.repository.create({
category,
severity,
userId,
action,
resourceType,
resourceId,
details,
});
logger.debug('Audit log created', {
id: entry.id,
category,
severity,
action,
});
return entry;
} catch (error) {
logger.error('Error creating audit log', { error, category, action });
throw error;
}
}
/**
* Convenience method for info-level logs
*/
async info(
category: AuditLogCategory,
userId: string | null,
action: string,
resourceType?: string | null,
resourceId?: string | null,
details?: Record<string, unknown> | null
): Promise<AuditLogEntry> {
return this.log(category, 'info', userId, action, resourceType, resourceId, details);
}
/**
* Convenience method for warning-level logs
*/
async warning(
category: AuditLogCategory,
userId: string | null,
action: string,
resourceType?: string | null,
resourceId?: string | null,
details?: Record<string, unknown> | null
): Promise<AuditLogEntry> {
return this.log(category, 'warning', userId, action, resourceType, resourceId, details);
}
/**
* Convenience method for error-level logs
*/
async error(
category: AuditLogCategory,
userId: string | null,
action: string,
resourceType?: string | null,
resourceId?: string | null,
details?: Record<string, unknown> | null
): Promise<AuditLogEntry> {
return this.log(category, 'error', userId, action, resourceType, resourceId, details);
}
/**
* Search audit logs with filters and pagination
*/
async search(
filters: AuditLogFilters,
pagination: AuditLogPagination
): Promise<AuditLogSearchResult> {
try {
return await this.repository.search(filters, pagination);
} catch (error) {
logger.error('Error searching audit logs', { error, filters });
throw error;
}
}
/**
* Get logs for CSV export (limited to 5000 records)
*/
async getForExport(filters: AuditLogFilters): Promise<{ logs: AuditLogEntry[]; truncated: boolean }> {
try {
return await this.repository.getForExport(filters);
} catch (error) {
logger.error('Error getting audit logs for export', { error, filters });
throw error;
}
}
/**
* Run retention cleanup (delete logs older than specified days)
*/
async cleanup(olderThanDays: number = 90): Promise<number> {
try {
const deletedCount = await this.repository.cleanup(olderThanDays);
logger.info('Audit log cleanup completed', { olderThanDays, deletedCount });
return deletedCount;
} catch (error) {
logger.error('Error running audit log cleanup', { error, olderThanDays });
throw error;
}
}
}

View File

@@ -1,107 +0,0 @@
/**
* @ai-summary Type definitions for centralized audit logging
* @ai-context Categories, severity levels, log entries, and filter options
*/
/**
* Audit log categories - maps to system domains
*/
export type AuditLogCategory = 'auth' | 'vehicle' | 'user' | 'system' | 'admin';
/**
* Audit log severity levels
*/
export type AuditLogSeverity = 'info' | 'warning' | 'error';
/**
* Audit log entry as stored in database
*/
export interface AuditLogEntry {
id: string;
category: AuditLogCategory;
severity: AuditLogSeverity;
userId: string | null;
userEmail: string | null;
action: string;
resourceType: string | null;
resourceId: string | null;
details: Record<string, unknown> | null;
createdAt: Date;
}
/**
* Input for creating a new audit log entry
*/
export interface CreateAuditLogInput {
category: AuditLogCategory;
severity: AuditLogSeverity;
userId?: string | null;
action: string;
resourceType?: string | null;
resourceId?: string | null;
details?: Record<string, unknown> | null;
}
/**
* Filters for querying audit logs
*/
export interface AuditLogFilters {
search?: string;
category?: AuditLogCategory;
severity?: AuditLogSeverity;
userId?: string;
startDate?: Date;
endDate?: Date;
}
/**
* Pagination options for audit log queries
*/
export interface AuditLogPagination {
limit: number;
offset: number;
}
/**
* Paginated result set for audit logs
*/
export interface AuditLogSearchResult {
logs: AuditLogEntry[];
total: number;
limit: number;
offset: number;
}
/**
* Valid category values for validation
*/
export const AUDIT_LOG_CATEGORIES: readonly AuditLogCategory[] = [
'auth',
'vehicle',
'user',
'system',
'admin',
] as const;
/**
* Valid severity values for validation
*/
export const AUDIT_LOG_SEVERITIES: readonly AuditLogSeverity[] = [
'info',
'warning',
'error',
] as const;
/**
* Type guard for category validation
*/
export function isValidCategory(value: string): value is AuditLogCategory {
return AUDIT_LOG_CATEGORIES.includes(value as AuditLogCategory);
}
/**
* Type guard for severity validation
*/
export function isValidSeverity(value: string): value is AuditLogSeverity {
return AUDIT_LOG_SEVERITIES.includes(value as AuditLogSeverity);
}

View File

@@ -1,28 +0,0 @@
/**
* @ai-summary Audit log feature exports
* @ai-context Re-exports types, service, and repository for external use
*/
// Types
export {
AuditLogCategory,
AuditLogSeverity,
AuditLogEntry,
CreateAuditLogInput,
AuditLogFilters,
AuditLogPagination,
AuditLogSearchResult,
AUDIT_LOG_CATEGORIES,
AUDIT_LOG_SEVERITIES,
isValidCategory,
isValidSeverity,
} from './domain/audit-log.types';
// Service
export { AuditLogService } from './domain/audit-log.service';
// Repository
export { AuditLogRepository } from './data/audit-log.repository';
// Singleton instance for cross-feature use
export { auditLogService } from './audit-log.instance';

View File

@@ -1,74 +0,0 @@
/**
* @ai-summary Job for audit log retention cleanup
* @ai-context Runs daily at 3 AM to delete logs older than 90 days
*/
import { Pool } from 'pg';
import { logger } from '../../../core/logging/logger';
import { AuditLogService } from '../domain/audit-log.service';
import { AuditLogRepository } from '../data/audit-log.repository';
let pool: Pool | null = null;
/**
* Sets the database pool for the job
*/
export function setAuditLogCleanupJobPool(dbPool: Pool): void {
pool = dbPool;
}
/**
* Retention period in days for audit logs
*/
const AUDIT_LOG_RETENTION_DAYS = 90;
/**
* Result of cleanup job
*/
export interface AuditLogCleanupResult {
deletedCount: number;
retentionDays: number;
success: boolean;
error?: string;
}
/**
* Processes audit log retention cleanup
*/
export async function processAuditLogCleanup(): Promise<AuditLogCleanupResult> {
if (!pool) {
throw new Error('Database pool not initialized for audit log cleanup job');
}
const repository = new AuditLogRepository(pool);
const service = new AuditLogService(repository);
try {
logger.info('Starting audit log cleanup job', {
retentionDays: AUDIT_LOG_RETENTION_DAYS,
});
const deletedCount = await service.cleanup(AUDIT_LOG_RETENTION_DAYS);
logger.info('Audit log cleanup job completed', {
deletedCount,
retentionDays: AUDIT_LOG_RETENTION_DAYS,
});
return {
deletedCount,
retentionDays: AUDIT_LOG_RETENTION_DAYS,
success: true,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Audit log cleanup job failed', { error: errorMessage });
return {
deletedCount: 0,
retentionDays: AUDIT_LOG_RETENTION_DAYS,
success: false,
error: errorMessage,
};
}
}

View File

@@ -1,35 +0,0 @@
-- Migration: Create audit_logs table for centralized audit logging
-- Categories: auth, vehicle, user, system, admin
-- Severity levels: info, warning, error
CREATE TABLE IF NOT EXISTS audit_logs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
category VARCHAR(20) NOT NULL CHECK (category IN ('auth', 'vehicle', 'user', 'system', 'admin')),
severity VARCHAR(10) NOT NULL CHECK (severity IN ('info', 'warning', 'error')),
user_id VARCHAR(255),
action VARCHAR(500) NOT NULL,
resource_type VARCHAR(100),
resource_id VARCHAR(255),
details JSONB,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- B-tree indexes for filtered queries
CREATE INDEX idx_audit_logs_category_created ON audit_logs(category, created_at DESC);
CREATE INDEX idx_audit_logs_severity_created ON audit_logs(severity, created_at DESC);
CREATE INDEX idx_audit_logs_user_created ON audit_logs(user_id, created_at DESC);
CREATE INDEX idx_audit_logs_created ON audit_logs(created_at DESC);
-- GIN index for text search on action column (requires pg_trgm extension)
CREATE EXTENSION IF NOT EXISTS pg_trgm;
CREATE INDEX idx_audit_logs_action_gin ON audit_logs USING gin (action gin_trgm_ops);
-- Comment for documentation
COMMENT ON TABLE audit_logs IS 'Centralized audit log for all system events across categories';
COMMENT ON COLUMN audit_logs.category IS 'Event category: auth, vehicle, user, system, admin';
COMMENT ON COLUMN audit_logs.severity IS 'Event severity: info, warning, error';
COMMENT ON COLUMN audit_logs.user_id IS 'User who performed the action (null for system actions)';
COMMENT ON COLUMN audit_logs.action IS 'Human-readable description of the action';
COMMENT ON COLUMN audit_logs.resource_type IS 'Type of resource affected (e.g., vehicle, backup)';
COMMENT ON COLUMN audit_logs.resource_id IS 'ID of the affected resource';
COMMENT ON COLUMN audit_logs.details IS 'Additional structured data about the event';

View File

@@ -1,16 +0,0 @@
# auth/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `README.md` | Feature documentation | Understanding auth flow |
| `index.ts` | Feature barrel export | Importing auth services |
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `api/` | HTTP endpoints and routes | API changes |
| `domain/` | Business logic, services, types | Core auth logic |
| `tests/` | Unit and integration tests | Adding or modifying tests |

View File

@@ -11,7 +11,6 @@ import { termsConfig } from '../../terms-agreement/domain/terms-config';
import { pool } from '../../../core/config/database';
import { logger } from '../../../core/logging/logger';
import { signupSchema, resendVerificationPublicSchema } from './auth.validation';
import { auditLogService } from '../../audit-log';
export class AuthController {
private authService: AuthService;
@@ -67,16 +66,6 @@ export class AuthController {
logger.info('User signup successful', { email, userId: result.userId });
// Log signup to unified audit log
await auditLogService.info(
'auth',
result.userId,
`User signup: ${email}`,
'user',
result.userId,
{ email, ipAddress: termsData.ipAddress }
).catch(err => logger.error('Failed to log signup audit event', { error: err }));
return reply.code(201).send(result);
} catch (error: any) {
logger.error('Signup failed', { error, email: (request.body as any)?.email });
@@ -110,17 +99,17 @@ export class AuthController {
*/
async getVerifyStatus(request: FastifyRequest, reply: FastifyReply) {
try {
const auth0Sub = (request as any).user.sub;
const userId = (request as any).user.sub;
const result = await this.authService.getVerifyStatus(auth0Sub);
const result = await this.authService.getVerifyStatus(userId);
logger.info('Verification status checked', { userId: request.userContext?.userId, emailVerified: result.emailVerified });
logger.info('Verification status checked', { userId, emailVerified: result.emailVerified });
return reply.code(200).send(result);
} catch (error: any) {
logger.error('Failed to get verification status', {
error,
userId: request.userContext?.userId,
userId: (request as any).user?.sub,
});
return reply.code(500).send({
@@ -137,17 +126,17 @@ export class AuthController {
*/
async resendVerification(request: FastifyRequest, reply: FastifyReply) {
try {
const auth0Sub = (request as any).user.sub;
const userId = (request as any).user.sub;
const result = await this.authService.resendVerification(auth0Sub);
const result = await this.authService.resendVerification(userId);
logger.info('Verification email resent', { userId: request.userContext?.userId });
logger.info('Verification email resent', { userId });
return reply.code(200).send(result);
} catch (error: any) {
logger.error('Failed to resend verification email', {
error,
userId: request.userContext?.userId,
userId: (request as any).user?.sub,
});
return reply.code(500).send({
@@ -193,32 +182,15 @@ export class AuthController {
* GET /api/auth/user-status
* Get user status for routing decisions
* Protected endpoint - requires JWT
*
* Note: This endpoint is called once per Auth0 callback (from CallbackPage/CallbackMobileScreen).
* We log the login event here since it's the first authenticated request after Auth0 redirect.
*/
async getUserStatus(request: FastifyRequest, reply: FastifyReply) {
try {
const auth0Sub = (request as any).user.sub;
const userId = request.userContext?.userId;
const userId = (request as any).user.sub;
const result = await this.authService.getUserStatus(auth0Sub);
// Log login event to audit trail (called once per Auth0 callback)
const ipAddress = this.getClientIp(request);
if (userId) {
await auditLogService.info(
'auth',
userId,
'User login',
'user',
userId,
{ ipAddress }
).catch(err => logger.error('Failed to log login audit event', { error: err }));
}
const result = await this.authService.getUserStatus(userId);
logger.info('User status retrieved', {
userId: userId?.substring(0, 8) + '...',
userId: userId.substring(0, 8) + '...',
emailVerified: result.emailVerified,
onboardingCompleted: result.onboardingCompleted,
});
@@ -227,7 +199,7 @@ export class AuthController {
} catch (error: any) {
logger.error('Failed to get user status', {
error,
userId: request.userContext?.userId,
userId: (request as any).user?.sub,
});
return reply.code(500).send({
@@ -244,12 +216,12 @@ export class AuthController {
*/
async getSecurityStatus(request: FastifyRequest, reply: FastifyReply) {
try {
const auth0Sub = (request as any).user.sub;
const userId = (request as any).user.sub;
const result = await this.authService.getSecurityStatus(auth0Sub);
const result = await this.authService.getSecurityStatus(userId);
logger.info('Security status retrieved', {
userId: request.userContext?.userId,
userId: userId.substring(0, 8) + '...',
emailVerified: result.emailVerified,
});
@@ -257,7 +229,7 @@ export class AuthController {
} catch (error: any) {
logger.error('Failed to get security status', {
error,
userId: request.userContext?.userId,
userId: (request as any).user?.sub,
});
return reply.code(500).send({
@@ -274,31 +246,19 @@ export class AuthController {
*/
async requestPasswordReset(request: FastifyRequest, reply: FastifyReply) {
try {
const auth0Sub = (request as any).user.sub;
const userId = request.userContext?.userId;
const userId = (request as any).user.sub;
const result = await this.authService.requestPasswordReset(auth0Sub);
const result = await this.authService.requestPasswordReset(userId);
logger.info('Password reset email requested', {
userId: userId?.substring(0, 8) + '...',
userId: userId.substring(0, 8) + '...',
});
// Log password reset request to unified audit log
if (userId) {
await auditLogService.info(
'auth',
userId,
'Password reset requested',
'user',
userId
).catch(err => logger.error('Failed to log password reset audit event', { error: err }));
}
return reply.code(200).send(result);
} catch (error: any) {
logger.error('Failed to request password reset', {
error,
userId: request.userContext?.userId,
userId: (request as any).user?.sub,
});
return reply.code(500).send({
@@ -307,45 +267,4 @@ export class AuthController {
});
}
}
/**
* POST /api/auth/track-logout
* Track user logout event for audit logging
* Protected endpoint - requires JWT
*
* Called by frontend before Auth0 logout to capture the logout event.
* Returns success even if audit logging fails (non-blocking).
*/
async trackLogout(request: FastifyRequest, reply: FastifyReply) {
try {
const userId = request.userContext?.userId;
const ipAddress = this.getClientIp(request);
// Log logout event to audit trail
if (userId) {
await auditLogService.info(
'auth',
userId,
'User logout',
'user',
userId,
{ ipAddress }
).catch(err => logger.error('Failed to log logout audit event', { error: err }));
}
logger.info('User logout tracked', {
userId: userId?.substring(0, 8) + '...',
});
return reply.code(200).send({ success: true });
} catch (error: any) {
// Don't block logout on audit failure - always return success
logger.error('Failed to track logout', {
error,
userId: request.userContext?.userId,
});
return reply.code(200).send({ success: true });
}
}
}

View File

@@ -48,10 +48,4 @@ export const authRoutes: FastifyPluginAsync = async (
preHandler: [fastify.authenticate],
handler: authController.requestPasswordReset.bind(authController),
});
// POST /api/auth/track-logout - Track logout event for audit (requires JWT)
fastify.post('/auth/track-logout', {
preHandler: [fastify.authenticate],
handler: authController.trackLogout.bind(authController),
});
};

View File

@@ -19,7 +19,6 @@ jest.mock('../../../../core/plugins/auth.plugin', () => {
return {
default: fastifyPlugin(async function (fastify) {
fastify.decorate('authenticate', async function (request, _reply) {
// JWT sub is still auth0|xxx format
request.user = { sub: 'auth0|test-user-123' };
});
}, { name: 'auth-plugin' }),

View File

@@ -103,8 +103,6 @@ describe('AuthService', () => {
onboardingCompletedAt: null,
deactivatedAt: null,
deactivatedBy: null,
deletionRequestedAt: null,
deletionScheduledFor: null,
createdAt: new Date(),
updatedAt: new Date(),
});
@@ -118,8 +116,6 @@ describe('AuthService', () => {
onboardingCompletedAt: null,
deactivatedAt: null,
deactivatedBy: null,
deletionRequestedAt: null,
deletionScheduledFor: null,
createdAt: new Date(),
updatedAt: new Date(),
});
@@ -153,8 +149,6 @@ describe('AuthService', () => {
onboardingCompletedAt: null,
deactivatedAt: null,
deactivatedBy: null,
deletionRequestedAt: null,
deletionScheduledFor: null,
createdAt: new Date(),
updatedAt: new Date(),
});

View File

@@ -1,18 +0,0 @@
# backup/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `README.md` | Feature documentation | Understanding backup architecture |
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `api/` | HTTP endpoints, validation | API changes |
| `domain/` | Business logic, services | Core backup/retention logic |
| `data/` | Repository, database queries | Database operations |
| `jobs/` | Scheduled job handlers | Cron job modifications |
| `migrations/` | Database schema | Schema changes |
| `tests/` | Unit and integration tests | Adding or modifying tests |

View File

@@ -19,12 +19,11 @@ backup/
backup.controller.ts # Request handlers
backup.validation.ts # Zod schemas
domain/ # Business logic
backup.types.ts # TypeScript types and constants
backup.types.ts # TypeScript types
backup.service.ts # Core backup operations
backup-archive.service.ts # Archive creation
backup-restore.service.ts # Restore operations
backup-retention.service.ts # Tiered retention enforcement
backup-classification.service.ts # Backup category classification
backup-archive.service.ts # Archive creation
backup-restore.service.ts # Restore operations
backup-retention.service.ts # Retention enforcement
data/ # Data access
backup.repository.ts # Database queries
jobs/ # Scheduled jobs
@@ -32,10 +31,6 @@ backup/
backup-cleanup.job.ts # Retention cleanup
migrations/ # Database schema
001_create_backup_tables.sql
002_add_retention_categories.sql # Tiered retention columns
tests/ # Test files
unit/
backup-classification.service.test.ts # Classification tests
```
## API Endpoints
@@ -127,45 +122,11 @@ Scheduled backups use Redis distributed locking to prevent duplicate backups whe
- Lock TTL: 5 minutes (auto-release if container crashes)
- Only one container creates the backup; others skip
**Retention cleanup (tiered):**
**Retention cleanup:**
- Runs immediately after each successful scheduled backup
- Uses tiered classification: each backup can belong to multiple categories
- A backup is only deleted when it exceeds ALL applicable category quotas
- Deletes backups exceeding the schedule's retention count
- Also runs globally at 4 AM daily as a safety net
## Tiered Retention System
Backups are classified by their creation timestamp into categories:
| Category | Qualification | Retention Count |
|----------|--------------|-----------------|
| hourly | All backups | 8 |
| daily | First backup at midnight UTC | 7 |
| weekly | First backup on Sunday at midnight UTC | 4 |
| monthly | First backup on 1st of month at midnight UTC | 12 |
**Multi-category classification:**
- A backup can belong to multiple categories simultaneously
- Example: Backup at midnight on Sunday, January 1st qualifies as: hourly + daily + weekly + monthly
**Retention logic:**
```
For each category (hourly, daily, weekly, monthly):
1. Get all backups with this category
2. Keep top N (sorted by started_at DESC)
3. Add to protected set
A backup is deleted ONLY if it's NOT in the protected set
(i.e., exceeds quota for ALL its categories)
```
**Expiration calculation:**
- Each backup's `expires_at` is calculated based on its longest retention period
- Monthly backup: 12 months from creation
- Weekly-only backup: 4 weeks from creation
- Daily-only backup: 7 days from creation
- Hourly-only backup: 8 hours from creation
See `backend/src/core/scheduler/README.md` for the distributed locking pattern.
### Admin Routes

View File

@@ -18,7 +18,6 @@ import {
ScheduleIdParam,
UpdateSettingsBody,
} from './backup.validation';
import { auditLogService } from '../../audit-log';
export class BackupController {
private backupService: BackupService;
@@ -45,42 +44,22 @@ export class BackupController {
request: FastifyRequest<{ Body: CreateBackupBody }>,
reply: FastifyReply
): Promise<void> {
const adminUserId = request.userContext?.userId;
const adminSub = (request as any).userContext?.auth0Sub;
const result = await this.backupService.createBackup({
name: request.body.name,
backupType: 'manual',
createdBy: adminUserId,
createdBy: adminSub,
includeDocuments: request.body.includeDocuments,
});
if (result.success) {
// Log backup creation to unified audit log
await auditLogService.info(
'system',
adminUserId || null,
`Backup created: ${request.body.name || 'Manual backup'}`,
'backup',
result.backupId,
{ name: request.body.name, includeDocuments: request.body.includeDocuments }
).catch(err => logger.error('Failed to log backup create audit event', { error: err }));
reply.status(201).send({
backupId: result.backupId,
status: 'completed',
message: 'Backup created successfully',
});
} else {
// Log backup failure
await auditLogService.error(
'system',
adminUserId || null,
`Backup failed: ${request.body.name || 'Manual backup'}`,
'backup',
result.backupId,
{ error: result.error }
).catch(err => logger.error('Failed to log backup failure audit event', { error: err }));
reply.status(500).send({
backupId: result.backupId,
status: 'failed',
@@ -139,7 +118,7 @@ export class BackupController {
request: FastifyRequest,
reply: FastifyReply
): Promise<void> {
const adminUserId = request.userContext?.userId;
const adminSub = (request as any).userContext?.auth0Sub;
// Handle multipart file upload
const data = await request.file();
@@ -173,7 +152,7 @@ export class BackupController {
const backup = await this.backupService.importUploadedBackup(
tempPath,
filename,
adminUserId
adminSub
);
reply.status(201).send({
@@ -217,8 +196,6 @@ export class BackupController {
request: FastifyRequest<{ Params: BackupIdParam; Body: RestoreBody }>,
reply: FastifyReply
): Promise<void> {
const adminUserId = request.userContext?.userId;
try {
const result = await this.restoreService.executeRestore({
backupId: request.params.id,
@@ -226,16 +203,6 @@ export class BackupController {
});
if (result.success) {
// Log successful restore to unified audit log
await auditLogService.info(
'system',
adminUserId || null,
`Backup restored: ${request.params.id}`,
'backup',
request.params.id,
{ safetyBackupId: result.safetyBackupId }
).catch(err => logger.error('Failed to log restore success audit event', { error: err }));
reply.send({
success: true,
safetyBackupId: result.safetyBackupId,
@@ -243,16 +210,6 @@ export class BackupController {
message: 'Restore completed successfully',
});
} else {
// Log restore failure
await auditLogService.error(
'system',
adminUserId || null,
`Backup restore failed: ${request.params.id}`,
'backup',
request.params.id,
{ error: result.error, safetyBackupId: result.safetyBackupId }
).catch(err => logger.error('Failed to log restore failure audit event', { error: err }));
reply.status(500).send({
success: false,
safetyBackupId: result.safetyBackupId,

View File

@@ -12,7 +12,6 @@ import {
BackupType,
BackupStatus,
BackupMetadata,
BackupCategory,
ListBackupsParams,
CRON_EXPRESSIONS,
} from '../domain/backup.types';
@@ -55,8 +54,6 @@ export class BackupRepository {
completedAt: row.completed_at ? new Date(row.completed_at) : null,
createdBy: row.created_by,
metadata: row.metadata as BackupMetadata,
categories: (row.categories || ['hourly']) as BackupCategory[],
expiresAt: row.expires_at ? new Date(row.expires_at) : null,
};
}
@@ -264,13 +261,11 @@ export class BackupRepository {
fileSizeBytes: number;
createdBy?: string | null;
metadata?: BackupMetadata;
categories?: BackupCategory[];
expiresAt?: Date | null;
}): Promise<BackupHistory> {
const result = await this.pool.query(
`INSERT INTO backup_history
(schedule_id, backup_type, filename, file_path, file_size_bytes, status, created_by, metadata, categories, expires_at)
VALUES ($1, $2, $3, $4, $5, 'in_progress', $6, $7, $8, $9)
(schedule_id, backup_type, filename, file_path, file_size_bytes, status, created_by, metadata)
VALUES ($1, $2, $3, $4, $5, 'in_progress', $6, $7)
RETURNING *`,
[
data.scheduleId || null,
@@ -280,8 +275,6 @@ export class BackupRepository {
data.fileSizeBytes,
data.createdBy || null,
JSON.stringify(data.metadata || {}),
data.categories || ['hourly'],
data.expiresAt || null,
]
);
return this.mapHistoryRow(result.rows[0]);
@@ -358,38 +351,6 @@ export class BackupRepository {
return result.rows.map(this.mapHistoryRow);
}
// ============================================
// Tiered Retention Operations
// ============================================
/**
* Gets all completed backups that have a specific category.
* Sorted by started_at DESC (newest first).
*/
async getBackupsByCategory(category: BackupCategory): Promise<BackupHistory[]> {
const result = await this.pool.query(
`SELECT * FROM backup_history
WHERE status = 'completed'
AND $1 = ANY(categories)
ORDER BY started_at DESC`,
[category]
);
return result.rows.map(row => this.mapHistoryRow(row));
}
/**
* Gets all completed backups for tiered retention processing.
* Returns backups sorted by started_at DESC.
*/
async getAllCompletedBackups(): Promise<BackupHistory[]> {
const result = await this.pool.query(
`SELECT * FROM backup_history
WHERE status = 'completed'
ORDER BY started_at DESC`
);
return result.rows.map(row => this.mapHistoryRow(row));
}
// ============================================
// Settings Operations
// ============================================

View File

@@ -1,106 +0,0 @@
/**
* @ai-summary Service for classifying backups into tiered retention categories
* @ai-context Pure functions for timestamp-based classification, no database dependencies
*/
import { BackupCategory, TIERED_RETENTION } from './backup.types';
/**
* Classifies a backup by its timestamp into retention categories.
* A backup can belong to multiple categories simultaneously.
*
* Categories:
* - hourly: All backups
* - daily: First backup at midnight UTC (hour = 0)
* - weekly: First backup on Sunday at midnight UTC
* - monthly: First backup on 1st of month at midnight UTC
*/
export function classifyBackup(timestamp: Date): BackupCategory[] {
const categories: BackupCategory[] = ['hourly'];
const utcHour = timestamp.getUTCHours();
const utcDay = timestamp.getUTCDate();
const utcDayOfWeek = timestamp.getUTCDay(); // 0 = Sunday
// Midnight UTC qualifies for daily
if (utcHour === 0) {
categories.push('daily');
// Sunday at midnight qualifies for weekly
if (utcDayOfWeek === 0) {
categories.push('weekly');
}
// 1st of month at midnight qualifies for monthly
if (utcDay === 1) {
categories.push('monthly');
}
}
return categories;
}
/**
* Calculates the expiration date based on the backup's categories.
* Uses the longest retention period among all applicable categories.
*
* Retention periods are count-based in the actual cleanup, but for display
* we estimate based on typical backup frequency:
* - hourly: 8 hours (8 backups * 1 hour)
* - daily: 7 days (7 backups * 1 day)
* - weekly: 4 weeks (4 backups * 1 week)
* - monthly: 12 months (12 backups * 1 month)
*/
export function calculateExpiration(
categories: BackupCategory[],
timestamp: Date
): Date {
const expirationDate = new Date(timestamp);
if (categories.includes('monthly')) {
expirationDate.setUTCMonth(expirationDate.getUTCMonth() + TIERED_RETENTION.monthly);
} else if (categories.includes('weekly')) {
expirationDate.setUTCDate(expirationDate.getUTCDate() + TIERED_RETENTION.weekly * 7);
} else if (categories.includes('daily')) {
expirationDate.setUTCDate(expirationDate.getUTCDate() + TIERED_RETENTION.daily);
} else {
// Hourly only - 8 hours
expirationDate.setUTCHours(expirationDate.getUTCHours() + TIERED_RETENTION.hourly);
}
return expirationDate;
}
/**
* Checks if a backup timestamp represents the first backup of the day (midnight UTC).
*/
export function isFirstBackupOfDay(timestamp: Date): boolean {
return timestamp.getUTCHours() === 0;
}
/**
* Checks if a timestamp falls on a Sunday.
*/
export function isSunday(timestamp: Date): boolean {
return timestamp.getUTCDay() === 0;
}
/**
* Checks if a timestamp falls on the first day of the month.
*/
export function isFirstDayOfMonth(timestamp: Date): boolean {
return timestamp.getUTCDate() === 1;
}
/**
* Classifies a backup and calculates its expiration in one call.
* Convenience function for backup creation flow.
*/
export function classifyAndCalculateExpiration(timestamp: Date): {
categories: BackupCategory[];
expiresAt: Date;
} {
const categories = classifyBackup(timestamp);
const expiresAt = calculateExpiration(categories, timestamp);
return { categories, expiresAt };
}

View File

@@ -10,9 +10,6 @@ import { BackupRepository } from '../data/backup.repository';
import {
RetentionCleanupResult,
RetentionCleanupJobResult,
BackupCategory,
BackupHistory,
TIERED_RETENTION,
} from './backup.types';
export class BackupRetentionService {
@@ -23,47 +20,61 @@ export class BackupRetentionService {
}
/**
* Processes retention cleanup using tiered classification.
* A backup can only be deleted if it exceeds the quota for ALL of its categories.
* Processes retention cleanup for all schedules
*/
async processRetentionCleanup(): Promise<RetentionCleanupJobResult> {
logger.info('Starting tiered backup retention cleanup');
logger.info('Starting backup retention cleanup');
const schedules = await this.repository.listSchedules();
const results: RetentionCleanupResult[] = [];
const errors: Array<{ scheduleId: string; error: string }> = [];
let totalDeleted = 0;
let totalFreedBytes = 0;
try {
const result = await this.processTieredRetentionCleanup();
results.push(result);
totalDeleted = result.deletedCount;
totalFreedBytes = result.freedBytes;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Tiered retention cleanup failed', { error: errorMessage });
errors.push({ scheduleId: 'tiered', error: errorMessage });
for (const schedule of schedules) {
try {
const result = await this.cleanupScheduleBackups(
schedule.id,
schedule.name,
schedule.retentionCount
);
results.push(result);
totalDeleted += result.deletedCount;
totalFreedBytes += result.freedBytes;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Retention cleanup failed for schedule', {
scheduleId: schedule.id,
scheduleName: schedule.name,
error: errorMessage,
});
errors.push({ scheduleId: schedule.id, error: errorMessage });
}
}
// Also cleanup failed backups older than 24 hours
// Also cleanup orphaned backups (from deleted schedules)
try {
const failedCount = await this.cleanupFailedBackups();
if (failedCount > 0) {
logger.info('Cleaned up failed backups', { count: failedCount });
const orphanResult = await this.cleanupOrphanedBackups();
if (orphanResult.deletedCount > 0) {
results.push(orphanResult);
totalDeleted += orphanResult.deletedCount;
totalFreedBytes += orphanResult.freedBytes;
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Failed backup cleanup failed', { error: errorMessage });
logger.error('Orphaned backup cleanup failed', { error: errorMessage });
errors.push({ scheduleId: 'orphaned', error: errorMessage });
}
logger.info('Backup retention cleanup completed', {
processed: schedules.length,
totalDeleted,
totalFreedBytes,
errors: errors.length,
});
return {
processed: 1, // Single tiered process
processed: schedules.length,
totalDeleted,
totalFreedBytes,
results,
@@ -71,140 +82,6 @@ export class BackupRetentionService {
};
}
/**
* Implements tiered retention: keeps N backups per category.
* A backup is protected if it's in the top N for ANY of its categories.
* Only deletes backups that exceed ALL applicable category quotas.
*/
private async processTieredRetentionCleanup(): Promise<RetentionCleanupResult> {
const allBackups = await this.repository.getAllCompletedBackups();
if (allBackups.length === 0) {
logger.debug('No completed backups to process');
return {
scheduleId: 'tiered',
scheduleName: 'Tiered Retention',
deletedCount: 0,
retainedCount: 0,
freedBytes: 0,
};
}
// Build sets of protected backup IDs for each category
const protectedIds = new Set<string>();
const categoryRetained: Record<BackupCategory, string[]> = {
hourly: [],
daily: [],
weekly: [],
monthly: [],
};
// For each category, identify which backups to keep
const categories: BackupCategory[] = ['hourly', 'daily', 'weekly', 'monthly'];
for (const category of categories) {
const limit = TIERED_RETENTION[category];
const backupsInCategory = allBackups.filter(b =>
b.categories && b.categories.includes(category)
);
// Keep the top N (already sorted by started_at DESC)
const toKeep = backupsInCategory.slice(0, limit);
for (const backup of toKeep) {
protectedIds.add(backup.id);
categoryRetained[category].push(backup.id);
}
logger.debug('Category retention analysis', {
category,
limit,
totalInCategory: backupsInCategory.length,
keeping: toKeep.length,
});
}
// Find backups to delete (not protected by any category)
const backupsToDelete = allBackups.filter(b => !protectedIds.has(b.id));
logger.info('Tiered retention analysis complete', {
totalBackups: allBackups.length,
protected: protectedIds.size,
toDelete: backupsToDelete.length,
hourlyRetained: categoryRetained.hourly.length,
dailyRetained: categoryRetained.daily.length,
weeklyRetained: categoryRetained.weekly.length,
monthlyRetained: categoryRetained.monthly.length,
});
// Delete unprotected backups
let deletedCount = 0;
let freedBytes = 0;
for (const backup of backupsToDelete) {
try {
// Log retention decision with category reasoning
logger.info('Deleting backup - exceeded all category quotas', {
backupId: backup.id,
filename: backup.filename,
categories: backup.categories,
startedAt: backup.startedAt,
reason: this.buildDeletionReason(backup, categoryRetained),
});
// Delete the file
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (filePath) {
try {
const stats = await fsp.stat(filePath);
freedBytes += stats.size;
await fsp.unlink(filePath);
} catch (error) {
logger.warn('Failed to delete backup file', {
backupId: backup.id,
filePath,
});
}
}
// Delete the database record
await this.repository.deleteBackupRecord(backup.id);
deletedCount++;
} catch (error) {
logger.error('Failed to delete backup during retention cleanup', {
backupId: backup.id,
error: error instanceof Error ? error.message : String(error),
});
}
}
return {
scheduleId: 'tiered',
scheduleName: 'Tiered Retention',
deletedCount,
retainedCount: protectedIds.size,
freedBytes,
};
}
/**
* Builds a human-readable reason for why a backup is being deleted.
*/
private buildDeletionReason(
backup: BackupHistory,
categoryRetained: Record<BackupCategory, string[]>
): string {
const reasons: string[] = [];
for (const category of (backup.categories || ['hourly']) as BackupCategory[]) {
const kept = categoryRetained[category];
const limit = TIERED_RETENTION[category];
if (!kept.includes(backup.id)) {
reasons.push(`${category}: not in top ${limit}`);
}
}
return reasons.join('; ') || 'no categories';
}
/**
* Cleans up old backups for a specific schedule
*/
@@ -323,4 +200,75 @@ export class BackupRetentionService {
return deletedCount;
}
/**
* Cleans up orphaned backups (from deleted schedules)
* Keeps manual backups indefinitely
*/
private async cleanupOrphanedBackups(): Promise<RetentionCleanupResult> {
const { items } = await this.repository.listBackups({
backupType: 'scheduled',
pageSize: 1000,
});
// Get all valid schedule IDs
const schedules = await this.repository.listSchedules();
const validScheduleIds = new Set(schedules.map(s => s.id));
// Find orphaned scheduled backups (schedule was deleted)
const orphanedBackups = items.filter(
backup => backup.scheduleId && !validScheduleIds.has(backup.scheduleId)
);
// Keep only the most recent 5 orphaned backups per deleted schedule
const orphansBySchedule = new Map<string, typeof orphanedBackups>();
for (const backup of orphanedBackups) {
const scheduleId = backup.scheduleId!;
if (!orphansBySchedule.has(scheduleId)) {
orphansBySchedule.set(scheduleId, []);
}
orphansBySchedule.get(scheduleId)!.push(backup);
}
let deletedCount = 0;
let freedBytes = 0;
let retainedCount = 0;
for (const [_scheduleId, backups] of orphansBySchedule) {
// Sort by date descending and keep first 5
backups.sort((a, b) => b.startedAt.getTime() - a.startedAt.getTime());
const toDelete = backups.slice(5);
retainedCount += Math.min(backups.length, 5);
for (const backup of toDelete) {
try {
const filePath = (backup.metadata as any)?.archivePath || backup.filePath;
if (filePath) {
try {
const stats = await fsp.stat(filePath);
freedBytes += stats.size;
await fsp.unlink(filePath);
} catch {
// File might not exist
}
}
await this.repository.deleteBackupRecord(backup.id);
deletedCount++;
} catch (error) {
logger.warn('Failed to delete orphaned backup', {
backupId: backup.id,
error: error instanceof Error ? error.message : String(error),
});
}
}
}
return {
scheduleId: 'orphaned',
scheduleName: 'Orphaned Backups',
deletedCount,
retainedCount,
freedBytes,
};
}
}

View File

@@ -22,7 +22,6 @@ import {
BackupFrequency,
ScheduleResponse,
} from './backup.types';
import { classifyAndCalculateExpiration } from './backup-classification.service';
export class BackupService {
private repository: BackupRepository;
@@ -41,14 +40,10 @@ export class BackupService {
* Creates a new backup
*/
async createBackup(options: CreateBackupOptions): Promise<BackupResult> {
const now = new Date();
const timestamp = now.toISOString().replace(/[:.]/g, '-').slice(0, 19);
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const tempFilename = `backup_${timestamp}`;
// Classify the backup based on its creation timestamp
const { categories, expiresAt } = classifyAndCalculateExpiration(now);
// Create initial backup record with classification
// Create initial backup record
const backupRecord = await this.repository.createBackupRecord({
scheduleId: options.scheduleId,
backupType: options.backupType,
@@ -57,16 +52,12 @@ export class BackupService {
fileSizeBytes: 0,
createdBy: options.createdBy,
metadata: { name: options.name },
categories,
expiresAt,
});
logger.info('Starting backup creation', {
backupId: backupRecord.id,
backupType: options.backupType,
scheduleName: options.name,
categories,
expiresAt: expiresAt.toISOString(),
});
try {

View File

@@ -29,17 +29,6 @@ export const DEFAULT_RETENTION = {
monthly: 12,
} as const;
/**
* Tiered retention counts for unified classification system.
* Each backup can belong to multiple categories; expiration is based on longest retention.
*/
export const TIERED_RETENTION = {
hourly: 8,
daily: 7,
weekly: 4,
monthly: 12,
} as const;
// ============================================
// Enums and Union Types
// ============================================
@@ -47,7 +36,6 @@ export const TIERED_RETENTION = {
export type BackupFrequency = 'hourly' | 'daily' | 'weekly' | 'monthly';
export type BackupType = 'scheduled' | 'manual';
export type BackupStatus = 'in_progress' | 'completed' | 'failed';
export type BackupCategory = 'hourly' | 'daily' | 'weekly' | 'monthly';
// ============================================
// Database Entity Types
@@ -81,8 +69,6 @@ export interface BackupHistory {
completedAt: Date | null;
createdBy: string | null;
metadata: BackupMetadata;
categories: BackupCategory[];
expiresAt: Date | null;
}
export interface BackupSettings {

View File

@@ -1,78 +0,0 @@
-- Migration: Add tiered retention classification columns
-- Description: Adds categories array and expires_at for tiered backup retention
-- Issue: #6 - Backup retention purges all backups
-- ============================================
-- Add new columns to backup_history
-- ============================================
ALTER TABLE backup_history
ADD COLUMN IF NOT EXISTS categories TEXT[] DEFAULT '{}',
ADD COLUMN IF NOT EXISTS expires_at TIMESTAMP WITH TIME ZONE;
-- ============================================
-- Indexes for efficient category queries
-- ============================================
CREATE INDEX IF NOT EXISTS idx_backup_history_categories ON backup_history USING GIN(categories);
CREATE INDEX IF NOT EXISTS idx_backup_history_expires ON backup_history(expires_at);
-- ============================================
-- Populate categories for existing backups based on started_at
-- Classification logic:
-- - All backups: 'hourly'
-- - Hour = 0 (midnight UTC): + 'daily'
-- - Hour = 0 AND Sunday: + 'weekly'
-- - Hour = 0 AND day = 1: + 'monthly'
-- ============================================
UPDATE backup_history
SET categories = ARRAY(
SELECT unnest(
CASE
-- Midnight on Sunday, 1st of month: all categories
WHEN EXTRACT(HOUR FROM started_at AT TIME ZONE 'UTC') = 0
AND EXTRACT(DOW FROM started_at AT TIME ZONE 'UTC') = 0
AND EXTRACT(DAY FROM started_at AT TIME ZONE 'UTC') = 1
THEN ARRAY['hourly', 'daily', 'weekly', 'monthly']
-- Midnight on Sunday (not 1st): hourly + daily + weekly
WHEN EXTRACT(HOUR FROM started_at AT TIME ZONE 'UTC') = 0
AND EXTRACT(DOW FROM started_at AT TIME ZONE 'UTC') = 0
THEN ARRAY['hourly', 'daily', 'weekly']
-- Midnight on 1st (not Sunday): hourly + daily + monthly
WHEN EXTRACT(HOUR FROM started_at AT TIME ZONE 'UTC') = 0
AND EXTRACT(DAY FROM started_at AT TIME ZONE 'UTC') = 1
THEN ARRAY['hourly', 'daily', 'monthly']
-- Midnight (not Sunday, not 1st): hourly + daily
WHEN EXTRACT(HOUR FROM started_at AT TIME ZONE 'UTC') = 0
THEN ARRAY['hourly', 'daily']
-- Non-midnight: hourly only
ELSE ARRAY['hourly']
END
)
)
WHERE categories = '{}' OR categories IS NULL;
-- ============================================
-- Calculate expires_at based on categories
-- Retention periods: hourly=8hrs, daily=7days, weekly=4wks, monthly=12mo
-- Use longest applicable retention period
-- ============================================
UPDATE backup_history
SET expires_at = CASE
WHEN 'monthly' = ANY(categories) THEN started_at + INTERVAL '12 months'
WHEN 'weekly' = ANY(categories) THEN started_at + INTERVAL '4 weeks'
WHEN 'daily' = ANY(categories) THEN started_at + INTERVAL '7 days'
ELSE started_at + INTERVAL '8 hours'
END
WHERE expires_at IS NULL;
-- ============================================
-- Add NOT NULL constraint after populating data
-- ============================================
ALTER TABLE backup_history
ALTER COLUMN categories SET DEFAULT ARRAY['hourly']::TEXT[];
-- Ensure all rows have categories
UPDATE backup_history SET categories = ARRAY['hourly'] WHERE categories = '{}' OR categories IS NULL;

View File

@@ -1,188 +0,0 @@
/**
* @ai-summary Unit tests for BackupClassificationService
* @ai-context Tests pure timestamp-based classification functions
*/
import {
classifyBackup,
calculateExpiration,
isFirstBackupOfDay,
isSunday,
isFirstDayOfMonth,
classifyAndCalculateExpiration,
} from '../../domain/backup-classification.service';
import { TIERED_RETENTION } from '../../domain/backup.types';
describe('BackupClassificationService', () => {
describe('classifyBackup', () => {
it('should classify regular hourly backup (non-midnight)', () => {
// Tuesday, January 7, 2026 at 14:30 UTC
const timestamp = new Date('2026-01-07T14:30:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly']);
});
it('should classify midnight backup as hourly + daily', () => {
// Wednesday, January 8, 2026 at 00:00 UTC
const timestamp = new Date('2026-01-08T00:00:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly', 'daily']);
});
it('should classify Sunday midnight backup as hourly + daily + weekly', () => {
// Sunday, January 4, 2026 at 00:00 UTC
const timestamp = new Date('2026-01-04T00:00:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly', 'daily', 'weekly']);
});
it('should classify 1st of month midnight backup as hourly + daily + monthly', () => {
// Thursday, January 1, 2026 at 00:00 UTC (not Sunday)
const timestamp = new Date('2026-01-01T00:00:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly', 'daily', 'monthly']);
});
it('should classify Sunday 1st of month midnight as all categories', () => {
// Sunday, February 1, 2026 at 00:00 UTC
const timestamp = new Date('2026-02-01T00:00:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly', 'daily', 'weekly', 'monthly']);
});
it('should not classify non-midnight on 1st as monthly', () => {
// Thursday, January 1, 2026 at 10:00 UTC
const timestamp = new Date('2026-01-01T10:00:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly']);
});
it('should not classify non-midnight on Sunday as weekly', () => {
// Sunday, January 4, 2026 at 15:00 UTC
const timestamp = new Date('2026-01-04T15:00:00.000Z');
const categories = classifyBackup(timestamp);
expect(categories).toEqual(['hourly']);
});
});
describe('calculateExpiration', () => {
const baseTimestamp = new Date('2026-01-05T00:00:00.000Z');
it('should calculate 8 hours for hourly-only backup', () => {
const expiresAt = calculateExpiration(['hourly'], baseTimestamp);
const expectedDate = new Date('2026-01-05T08:00:00.000Z');
expect(expiresAt).toEqual(expectedDate);
});
it('should calculate 7 days for daily backup', () => {
const expiresAt = calculateExpiration(['hourly', 'daily'], baseTimestamp);
const expectedDate = new Date('2026-01-12T00:00:00.000Z');
expect(expiresAt).toEqual(expectedDate);
});
it('should calculate 4 weeks for weekly backup', () => {
const expiresAt = calculateExpiration(['hourly', 'daily', 'weekly'], baseTimestamp);
const expectedDate = new Date('2026-02-02T00:00:00.000Z');
expect(expiresAt).toEqual(expectedDate);
});
it('should calculate 12 months for monthly backup', () => {
const expiresAt = calculateExpiration(
['hourly', 'daily', 'weekly', 'monthly'],
baseTimestamp
);
const expectedDate = new Date('2027-01-05T00:00:00.000Z');
expect(expiresAt).toEqual(expectedDate);
});
it('should use longest retention when monthly is present (even without weekly)', () => {
const expiresAt = calculateExpiration(['hourly', 'daily', 'monthly'], baseTimestamp);
const expectedDate = new Date('2027-01-05T00:00:00.000Z');
expect(expiresAt).toEqual(expectedDate);
});
});
describe('isFirstBackupOfDay', () => {
it('should return true for midnight UTC', () => {
const timestamp = new Date('2026-01-05T00:00:00.000Z');
expect(isFirstBackupOfDay(timestamp)).toBe(true);
});
it('should return false for non-midnight', () => {
const timestamp = new Date('2026-01-05T01:00:00.000Z');
expect(isFirstBackupOfDay(timestamp)).toBe(false);
});
it('should return true for midnight with minutes/seconds', () => {
// 00:30:45 is still hour 0
const timestamp = new Date('2026-01-05T00:30:45.000Z');
expect(isFirstBackupOfDay(timestamp)).toBe(true);
});
});
describe('isSunday', () => {
it('should return true for Sunday', () => {
// January 4, 2026 is a Sunday
const timestamp = new Date('2026-01-04T12:00:00.000Z');
expect(isSunday(timestamp)).toBe(true);
});
it('should return false for non-Sunday', () => {
// January 5, 2026 is a Monday
const timestamp = new Date('2026-01-05T12:00:00.000Z');
expect(isSunday(timestamp)).toBe(false);
});
});
describe('isFirstDayOfMonth', () => {
it('should return true for 1st of month', () => {
const timestamp = new Date('2026-01-01T12:00:00.000Z');
expect(isFirstDayOfMonth(timestamp)).toBe(true);
});
it('should return false for non-1st', () => {
const timestamp = new Date('2026-01-15T12:00:00.000Z');
expect(isFirstDayOfMonth(timestamp)).toBe(false);
});
});
describe('classifyAndCalculateExpiration', () => {
it('should return both categories and expiresAt', () => {
// Sunday, February 1, 2026 at 00:00 UTC - all categories
const timestamp = new Date('2026-02-01T00:00:00.000Z');
const result = classifyAndCalculateExpiration(timestamp);
expect(result.categories).toEqual(['hourly', 'daily', 'weekly', 'monthly']);
expect(result.expiresAt).toEqual(new Date('2027-02-01T00:00:00.000Z'));
});
it('should work for hourly-only backup', () => {
const timestamp = new Date('2026-01-07T14:30:00.000Z');
const result = classifyAndCalculateExpiration(timestamp);
expect(result.categories).toEqual(['hourly']);
expect(result.expiresAt).toEqual(new Date('2026-01-07T22:30:00.000Z'));
});
});
describe('TIERED_RETENTION constants', () => {
it('should have correct retention values', () => {
expect(TIERED_RETENTION.hourly).toBe(8);
expect(TIERED_RETENTION.daily).toBe(7);
expect(TIERED_RETENTION.weekly).toBe(4);
expect(TIERED_RETENTION.monthly).toBe(12);
});
});
});

View File

@@ -1,18 +0,0 @@
# documents/
## Files
| File | What | When to read |
| ---- | ---- | ------------ |
| `README.md` | Feature documentation | Understanding document management |
| `index.ts` | Feature barrel export | Importing document services |
## Subdirectories
| Directory | What | When to read |
| --------- | ---- | ------------ |
| `api/` | HTTP endpoints and routes | API changes |
| `domain/` | Business logic, services, types | Core document logic |
| `data/` | Repository, database queries | Database operations |
| `migrations/` | Database schema | Schema changes |
| `tests/` | Unit and integration tests | Adding or modifying tests |

View File

@@ -8,14 +8,12 @@ import { Transform, TransformCallback } from 'stream';
import crypto from 'crypto';
import FileType from 'file-type';
import { Readable } from 'stream';
import { canAccessFeature, getFeatureConfig } from '../../../core/config/feature-tiers';
import { SubscriptionTier } from '../../user-profile/domain/user-profile.types';
export class DocumentsController {
private readonly service = new DocumentsService();
async list(request: FastifyRequest<{ Querystring: ListQuery }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userId = (request as any).user?.sub as string;
logger.info('Documents list requested', {
operation: 'documents.list',
@@ -43,7 +41,7 @@ export class DocumentsController {
}
async get(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document get requested', {
@@ -74,8 +72,7 @@ export class DocumentsController {
}
async create(request: FastifyRequest<{ Body: CreateBody }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userTier: SubscriptionTier = request.userContext?.subscriptionTier || 'free';
const userId = (request as any).user?.sub as string;
logger.info('Document create requested', {
operation: 'documents.create',
@@ -85,26 +82,6 @@ export class DocumentsController {
title: request.body.title,
});
// Tier validation: scanForMaintenance requires Pro tier
const featureKey = 'document.scanMaintenanceSchedule';
if (request.body.scanForMaintenance && !canAccessFeature(userTier, featureKey)) {
const config = getFeatureConfig(featureKey);
logger.warn('Tier required for scanForMaintenance', {
operation: 'documents.create.tier_required',
userId,
userTier,
requiredTier: config?.minTier,
});
return reply.code(403).send({
error: 'TIER_REQUIRED',
requiredTier: config?.minTier || 'pro',
currentTier: userTier,
feature: featureKey,
featureName: config?.name || null,
upgradePrompt: config?.upgradePrompt || 'Upgrade to Pro to access this feature.',
});
}
const created = await this.service.createDocument(userId, request.body);
logger.info('Document created', {
@@ -120,8 +97,7 @@ export class DocumentsController {
}
async update(request: FastifyRequest<{ Params: IdParams; Body: UpdateBody }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userTier: SubscriptionTier = request.userContext?.subscriptionTier || 'free';
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document update requested', {
@@ -131,27 +107,6 @@ export class DocumentsController {
updateFields: Object.keys(request.body),
});
// Tier validation: scanForMaintenance requires Pro tier
const featureKey = 'document.scanMaintenanceSchedule';
if (request.body.scanForMaintenance && !canAccessFeature(userTier, featureKey)) {
const config = getFeatureConfig(featureKey);
logger.warn('Tier required for scanForMaintenance', {
operation: 'documents.update.tier_required',
userId,
documentId,
userTier,
requiredTier: config?.minTier,
});
return reply.code(403).send({
error: 'TIER_REQUIRED',
requiredTier: config?.minTier || 'pro',
currentTier: userTier,
feature: featureKey,
featureName: config?.name || null,
upgradePrompt: config?.upgradePrompt || 'Upgrade to Pro to access this feature.',
});
}
const updated = await this.service.updateDocument(userId, documentId, request.body);
if (!updated) {
logger.warn('Document not found for update', {
@@ -174,7 +129,7 @@ export class DocumentsController {
}
async remove(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document delete requested', {
@@ -221,7 +176,7 @@ export class DocumentsController {
}
async upload(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document upload requested', {
@@ -272,15 +227,20 @@ export class DocumentsController {
});
}
// Collect ALL file chunks first (breaking early from async iterator corrupts stream state)
// Read first 4100 bytes to detect file type via magic bytes
const chunks: Buffer[] = [];
let totalBytes = 0;
const targetBytes = 4100;
for await (const chunk of mp.file) {
chunks.push(chunk);
totalBytes += chunk.length;
if (totalBytes >= targetBytes) {
break;
}
}
const fullBuffer = Buffer.concat(chunks);
// Use first 4100 bytes for file type detection via magic bytes
const headerBuffer = fullBuffer.subarray(0, Math.min(4100, fullBuffer.length));
const headerBuffer = Buffer.concat(chunks);
// Validate actual file content using magic bytes
const detectedType = await FileType.fromBuffer(headerBuffer);
@@ -336,9 +296,15 @@ export class DocumentsController {
const counter = new CountingStream();
// Create readable stream from the complete buffer and pipe through counter
const fileStream = Readable.from([fullBuffer]);
fileStream.pipe(counter);
// Create a new readable stream from the header buffer + remaining file chunks
const headerStream = Readable.from([headerBuffer]);
const remainingStream = mp.file;
// Pipe header first, then remaining content through counter
headerStream.pipe(counter, { end: false });
headerStream.on('end', () => {
remainingStream.pipe(counter);
});
const storage = getStorageService();
const bucket = 'documents';
@@ -373,7 +339,7 @@ export class DocumentsController {
}
async download(request: FastifyRequest<{ Params: IdParams }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const userId = (request as any).user?.sub as string;
const documentId = request.params.id;
logger.info('Document download requested', {
@@ -421,165 +387,6 @@ export class DocumentsController {
const stream = await storage.getObjectStream(doc.storageBucket, doc.storageKey);
return reply.send(stream);
}
async listByVehicle(request: FastifyRequest<{ Params: { vehicleId: string } }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const vehicleId = request.params.vehicleId;
logger.info('Documents by vehicle requested', {
operation: 'documents.listByVehicle',
userId,
vehicleId,
});
try {
const docs = await this.service.getDocumentsByVehicle(userId, vehicleId);
logger.info('Documents by vehicle retrieved', {
operation: 'documents.listByVehicle.success',
userId,
vehicleId,
documentCount: docs.length,
});
return reply.code(200).send(docs);
} catch (e: any) {
if (e.statusCode === 403) {
logger.warn('Vehicle not found or not owned', {
operation: 'documents.listByVehicle.forbidden',
userId,
vehicleId,
});
return reply.code(403).send({ error: 'Forbidden', message: e.message });
}
throw e;
}
}
async addVehicle(request: FastifyRequest<{ Params: { id: string; vehicleId: string } }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const { id: documentId, vehicleId } = request.params;
logger.info('Add vehicle to document requested', {
operation: 'documents.addVehicle',
userId,
documentId,
vehicleId,
});
try {
const updated = await this.service.addVehicleToDocument(userId, documentId, vehicleId);
if (!updated) {
logger.warn('Document not updated (possibly duplicate vehicle)', {
operation: 'documents.addVehicle.not_updated',
userId,
documentId,
vehicleId,
});
return reply.code(400).send({ error: 'Bad Request', message: 'Vehicle could not be added' });
}
logger.info('Vehicle added to document', {
operation: 'documents.addVehicle.success',
userId,
documentId,
vehicleId,
sharedVehicleCount: updated.sharedVehicleIds.length,
});
return reply.code(200).send(updated);
} catch (e: any) {
if (e.statusCode === 404) {
logger.warn('Document not found for adding vehicle', {
operation: 'documents.addVehicle.not_found',
userId,
documentId,
vehicleId,
});
return reply.code(404).send({ error: 'Not Found', message: e.message });
}
if (e.statusCode === 400) {
logger.warn('Bad request for adding vehicle', {
operation: 'documents.addVehicle.bad_request',
userId,
documentId,
vehicleId,
reason: e.message,
});
return reply.code(400).send({ error: 'Bad Request', message: e.message });
}
if (e.statusCode === 403) {
logger.warn('Forbidden - vehicle not owned', {
operation: 'documents.addVehicle.forbidden',
userId,
documentId,
vehicleId,
});
return reply.code(403).send({ error: 'Forbidden', message: e.message });
}
throw e;
}
}
async removeVehicle(request: FastifyRequest<{ Params: { id: string; vehicleId: string } }>, reply: FastifyReply) {
const userId = request.userContext!.userId;
const { id: documentId, vehicleId } = request.params;
logger.info('Remove vehicle from document requested', {
operation: 'documents.removeVehicle',
userId,
documentId,
vehicleId,
});
try {
const updated = await this.service.removeVehicleFromDocument(userId, documentId, vehicleId);
if (!updated) {
// Document was soft deleted
logger.info('Document soft deleted (primary vehicle removed, no shared vehicles)', {
operation: 'documents.removeVehicle.deleted',
userId,
documentId,
vehicleId,
});
return reply.code(204).send();
}
logger.info('Vehicle removed from document', {
operation: 'documents.removeVehicle.success',
userId,
documentId,
vehicleId,
sharedVehicleCount: updated.sharedVehicleIds.length,
primaryVehicleId: updated.vehicleId,
});
return reply.code(200).send(updated);
} catch (e: any) {
if (e.statusCode === 404) {
logger.warn('Document not found for removing vehicle', {
operation: 'documents.removeVehicle.not_found',
userId,
documentId,
vehicleId,
});
return reply.code(404).send({ error: 'Not Found', message: e.message });
}
if (e.statusCode === 400) {
logger.warn('Bad request for removing vehicle', {
operation: 'documents.removeVehicle.bad_request',
userId,
documentId,
vehicleId,
reason: e.message,
});
return reply.code(400).send({ error: 'Bad Request', message: e.message });
}
throw e;
}
}
}
function cryptoRandom(): string {

View File

@@ -22,6 +22,16 @@ export const documentsRoutes: FastifyPluginAsync = async (
handler: ctrl.get.bind(ctrl)
});
fastify.get<{ Params: any }>('/documents/vehicle/:vehicleId', {
preHandler: [requireAuth],
handler: async (req, reply) => {
const userId = (req as any).user?.sub as string;
const query = { vehicleId: (req.params as any).vehicleId };
const docs = await ctrl['service'].listDocuments(userId, query);
return reply.code(200).send(docs);
}
});
fastify.post<{ Body: any }>('/documents', {
preHandler: [requireAuth],
handler: ctrl.create.bind(ctrl)
@@ -46,20 +56,4 @@ export const documentsRoutes: FastifyPluginAsync = async (
preHandler: [requireAuth],
handler: ctrl.download.bind(ctrl)
});
// Vehicle management routes
fastify.get<{ Params: any }>('/documents/by-vehicle/:vehicleId', {
preHandler: [requireAuth],
handler: ctrl.listByVehicle.bind(ctrl)
});
fastify.post<{ Params: any }>('/documents/:id/vehicles/:vehicleId', {
preHandler: [requireAuth],
handler: ctrl.addVehicle.bind(ctrl)
});
fastify.delete<{ Params: any }>('/documents/:id/vehicles/:vehicleId', {
preHandler: [requireAuth],
handler: ctrl.removeVehicle.bind(ctrl)
});
};

View File

@@ -9,10 +9,6 @@ export const ListQuerySchema = z.object({
export const IdParamsSchema = z.object({ id: z.string().uuid() });
export const VehicleParamsSchema = z.object({ vehicleId: z.string().uuid() });
export const DocumentVehicleParamsSchema = z.object({
id: z.string().uuid(),
vehicleId: z.string().uuid()
});
export const CreateBodySchema = CreateDocumentBodySchema;
export const UpdateBodySchema = UpdateDocumentBodySchema;
@@ -20,7 +16,6 @@ export const UpdateBodySchema = UpdateDocumentBodySchema;
export type ListQuery = z.infer<typeof ListQuerySchema>;
export type IdParams = z.infer<typeof IdParamsSchema>;
export type VehicleParams = z.infer<typeof VehicleParamsSchema>;
export type DocumentVehicleParams = z.infer<typeof DocumentVehicleParamsSchema>;
export type CreateBody = z.infer<typeof CreateBodySchema>;
export type UpdateBody = z.infer<typeof UpdateBodySchema>;

View File

@@ -28,7 +28,6 @@ export class DocumentsRepository {
expirationDate: row.expiration_date,
emailNotifications: row.email_notifications,
scanForMaintenance: row.scan_for_maintenance,
sharedVehicleIds: row.shared_vehicle_ids || [],
createdAt: row.created_at,
updatedAt: row.updated_at,
deletedAt: row.deleted_at
@@ -51,12 +50,11 @@ export class DocumentsRepository {
expirationDate?: string | null;
emailNotifications?: boolean;
scanForMaintenance?: boolean;
sharedVehicleIds?: string[];
}): Promise<DocumentRecord> {
const res = await this.db.query(
`INSERT INTO documents (
id, user_id, vehicle_id, document_type, title, notes, details, issued_date, expiration_date, email_notifications, scan_for_maintenance, shared_vehicle_ids
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)
id, user_id, vehicle_id, document_type, title, notes, details, issued_date, expiration_date, email_notifications, scan_for_maintenance
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11)
RETURNING *`,
[
doc.id,
@@ -70,7 +68,6 @@ export class DocumentsRepository {
doc.expirationDate ?? null,
doc.emailNotifications ?? false,
doc.scanForMaintenance ?? false,
doc.sharedVehicleIds ?? [],
]
);
return this.mapDocumentRecord(res.rows[0]);
@@ -93,71 +90,11 @@ export class DocumentsRepository {
return res.rows.map(row => this.mapDocumentRecord(row));
}
async batchInsert(
documents: Array<{
id: string;
userId: string;
vehicleId: string;
documentType: DocumentType;
title: string;
notes?: string | null;
details?: any;
issuedDate?: string | null;
expirationDate?: string | null;
emailNotifications?: boolean;
scanForMaintenance?: boolean;
sharedVehicleIds?: string[];
}>,
client?: any
): Promise<DocumentRecord[]> {
if (documents.length === 0) {
return [];
}
// Multi-value INSERT for performance (avoids N round-trips)
const queryClient = client || this.db;
const placeholders: string[] = [];
const values: any[] = [];
let paramCount = 1;
documents.forEach((doc) => {
const docParams = [
doc.id,
doc.userId,
doc.vehicleId,
doc.documentType,
doc.title,
doc.notes ?? null,
doc.details ?? null,
doc.issuedDate ?? null,
doc.expirationDate ?? null,
doc.emailNotifications ?? false,
doc.scanForMaintenance ?? false,
doc.sharedVehicleIds ?? []
];
const placeholder = `($${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++}, $${paramCount++})`;
placeholders.push(placeholder);
values.push(...docParams);
});
const query = `
INSERT INTO documents (
id, user_id, vehicle_id, document_type, title, notes, details, issued_date, expiration_date, email_notifications, scan_for_maintenance, shared_vehicle_ids
)
VALUES ${placeholders.join(', ')}
RETURNING *
`;
const result = await queryClient.query(query, values);
return result.rows.map((row: any) => this.mapDocumentRecord(row));
}
async softDelete(id: string, userId: string): Promise<void> {
await this.db.query(`UPDATE documents SET deleted_at = NOW() WHERE id = $1 AND user_id = $2`, [id, userId]);
}
async updateMetadata(id: string, userId: string, patch: Partial<Pick<DocumentRecord, 'title'|'notes'|'details'|'issuedDate'|'expirationDate'|'emailNotifications'|'scanForMaintenance'|'sharedVehicleIds'>>): Promise<DocumentRecord | null> {
async updateMetadata(id: string, userId: string, patch: Partial<Pick<DocumentRecord, 'title'|'notes'|'details'|'issuedDate'|'expirationDate'|'emailNotifications'|'scanForMaintenance'>>): Promise<DocumentRecord | null> {
const fields: string[] = [];
const params: any[] = [];
let i = 1;
@@ -168,7 +105,6 @@ export class DocumentsRepository {
if (patch.expirationDate !== undefined) { fields.push(`expiration_date = $${i++}`); params.push(patch.expirationDate); }
if (patch.emailNotifications !== undefined) { fields.push(`email_notifications = $${i++}`); params.push(patch.emailNotifications); }
if (patch.scanForMaintenance !== undefined) { fields.push(`scan_for_maintenance = $${i++}`); params.push(patch.scanForMaintenance); }
if (patch.sharedVehicleIds !== undefined) { fields.push(`shared_vehicle_ids = $${i++}`); params.push(patch.sharedVehicleIds); }
if (!fields.length) return this.findById(id, userId);
params.push(id, userId);
const sql = `UPDATE documents SET ${fields.join(', ')} WHERE id = $${i++} AND user_id = $${i++} AND deleted_at IS NULL RETURNING *`;
@@ -193,56 +129,5 @@ export class DocumentsRepository {
);
return res.rows[0] ? this.mapDocumentRecord(res.rows[0]) : null;
}
// ========================
// Shared Vehicle Operations (Atomic)
// ========================
/**
* Atomically add a vehicle to the shared_vehicle_ids array.
* Uses PostgreSQL array_append() to avoid race conditions.
*/
async addSharedVehicle(docId: string, userId: string, vehicleId: string): Promise<DocumentRecord | null> {
const res = await this.db.query(
`UPDATE documents
SET shared_vehicle_ids = array_append(shared_vehicle_ids, $1::uuid)
WHERE id = $2 AND user_id = $3 AND deleted_at IS NULL
AND NOT ($1::uuid = ANY(shared_vehicle_ids))
RETURNING *`,
[vehicleId, docId, userId]
);
return res.rows[0] ? this.mapDocumentRecord(res.rows[0]) : null;
}
/**
* Atomically remove a vehicle from the shared_vehicle_ids array.
* Uses PostgreSQL array_remove() to avoid race conditions.
*/
async removeSharedVehicle(docId: string, userId: string, vehicleId: string): Promise<DocumentRecord | null> {
const res = await this.db.query(
`UPDATE documents
SET shared_vehicle_ids = array_remove(shared_vehicle_ids, $1::uuid)
WHERE id = $2 AND user_id = $3 AND deleted_at IS NULL
RETURNING *`,
[vehicleId, docId, userId]
);
return res.rows[0] ? this.mapDocumentRecord(res.rows[0]) : null;
}
/**
* List all documents associated with a vehicle (either as primary or shared).
* Returns documents where vehicle_id = vehicleId OR vehicleId = ANY(shared_vehicle_ids).
*/
async listByVehicle(userId: string, vehicleId: string): Promise<DocumentRecord[]> {
const res = await this.db.query(
`SELECT * FROM documents
WHERE user_id = $1
AND deleted_at IS NULL
AND (vehicle_id = $2 OR $2::uuid = ANY(shared_vehicle_ids))
ORDER BY created_at DESC`,
[userId, vehicleId]
);
return res.rows.map(row => this.mapDocumentRecord(row));
}
}

View File

@@ -1,32 +1,15 @@
import { randomUUID } from 'crypto';
import type { CreateDocumentBody, DocumentRecord, DocumentType, UpdateDocumentBody } from './documents.types';
import { DocumentsRepository } from '../data/documents.repository';
import { OwnershipCostsService } from '../../ownership-costs/domain/ownership-costs.service';
import type { OwnershipCostType } from '../../ownership-costs/domain/ownership-costs.types';
import pool from '../../../core/config/database';
export class DocumentsService {
private readonly repo = new DocumentsRepository(pool);
private readonly ownershipCostsService = new OwnershipCostsService(pool);
async createDocument(userId: string, body: CreateDocumentBody): Promise<DocumentRecord> {
await this.assertVehicleOwnership(userId, body.vehicleId);
// Validate shared vehicles if provided (insurance type only)
if (body.sharedVehicleIds && body.sharedVehicleIds.length > 0) {
if (body.documentType !== 'insurance') {
const err: any = new Error('Shared vehicles are only supported for insurance documents');
err.statusCode = 400;
throw err;
}
// Validate ownership of all shared vehicles
for (const vid of body.sharedVehicleIds) {
await this.assertVehicleOwnership(userId, vid);
}
}
const id = randomUUID();
const doc = await this.repo.insert({
return this.repo.insert({
id,
userId,
vehicleId: body.vehicleId,
@@ -38,72 +21,7 @@ export class DocumentsService {
expirationDate: body.expirationDate ?? null,
emailNotifications: body.emailNotifications ?? false,
scanForMaintenance: body.scanForMaintenance ?? false,
sharedVehicleIds: body.sharedVehicleIds ?? [],
});
// Auto-create ownership_cost when insurance/registration has cost data
await this.autoCreateOwnershipCost(userId, doc, body);
return doc;
}
/**
* Auto-creates an ownership_cost record when an insurance or registration
* document is created with cost data (premium or cost field in details).
*/
private async autoCreateOwnershipCost(
userId: string,
doc: DocumentRecord,
body: CreateDocumentBody
): Promise<void> {
const costType = this.mapDocumentTypeToCostType(body.documentType);
if (!costType) return; // Not a cost-linkable document type
const costAmount = this.extractCostAmount(body);
if (!costAmount || costAmount <= 0) return; // No valid cost data
try {
await this.ownershipCostsService.createCost(userId, {
vehicleId: body.vehicleId,
documentId: doc.id,
costType,
amount: costAmount,
description: doc.title,
periodStart: body.issuedDate,
periodEnd: body.expirationDate,
});
} catch (err) {
// Log but don't fail document creation if cost creation fails
console.error('Failed to auto-create ownership cost for document:', doc.id, err);
}
}
/**
* Maps document types to ownership cost types.
* Returns null for document types that don't auto-create costs.
*/
private mapDocumentTypeToCostType(documentType: string): OwnershipCostType | null {
const typeMap: Record<string, OwnershipCostType> = {
'insurance': 'insurance',
'registration': 'registration',
};
return typeMap[documentType] || null;
}
/**
* Extracts cost amount from document details.
* Insurance uses 'premium', registration uses 'cost'.
*/
private extractCostAmount(body: CreateDocumentBody): number | null {
if (!body.details) return null;
const premium = body.details.premium;
const cost = body.details.cost;
if (typeof premium === 'number' && premium > 0) return premium;
if (typeof cost === 'number' && cost > 0) return cost;
return null;
}
async getDocument(userId: string, id: string): Promise<DocumentRecord | null> {
@@ -117,184 +35,16 @@ export class DocumentsService {
async updateDocument(userId: string, id: string, patch: UpdateDocumentBody) {
const existing = await this.repo.findById(id, userId);
if (!existing) return null;
// Validate shared vehicles if provided (insurance type only)
if (patch.sharedVehicleIds !== undefined) {
if (existing.documentType !== 'insurance') {
const err: any = new Error('Shared vehicles are only supported for insurance documents');
err.statusCode = 400;
throw err;
}
// Validate ownership of all shared vehicles
for (const vid of patch.sharedVehicleIds) {
await this.assertVehicleOwnership(userId, vid);
}
}
if (patch && typeof patch === 'object') {
const updated = await this.repo.updateMetadata(id, userId, patch as any);
// Sync cost changes to linked ownership_cost if applicable
if (updated && patch.details) {
await this.syncOwnershipCost(userId, updated, patch);
}
return updated;
return this.repo.updateMetadata(id, userId, patch as any);
}
return existing;
}
/**
* Syncs cost data changes to linked ownership_cost record.
* If document has linked cost and details.premium/cost changed, update it.
*/
private async syncOwnershipCost(
userId: string,
doc: DocumentRecord,
patch: UpdateDocumentBody
): Promise<void> {
const costType = this.mapDocumentTypeToCostType(doc.documentType);
if (!costType) return;
const newCostAmount = this.extractCostAmountFromDetails(patch.details);
if (newCostAmount === null) return; // No cost in update
try {
// Find existing linked cost
const linkedCosts = await this.ownershipCostsService.getCosts(userId, { documentId: doc.id });
if (linkedCosts.length > 0 && newCostAmount > 0) {
// Update existing linked cost
await this.ownershipCostsService.updateCost(userId, linkedCosts[0].id, {
amount: newCostAmount,
periodStart: patch.issuedDate ?? undefined,
periodEnd: patch.expirationDate ?? undefined,
});
} else if (linkedCosts.length === 0 && newCostAmount > 0) {
// Create new cost if none exists
await this.ownershipCostsService.createCost(userId, {
vehicleId: doc.vehicleId,
documentId: doc.id,
costType,
amount: newCostAmount,
description: doc.title,
periodStart: patch.issuedDate ?? doc.issuedDate ?? undefined,
periodEnd: patch.expirationDate ?? doc.expirationDate ?? undefined,
});
}
} catch (err) {
console.error('Failed to sync ownership cost for document:', doc.id, err);
}
}
/**
* Extracts cost amount from details object (for updates).
*/
private extractCostAmountFromDetails(details?: Record<string, any> | null): number | null {
if (!details) return null;
const premium = details.premium;
const cost = details.cost;
if (typeof premium === 'number') return premium;
if (typeof cost === 'number') return cost;
return null;
}
async deleteDocument(userId: string, id: string): Promise<void> {
// Note: Linked ownership_cost records are CASCADE deleted via FK
await this.repo.softDelete(id, userId);
}
async addVehicleToDocument(userId: string, docId: string, vehicleId: string): Promise<DocumentRecord | null> {
// Validate document exists and is owned by user
const doc = await this.repo.findById(docId, userId);
if (!doc) {
const err: any = new Error('Document not found');
err.statusCode = 404;
throw err;
}
// Only insurance documents support shared vehicles
if (doc.documentType !== 'insurance') {
const err: any = new Error('Shared vehicles are only supported for insurance documents');
err.statusCode = 400;
throw err;
}
// Validate vehicle ownership
await this.assertVehicleOwnership(userId, vehicleId);
// Check if vehicle is already the primary vehicle
if (doc.vehicleId === vehicleId) {
const err: any = new Error('Vehicle is already the primary vehicle for this document');
err.statusCode = 400;
throw err;
}
// Add to shared vehicles (repository handles duplicate check)
return this.repo.addSharedVehicle(docId, userId, vehicleId);
}
async removeVehicleFromDocument(userId: string, docId: string, vehicleId: string): Promise<DocumentRecord | null> {
// Validate document exists and is owned by user
const doc = await this.repo.findById(docId, userId);
if (!doc) {
const err: any = new Error('Document not found');
err.statusCode = 404;
throw err;
}
// Context-aware delete logic
const isSharedVehicle = doc.sharedVehicleIds.includes(vehicleId);
const isPrimaryVehicle = doc.vehicleId === vehicleId;
if (!isSharedVehicle && !isPrimaryVehicle) {
const err: any = new Error('Vehicle is not associated with this document');
err.statusCode = 400;
throw err;
}
// Case 1: Removing from shared vehicles only
if (isSharedVehicle && !isPrimaryVehicle) {
return this.repo.removeSharedVehicle(docId, userId, vehicleId);
}
// Case 2: Removing primary vehicle with no shared vehicles -> soft delete document
if (isPrimaryVehicle && doc.sharedVehicleIds.length === 0) {
await this.repo.softDelete(docId, userId);
return null;
}
// Case 3: Removing primary vehicle with shared vehicles -> promote first shared to primary
if (isPrimaryVehicle && doc.sharedVehicleIds.length > 0) {
const newPrimaryId = doc.sharedVehicleIds[0];
const remainingShared = doc.sharedVehicleIds.slice(1);
// Update primary vehicle and remaining shared vehicles
return this.repo.updateMetadata(docId, userId, {
sharedVehicleIds: remainingShared,
}).then(async () => {
// Update vehicle_id separately as it's not part of the metadata update
const res = await pool.query(
'UPDATE documents SET vehicle_id = $1 WHERE id = $2 AND user_id = $3 AND deleted_at IS NULL RETURNING *',
[newPrimaryId, docId, userId]
);
if (!res.rows[0]) return null;
return this.repo.findById(docId, userId);
});
}
return null;
}
async getDocumentsByVehicle(userId: string, vehicleId: string): Promise<DocumentRecord[]> {
// Validate vehicle ownership
await this.assertVehicleOwnership(userId, vehicleId);
return this.repo.listByVehicle(userId, vehicleId);
}
private async assertVehicleOwnership(userId: string, vehicleId: string) {
const res = await pool.query('SELECT id FROM vehicles WHERE id = $1 AND user_id = $2', [vehicleId, userId]);
if (!res.rows[0]) {

View File

@@ -22,7 +22,6 @@ export interface DocumentRecord {
expirationDate?: string | null;
emailNotifications?: boolean;
scanForMaintenance?: boolean;
sharedVehicleIds: string[];
createdAt: string;
updatedAt: string;
deletedAt?: string | null;
@@ -39,7 +38,6 @@ export const CreateDocumentBodySchema = z.object({
expirationDate: z.string().optional(),
emailNotifications: z.boolean().optional(),
scanForMaintenance: z.boolean().optional(),
sharedVehicleIds: z.array(z.string().uuid()).optional(),
});
export type CreateDocumentBody = z.infer<typeof CreateDocumentBodySchema>;
@@ -51,7 +49,6 @@ export const UpdateDocumentBodySchema = z.object({
expirationDate: z.string().nullable().optional(),
emailNotifications: z.boolean().optional(),
scanForMaintenance: z.boolean().optional(),
sharedVehicleIds: z.array(z.string().uuid()).optional(),
});
export type UpdateDocumentBody = z.infer<typeof UpdateDocumentBodySchema>;

View File

@@ -1,10 +0,0 @@
-- Migration: Reset scanForMaintenance for free tier users
-- This migration is part of the tier-gating feature implementation.
-- scanForMaintenance is now a Pro feature, so existing free users with it enabled need to be reset.
UPDATE documents d
SET scan_for_maintenance = false
FROM user_profiles u
WHERE d.user_id = u.auth0_sub
AND u.subscription_tier = 'free'
AND d.scan_for_maintenance = true;

View File

@@ -1,18 +0,0 @@
-- Migration: Add shared_vehicle_ids array column for cross-vehicle document sharing
-- Issue: #31
-- Allows a document to be shared with multiple vehicles beyond its primary vehicle_id
-- Add shared_vehicle_ids column with default empty array
ALTER TABLE documents
ADD COLUMN shared_vehicle_ids UUID[] DEFAULT '{}' NOT NULL;
-- Add GIN index for efficient array membership queries
-- This allows fast lookups of "which documents are shared with vehicle X"
CREATE INDEX idx_documents_shared_vehicle_ids ON documents USING GIN (shared_vehicle_ids array_ops);
-- Example usage:
-- 1. Find all documents shared with a specific vehicle:
-- SELECT * FROM documents WHERE 'vehicle-uuid-here' = ANY(shared_vehicle_ids);
--
-- 2. Find documents by primary OR shared vehicle:
-- SELECT * FROM documents WHERE vehicle_id = 'uuid' OR 'uuid' = ANY(shared_vehicle_ids);

View File

@@ -1,299 +0,0 @@
/**
* @ai-summary Unit tests for tier validation in DocumentsController
* @ai-context Tests that free users cannot use scanForMaintenance feature
*/
// Mock config and dependencies first (before any imports that might use them)
jest.mock('../../../../core/config/config-loader', () => ({
appConfig: {
getDatabaseUrl: () => 'postgresql://mock:mock@localhost/mock',
getRedisUrl: () => 'redis://localhost',
get: () => ({}),
},
config: {
database: { connectionString: 'mock' },
redis: { url: 'mock' },
auth0: { domain: 'mock', clientId: 'mock', audience: 'mock' },
storage: { provider: 'filesystem', root: '/tmp' },
logging: { level: 'error' },
},
}));
jest.mock('../../../../core/config/database', () => ({
pool: {
query: jest.fn(),
connect: jest.fn(),
end: jest.fn(),
},
default: {
query: jest.fn(),
connect: jest.fn(),
end: jest.fn(),
},
}));
jest.mock('../../../../core/logging/logger', () => ({
logger: {
info: jest.fn(),
warn: jest.fn(),
error: jest.fn(),
debug: jest.fn(),
},
}));
jest.mock('../../../../core/storage/storage.service', () => ({
getStorageService: jest.fn(() => ({
putObject: jest.fn(),
getObjectStream: jest.fn(),
deleteObject: jest.fn(),
headObject: jest.fn(),
})),
}));
jest.mock('../../domain/documents.service');
import { FastifyRequest, FastifyReply } from 'fastify';
import { DocumentsController } from '../../api/documents.controller';
import { DocumentsService } from '../../domain/documents.service';
const MockedService = jest.mocked(DocumentsService);
describe('DocumentsController - Tier Validation', () => {
let controller: DocumentsController;
let mockServiceInstance: jest.Mocked<DocumentsService>;
const createMockRequest = (overrides: Partial<FastifyRequest> = {}): FastifyRequest => ({
user: { sub: 'user-123' },
userContext: {
userId: 'user-123',
email: 'test@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'free',
},
body: {},
params: {},
query: {},
...overrides,
} as unknown as FastifyRequest);
const createMockReply = (): Partial<FastifyReply> & { payload?: unknown; statusCode?: number } => ({
sent: false,
code: jest.fn(function(this: any, status: number) {
this.statusCode = status;
return this;
}),
send: jest.fn(function(this: any, payload: unknown) {
this.payload = payload;
this.sent = true;
return this;
}),
});
beforeEach(() => {
jest.clearAllMocks();
mockServiceInstance = {
createDocument: jest.fn(),
updateDocument: jest.fn(),
getDocument: jest.fn(),
listDocuments: jest.fn(),
deleteDocument: jest.fn(),
} as any;
MockedService.mockImplementation(() => mockServiceInstance);
controller = new DocumentsController();
});
describe('create - scanForMaintenance tier gating', () => {
const baseDocumentBody = {
vehicleId: 'vehicle-123',
documentType: 'manual',
title: 'Service Manual',
};
it('allows free user to create document without scanForMaintenance', async () => {
const request = createMockRequest({
body: { ...baseDocumentBody, scanForMaintenance: false },
});
const reply = createMockReply();
mockServiceInstance.createDocument.mockResolvedValue({
id: 'doc-123',
userId: 'user-123',
vehicleId: 'vehicle-123',
documentType: 'manual',
title: 'Service Manual',
scanForMaintenance: false,
} as any);
await controller.create(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(201);
expect(mockServiceInstance.createDocument).toHaveBeenCalled();
});
it('blocks free user from using scanForMaintenance=true', async () => {
const request = createMockRequest({
body: { ...baseDocumentBody, scanForMaintenance: true },
});
const reply = createMockReply();
await controller.create(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
requiredTier: 'pro',
currentTier: 'free',
feature: 'document.scanMaintenanceSchedule',
featureName: 'Scan for Maintenance Schedule',
})
);
expect(mockServiceInstance.createDocument).not.toHaveBeenCalled();
});
it('allows pro user to use scanForMaintenance=true', async () => {
const request = createMockRequest({
body: { ...baseDocumentBody, scanForMaintenance: true },
userContext: {
userId: 'user-123',
email: 'pro@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'pro',
},
});
const reply = createMockReply();
mockServiceInstance.createDocument.mockResolvedValue({
id: 'doc-123',
userId: 'user-123',
vehicleId: 'vehicle-123',
documentType: 'manual',
title: 'Service Manual',
scanForMaintenance: true,
} as any);
await controller.create(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(201);
expect(mockServiceInstance.createDocument).toHaveBeenCalled();
});
it('allows enterprise user to use scanForMaintenance=true', async () => {
const request = createMockRequest({
body: { ...baseDocumentBody, scanForMaintenance: true },
userContext: {
userId: 'user-123',
email: 'enterprise@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'enterprise',
},
});
const reply = createMockReply();
mockServiceInstance.createDocument.mockResolvedValue({
id: 'doc-123',
userId: 'user-123',
vehicleId: 'vehicle-123',
documentType: 'manual',
title: 'Service Manual',
scanForMaintenance: true,
} as any);
await controller.create(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(201);
expect(mockServiceInstance.createDocument).toHaveBeenCalled();
});
it('defaults to free tier when userContext is missing', async () => {
const request = createMockRequest({
body: { ...baseDocumentBody, scanForMaintenance: true },
userContext: undefined,
});
const reply = createMockReply();
await controller.create(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
currentTier: 'free',
})
);
});
});
describe('update - scanForMaintenance tier gating', () => {
const documentId = 'doc-123';
it('allows free user to update document without scanForMaintenance', async () => {
const request = createMockRequest({
params: { id: documentId },
body: { title: 'Updated Title' },
});
const reply = createMockReply();
mockServiceInstance.updateDocument.mockResolvedValue({
id: documentId,
title: 'Updated Title',
} as any);
await controller.update(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(200);
expect(mockServiceInstance.updateDocument).toHaveBeenCalled();
});
it('blocks free user from setting scanForMaintenance=true on update', async () => {
const request = createMockRequest({
params: { id: documentId },
body: { scanForMaintenance: true },
});
const reply = createMockReply();
await controller.update(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(403);
expect(reply.send).toHaveBeenCalledWith(
expect.objectContaining({
error: 'TIER_REQUIRED',
requiredTier: 'pro',
currentTier: 'free',
feature: 'document.scanMaintenanceSchedule',
})
);
expect(mockServiceInstance.updateDocument).not.toHaveBeenCalled();
});
it('allows pro user to set scanForMaintenance=true on update', async () => {
const request = createMockRequest({
params: { id: documentId },
body: { scanForMaintenance: true },
userContext: {
userId: 'user-123',
email: 'pro@example.com',
emailVerified: true,
onboardingCompleted: true,
isAdmin: false,
subscriptionTier: 'pro',
},
});
const reply = createMockReply();
mockServiceInstance.updateDocument.mockResolvedValue({
id: documentId,
scanForMaintenance: true,
} as any);
await controller.update(request as any, reply as FastifyReply);
expect(reply.code).toHaveBeenCalledWith(200);
expect(mockServiceInstance.updateDocument).toHaveBeenCalled();
});
});
});

View File

@@ -1,188 +0,0 @@
/**
* @ai-summary Controller for Resend inbound email webhook and user-facing pending association endpoints
* @ai-context Webhook handler (public) + pending association CRUD (JWT-authenticated)
*/
import { FastifyRequest, FastifyReply } from 'fastify';
import { ResendInboundClient } from '../external/resend-inbound.client';
import { EmailIngestionRepository } from '../data/email-ingestion.repository';
import { EmailIngestionService } from '../domain/email-ingestion.service';
import { logger } from '../../../core/logging/logger';
import type { ResendWebhookEvent } from '../domain/email-ingestion.types';
export class EmailIngestionController {
private resendClient: ResendInboundClient;
private repository: EmailIngestionRepository;
private service: EmailIngestionService;
constructor() {
this.resendClient = new ResendInboundClient();
this.repository = new EmailIngestionRepository();
this.service = new EmailIngestionService();
}
// ========================
// Pending Association Endpoints (JWT-authenticated)
// ========================
async getPendingAssociations(request: FastifyRequest, reply: FastifyReply): Promise<void> {
try {
const userId = request.userContext!.userId;
const associations = await this.repository.getPendingAssociations(userId);
return reply.code(200).send(associations);
} catch (error: any) {
logger.error('Error listing pending associations', { error: error.message, userId: request.userContext?.userId });
return reply.code(500).send({ error: 'Failed to list pending associations' });
}
}
async getPendingAssociationCount(request: FastifyRequest, reply: FastifyReply): Promise<void> {
try {
const userId = request.userContext!.userId;
const count = await this.repository.getPendingAssociationCount(userId);
return reply.code(200).send({ count });
} catch (error: any) {
logger.error('Error counting pending associations', { error: error.message, userId: request.userContext?.userId });
return reply.code(500).send({ error: 'Failed to count pending associations' });
}
}
async resolveAssociation(
request: FastifyRequest<{ Params: { id: string }; Body: { vehicleId: string } }>,
reply: FastifyReply
): Promise<void> {
try {
const userId = request.userContext!.userId;
const { id } = request.params;
const { vehicleId } = request.body;
if (!vehicleId || typeof vehicleId !== 'string') {
return reply.code(400).send({ error: 'vehicleId is required' });
}
const result = await this.service.resolveAssociation(id, vehicleId, userId);
return reply.code(200).send(result);
} catch (error: any) {
const userId = request.userContext?.userId;
logger.error('Error resolving pending association', {
error: error.message,
associationId: request.params.id,
userId,
});
if (error.message === 'Pending association not found' || error.message === 'Vehicle not found') {
return reply.code(404).send({ error: error.message });
}
if (error.message === 'Unauthorized') {
return reply.code(403).send({ error: 'Not authorized' });
}
if (error.message === 'Association already resolved') {
return reply.code(409).send({ error: error.message });
}
return reply.code(500).send({ error: 'Failed to resolve association' });
}
}
async dismissAssociation(
request: FastifyRequest<{ Params: { id: string } }>,
reply: FastifyReply
): Promise<void> {
try {
const userId = request.userContext!.userId;
const { id } = request.params;
await this.service.dismissAssociation(id, userId);
return reply.code(204).send();
} catch (error: any) {
const userId = request.userContext?.userId;
logger.error('Error dismissing pending association', {
error: error.message,
associationId: request.params.id,
userId,
});
if (error.message === 'Pending association not found') {
return reply.code(404).send({ error: error.message });
}
if (error.message === 'Unauthorized') {
return reply.code(403).send({ error: 'Not authorized' });
}
if (error.message === 'Association already resolved') {
return reply.code(409).send({ error: error.message });
}
return reply.code(500).send({ error: 'Failed to dismiss association' });
}
}
// ========================
// Webhook Endpoint (Public)
// ========================
async handleInboundWebhook(request: FastifyRequest, reply: FastifyReply): Promise<void> {
try {
const rawBody = (request as any).rawBody;
if (!rawBody) {
logger.error('Missing raw body in Resend webhook request');
return reply.status(400).send({ error: 'Missing raw body' });
}
// Extract Svix headers for signature verification
const headers: Record<string, string> = {
'svix-id': (request.headers['svix-id'] as string) || '',
'svix-timestamp': (request.headers['svix-timestamp'] as string) || '',
'svix-signature': (request.headers['svix-signature'] as string) || '',
};
// Verify webhook signature
let event: ResendWebhookEvent;
try {
event = this.resendClient.verifyWebhookSignature(rawBody, headers);
} catch (error: any) {
logger.warn('Invalid Resend webhook signature', { error: error.message });
return reply.status(400).send({ error: 'Invalid signature' });
}
const emailId = event.data.email_id;
const senderEmail = event.data.from;
// Idempotency check: reject if email_id already exists in queue
const existing = await this.repository.findByEmailId(emailId);
if (existing) {
logger.info('Duplicate email webhook received, skipping', { emailId });
return reply.status(200).send({ received: true, duplicate: true });
}
// Insert queue record with status=pending via repository
await this.repository.insertQueueEntry({
emailId,
senderEmail,
userId: senderEmail, // Resolved to auth0_sub during processing
receivedAt: event.data.created_at || new Date().toISOString(),
subject: event.data.subject,
});
logger.info('Inbound email queued for processing', { emailId, senderEmail });
// Return 200 immediately before processing begins
reply.status(200).send({ received: true });
// Trigger async processing via setImmediate
setImmediate(() => {
this.service.processEmail(emailId, event).catch((error) => {
logger.error('Async email processing failed', {
emailId,
error: error instanceof Error ? error.message : String(error),
});
});
});
} catch (error: any) {
logger.error('Resend webhook handler error', {
error: error.message,
stack: error.stack,
});
return reply.status(500).send({ error: 'Webhook processing failed' });
}
}
}

View File

@@ -1,60 +0,0 @@
/**
* @ai-summary Resend inbound webhook + user-facing pending association routes
* @ai-context Public webhook (no JWT) + authenticated CRUD for pending vehicle associations
*/
import { FastifyPluginAsync } from 'fastify';
import { EmailIngestionController } from './email-ingestion.controller';
/** Public webhook route - no JWT auth, uses Svix signature verification */
export const emailIngestionWebhookRoutes: FastifyPluginAsync = async (fastify) => {
const controller = new EmailIngestionController();
// POST /api/webhooks/resend/inbound - PUBLIC endpoint (no JWT auth)
// Resend authenticates via webhook signature verification (Svix)
// rawBody MUST be enabled for signature verification to work
fastify.post(
'/webhooks/resend/inbound',
{
config: {
rawBody: true,
},
},
controller.handleInboundWebhook.bind(controller)
);
};
/** Authenticated user-facing routes for pending vehicle associations */
export const emailIngestionRoutes: FastifyPluginAsync = async (fastify) => {
const controller = new EmailIngestionController();
// GET /api/email-ingestion/pending - List pending associations for authenticated user
fastify.get('/email-ingestion/pending', {
preHandler: [fastify.authenticate],
handler: controller.getPendingAssociations.bind(controller),
});
// GET /api/email-ingestion/pending/count - Get count of pending associations
fastify.get('/email-ingestion/pending/count', {
preHandler: [fastify.authenticate],
handler: controller.getPendingAssociationCount.bind(controller),
});
// POST /api/email-ingestion/pending/:id/resolve - Resolve by selecting vehicle
fastify.post<{ Params: { id: string }; Body: { vehicleId: string } }>(
'/email-ingestion/pending/:id/resolve',
{
preHandler: [fastify.authenticate],
handler: controller.resolveAssociation.bind(controller),
}
);
// DELETE /api/email-ingestion/pending/:id - Dismiss/discard a pending association
fastify.delete<{ Params: { id: string } }>(
'/email-ingestion/pending/:id',
{
preHandler: [fastify.authenticate],
handler: controller.dismissAssociation.bind(controller),
}
);
};

View File

@@ -1,257 +0,0 @@
/**
* @ai-summary Data access layer for email ingestion queue and pending vehicle associations
* @ai-context Provides CRUD operations with standard mapRow() snake_case -> camelCase conversion
*/
import { Pool } from 'pg';
import pool from '../../../core/config/database';
import { logger } from '../../../core/logging/logger';
import type {
EmailIngestionQueueRecord,
EmailIngestionStatus,
EmailProcessingResult,
PendingVehicleAssociation,
PendingAssociationStatus,
EmailRecordType,
ExtractedReceiptData,
} from '../domain/email-ingestion.types';
export class EmailIngestionRepository {
constructor(private readonly db: Pool = pool) {}
// ========================
// Row Mappers
// ========================
private mapQueueRow(row: any): EmailIngestionQueueRecord {
return {
id: row.id,
emailId: row.email_id,
senderEmail: row.sender_email,
userId: row.user_id,
receivedAt: row.received_at,
subject: row.subject,
status: row.status,
processingResult: row.processing_result,
errorMessage: row.error_message,
retryCount: row.retry_count,
createdAt: row.created_at,
updatedAt: row.updated_at,
};
}
private mapPendingAssociationRow(row: any): PendingVehicleAssociation {
return {
id: row.id,
userId: row.user_id,
recordType: row.record_type,
extractedData: row.extracted_data,
documentId: row.document_id,
status: row.status,
createdAt: row.created_at,
resolvedAt: row.resolved_at,
};
}
// ========================
// Queue Operations
// ========================
async insertQueueEntry(entry: {
emailId: string;
senderEmail: string;
userId: string;
receivedAt: string;
subject: string | null;
}): Promise<EmailIngestionQueueRecord> {
try {
const res = await this.db.query(
`INSERT INTO email_ingestion_queue
(email_id, sender_email, user_id, received_at, subject, status)
VALUES ($1, $2, $3, $4, $5, 'pending')
RETURNING *`,
[
entry.emailId,
entry.senderEmail,
entry.userId,
entry.receivedAt,
entry.subject,
]
);
return this.mapQueueRow(res.rows[0]);
} catch (error) {
logger.error('Error inserting queue entry', { error, emailId: entry.emailId });
throw error;
}
}
async updateQueueStatus(
emailId: string,
status: EmailIngestionStatus,
updates?: {
processingResult?: EmailProcessingResult;
errorMessage?: string;
retryCount?: number;
userId?: string;
}
): Promise<EmailIngestionQueueRecord | null> {
try {
const fields: string[] = ['status = $2'];
const params: any[] = [emailId, status];
let paramIndex = 3;
if (updates?.processingResult !== undefined) {
fields.push(`processing_result = $${paramIndex++}`);
params.push(JSON.stringify(updates.processingResult));
}
if (updates?.errorMessage !== undefined) {
fields.push(`error_message = $${paramIndex++}`);
params.push(updates.errorMessage);
}
if (updates?.retryCount !== undefined) {
fields.push(`retry_count = $${paramIndex++}`);
params.push(updates.retryCount);
}
if (updates?.userId !== undefined) {
fields.push(`user_id = $${paramIndex++}`);
params.push(updates.userId);
}
const res = await this.db.query(
`UPDATE email_ingestion_queue
SET ${fields.join(', ')}
WHERE email_id = $1
RETURNING *`,
params
);
return res.rows[0] ? this.mapQueueRow(res.rows[0]) : null;
} catch (error) {
logger.error('Error updating queue status', { error, emailId, status });
throw error;
}
}
async getQueueEntry(emailId: string): Promise<EmailIngestionQueueRecord | null> {
try {
const res = await this.db.query(
`SELECT * FROM email_ingestion_queue WHERE email_id = $1`,
[emailId]
);
return res.rows[0] ? this.mapQueueRow(res.rows[0]) : null;
} catch (error) {
logger.error('Error fetching queue entry', { error, emailId });
throw error;
}
}
async findByEmailId(emailId: string): Promise<EmailIngestionQueueRecord | null> {
return this.getQueueEntry(emailId);
}
async getRetryableEntries(maxRetries: number = 3): Promise<EmailIngestionQueueRecord[]> {
try {
const res = await this.db.query(
`SELECT * FROM email_ingestion_queue
WHERE status = 'failed'
AND retry_count < $1
ORDER BY created_at ASC`,
[maxRetries]
);
return res.rows.map(row => this.mapQueueRow(row));
} catch (error) {
logger.error('Error fetching retryable entries', { error });
throw error;
}
}
// ========================
// Pending Association Operations
// ========================
async insertPendingAssociation(association: {
userId: string;
recordType: EmailRecordType;
extractedData: ExtractedReceiptData;
documentId: string | null;
}): Promise<PendingVehicleAssociation> {
try {
const res = await this.db.query(
`INSERT INTO pending_vehicle_associations
(user_id, record_type, extracted_data, document_id, status)
VALUES ($1, $2, $3, $4, 'pending')
RETURNING *`,
[
association.userId,
association.recordType,
JSON.stringify(association.extractedData),
association.documentId,
]
);
return this.mapPendingAssociationRow(res.rows[0]);
} catch (error) {
logger.error('Error inserting pending association', { error, userId: association.userId });
throw error;
}
}
async getPendingAssociationById(associationId: string): Promise<PendingVehicleAssociation | null> {
try {
const res = await this.db.query(
`SELECT * FROM pending_vehicle_associations WHERE id = $1`,
[associationId]
);
return res.rows[0] ? this.mapPendingAssociationRow(res.rows[0]) : null;
} catch (error) {
logger.error('Error fetching pending association by id', { error, associationId });
throw error;
}
}
async getPendingAssociationCount(userId: string): Promise<number> {
try {
const res = await this.db.query(
`SELECT COUNT(*)::int AS count FROM pending_vehicle_associations
WHERE user_id = $1 AND status = 'pending'`,
[userId]
);
return res.rows[0]?.count ?? 0;
} catch (error) {
logger.error('Error counting pending associations', { error, userId });
throw error;
}
}
async getPendingAssociations(userId: string): Promise<PendingVehicleAssociation[]> {
try {
const res = await this.db.query(
`SELECT * FROM pending_vehicle_associations
WHERE user_id = $1 AND status = 'pending'
ORDER BY created_at DESC`,
[userId]
);
return res.rows.map(row => this.mapPendingAssociationRow(row));
} catch (error) {
logger.error('Error fetching pending associations', { error, userId });
throw error;
}
}
async resolvePendingAssociation(
associationId: string,
status: PendingAssociationStatus = 'resolved'
): Promise<PendingVehicleAssociation | null> {
try {
const res = await this.db.query(
`UPDATE pending_vehicle_associations
SET status = $2, resolved_at = CURRENT_TIMESTAMP
WHERE id = $1
RETURNING *`,
[associationId, status]
);
return res.rows[0] ? this.mapPendingAssociationRow(res.rows[0]) : null;
} catch (error) {
logger.error('Error resolving pending association', { error, associationId });
throw error;
}
}
}

View File

@@ -1,844 +0,0 @@
/**
* @ai-summary Core processing service for the email-to-record pipeline
* @ai-context Orchestrates sender validation, OCR extraction, record classification,
* vehicle association, status tracking, and retry logic. Delegates all notifications
* (emails, in-app, logging) to EmailIngestionNotificationHandler.
*/
import { Pool } from 'pg';
import pool from '../../../core/config/database';
import { logger } from '../../../core/logging/logger';
import { EmailIngestionRepository } from '../data/email-ingestion.repository';
import { ResendInboundClient, type ParsedEmailAttachment } from '../external/resend-inbound.client';
import { UserProfileRepository } from '../../user-profile/data/user-profile.repository';
import { VehiclesRepository } from '../../vehicles/data/vehicles.repository';
import { NotificationsRepository } from '../../notifications/data/notifications.repository';
import { TemplateService } from '../../notifications/domain/template.service';
import { EmailService } from '../../notifications/domain/email.service';
import { ocrService } from '../../ocr/domain/ocr.service';
import type { ReceiptExtractionResponse } from '../../ocr/domain/ocr.types';
import { ReceiptClassifier } from './receipt-classifier';
import { EmailIngestionNotificationHandler } from './notification-handler';
import { FuelLogsService } from '../../fuel-logs/domain/fuel-logs.service';
import { FuelLogsRepository } from '../../fuel-logs/data/fuel-logs.repository';
import { FuelType } from '../../fuel-logs/domain/fuel-logs.types';
import type { EnhancedCreateFuelLogRequest } from '../../fuel-logs/domain/fuel-logs.types';
import { MaintenanceService } from '../../maintenance/domain/maintenance.service';
import type { MaintenanceCategory } from '../../maintenance/domain/maintenance.types';
import { validateSubtypes, getSubtypesForCategory } from '../../maintenance/domain/maintenance.types';
import type {
ResendWebhookEvent,
EmailProcessingResult,
ExtractedReceiptData,
EmailRecordType,
} from './email-ingestion.types';
/** Supported attachment MIME types */
const SUPPORTED_ATTACHMENT_TYPES = new Set([
'application/pdf',
'image/png',
'image/jpeg',
'image/heic',
'image/heif',
]);
/** Image types that work with receipt-specific OCR */
const OCR_RECEIPT_IMAGE_TYPES = new Set([
'image/jpeg',
'image/png',
'image/heic',
'image/heif',
]);
const MAX_ATTACHMENT_SIZE = 10 * 1024 * 1024; // 10MB
const MAX_RETRY_COUNT = 3;
export class EmailIngestionService {
private repository: EmailIngestionRepository;
private resendClient: ResendInboundClient;
private userProfileRepository: UserProfileRepository;
private vehiclesRepository: VehiclesRepository;
private notificationHandler: EmailIngestionNotificationHandler;
private classifier: ReceiptClassifier;
private fuelLogsService: FuelLogsService;
private maintenanceService: MaintenanceService;
constructor(dbPool?: Pool) {
const p = dbPool || pool;
this.repository = new EmailIngestionRepository(p);
this.resendClient = new ResendInboundClient();
this.userProfileRepository = new UserProfileRepository(p);
this.vehiclesRepository = new VehiclesRepository(p);
const notificationsRepository = new NotificationsRepository(p);
this.notificationHandler = new EmailIngestionNotificationHandler(
notificationsRepository,
new TemplateService(),
new EmailService(),
);
this.classifier = new ReceiptClassifier();
this.fuelLogsService = new FuelLogsService(new FuelLogsRepository(p));
this.maintenanceService = new MaintenanceService();
}
// ========================
// Main Processing Pipeline
// ========================
/**
* Process an inbound email through the full pipeline.
* Called asynchronously after webhook receipt is acknowledged.
*/
async processEmail(emailId: string, event: ResendWebhookEvent): Promise<void> {
const senderEmail = event.data.from;
const subject = event.data.subject;
try {
// 1. Mark as processing
await this.repository.updateQueueStatus(emailId, 'processing');
// 2. Validate sender
const userProfile = await this.validateSender(senderEmail);
if (!userProfile) {
await this.handleUnregisteredSender(emailId, senderEmail);
return;
}
const userId = userProfile.auth0Sub;
const userName = userProfile.displayName || userProfile.email;
// Update queue with resolved user_id
await this.repository.updateQueueStatus(emailId, 'processing', { userId });
// 3. Get attachments (from webhook data or by fetching raw email)
const attachments = await this.getAttachments(emailId, event);
// 4. Filter valid attachments
const validAttachments = this.filterAttachments(attachments);
if (validAttachments.length === 0) {
await this.handleNoValidAttachments(emailId, userId, userName, senderEmail);
return;
}
// 5. Classify receipt from email text first
const emailClassification = this.classifier.classifyFromText(subject, event.data.text);
logger.info('Email text classification result', {
emailId,
type: emailClassification.type,
confidence: emailClassification.confidence,
});
// 6. Process attachments through OCR using classification
const ocrResult = await this.processAttachmentsWithClassification(
userId, validAttachments, emailClassification, emailId
);
if (!ocrResult) {
await this.handleOcrFailure(emailId, userId, userName, senderEmail, 'No receipt data could be extracted from attachments');
return;
}
// 7. Build extracted data from OCR result
const extractedData = this.mapOcrToExtractedData(ocrResult.response);
const recordType = ocrResult.recordType;
// 8. Handle vehicle association
const processingResult = await this.handleVehicleAssociation(
userId, userName, senderEmail, recordType, extractedData
);
// 9. Mark as completed
await this.repository.updateQueueStatus(emailId, 'completed', {
processingResult,
});
logger.info('Email processing completed successfully', {
emailId,
userId,
recordType,
vehicleId: processingResult.vehicleId,
pendingAssociationId: processingResult.pendingAssociationId,
});
} catch (error) {
await this.handleProcessingError(emailId, senderEmail, subject, error);
}
}
// ========================
// Sender Validation
// ========================
private async validateSender(senderEmail: string): Promise<{
auth0Sub: string;
email: string;
displayName: string | null;
} | null> {
// Case-insensitive lookup by lowercasing the sender email
const profile = await this.userProfileRepository.getByEmail(senderEmail.toLowerCase());
if (profile) {
return {
auth0Sub: profile.auth0Sub,
email: profile.email,
displayName: profile.displayName ?? null,
};
}
// Try original case as fallback
if (senderEmail !== senderEmail.toLowerCase()) {
const fallback = await this.userProfileRepository.getByEmail(senderEmail);
if (fallback) {
return {
auth0Sub: fallback.auth0Sub,
email: fallback.email,
displayName: fallback.displayName ?? null,
};
}
}
return null;
}
// ========================
// Attachment Handling
// ========================
/**
* Get attachments from webhook data or by fetching the raw email
*/
private async getAttachments(
emailId: string,
event: ResendWebhookEvent
): Promise<ParsedEmailAttachment[]> {
// If webhook includes attachments with content, use those
if (event.data.attachments && event.data.attachments.length > 0) {
return event.data.attachments.map(att => ({
filename: att.filename,
contentType: att.content_type,
content: Buffer.from(att.content, 'base64'),
size: Buffer.from(att.content, 'base64').length,
}));
}
// Otherwise fetch and parse the raw email
try {
const { downloadUrl } = await this.resendClient.getEmail(emailId);
const rawEmail = await this.resendClient.downloadRawEmail(downloadUrl);
const parsed = await this.resendClient.parseEmail(rawEmail);
return parsed.attachments;
} catch (error) {
logger.warn('Failed to fetch raw email for attachments', {
emailId,
error: error instanceof Error ? error.message : String(error),
});
return [];
}
}
/**
* Filter attachments by supported type and size
*/
private filterAttachments(attachments: ParsedEmailAttachment[]): ParsedEmailAttachment[] {
return attachments.filter(att => {
if (!SUPPORTED_ATTACHMENT_TYPES.has(att.contentType)) {
logger.info('Skipping unsupported attachment type', {
filename: att.filename,
contentType: att.contentType,
});
return false;
}
if (att.size > MAX_ATTACHMENT_SIZE) {
logger.info('Skipping oversized attachment', {
filename: att.filename,
size: att.size,
maxSize: MAX_ATTACHMENT_SIZE,
});
return false;
}
return true;
});
}
// ========================
// OCR Processing
// ========================
/**
* Process attachments using classifier-driven OCR extraction.
* If email text classification is confident, calls the specific OCR endpoint.
* If not, performs general OCR and classifies from rawText.
* Returns null if no usable result or receipt is unclassified.
*/
private async processAttachmentsWithClassification(
userId: string,
attachments: ParsedEmailAttachment[],
emailClassification: { type: string; confidence: number },
emailId: string
): Promise<{ response: ReceiptExtractionResponse; recordType: EmailRecordType } | null> {
const imageAttachments = attachments.filter(att => OCR_RECEIPT_IMAGE_TYPES.has(att.contentType));
for (const attachment of imageAttachments) {
// If email text gave a confident classification, call the specific OCR endpoint first
if (emailClassification.type === 'fuel') {
const result = await this.extractFuelReceipt(userId, attachment);
if (result?.success) return { response: result, recordType: 'fuel_log' };
// Fuel OCR failed, try maintenance as fallback
const fallbackResult = await this.extractMaintenanceReceipt(userId, attachment);
if (fallbackResult?.success) return { response: fallbackResult, recordType: 'maintenance_record' };
continue;
}
if (emailClassification.type === 'maintenance') {
const result = await this.extractMaintenanceReceipt(userId, attachment);
if (result?.success) return { response: result, recordType: 'maintenance_record' };
// Maintenance OCR failed, try fuel as fallback
const fallbackResult = await this.extractFuelReceipt(userId, attachment);
if (fallbackResult?.success) return { response: fallbackResult, recordType: 'fuel_log' };
continue;
}
// Email text was not confident - try both OCR endpoints and classify from rawText
const fuelResult = await this.extractFuelReceipt(userId, attachment);
const maintenanceResult = await this.extractMaintenanceReceipt(userId, attachment);
// Use rawText from whichever succeeded for secondary classification
const rawText = fuelResult?.rawText || maintenanceResult?.rawText || '';
if (rawText) {
const ocrClassification = this.classifier.classifyFromOcrRawText(rawText);
logger.info('OCR rawText classification result', {
emailId,
type: ocrClassification.type,
confidence: ocrClassification.confidence,
});
if (ocrClassification.type === 'fuel' && fuelResult?.success) {
return { response: fuelResult, recordType: 'fuel_log' };
}
if (ocrClassification.type === 'maintenance' && maintenanceResult?.success) {
return { response: maintenanceResult, recordType: 'maintenance_record' };
}
}
// Both classifiers failed - fall back to field-count heuristic
const fallback = this.selectBestResultByFields(fuelResult, maintenanceResult);
if (fallback) return fallback;
}
return null;
}
/**
* Extract fuel receipt via OCR. Returns null on failure.
*/
private async extractFuelReceipt(
userId: string,
attachment: ParsedEmailAttachment
): Promise<ReceiptExtractionResponse | null> {
try {
return await ocrService.extractReceipt(userId, {
fileBuffer: attachment.content,
contentType: attachment.contentType,
receiptType: 'fuel',
});
} catch (error) {
logger.info('Fuel receipt extraction failed', {
filename: attachment.filename,
error: error instanceof Error ? error.message : String(error),
});
return null;
}
}
/**
* Extract maintenance receipt via OCR. Returns null on failure.
*/
private async extractMaintenanceReceipt(
userId: string,
attachment: ParsedEmailAttachment
): Promise<ReceiptExtractionResponse | null> {
try {
return await ocrService.extractMaintenanceReceipt(userId, {
fileBuffer: attachment.content,
contentType: attachment.contentType,
});
} catch (error) {
logger.info('Maintenance receipt extraction failed', {
filename: attachment.filename,
error: error instanceof Error ? error.message : String(error),
});
return null;
}
}
/**
* Last-resort fallback: select the better OCR result based on domain-specific
* fields and field count when keyword classifiers could not decide.
*/
private selectBestResultByFields(
fuelResult: ReceiptExtractionResponse | null,
maintenanceResult: ReceiptExtractionResponse | null
): { response: ReceiptExtractionResponse; recordType: EmailRecordType } | null {
const fuelFieldCount = fuelResult?.success
? Object.keys(fuelResult.extractedFields).length
: 0;
const maintenanceFieldCount = maintenanceResult?.success
? Object.keys(maintenanceResult.extractedFields).length
: 0;
if (fuelFieldCount === 0 && maintenanceFieldCount === 0) {
return null;
}
const hasFuelFields = fuelResult?.extractedFields['gallons'] ||
fuelResult?.extractedFields['price_per_gallon'] ||
fuelResult?.extractedFields['fuel_type'];
const hasMaintenanceFields = maintenanceResult?.extractedFields['category'] ||
maintenanceResult?.extractedFields['shop_name'] ||
maintenanceResult?.extractedFields['description'];
if (hasFuelFields && !hasMaintenanceFields) {
return { response: fuelResult!, recordType: 'fuel_log' };
}
if (hasMaintenanceFields && !hasFuelFields) {
return { response: maintenanceResult!, recordType: 'maintenance_record' };
}
if (fuelFieldCount >= maintenanceFieldCount && fuelResult?.success) {
return { response: fuelResult, recordType: 'fuel_log' };
}
if (maintenanceResult?.success) {
return { response: maintenanceResult, recordType: 'maintenance_record' };
}
return null;
}
/**
* Map OCR extracted fields to our ExtractedReceiptData format
*/
private mapOcrToExtractedData(response: ReceiptExtractionResponse): ExtractedReceiptData {
const fields = response.extractedFields;
const getFieldValue = (key: string): string | null =>
fields[key]?.value || null;
const getFieldNumber = (key: string): number | null => {
const val = fields[key]?.value;
if (!val) return null;
const num = parseFloat(val);
return isNaN(num) ? null : num;
};
return {
vendor: getFieldValue('vendor') || getFieldValue('shop_name'),
date: getFieldValue('date'),
total: getFieldNumber('total'),
odometerReading: getFieldNumber('odometer') || getFieldNumber('odometer_reading'),
gallons: getFieldNumber('gallons'),
pricePerGallon: getFieldNumber('price_per_gallon'),
fuelType: getFieldValue('fuel_type'),
category: getFieldValue('category'),
subtypes: fields['subtypes']?.value ? fields['subtypes'].value.split(',').map(s => s.trim()) : null,
shopName: getFieldValue('shop_name'),
description: getFieldValue('description'),
};
}
// ========================
// Vehicle Association
// ========================
/**
* Handle vehicle association based on user's vehicle count.
* No vehicles: send error email.
* Single vehicle: auto-associate and create record.
* Multiple vehicles: create pending association for user selection.
*/
private async handleVehicleAssociation(
userId: string,
userName: string,
userEmail: string,
recordType: EmailRecordType,
extractedData: ExtractedReceiptData
): Promise<EmailProcessingResult> {
const vehicles = await this.vehiclesRepository.findByUserId(userId);
// No vehicles: user must add a vehicle first
if (vehicles.length === 0) {
await this.notificationHandler.notifyNoVehicles(userId, userName, userEmail);
return {
recordType,
vehicleId: null,
recordId: null,
documentId: null,
pendingAssociationId: null,
extractedData,
};
}
// Single vehicle: auto-associate and create record
if (vehicles.length === 1) {
const vehicle = vehicles[0];
let recordId: string | null = null;
try {
recordId = await this.createRecord(userId, vehicle.id, recordType, extractedData);
} catch (error) {
logger.error('Failed to create record from email receipt', {
userId,
vehicleId: vehicle.id,
recordType,
error: error instanceof Error ? error.message : String(error),
});
}
const vehicleName = vehicle.nickname
|| [vehicle.year, vehicle.make, vehicle.model].filter(Boolean).join(' ')
|| 'your vehicle';
await this.notificationHandler.notifyReceiptProcessed({
userId,
userName,
userEmail,
vehicleName,
recordType,
recordId,
vehicleId: vehicle.id,
extractedData,
});
return {
recordType,
vehicleId: vehicle.id,
recordId,
documentId: null,
pendingAssociationId: null,
extractedData,
};
}
// Multiple vehicles: create pending association for user selection
const pendingAssociation = await this.repository.insertPendingAssociation({
userId,
recordType,
extractedData,
documentId: null,
});
await this.notificationHandler.notifyPendingVehicleSelection({
userId,
userName,
userEmail,
recordType,
pendingAssociationId: pendingAssociation.id,
extractedData,
});
return {
recordType,
vehicleId: null,
recordId: null,
documentId: null,
pendingAssociationId: pendingAssociation.id,
extractedData,
};
}
// ========================
// Public Resolution API
// ========================
/**
* Resolve a pending vehicle association by creating the record with the selected vehicle.
* Called from the user-facing API when a multi-vehicle user picks a vehicle.
*/
async resolveAssociation(
associationId: string,
vehicleId: string,
userId: string
): Promise<{ recordId: string; recordType: EmailRecordType }> {
const association = await this.repository.getPendingAssociationById(associationId);
if (!association) {
throw new Error('Pending association not found');
}
if (association.userId !== userId) {
throw new Error('Unauthorized');
}
if (association.status !== 'pending') {
throw new Error('Association already resolved');
}
// Verify vehicle belongs to user
const vehicles = await this.vehiclesRepository.findByUserId(userId);
const vehicle = vehicles.find(v => v.id === vehicleId);
if (!vehicle) {
throw new Error('Vehicle not found');
}
// Create the record
const recordId = await this.createRecord(userId, vehicleId, association.recordType, association.extractedData);
// Mark as resolved
await this.repository.resolvePendingAssociation(associationId, 'resolved');
logger.info('Pending association resolved', { associationId, vehicleId, userId, recordType: association.recordType, recordId });
return { recordId, recordType: association.recordType };
}
/**
* Dismiss a pending vehicle association without creating a record.
*/
async dismissAssociation(associationId: string, userId: string): Promise<void> {
const association = await this.repository.getPendingAssociationById(associationId);
if (!association) {
throw new Error('Pending association not found');
}
if (association.userId !== userId) {
throw new Error('Unauthorized');
}
if (association.status !== 'pending') {
throw new Error('Association already resolved');
}
await this.repository.resolvePendingAssociation(associationId, 'expired');
logger.info('Pending association dismissed', { associationId, userId });
}
// ========================
// Record Creation
// ========================
/**
* Create a fuel log or maintenance record from extracted receipt data.
* Returns the created record ID.
*/
private async createRecord(
userId: string,
vehicleId: string,
recordType: EmailRecordType,
extractedData: ExtractedReceiptData
): Promise<string> {
if (recordType === 'fuel_log') {
return this.createFuelLogRecord(userId, vehicleId, extractedData);
}
return this.createMaintenanceRecord(userId, vehicleId, extractedData);
}
/**
* Map extracted receipt data to EnhancedCreateFuelLogRequest and create fuel log.
*/
private async createFuelLogRecord(
userId: string,
vehicleId: string,
data: ExtractedReceiptData
): Promise<string> {
const fuelUnits = data.gallons ?? 0;
const costPerUnit = data.pricePerGallon ?? (data.total && fuelUnits > 0 ? data.total / fuelUnits : 0);
const request: EnhancedCreateFuelLogRequest = {
vehicleId,
dateTime: data.date || new Date().toISOString(),
fuelType: this.mapFuelType(data.fuelType),
fuelUnits,
costPerUnit,
odometerReading: data.odometerReading ?? undefined,
locationData: data.vendor ? { stationName: data.vendor } : undefined,
notes: 'Created from emailed receipt',
};
logger.info('Creating fuel log from email receipt', { userId, vehicleId, fuelUnits, costPerUnit });
const result = await this.fuelLogsService.createFuelLog(request, userId);
return result.id;
}
/**
* Map extracted receipt data to CreateMaintenanceRecordRequest and create maintenance record.
*/
private async createMaintenanceRecord(
userId: string,
vehicleId: string,
data: ExtractedReceiptData
): Promise<string> {
const category = this.mapMaintenanceCategory(data.category);
const subtypes = this.resolveMaintenanceSubtypes(category, data.subtypes);
const record = await this.maintenanceService.createRecord(userId, {
vehicleId,
category,
subtypes,
date: data.date || new Date().toISOString().split('T')[0],
odometerReading: data.odometerReading ?? undefined,
cost: data.total ?? undefined,
shopName: data.shopName || data.vendor || undefined,
notes: data.description
? `${data.description}\n\nCreated from emailed receipt`
: 'Created from emailed receipt',
});
logger.info('Created maintenance record from email receipt', { userId, vehicleId, recordId: record.id, category });
return record.id;
}
/**
* Map OCR fuel type string to FuelType enum. Defaults to gasoline.
*/
private mapFuelType(fuelTypeStr: string | null): FuelType {
if (!fuelTypeStr) return FuelType.GASOLINE;
const normalized = fuelTypeStr.toLowerCase().trim();
if (normalized.includes('diesel') || normalized === '#1' || normalized === '#2') {
return FuelType.DIESEL;
}
if (normalized.includes('electric') || normalized.includes('ev')) {
return FuelType.ELECTRIC;
}
return FuelType.GASOLINE;
}
/**
* Map OCR category string to MaintenanceCategory. Defaults to routine_maintenance.
*/
private mapMaintenanceCategory(categoryStr: string | null): MaintenanceCategory {
if (!categoryStr) return 'routine_maintenance';
const normalized = categoryStr.toLowerCase().trim();
if (normalized.includes('repair')) return 'repair';
if (normalized.includes('performance') || normalized.includes('upgrade')) return 'performance_upgrade';
return 'routine_maintenance';
}
/**
* Validate and resolve maintenance subtypes. Falls back to first valid
* subtype for the category if OCR subtypes are invalid or missing.
*/
private resolveMaintenanceSubtypes(
category: MaintenanceCategory,
ocrSubtypes: string[] | null
): string[] {
if (ocrSubtypes && ocrSubtypes.length > 0 && validateSubtypes(category, ocrSubtypes)) {
return ocrSubtypes;
}
// Attempt to match OCR subtypes against valid options (case-insensitive)
if (ocrSubtypes && ocrSubtypes.length > 0) {
const validOptions = getSubtypesForCategory(category);
const matched = ocrSubtypes
.map(s => validOptions.find(v => v.toLowerCase() === s.toLowerCase().trim()))
.filter((v): v is string => v !== undefined);
if (matched.length > 0) return matched;
}
// Default to first subtype of category
const defaults = getSubtypesForCategory(category);
return [defaults[0] as string];
}
// ========================
// Error Handling & Retries
// ========================
private async handleProcessingError(
emailId: string,
senderEmail: string,
_subject: string | null,
error: unknown
): Promise<void> {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error('Email processing pipeline error', { emailId, error: errorMessage });
// Get current queue entry for retry count and userId
const queueEntry = await this.repository.getQueueEntry(emailId);
const currentRetryCount = queueEntry?.retryCount || 0;
const newRetryCount = currentRetryCount + 1;
if (newRetryCount < MAX_RETRY_COUNT) {
// Mark for retry
await this.repository.updateQueueStatus(emailId, 'failed', {
errorMessage,
retryCount: newRetryCount,
});
logger.info('Email queued for retry', {
emailId,
retryCount: newRetryCount,
maxRetries: MAX_RETRY_COUNT,
});
} else {
// Max retries exceeded - permanently failed
await this.repository.updateQueueStatus(emailId, 'failed', {
errorMessage: `Max retries (${MAX_RETRY_COUNT}) exceeded. Last error: ${errorMessage}`,
retryCount: newRetryCount,
});
// Send failure notification (email + in-app if userId available)
await this.notificationHandler.notifyProcessingFailure({
userId: queueEntry?.userId,
userEmail: senderEmail,
errorReason: errorMessage,
}).catch(notifyErr => {
logger.error('Failed to send failure notification', {
emailId,
error: notifyErr instanceof Error ? notifyErr.message : String(notifyErr),
});
});
}
}
private async handleUnregisteredSender(
emailId: string,
senderEmail: string
): Promise<void> {
logger.info('Unregistered sender rejected', { emailId, senderEmail });
await this.repository.updateQueueStatus(emailId, 'failed', {
errorMessage: 'Sender email is not registered with MotoVaultPro',
});
await this.notificationHandler.notifyUnregisteredSender(senderEmail).catch(error => {
logger.error('Failed to send unregistered sender notification', {
emailId,
error: error instanceof Error ? error.message : String(error),
});
});
}
private async handleNoValidAttachments(
emailId: string,
userId: string,
userName: string,
userEmail: string
): Promise<void> {
logger.info('No valid attachments found', { emailId });
await this.repository.updateQueueStatus(emailId, 'failed', {
errorMessage: 'No valid attachments found. Supported types: PDF, PNG, JPG, JPEG, HEIC (max 10MB each)',
});
await this.notificationHandler.notifyNoValidAttachments(userId, userName, userEmail).catch(error => {
logger.error('Failed to send no-attachments notification', {
emailId,
error: error instanceof Error ? error.message : String(error),
});
});
}
private async handleOcrFailure(
emailId: string,
userId: string,
userName: string,
userEmail: string,
reason: string
): Promise<void> {
logger.info('OCR extraction failed for all attachments', { emailId, reason });
await this.repository.updateQueueStatus(emailId, 'failed', {
errorMessage: reason,
});
await this.notificationHandler.notifyOcrFailure(userId, userName, userEmail, reason).catch(error => {
logger.error('Failed to send OCR failure notification', {
emailId,
error: error instanceof Error ? error.message : String(error),
});
});
}
}

View File

@@ -1,114 +0,0 @@
/**
* @ai-summary TypeScript types for the email ingestion feature
* @ai-context Covers database records, status enums, and Resend webhook payloads
*/
// ========================
// Status Enums
// ========================
export type EmailIngestionStatus = 'pending' | 'processing' | 'completed' | 'failed';
export type PendingAssociationStatus = 'pending' | 'resolved' | 'expired';
export type EmailRecordType = 'fuel_log' | 'maintenance_record';
// ========================
// Receipt Classification
// ========================
export type ReceiptClassificationType = 'fuel' | 'maintenance' | 'unclassified';
export interface ClassificationResult {
type: ReceiptClassificationType;
confidence: number;
}
// ========================
// Database Records
// ========================
export interface EmailIngestionQueueRecord {
id: string;
emailId: string;
senderEmail: string;
userId: string;
receivedAt: string;
subject: string | null;
status: EmailIngestionStatus;
processingResult: EmailProcessingResult | null;
errorMessage: string | null;
retryCount: number;
createdAt: string;
updatedAt: string;
}
export interface PendingVehicleAssociation {
id: string;
userId: string;
recordType: EmailRecordType;
extractedData: ExtractedReceiptData;
documentId: string | null;
status: PendingAssociationStatus;
createdAt: string;
resolvedAt: string | null;
}
// ========================
// Processing Results
// ========================
export interface EmailProcessingResult {
recordType: EmailRecordType;
vehicleId: string | null;
recordId: string | null;
documentId: string | null;
pendingAssociationId: string | null;
extractedData: ExtractedReceiptData;
}
export interface ExtractedReceiptData {
vendor: string | null;
date: string | null;
total: number | null;
odometerReading: number | null;
/** Fuel-specific fields */
gallons: number | null;
pricePerGallon: number | null;
fuelType: string | null;
/** Maintenance-specific fields */
category: string | null;
subtypes: string[] | null;
shopName: string | null;
description: string | null;
}
// ========================
// Resend Webhook Payloads
// ========================
/** Top-level Resend webhook event envelope */
export interface ResendWebhookEvent {
type: string;
created_at: string;
data: ResendWebhookEventData;
}
/** Resend email.received webhook event data */
export interface ResendWebhookEventData {
email_id: string;
from: string;
to: string[];
subject: string;
text: string | null;
html: string | null;
created_at: string;
attachments: ResendEmailAttachment[];
}
/** Attachment metadata from Resend inbound email */
export interface ResendEmailAttachment {
filename: string;
content_type: string;
content: string;
}

Some files were not shown because too many files have changed in this diff Show More