Merge pull request 'fix: add dynamic timeout for document uploads (#33)' (#34) from issue-33-document-upload-timeout into main
All checks were successful
Deploy to Staging / Build Images (push) Successful in 25s
Deploy to Staging / Deploy to Staging (push) Successful in 28s
Deploy to Staging / Verify Staging (push) Successful in 7s
Deploy to Staging / Notify Staging Ready (push) Successful in 6s
Deploy to Staging / Notify Staging Failure (push) Has been skipped

Reviewed-on: #34
This commit was merged in pull request #34.
This commit is contained in:
2026-01-15 02:33:20 +00:00
2 changed files with 20 additions and 18 deletions

View File

@@ -272,20 +272,15 @@ export class DocumentsController {
});
}
// Read first 4100 bytes to detect file type via magic bytes
// Collect ALL file chunks first (breaking early from async iterator corrupts stream state)
const chunks: Buffer[] = [];
let totalBytes = 0;
const targetBytes = 4100;
for await (const chunk of mp.file) {
chunks.push(chunk);
totalBytes += chunk.length;
if (totalBytes >= targetBytes) {
break;
}
}
const fullBuffer = Buffer.concat(chunks);
const headerBuffer = Buffer.concat(chunks);
// Use first 4100 bytes for file type detection via magic bytes
const headerBuffer = fullBuffer.subarray(0, Math.min(4100, fullBuffer.length));
// Validate actual file content using magic bytes
const detectedType = await FileType.fromBuffer(headerBuffer);
@@ -341,15 +336,9 @@ export class DocumentsController {
const counter = new CountingStream();
// Create a new readable stream from the header buffer + remaining file chunks
const headerStream = Readable.from([headerBuffer]);
const remainingStream = mp.file;
// Pipe header first, then remaining content through counter
headerStream.pipe(counter, { end: false });
headerStream.on('end', () => {
remainingStream.pipe(counter);
});
// Create readable stream from the complete buffer and pipe through counter
const fileStream = Readable.from([fullBuffer]);
fileStream.pipe(counter);
const storage = getStorageService();
const bucket = 'documents';

View File

@@ -1,6 +1,17 @@
import { apiClient } from '../../../core/api/client';
import type { CreateDocumentRequest, DocumentRecord, UpdateDocumentRequest } from '../types/documents.types';
/**
* Calculate upload timeout based on file size.
* Base: 30 seconds + 10 seconds per MB to accommodate slow connections.
*/
function calculateUploadTimeout(file: File): number {
const fileSizeMB = file.size / (1024 * 1024);
const baseTimeout = 30000; // 30 seconds minimum
const perMBTimeout = 10000; // 10 seconds per MB
return Math.round(baseTimeout + fileSizeMB * perMBTimeout);
}
export const documentsApi = {
async list(params?: { vehicleId?: string; type?: string; expiresBefore?: string }) {
const res = await apiClient.get<DocumentRecord[]>('/documents', { params });
@@ -26,6 +37,7 @@ export const documentsApi = {
form.append('file', file);
const res = await apiClient.post<DocumentRecord>(`/documents/${id}/upload`, form, {
headers: { 'Content-Type': 'multipart/form-data' },
timeout: calculateUploadTimeout(file),
});
return res.data;
},
@@ -34,6 +46,7 @@ export const documentsApi = {
form.append('file', file);
const res = await apiClient.post<DocumentRecord>(`/documents/${id}/upload`, form, {
headers: { 'Content-Type': 'multipart/form-data' },
timeout: calculateUploadTimeout(file),
onUploadProgress: (evt) => {
if (evt.total) {
const pct = Math.round((evt.loaded / evt.total) * 100);