diff --git a/backend/src/features/documents/api/documents.controller.ts b/backend/src/features/documents/api/documents.controller.ts index 00caa67..df017a8 100644 --- a/backend/src/features/documents/api/documents.controller.ts +++ b/backend/src/features/documents/api/documents.controller.ts @@ -272,20 +272,15 @@ export class DocumentsController { }); } - // Read first 4100 bytes to detect file type via magic bytes + // Collect ALL file chunks first (breaking early from async iterator corrupts stream state) const chunks: Buffer[] = []; - let totalBytes = 0; - const targetBytes = 4100; - for await (const chunk of mp.file) { chunks.push(chunk); - totalBytes += chunk.length; - if (totalBytes >= targetBytes) { - break; - } } + const fullBuffer = Buffer.concat(chunks); - const headerBuffer = Buffer.concat(chunks); + // Use first 4100 bytes for file type detection via magic bytes + const headerBuffer = fullBuffer.subarray(0, Math.min(4100, fullBuffer.length)); // Validate actual file content using magic bytes const detectedType = await FileType.fromBuffer(headerBuffer); @@ -341,15 +336,9 @@ export class DocumentsController { const counter = new CountingStream(); - // Create a new readable stream from the header buffer + remaining file chunks - const headerStream = Readable.from([headerBuffer]); - const remainingStream = mp.file; - - // Pipe header first, then remaining content through counter - headerStream.pipe(counter, { end: false }); - headerStream.on('end', () => { - remainingStream.pipe(counter); - }); + // Create readable stream from the complete buffer and pipe through counter + const fileStream = Readable.from([fullBuffer]); + fileStream.pipe(counter); const storage = getStorageService(); const bucket = 'documents';