diff --git a/backend/src/routes/import.ts b/backend/src/routes/import.ts index 4727ea7..cc38c89 100644 --- a/backend/src/routes/import.ts +++ b/backend/src/routes/import.ts @@ -28,14 +28,8 @@ function isJsonFile(file: { mimetype: string; originalname: string }): boolean { ); } -const SSE_PAD_TARGET = 4096; - function sseWrite(res: import('express').Response, data: Record) { - const payload = `data: ${JSON.stringify(data)}\n\n`; - const pad = Math.max(0, SSE_PAD_TARGET - payload.length); - // SSE comment lines (": ...") are ignored by the browser but push - // data past proxy buffer thresholds so each event is delivered immediately. - res.write(pad > 0 ? `: ${' '.repeat(pad)}\n${payload}` : payload); + res.write(`data: ${JSON.stringify(data)}\n\n`); } const router = Router(); diff --git a/backend/src/services/pdfToStatement.ts b/backend/src/services/pdfToStatement.ts index c7fe5a6..2e66b32 100644 --- a/backend/src/services/pdfToStatement.ts +++ b/backend/src/services/pdfToStatement.ts @@ -150,6 +150,10 @@ const LLM_PROGRESS_MAX = 98; const LLM_PROGRESS_RANGE = LLM_PROGRESS_MAX - LLM_PROGRESS_MIN; const THROTTLE_MS = 300; +function yieldToEventLoop(): Promise { + return new Promise(resolve => setImmediate(resolve)); +} + export async function convertPdfToStatementStreaming( buffer: Buffer, onProgress: OnProgress, @@ -163,6 +167,7 @@ export async function convertPdfToStatementStreaming( } onProgress('pdf', 2, 'Извлечение текста из PDF...'); + await yieldToEventLoop(); let text: string; try { @@ -186,6 +191,7 @@ export async function convertPdfToStatementStreaming( } onProgress('pdf', 8, 'Текст извлечён, отправка в LLM...'); + await yieldToEventLoop(); const openai = new OpenAI({ apiKey: config.llmApiKey, @@ -205,7 +211,6 @@ export async function convertPdfToStatementStreaming( stream: true, }); - // Estimate expected output size as ~2x the input PDF text length, clamped const expectedChars = Math.max(2_000, Math.min(text.length * 2, 30_000)); let accumulated = ''; @@ -227,11 +232,14 @@ export async function convertPdfToStatementStreaming( ); onProgress('llm', llmProgress, 'Конвертация через LLM...'); lastEmitTime = now; + // Let the event loop flush socket writes to the network + await yieldToEventLoop(); } } } onProgress('llm', LLM_PROGRESS_MAX, 'LLM завершил, обработка результата...'); + await yieldToEventLoop(); const content = accumulated.trim(); if (!content) {