Merge pull request 'fix: yield to event loop after each SSE write to flush socket' (#10) from fix/sse-event-loop-flush into main

Reviewed-on: #10
This commit was merged in pull request #10.
This commit is contained in:
2026-03-14 17:00:51 +00:00
2 changed files with 10 additions and 8 deletions

View File

@@ -28,14 +28,8 @@ function isJsonFile(file: { mimetype: string; originalname: string }): boolean {
);
}
const SSE_PAD_TARGET = 4096;
function sseWrite(res: import('express').Response, data: Record<string, unknown>) {
const payload = `data: ${JSON.stringify(data)}\n\n`;
const pad = Math.max(0, SSE_PAD_TARGET - payload.length);
// SSE comment lines (": ...") are ignored by the browser but push
// data past proxy buffer thresholds so each event is delivered immediately.
res.write(pad > 0 ? `: ${' '.repeat(pad)}\n${payload}` : payload);
res.write(`data: ${JSON.stringify(data)}\n\n`);
}
const router = Router();

View File

@@ -150,6 +150,10 @@ const LLM_PROGRESS_MAX = 98;
const LLM_PROGRESS_RANGE = LLM_PROGRESS_MAX - LLM_PROGRESS_MIN;
const THROTTLE_MS = 300;
function yieldToEventLoop(): Promise<void> {
return new Promise(resolve => setImmediate(resolve));
}
export async function convertPdfToStatementStreaming(
buffer: Buffer,
onProgress: OnProgress,
@@ -163,6 +167,7 @@ export async function convertPdfToStatementStreaming(
}
onProgress('pdf', 2, 'Извлечение текста из PDF...');
await yieldToEventLoop();
let text: string;
try {
@@ -186,6 +191,7 @@ export async function convertPdfToStatementStreaming(
}
onProgress('pdf', 8, 'Текст извлечён, отправка в LLM...');
await yieldToEventLoop();
const openai = new OpenAI({
apiKey: config.llmApiKey,
@@ -205,7 +211,6 @@ export async function convertPdfToStatementStreaming(
stream: true,
});
// Estimate expected output size as ~2x the input PDF text length, clamped
const expectedChars = Math.max(2_000, Math.min(text.length * 2, 30_000));
let accumulated = '';
@@ -227,11 +232,14 @@ export async function convertPdfToStatementStreaming(
);
onProgress('llm', llmProgress, 'Конвертация через LLM...');
lastEmitTime = now;
// Let the event loop flush socket writes to the network
await yieldToEventLoop();
}
}
}
onProgress('llm', LLM_PROGRESS_MAX, 'LLM завершил, обработка результата...');
await yieldToEventLoop();
const content = accumulated.trim();
if (!content) {