5 Commits

Author SHA1 Message Date
vakabunga
db4d5e4d00 fix: yield to event loop after each SSE write to flush socket
The for-await loop over OpenAI stream chunks runs synchronously when
data is buffered, causing res.write() calls to queue without flushing.
Add setImmediate yield after each progress event so the event loop
reaches its I/O phase and pushes data to the network immediately.
2026-03-14 19:59:22 +03:00
358fcaeff5 Merge pull request 'fix: disable gzip and pad SSE events to prevent proxy buffering' (#9) from fix/sse-gzip-buffering into main
Reviewed-on: #9
2026-03-14 16:46:07 +00:00
vakabunga
67fed57118 fix: disable gzip and pad SSE events to prevent proxy buffering
Add gzip off to Nginx import location — the global gzip on was
buffering text/event-stream responses. Pad each SSE event to 4 KB
with comment lines to push past any remaining proxy buffer threshold.
2026-03-14 19:45:33 +03:00
45a6f3d374 Merge pull request 'fix: eliminate SSE buffering through Nginx proxy' (#8) from fix/sse-proxy-buffering into main
Reviewed-on: #8
2026-03-14 14:31:16 +00:00
vakabunga
aaf8cacf75 fix: eliminate SSE buffering through Nginx proxy
Add 2 KB padding comment after headers to push past proxy buffer
threshold, enable TCP_NODELAY on the socket, and remove erroneous
chunked_transfer_encoding off from Nginx that caused full response
buffering.
2026-03-14 17:30:52 +03:00
3 changed files with 11 additions and 2 deletions

View File

@@ -60,6 +60,7 @@ router.post(
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
res.setHeader('X-Accel-Buffering', 'no');
res.socket?.setNoDelay(true);
res.flushHeaders();
try {

View File

@@ -150,6 +150,10 @@ const LLM_PROGRESS_MAX = 98;
const LLM_PROGRESS_RANGE = LLM_PROGRESS_MAX - LLM_PROGRESS_MIN;
const THROTTLE_MS = 300;
function yieldToEventLoop(): Promise<void> {
return new Promise(resolve => setImmediate(resolve));
}
export async function convertPdfToStatementStreaming(
buffer: Buffer,
onProgress: OnProgress,
@@ -163,6 +167,7 @@ export async function convertPdfToStatementStreaming(
}
onProgress('pdf', 2, 'Извлечение текста из PDF...');
await yieldToEventLoop();
let text: string;
try {
@@ -186,6 +191,7 @@ export async function convertPdfToStatementStreaming(
}
onProgress('pdf', 8, 'Текст извлечён, отправка в LLM...');
await yieldToEventLoop();
const openai = new OpenAI({
apiKey: config.llmApiKey,
@@ -205,7 +211,6 @@ export async function convertPdfToStatementStreaming(
stream: true,
});
// Estimate expected output size as ~2x the input PDF text length, clamped
const expectedChars = Math.max(2_000, Math.min(text.length * 2, 30_000));
let accumulated = '';
@@ -227,11 +232,14 @@ export async function convertPdfToStatementStreaming(
);
onProgress('llm', llmProgress, 'Конвертация через LLM...');
lastEmitTime = now;
// Let the event loop flush socket writes to the network
await yieldToEventLoop();
}
}
}
onProgress('llm', LLM_PROGRESS_MAX, 'LLM завершил, обработка результата...');
await yieldToEventLoop();
const content = accumulated.trim();
if (!content) {

View File

@@ -15,7 +15,7 @@ server {
proxy_connect_timeout 5s;
proxy_read_timeout 600s;
proxy_buffering off;
chunked_transfer_encoding off;
gzip off;
client_max_body_size 15m;
}