From cb619f92866128fa5b1820c4792b28cd7645d990 Mon Sep 17 00:00:00 2001 From: hobokenchicken Date: Wed, 18 Mar 2026 15:17:56 +0000 Subject: [PATCH] fix(openai): improve Responses API stream robustness and diagnostics - Implement final buffer flush in streaming path to prevent data loss - Increase probe response body logging to 500 characters - Ensure internal metadata is stripped even on final flush - Fix potential hang when stream ends without explicit [DONE] event --- src/providers/openai.rs | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/providers/openai.rs b/src/providers/openai.rs index 88c841a5..9f7708ec 100644 --- a/src/providers/openai.rs +++ b/src/providers/openai.rs @@ -907,8 +907,9 @@ impl super::Provider for OpenAIProvider { let status = r.status(); let body = r.text().await.unwrap_or_default(); if status.is_success() { - tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", body); - Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", body)))?; + let preview = if body.len() > 500 { format!("{}...", &body[..500]) } else { body.clone() }; + tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", preview); + Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", preview)))?; } else { tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, body); Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, body)))?; @@ -922,6 +923,21 @@ impl super::Provider for OpenAIProvider { } } } + + // Final flush of content_buffer if not empty + if !content_buffer.is_empty() { + let stripped = Self::strip_internal_metadata(&content_buffer); + if !stripped.is_empty() { + yield ProviderStreamChunk { + content: stripped, + reasoning_content: None, + finish_reason: None, + tool_calls: None, + model: model.clone(), + usage: None, + }; + } + } }; Ok(Box::pin(stream))