diff --git a/src/providers/openai.rs b/src/providers/openai.rs index 88c841a5..9f7708ec 100644 --- a/src/providers/openai.rs +++ b/src/providers/openai.rs @@ -907,8 +907,9 @@ impl super::Provider for OpenAIProvider { let status = r.status(); let body = r.text().await.unwrap_or_default(); if status.is_success() { - tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", body); - Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", body)))?; + let preview = if body.len() > 500 { format!("{}...", &body[..500]) } else { body.clone() }; + tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", preview); + Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", preview)))?; } else { tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, body); Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, body)))?; @@ -922,6 +923,21 @@ impl super::Provider for OpenAIProvider { } } } + + // Final flush of content_buffer if not empty + if !content_buffer.is_empty() { + let stripped = Self::strip_internal_metadata(&content_buffer); + if !stripped.is_empty() { + yield ProviderStreamChunk { + content: stripped, + reasoning_content: None, + finish_reason: None, + tool_calls: None, + model: model.clone(), + usage: None, + }; + } + } }; Ok(Box::pin(stream))