fix(openai): improve Responses API stream robustness and diagnostics
Some checks failed
CI / Check (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Formatting (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Release Build (push) Has been cancelled

- Implement final buffer flush in streaming path to prevent data loss
- Increase probe response body logging to 500 characters
- Ensure internal metadata is stripped even on final flush
- Fix potential hang when stream ends without explicit [DONE] event
This commit is contained in:
2026-03-18 15:17:56 +00:00
parent 441270317c
commit cb619f9286

View File

@@ -907,8 +907,9 @@ impl super::Provider for OpenAIProvider {
let status = r.status();
let body = r.text().await.unwrap_or_default();
if status.is_success() {
tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", body);
Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", body)))?;
let preview = if body.len() > 500 { format!("{}...", &body[..500]) } else { body.clone() };
tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", preview);
Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", preview)))?;
} else {
tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, body);
Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, body)))?;
@@ -922,6 +923,21 @@ impl super::Provider for OpenAIProvider {
}
}
}
// Final flush of content_buffer if not empty
if !content_buffer.is_empty() {
let stripped = Self::strip_internal_metadata(&content_buffer);
if !stripped.is_empty() {
yield ProviderStreamChunk {
content: stripped,
reasoning_content: None,
finish_reason: None,
tool_calls: None,
model: model.clone(),
usage: None,
};
}
}
};
Ok(Box::pin(stream))