fix(openai): improve Responses API stream robustness and diagnostics
- Implement final buffer flush in streaming path to prevent data loss - Increase probe response body logging to 500 characters - Ensure internal metadata is stripped even on final flush - Fix potential hang when stream ends without explicit [DONE] event
This commit is contained in:
@@ -907,8 +907,9 @@ impl super::Provider for OpenAIProvider {
|
||||
let status = r.status();
|
||||
let body = r.text().await.unwrap_or_default();
|
||||
if status.is_success() {
|
||||
tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", body);
|
||||
Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", body)))?;
|
||||
let preview = if body.len() > 500 { format!("{}...", &body[..500]) } else { body.clone() };
|
||||
tracing::warn!("Responses stream ended prematurely but probe returned 200 OK. Body: {}", preview);
|
||||
Err(AppError::ProviderError(format!("Responses stream ended (server sent 200 OK with body: {})", preview)))?;
|
||||
} else {
|
||||
tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, body);
|
||||
Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, body)))?;
|
||||
@@ -922,6 +923,21 @@ impl super::Provider for OpenAIProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final flush of content_buffer if not empty
|
||||
if !content_buffer.is_empty() {
|
||||
let stripped = Self::strip_internal_metadata(&content_buffer);
|
||||
if !stripped.is_empty() {
|
||||
yield ProviderStreamChunk {
|
||||
content: stripped,
|
||||
reasoning_content: None,
|
||||
finish_reason: None,
|
||||
tool_calls: None,
|
||||
model: model.clone(),
|
||||
usage: None,
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Box::pin(stream))
|
||||
|
||||
Reference in New Issue
Block a user