fix(openai): use structured input and add probe for Responses API
Some checks failed
CI / Check (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Formatting (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Release Build (push) Has been cancelled

Updated OpenAI Responses API to use a structured input format (array of objects) for better compatibility. Added a proactive error probe to chat_responses_stream to capture and log API error bodies on failure.
This commit is contained in:
2026-03-06 20:26:14 +00:00
parent 4be23629d8
commit a243a3987d

View File

@@ -197,27 +197,24 @@ impl super::Provider for OpenAIProvider {
}
async fn chat_responses(&self, request: UnifiedRequest) -> Result<ProviderResponse, AppError> {
// Build a simple `input` string by concatenating message parts.
// Build a structured input for the Responses API.
let messages_json = helpers::messages_to_openai_json(&request.messages).await?;
let mut inputs: Vec<String> = Vec::new();
let mut input_parts = Vec::new();
for m in &messages_json {
let role = m["role"].as_str().unwrap_or("");
let parts = m.get("content").and_then(|c| c.as_array()).cloned().unwrap_or_default();
let mut text_parts = Vec::new();
for p in parts {
if let Some(t) = p.get("text").and_then(|v| v.as_str()) {
text_parts.push(t.to_string());
let role = m["role"].as_str().unwrap_or("user");
let content = m.get("content").cloned().unwrap_or(serde_json::json!(""));
input_parts.push(serde_json::json!({
"role": role,
"content": content
}));
}
}
inputs.push(format!("{}: {}", role, text_parts.join("")));
}
let input_text = inputs.join("\n");
let resp = self
.client
.post(format!("{}/responses", self.config.base_url))
.header("Authorization", format!("Bearer {}", self.api_key))
.json(&serde_json::json!({ "model": request.model, "input": input_text }))
.json(&serde_json::json!({ "model": request.model, "input": input_parts }))
.send()
.await
.map_err(|e| AppError::ProviderError(e.to_string()))?;
@@ -400,31 +397,30 @@ impl super::Provider for OpenAIProvider {
&self,
request: UnifiedRequest,
) -> Result<BoxStream<'static, Result<ProviderStreamChunk, AppError>>, AppError> {
// Build a simple `input` string by concatenating message parts.
// Build a structured input for the Responses API.
let messages_json = helpers::messages_to_openai_json(&request.messages).await?;
let mut inputs: Vec<String> = Vec::new();
let mut input_parts = Vec::new();
for m in &messages_json {
let role = m["role"].as_str().unwrap_or("");
let parts = m.get("content").and_then(|c| c.as_array()).cloned().unwrap_or_default();
let mut text_parts = Vec::new();
for p in parts {
if let Some(t) = p.get("text").and_then(|v| v.as_str()) {
text_parts.push(t.to_string());
let role = m["role"].as_str().unwrap_or("user");
let content = m.get("content").cloned().unwrap_or(serde_json::json!(""));
input_parts.push(serde_json::json!({
"role": role,
"content": content
}));
}
}
inputs.push(format!("{}: {}", role, text_parts.join("")));
}
let input_text = inputs.join("\n");
let body = serde_json::json!({
"model": request.model,
"input": input_text,
"input": input_parts,
"stream": true
});
let url = format!("{}/responses", self.config.base_url);
let api_key = self.api_key.clone();
let model = request.model.clone();
let probe_client = self.client.clone();
let probe_body = body.clone();
let es = reqwest_eventsource::EventSource::new(
self.client
@@ -498,7 +494,29 @@ impl super::Provider for OpenAIProvider {
}
Ok(_) => continue,
Err(e) => {
Err(AppError::ProviderError(format!("Responses stream error: {}", e)))?;
// Attempt to probe for the actual error body
let probe_resp = probe_client
.post(&url)
.header("Authorization", format!("Bearer {}", api_key))
.json(&probe_body)
.send()
.await;
match probe_resp {
Ok(r) if !r.status().is_success() => {
let status = r.status();
let error_body = r.text().await.unwrap_or_default();
tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, error_body);
Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, error_body)))?;
}
Ok(_) => {
Err(AppError::ProviderError(format!("Responses stream error (probe returned 200): {}", e)))?;
}
Err(probe_err) => {
tracing::error!("OpenAI Responses Stream Error Probe failed: {}", probe_err);
Err(AppError::ProviderError(format!("Responses stream error (probe failed: {}): {}", probe_err, e)))?;
}
}
}
}
}