fix(openai): use structured input and add probe for Responses API
Some checks failed
CI / Check (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Formatting (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Release Build (push) Has been cancelled

Updated OpenAI Responses API to use a structured input format (array of objects) for better compatibility. Added a proactive error probe to chat_responses_stream to capture and log API error bodies on failure.
This commit is contained in:
2026-03-06 20:26:14 +00:00
parent 4be23629d8
commit a243a3987d

View File

@@ -197,27 +197,24 @@ impl super::Provider for OpenAIProvider {
} }
async fn chat_responses(&self, request: UnifiedRequest) -> Result<ProviderResponse, AppError> { async fn chat_responses(&self, request: UnifiedRequest) -> Result<ProviderResponse, AppError> {
// Build a simple `input` string by concatenating message parts. // Build a structured input for the Responses API.
let messages_json = helpers::messages_to_openai_json(&request.messages).await?; let messages_json = helpers::messages_to_openai_json(&request.messages).await?;
let mut inputs: Vec<String> = Vec::new(); let mut input_parts = Vec::new();
for m in &messages_json { for m in &messages_json {
let role = m["role"].as_str().unwrap_or(""); let role = m["role"].as_str().unwrap_or("user");
let parts = m.get("content").and_then(|c| c.as_array()).cloned().unwrap_or_default(); let content = m.get("content").cloned().unwrap_or(serde_json::json!(""));
let mut text_parts = Vec::new();
for p in parts { input_parts.push(serde_json::json!({
if let Some(t) = p.get("text").and_then(|v| v.as_str()) { "role": role,
text_parts.push(t.to_string()); "content": content
}));
} }
}
inputs.push(format!("{}: {}", role, text_parts.join("")));
}
let input_text = inputs.join("\n");
let resp = self let resp = self
.client .client
.post(format!("{}/responses", self.config.base_url)) .post(format!("{}/responses", self.config.base_url))
.header("Authorization", format!("Bearer {}", self.api_key)) .header("Authorization", format!("Bearer {}", self.api_key))
.json(&serde_json::json!({ "model": request.model, "input": input_text })) .json(&serde_json::json!({ "model": request.model, "input": input_parts }))
.send() .send()
.await .await
.map_err(|e| AppError::ProviderError(e.to_string()))?; .map_err(|e| AppError::ProviderError(e.to_string()))?;
@@ -400,31 +397,30 @@ impl super::Provider for OpenAIProvider {
&self, &self,
request: UnifiedRequest, request: UnifiedRequest,
) -> Result<BoxStream<'static, Result<ProviderStreamChunk, AppError>>, AppError> { ) -> Result<BoxStream<'static, Result<ProviderStreamChunk, AppError>>, AppError> {
// Build a simple `input` string by concatenating message parts. // Build a structured input for the Responses API.
let messages_json = helpers::messages_to_openai_json(&request.messages).await?; let messages_json = helpers::messages_to_openai_json(&request.messages).await?;
let mut inputs: Vec<String> = Vec::new(); let mut input_parts = Vec::new();
for m in &messages_json { for m in &messages_json {
let role = m["role"].as_str().unwrap_or(""); let role = m["role"].as_str().unwrap_or("user");
let parts = m.get("content").and_then(|c| c.as_array()).cloned().unwrap_or_default(); let content = m.get("content").cloned().unwrap_or(serde_json::json!(""));
let mut text_parts = Vec::new();
for p in parts { input_parts.push(serde_json::json!({
if let Some(t) = p.get("text").and_then(|v| v.as_str()) { "role": role,
text_parts.push(t.to_string()); "content": content
}));
} }
}
inputs.push(format!("{}: {}", role, text_parts.join("")));
}
let input_text = inputs.join("\n");
let body = serde_json::json!({ let body = serde_json::json!({
"model": request.model, "model": request.model,
"input": input_text, "input": input_parts,
"stream": true "stream": true
}); });
let url = format!("{}/responses", self.config.base_url); let url = format!("{}/responses", self.config.base_url);
let api_key = self.api_key.clone(); let api_key = self.api_key.clone();
let model = request.model.clone(); let model = request.model.clone();
let probe_client = self.client.clone();
let probe_body = body.clone();
let es = reqwest_eventsource::EventSource::new( let es = reqwest_eventsource::EventSource::new(
self.client self.client
@@ -498,7 +494,29 @@ impl super::Provider for OpenAIProvider {
} }
Ok(_) => continue, Ok(_) => continue,
Err(e) => { Err(e) => {
Err(AppError::ProviderError(format!("Responses stream error: {}", e)))?; // Attempt to probe for the actual error body
let probe_resp = probe_client
.post(&url)
.header("Authorization", format!("Bearer {}", api_key))
.json(&probe_body)
.send()
.await;
match probe_resp {
Ok(r) if !r.status().is_success() => {
let status = r.status();
let error_body = r.text().await.unwrap_or_default();
tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, error_body);
Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, error_body)))?;
}
Ok(_) => {
Err(AppError::ProviderError(format!("Responses stream error (probe returned 200): {}", e)))?;
}
Err(probe_err) => {
tracing::error!("OpenAI Responses Stream Error Probe failed: {}", probe_err);
Err(AppError::ProviderError(format!("Responses stream error (probe failed: {}): {}", probe_err, e)))?;
}
}
} }
} }
} }