From a243a3987dc38f7bec7e59c4bb6aeaa590d33c5c Mon Sep 17 00:00:00 2001 From: hobokenchicken Date: Fri, 6 Mar 2026 20:26:14 +0000 Subject: [PATCH] fix(openai): use structured input and add probe for Responses API Updated OpenAI Responses API to use a structured input format (array of objects) for better compatibility. Added a proactive error probe to chat_responses_stream to capture and log API error bodies on failure. --- src/providers/openai.rs | 72 +++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 27 deletions(-) diff --git a/src/providers/openai.rs b/src/providers/openai.rs index d3c5031a..913ca59b 100644 --- a/src/providers/openai.rs +++ b/src/providers/openai.rs @@ -197,27 +197,24 @@ impl super::Provider for OpenAIProvider { } async fn chat_responses(&self, request: UnifiedRequest) -> Result { - // Build a simple `input` string by concatenating message parts. + // Build a structured input for the Responses API. let messages_json = helpers::messages_to_openai_json(&request.messages).await?; - let mut inputs: Vec = Vec::new(); + let mut input_parts = Vec::new(); for m in &messages_json { - let role = m["role"].as_str().unwrap_or(""); - let parts = m.get("content").and_then(|c| c.as_array()).cloned().unwrap_or_default(); - let mut text_parts = Vec::new(); - for p in parts { - if let Some(t) = p.get("text").and_then(|v| v.as_str()) { - text_parts.push(t.to_string()); - } - } - inputs.push(format!("{}: {}", role, text_parts.join(""))); + let role = m["role"].as_str().unwrap_or("user"); + let content = m.get("content").cloned().unwrap_or(serde_json::json!("")); + + input_parts.push(serde_json::json!({ + "role": role, + "content": content + })); } - let input_text = inputs.join("\n"); let resp = self .client .post(format!("{}/responses", self.config.base_url)) .header("Authorization", format!("Bearer {}", self.api_key)) - .json(&serde_json::json!({ "model": request.model, "input": input_text })) + .json(&serde_json::json!({ "model": request.model, "input": input_parts })) .send() .await .map_err(|e| AppError::ProviderError(e.to_string()))?; @@ -400,31 +397,30 @@ impl super::Provider for OpenAIProvider { &self, request: UnifiedRequest, ) -> Result>, AppError> { - // Build a simple `input` string by concatenating message parts. + // Build a structured input for the Responses API. let messages_json = helpers::messages_to_openai_json(&request.messages).await?; - let mut inputs: Vec = Vec::new(); + let mut input_parts = Vec::new(); for m in &messages_json { - let role = m["role"].as_str().unwrap_or(""); - let parts = m.get("content").and_then(|c| c.as_array()).cloned().unwrap_or_default(); - let mut text_parts = Vec::new(); - for p in parts { - if let Some(t) = p.get("text").and_then(|v| v.as_str()) { - text_parts.push(t.to_string()); - } - } - inputs.push(format!("{}: {}", role, text_parts.join(""))); + let role = m["role"].as_str().unwrap_or("user"); + let content = m.get("content").cloned().unwrap_or(serde_json::json!("")); + + input_parts.push(serde_json::json!({ + "role": role, + "content": content + })); } - let input_text = inputs.join("\n"); let body = serde_json::json!({ "model": request.model, - "input": input_text, + "input": input_parts, "stream": true }); let url = format!("{}/responses", self.config.base_url); let api_key = self.api_key.clone(); let model = request.model.clone(); + let probe_client = self.client.clone(); + let probe_body = body.clone(); let es = reqwest_eventsource::EventSource::new( self.client @@ -498,7 +494,29 @@ impl super::Provider for OpenAIProvider { } Ok(_) => continue, Err(e) => { - Err(AppError::ProviderError(format!("Responses stream error: {}", e)))?; + // Attempt to probe for the actual error body + let probe_resp = probe_client + .post(&url) + .header("Authorization", format!("Bearer {}", api_key)) + .json(&probe_body) + .send() + .await; + + match probe_resp { + Ok(r) if !r.status().is_success() => { + let status = r.status(); + let error_body = r.text().await.unwrap_or_default(); + tracing::error!("OpenAI Responses Stream Error Probe ({}): {}", status, error_body); + Err(AppError::ProviderError(format!("OpenAI Responses API error ({}): {}", status, error_body)))?; + } + Ok(_) => { + Err(AppError::ProviderError(format!("Responses stream error (probe returned 200): {}", e)))?; + } + Err(probe_err) => { + tracing::error!("OpenAI Responses Stream Error Probe failed: {}", probe_err); + Err(AppError::ProviderError(format!("Responses stream error (probe failed: {}): {}", probe_err, e)))?; + } + } } } }