From e123f542f17cce457cc941fa9b75738f308b30ed Mon Sep 17 00:00:00 2001 From: hobokenchicken Date: Tue, 17 Mar 2026 18:36:05 +0000 Subject: [PATCH] fix(openai): use max_output_tokens for Responses API --- src/providers/openai.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/providers/openai.rs b/src/providers/openai.rs index 08d26053..ad9b74cf 100644 --- a/src/providers/openai.rs +++ b/src/providers/openai.rs @@ -159,10 +159,10 @@ impl super::Provider for OpenAIProvider { body["temperature"] = serde_json::json!(temp); } - // Newer models (gpt-5, o1) prefer max_completion_tokens + // Newer models (gpt-5, o1) in Responses API use max_output_tokens if let Some(max_tokens) = request.max_tokens { if request.model.contains("gpt-5") || request.model.starts_with("o1-") || request.model.starts_with("o3-") { - body["max_completion_tokens"] = serde_json::json!(max_tokens); + body["max_output_tokens"] = serde_json::json!(max_tokens); } else { body["max_tokens"] = serde_json::json!(max_tokens); } @@ -423,10 +423,10 @@ impl super::Provider for OpenAIProvider { body["temperature"] = serde_json::json!(temp); } - // Newer models (gpt-5, o1) prefer max_completion_tokens + // Newer models (gpt-5, o1) in Responses API use max_output_tokens if let Some(max_tokens) = request.max_tokens { if request.model.contains("gpt-5") || request.model.starts_with("o1-") || request.model.starts_with("o3-") { - body["max_completion_tokens"] = serde_json::json!(max_tokens); + body["max_output_tokens"] = serde_json::json!(max_tokens); } else { body["max_tokens"] = serde_json::json!(max_tokens); }