fix(gemini): ensure conversation starts with user role and add empty message check
Some checks failed
CI / Check (push) Has been cancelled
CI / Clippy (push) Has been cancelled
CI / Formatting (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Release Build (push) Has been cancelled

Gemini API requires the first message to be from the 'user' role.
This commit ensures that:
- If a conversation starts with a 'model' (assistant) role, a placeholder 'user' message is prepended.
- 'tool' results are correctly mapped to 'user' role parts.
- Sequential messages with the same role are merged.
- Empty content requests are prevented in both sync and stream paths.

This fixes 400 Bad Request errors when clients (like opencode) send
message histories that don't match Gemini's strict role requirements.
This commit is contained in:
2026-03-05 08:48:25 -05:00
parent 5b6583301d
commit b0bd1fd143

View File

@@ -19,6 +19,8 @@ use crate::{
struct GeminiRequest {
contents: Vec<GeminiContent>,
#[serde(skip_serializing_if = "Option::is_none")]
system_instruction: Option<GeminiContent>,
#[serde(skip_serializing_if = "Option::is_none")]
generation_config: Option<GeminiGenerationConfig>,
#[serde(skip_serializing_if = "Option::is_none")]
tools: Option<Vec<GeminiTool>>,
@@ -29,7 +31,8 @@ struct GeminiRequest {
#[derive(Debug, Clone, Serialize, Deserialize)]
struct GeminiContent {
parts: Vec<GeminiPart>,
role: String,
#[serde(skip_serializing_if = "Option::is_none")]
role: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -186,11 +189,37 @@ impl GeminiProvider {
/// Convert unified messages to Gemini content format.
/// Handles text, images, tool calls (assistant), and tool results.
async fn convert_messages(messages: Vec<UnifiedMessage>) -> Result<Vec<GeminiContent>, AppError> {
let mut contents = Vec::with_capacity(messages.len());
/// Returns (contents, system_instruction)
async fn convert_messages(
messages: Vec<UnifiedMessage>,
) -> Result<(Vec<GeminiContent>, Option<GeminiContent>), AppError> {
let mut contents: Vec<GeminiContent> = Vec::new();
let mut system_parts = Vec::new();
for msg in messages {
// Tool-result messages → functionResponse parts under role "user"
if msg.role == "system" {
for part in msg.content {
if let ContentPart::Text { text } = part {
system_parts.push(GeminiPart {
text: Some(text),
inline_data: None,
function_call: None,
function_response: None,
});
}
}
continue;
}
let role = match msg.role.as_str() {
"assistant" => "model".to_string(),
"tool" => "user".to_string(), // Tool results are technically from the user side in Gemini
_ => "user".to_string(),
};
let mut parts = Vec::new();
// Handle tool results (role "tool")
if msg.role == "tool" {
let text_content = msg
.content
@@ -201,14 +230,11 @@ impl GeminiProvider {
})
.unwrap_or_default();
let name = msg.name.unwrap_or_default();
// Parse the content as JSON if possible, otherwise wrap as string
let name = msg.name.clone().unwrap_or_default();
let response_value = serde_json::from_str::<Value>(&text_content)
.unwrap_or_else(|_| serde_json::json!({ "result": text_content }));
contents.push(GeminiContent {
parts: vec![GeminiPart {
parts.push(GeminiPart {
text: None,
inline_data: None,
function_call: None,
@@ -216,17 +242,10 @@ impl GeminiProvider {
name,
response: response_value,
}),
}],
role: "user".to_string(),
});
continue;
}
// Assistant messages with tool_calls → functionCall parts
if msg.role == "assistant" {
} else if msg.role == "assistant" && msg.tool_calls.is_some() {
// Assistant messages with tool_calls
if let Some(tool_calls) = &msg.tool_calls {
let mut parts = Vec::new();
// Include text content if present
for p in &msg.content {
if let ContentPart::Text { text } = p {
@@ -241,7 +260,6 @@ impl GeminiProvider {
}
}
// Convert each tool call to a functionCall part
for tc in tool_calls {
let args = serde_json::from_str::<Value>(&tc.function.arguments)
.unwrap_or_else(|_| serde_json::json!({}));
@@ -255,17 +273,9 @@ impl GeminiProvider {
function_response: None,
});
}
contents.push(GeminiContent {
parts,
role: "model".to_string(),
});
continue;
}
}
} else {
// Regular text/image messages
let mut parts = Vec::with_capacity(msg.content.len());
for part in msg.content {
match part {
ContentPart::Text { text } => {
@@ -294,16 +304,52 @@ impl GeminiProvider {
}
}
}
let role = match msg.role.as_str() {
"assistant" => "model".to_string(),
_ => "user".to_string(),
};
contents.push(GeminiContent { parts, role });
}
Ok(contents)
if parts.is_empty() {
continue;
}
// Merge with previous message if role matches
if let Some(last_content) = contents.last_mut() {
if last_content.role.as_ref() == Some(&role) {
last_content.parts.extend(parts);
continue;
}
}
contents.push(GeminiContent {
parts,
role: Some(role),
});
}
// Gemini requires the first message to be from "user".
// If it starts with "model", we prepend a placeholder user message.
if let Some(first) = contents.first() {
if first.role.as_deref() == Some("model") {
contents.insert(0, GeminiContent {
role: Some("user".to_string()),
parts: vec![GeminiPart {
text: Some("Continue conversation.".to_string()),
inline_data: None,
function_call: None,
function_response: None,
}],
});
}
}
let system_instruction = if !system_parts.is_empty() {
Some(GeminiContent {
parts: system_parts,
role: None,
})
} else {
None
};
Ok((contents, system_instruction))
}
/// Convert OpenAI tools to Gemini function declarations.
@@ -406,7 +452,7 @@ impl super::Provider for GeminiProvider {
let model = request.model.clone();
let tools = Self::convert_tools(&request);
let tool_config = Self::convert_tool_config(&request);
let contents = Self::convert_messages(request.messages.clone()).await?;
let (contents, system_instruction) = Self::convert_messages(request.messages.clone()).await?;
if contents.is_empty() {
return Err(AppError::ProviderError("No valid messages to send".to_string()));
@@ -423,6 +469,7 @@ impl super::Provider for GeminiProvider {
let gemini_request = GeminiRequest {
contents,
system_instruction,
generation_config,
tools,
tool_config,
@@ -530,7 +577,11 @@ impl super::Provider for GeminiProvider {
let model = request.model.clone();
let tools = Self::convert_tools(&request);
let tool_config = Self::convert_tool_config(&request);
let contents = Self::convert_messages(request.messages.clone()).await?;
let (contents, system_instruction) = Self::convert_messages(request.messages.clone()).await?;
if contents.is_empty() {
return Err(AppError::ProviderError("No valid messages to send".to_string()));
}
let generation_config = if request.temperature.is_some() || request.max_tokens.is_some() {
Some(GeminiGenerationConfig {
@@ -543,6 +594,7 @@ impl super::Provider for GeminiProvider {
let gemini_request = GeminiRequest {
contents,
system_instruction,
generation_config,
tools,
tool_config,