diff --git a/src/providers/openai.rs b/src/providers/openai.rs index ff2de314..ecb6300f 100644 --- a/src/providers/openai.rs +++ b/src/providers/openai.rs @@ -819,15 +819,37 @@ impl super::Provider for OpenAIProvider { }; } - // Yield the tool calls - // ... (rest of tool call yielding unchanged) - let deltas: Vec = embedded_calls.into_iter().enumerate().map(|(idx, tc)| { + // Yield the tool calls in two chunks to mimic standard streaming behavior + // Chunk 1: Initialization (id, name) + let init_deltas: Vec = embedded_calls.iter().enumerate().map(|(idx, tc)| { crate::models::ToolCallDelta { index: idx as u32, - id: Some(tc.id), + id: Some(tc.id.clone()), call_type: Some("function".to_string()), function: Some(crate::models::FunctionCallDelta { - name: Some(tc.function.name), + name: Some(tc.function.name.clone()), + arguments: Some("".to_string()), + }), + } + }).collect(); + + yield ProviderStreamChunk { + content: String::new(), + reasoning_content: None, + finish_reason: None, + tool_calls: Some(init_deltas), + model: model.clone(), + usage: None, + }; + + // Chunk 2: Payload (arguments) + let arg_deltas: Vec = embedded_calls.into_iter().enumerate().map(|(idx, tc)| { + crate::models::ToolCallDelta { + index: idx as u32, + id: None, + call_type: None, + function: Some(crate::models::FunctionCallDelta { + name: None, arguments: Some(tc.function.arguments), }), } @@ -837,7 +859,7 @@ impl super::Provider for OpenAIProvider { content: String::new(), reasoning_content: None, finish_reason: None, - tool_calls: Some(deltas), + tool_calls: Some(arg_deltas), model: model.clone(), usage: None, };