fix(openai): unify tool call indexing for both standard and embedded calls
- Sequential next_tool_index is now used for both Responses API 'function_call' events and the proxy's 'tool_uses' JSON extraction. - This ensures tool_calls arrays in the stream always start at index 0 and are dense, even if standard and embedded calls were somehow mixed. - Fixed 'payload_idx' logic to correctly align argument chunks with their initialization chunks.
This commit is contained in:
@@ -838,9 +838,11 @@ impl super::Provider for OpenAIProvider {
|
||||
|
||||
// Yield the tool calls in two chunks to mimic standard streaming behavior
|
||||
// Chunk 1: Initialization (id, name)
|
||||
let init_deltas: Vec<crate::models::ToolCallDelta> = embedded_calls.iter().enumerate().map(|(idx, tc)| {
|
||||
let init_deltas: Vec<crate::models::ToolCallDelta> = embedded_calls.iter().map(|tc| {
|
||||
let tc_idx = next_tool_index;
|
||||
next_tool_index += 1;
|
||||
crate::models::ToolCallDelta {
|
||||
index: idx as u32,
|
||||
index: tc_idx,
|
||||
id: Some(tc.id.clone()),
|
||||
call_type: Some("function".to_string()),
|
||||
function: Some(crate::models::FunctionCallDelta {
|
||||
@@ -860,9 +862,13 @@ impl super::Provider for OpenAIProvider {
|
||||
};
|
||||
|
||||
// Chunk 2: Payload (arguments)
|
||||
let arg_deltas: Vec<crate::models::ToolCallDelta> = embedded_calls.into_iter().enumerate().map(|(idx, tc)| {
|
||||
// Reset temp index for payload chunk
|
||||
let mut payload_idx = next_tool_index - embedded_calls.len() as u32;
|
||||
let arg_deltas: Vec<crate::models::ToolCallDelta> = embedded_calls.into_iter().map(|tc| {
|
||||
let tc_idx = payload_idx;
|
||||
payload_idx += 1;
|
||||
crate::models::ToolCallDelta {
|
||||
index: idx as u32,
|
||||
index: tc_idx,
|
||||
id: None,
|
||||
call_type: None,
|
||||
function: Some(crate::models::FunctionCallDelta {
|
||||
|
||||
Reference in New Issue
Block a user