feat(openai): implement Responses API streaming and proactive routing

This commit adds support for the OpenAI Responses API in both streaming and non-streaming modes. It also implements proactive routing for gpt-5 and codex models and cleans up unused 'session' variable warnings across the dashboard source files.
This commit is contained in:
2026-03-06 20:16:43 +00:00
parent 633b69a07b
commit dd54c14ff8
8 changed files with 155 additions and 14 deletions

View File

@@ -279,7 +279,7 @@ pub(super) async fn handle_system_backup(
State(state): State<DashboardState>,
headers: axum::http::HeaderMap,
) -> Json<ApiResponse<serde_json::Value>> {
let (session, _) = match super::auth::require_admin(&state, &headers).await {
let (_session, _) = match super::auth::require_admin(&state, &headers).await {
Ok((session, new_token)) => (session, new_token),
Err(e) => return e,
};
@@ -342,7 +342,7 @@ pub(super) async fn handle_update_settings(
State(state): State<DashboardState>,
headers: axum::http::HeaderMap,
) -> Json<ApiResponse<serde_json::Value>> {
let (session, _) = match super::auth::require_admin(&state, &headers).await {
let (_session, _) = match super::auth::require_admin(&state, &headers).await {
Ok((session, new_token)) => (session, new_token),
Err(e) => return e,
};