diff --git a/Cargo.toml b/Cargo.toml index 2888d4c2..f7160e35 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ name = "llm-proxy" version = "0.1.0" edition = "2024" +rust-version = "1.87" description = "Unified LLM proxy gateway supporting OpenAI, Gemini, DeepSeek, and Grok with token tracking and cost calculation" authors = ["newkirk"] license = "MIT OR Apache-2.0" diff --git a/src/dashboard/websocket.rs b/src/dashboard/websocket.rs index 06e231c3..846ced4b 100644 --- a/src/dashboard/websocket.rs +++ b/src/dashboard/websocket.rs @@ -65,7 +65,7 @@ pub(super) async fn handle_websocket_connection(mut socket: WebSocket, state: Da pub(super) async fn handle_websocket_message(text: &str, state: &DashboardState) { // Parse and handle WebSocket messages if let Ok(data) = serde_json::from_str::(text) - && let Some("ping") = data.get("type").and_then(|v| v.as_str()) + && data.get("type").and_then(|v| v.as_str()) == Some("ping") { let _ = state.app_state.dashboard_tx.send(serde_json::json!({ "type": "pong",