From 246a6d88f0d3ca0922a9d4a19c5875e371f64352 Mon Sep 17 00:00:00 2001 From: hobokenchicken Date: Thu, 19 Mar 2026 11:23:56 -0400 Subject: [PATCH] fix: update grok default model to grok-2 Changed grok-beta to grok-2 across backend config, dashboard tests, and frontend monitoring. --- README.md | 2 +- internal/config/config.go | 2 +- internal/server/dashboard.go | 2 +- static/js/pages/monitoring.js | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 30fa4cb0..2a3176f8 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ A unified, high-performance LLM proxy gateway built in Go. It provides a single - **OpenAI:** GPT-4o, GPT-4o Mini, o1, o3 reasoning models. - **Google Gemini:** Gemini 2.0 Flash, Pro, and vision models (with native CoT support). - **DeepSeek:** DeepSeek Chat and Reasoner (R1) models. - - **xAI Grok:** Grok-beta models. + - **xAI Grok:** Grok-2 models. - **Ollama:** Local LLMs running on your network. - **Observability & Tracking:** - **Asynchronous Logging:** Non-blocking request logging to SQLite using background workers. diff --git a/internal/config/config.go b/internal/config/config.go index 0f08664e..276a1aec 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -99,7 +99,7 @@ func Load() (*Config, error) { v.SetDefault("providers.grok.api_key_env", "GROK_API_KEY") v.SetDefault("providers.grok.base_url", "https://api.x.ai/v1") - v.SetDefault("providers.grok.default_model", "grok-beta") + v.SetDefault("providers.grok.default_model", "grok-2") v.SetDefault("providers.grok.enabled", true) v.SetDefault("providers.ollama.base_url", "http://localhost:11434/v1") diff --git a/internal/server/dashboard.go b/internal/server/dashboard.go index 464438ef..34505f27 100644 --- a/internal/server/dashboard.go +++ b/internal/server/dashboard.go @@ -693,7 +693,7 @@ func (s *Server) handleTestProvider(c *gin.Context) { } else if name == "deepseek" { testReq.Model = "deepseek-chat" } else if name == "grok" { - testReq.Model = "grok-beta" + testReq.Model = "grok-2" } _, err := provider.ChatCompletion(c.Request.Context(), testReq) diff --git a/static/js/pages/monitoring.js b/static/js/pages/monitoring.js index e6f44167..a3b3f0ef 100644 --- a/static/js/pages/monitoring.js +++ b/static/js/pages/monitoring.js @@ -492,7 +492,7 @@ class MonitoringPage { simulateRequest() { const clients = ['client-1', 'client-2', 'client-3', 'client-4', 'client-5']; const providers = ['OpenAI', 'Gemini', 'DeepSeek', 'Grok']; - const models = ['gpt-4', 'gpt-3.5-turbo', 'gemini-pro', 'deepseek-chat', 'grok-beta']; + const models = ['gpt-4o', 'gpt-4o-mini', 'gemini-2.0-flash', 'deepseek-chat', 'grok-2']; const statuses = ['success', 'success', 'success', 'error', 'warning']; // Mostly success const request = {