fix: enable xAI (Grok) by default and improve provider visibility in dashboard

- Set Grok to enabled: true by default.
- Updated AppState to include raw AppConfig.
- Refactored dashboard to show all supported providers, including their configuration and initialization status (online, disabled, or error).
This commit is contained in:
2026-02-26 15:56:29 -05:00
parent c884abe57d
commit c5fb2357ff
5 changed files with 67 additions and 20 deletions

View File

@@ -130,7 +130,7 @@ impl AppConfig {
.set_default("providers.grok.api_key_env", "GROK_API_KEY")?
.set_default("providers.grok.base_url", "https://api.x.ai/v1")?
.set_default("providers.grok.default_model", "grok-beta")?
.set_default("providers.grok.enabled", false)?
.set_default("providers.grok.enabled", true)?
.set_default("providers.ollama.base_url", "http://localhost:11434/v1")?
.set_default("providers.ollama.enabled", false)?
.set_default("providers.ollama.models", Vec::<String>::new())?;

View File

@@ -530,37 +530,52 @@ async fn handle_client_usage(
// Provider handlers
async fn handle_get_providers(State(state): State<DashboardState>) -> Json<ApiResponse<serde_json::Value>> {
let registry = &state.app_state.model_registry;
let providers = state.app_state.provider_manager.get_all_providers();
let config = &state.app_state.config;
let mut providers_json = Vec::new();
for provider in providers {
let p_id = provider.name();
// Define the list of providers we support
let provider_configs = vec![
("openai", "OpenAI", config.providers.openai.enabled),
("gemini", "Google Gemini", config.providers.gemini.enabled),
("deepseek", "DeepSeek", config.providers.deepseek.enabled),
("grok", "xAI Grok", config.providers.grok.enabled),
("ollama", "Ollama", config.providers.ollama.enabled),
];
for (id, display_name, enabled) in provider_configs {
// Find models for this provider in registry
let mut models = Vec::new();
if let Some(p_info) = registry.providers.get(p_id) {
if let Some(p_info) = registry.providers.get(id) {
models = p_info.models.keys().cloned().collect();
} else if p_id == "ollama" {
// Special handling for Ollama since it's local
// We could try to list models via API here
models = vec!["llama3".to_string(), "mistral".to_string(), "phi3".to_string()];
} else if id == "ollama" {
models = config.providers.ollama.models.clone();
}
// Check status via circuit breaker
let status = if state.app_state.rate_limit_manager.check_provider_request(p_id).await.unwrap_or(true) {
// Determine status
let status = if !enabled {
"disabled"
} else {
// Check if it's actually initialized in the provider manager
if state.app_state.provider_manager.get_provider(id).is_some() {
// Check circuit breaker
if state.app_state.rate_limit_manager.check_provider_request(id).await.unwrap_or(true) {
"online"
} else {
"degraded"
}
} else {
"error" // Enabled but failed to initialize (e.g. missing API key)
}
};
providers_json.push(serde_json::json!({
"id": p_id,
"name": p_id.to_uppercase(),
"enabled": true,
"id": id,
"name": display_name,
"enabled": enabled,
"status": status,
"models": models,
"last_used": None::<String>, // TODO
"last_used": None::<String>,
}));
}

View File

@@ -71,7 +71,36 @@ pub mod test_utils {
let (dashboard_tx, _) = tokio::sync::broadcast::channel(100);
let config = Arc::new(crate::config::AppConfig {
server: crate::config::ServerConfig {
port: 8080,
host: "127.0.0.1".to_string(),
auth_tokens: vec![],
},
database: crate::config::DatabaseConfig {
path: std::path::PathBuf::from(":memory:"),
max_connections: 5,
},
providers: crate::config::ProviderConfig {
openai: crate::config::OpenAIConfig { api_key_env: "OPENAI_API_KEY".to_string(), base_url: "".to_string(), default_model: "".to_string(), enabled: true },
gemini: crate::config::GeminiConfig { api_key_env: "GEMINI_API_KEY".to_string(), base_url: "".to_string(), default_model: "".to_string(), enabled: true },
deepseek: crate::config::DeepSeekConfig { api_key_env: "DEEPSEEK_API_KEY".to_string(), base_url: "".to_string(), default_model: "".to_string(), enabled: true },
grok: crate::config::GrokConfig { api_key_env: "GROK_API_KEY".to_string(), base_url: "".to_string(), default_model: "".to_string(), enabled: true },
ollama: crate::config::OllamaConfig { base_url: "".to_string(), enabled: true, models: vec![] },
},
model_mapping: crate::config::ModelMappingConfig { patterns: vec![] },
pricing: crate::config::PricingConfig {
openai: vec![],
gemini: vec![],
deepseek: vec![],
grok: vec![],
ollama: vec![],
},
config_path: None,
});
Arc::new(AppState {
config,
provider_manager,
db_pool: pool.clone(),
rate_limit_manager: Arc::new(rate_limit_manager),

View File

@@ -113,7 +113,7 @@ async fn main() -> Result<()> {
};
// Create application state
let state = AppState::new(provider_manager, db_pool, rate_limit_manager, model_registry, config.server.auth_tokens.clone());
let state = AppState::new(config.clone(), provider_manager, db_pool, rate_limit_manager, model_registry, config.server.auth_tokens.clone());
// Create application router
let app = Router::new()

View File

@@ -4,12 +4,13 @@ use tokio::sync::broadcast;
use crate::{
client::ClientManager, database::DbPool, providers::ProviderManager,
rate_limiting::RateLimitManager, logging::RequestLogger,
models::registry::ModelRegistry,
models::registry::ModelRegistry, config::AppConfig,
};
/// Shared application state
#[derive(Clone)]
pub struct AppState {
pub config: Arc<AppConfig>,
pub provider_manager: ProviderManager,
pub db_pool: DbPool,
pub rate_limit_manager: Arc<RateLimitManager>,
@@ -22,6 +23,7 @@ pub struct AppState {
impl AppState {
pub fn new(
config: Arc<AppConfig>,
provider_manager: ProviderManager,
db_pool: DbPool,
rate_limit_manager: RateLimitManager,
@@ -33,6 +35,7 @@ impl AppState {
let request_logger = Arc::new(RequestLogger::new(db_pool.clone(), dashboard_tx.clone()));
Self {
config,
provider_manager,
db_pool,
rate_limit_manager: Arc::new(rate_limit_manager),