# LLM Proxy Gateway Environment Variables # Copy to .env and fill in your API keys # OpenAI OPENAI_API_KEY=your_openai_api_key_here # Google Gemini GEMINI_API_KEY=your_gemini_api_key_here # DeepSeek DEEPSEEK_API_KEY=your_deepseek_api_key_here # xAI Grok (not yet available) GROK_API_KEY=your_grok_api_key_here # Ollama (local server) # LLM_PROXY__PROVIDERS__OLLAMA__BASE_URL=http://your-ollama-host:11434/v1 # LLM_PROXY__PROVIDERS__OLLAMA__ENABLED=true # LLM_PROXY__PROVIDERS__OLLAMA__MODELS=llama3,mistral,llava # Authentication tokens (comma-separated list) LLM_PROXY__SERVER__AUTH_TOKENS=your_bearer_token_here,another_token # Server port (optional) LLM_PROXY__SERVER__PORT=8080 # Database path (optional) LLM_PROXY__DATABASE__PATH=./data/llm_proxy.db # Session secret for HMAC-signed tokens (hex or base64 encoded, 32 bytes) SESSION_SECRET=your_session_secret_here_32_bytes