This commit replaces the Axum/Rust backend with a Gin/Go implementation. The original Rust code has been archived in the 'rust' branch.
29 lines
1.1 KiB
Plaintext
29 lines
1.1 KiB
Plaintext
# LLM Proxy Gateway Environment Variables
|
|
# Copy to .env and fill in your API keys
|
|
|
|
# MANDATORY: Encryption key for sessions and stored API keys
|
|
# Must be a 32-byte hex or base64 encoded string
|
|
# Example (hex): LLM_PROXY__ENCRYPTION_KEY=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
|
|
LLM_PROXY__ENCRYPTION_KEY=your_secure_32_byte_key_here
|
|
|
|
# LLM Provider API Keys (Standard Environment Variables)
|
|
OPENAI_API_KEY=your_openai_api_key_here
|
|
GEMINI_API_KEY=your_gemini_api_key_here
|
|
DEEPSEEK_API_KEY=your_deepseek_api_key_here
|
|
GROK_API_KEY=your_grok_api_key_here
|
|
|
|
# Provider Overrides (Optional)
|
|
# LLM_PROXY__PROVIDERS__OPENAI__BASE_URL=https://api.openai.com/v1
|
|
# LLM_PROXY__PROVIDERS__GEMINI__ENABLED=true
|
|
# LLM_PROXY__PROVIDERS__OLLAMA__BASE_URL=http://localhost:11434/v1
|
|
# LLM_PROXY__PROVIDERS__OLLAMA__ENABLED=true
|
|
# LLM_PROXY__PROVIDERS__OLLAMA__MODELS=llama3,mistral,llava
|
|
|
|
# Server Configuration
|
|
LLM_PROXY__SERVER__PORT=8080
|
|
LLM_PROXY__SERVER__HOST=0.0.0.0
|
|
|
|
# Database Configuration
|
|
LLM_PROXY__DATABASE__PATH=./data/llm_proxy.db
|
|
LLM_PROXY__DATABASE__MAX_CONNECTIONS=10
|