feat: migrate backend from rust to go
This commit replaces the Axum/Rust backend with a Gin/Go implementation. The original Rust code has been archived in the 'rust' branch.
This commit is contained in:
31
.env.example
31
.env.example
@@ -1,31 +1,28 @@
|
||||
# LLM Proxy Gateway Environment Variables
|
||||
# Copy to .env and fill in your API keys
|
||||
|
||||
# OpenAI
|
||||
# MANDATORY: Encryption key for sessions and stored API keys
|
||||
# Must be a 32-byte hex or base64 encoded string
|
||||
# Example (hex): LLM_PROXY__ENCRYPTION_KEY=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
|
||||
LLM_PROXY__ENCRYPTION_KEY=your_secure_32_byte_key_here
|
||||
|
||||
# LLM Provider API Keys (Standard Environment Variables)
|
||||
OPENAI_API_KEY=your_openai_api_key_here
|
||||
|
||||
# Google Gemini
|
||||
GEMINI_API_KEY=your_gemini_api_key_here
|
||||
|
||||
# DeepSeek
|
||||
DEEPSEEK_API_KEY=your_deepseek_api_key_here
|
||||
|
||||
# xAI Grok (not yet available)
|
||||
GROK_API_KEY=your_grok_api_key_here
|
||||
|
||||
# Ollama (local server)
|
||||
# LLM_PROXY__PROVIDERS__OLLAMA__BASE_URL=http://your-ollama-host:11434/v1
|
||||
# Provider Overrides (Optional)
|
||||
# LLM_PROXY__PROVIDERS__OPENAI__BASE_URL=https://api.openai.com/v1
|
||||
# LLM_PROXY__PROVIDERS__GEMINI__ENABLED=true
|
||||
# LLM_PROXY__PROVIDERS__OLLAMA__BASE_URL=http://localhost:11434/v1
|
||||
# LLM_PROXY__PROVIDERS__OLLAMA__ENABLED=true
|
||||
# LLM_PROXY__PROVIDERS__OLLAMA__MODELS=llama3,mistral,llava
|
||||
|
||||
# Authentication tokens (comma-separated list)
|
||||
LLM_PROXY__SERVER__AUTH_TOKENS=your_bearer_token_here,another_token
|
||||
|
||||
# Server port (optional)
|
||||
# Server Configuration
|
||||
LLM_PROXY__SERVER__PORT=8080
|
||||
LLM_PROXY__SERVER__HOST=0.0.0.0
|
||||
|
||||
# Database path (optional)
|
||||
# Database Configuration
|
||||
LLM_PROXY__DATABASE__PATH=./data/llm_proxy.db
|
||||
|
||||
# Session secret for HMAC-signed tokens (hex or base64 encoded, 32 bytes)
|
||||
SESSION_SECRET=your_session_secret_here_32_bytes
|
||||
LLM_PROXY__DATABASE__MAX_CONNECTIONS=10
|
||||
|
||||
Reference in New Issue
Block a user