This commit introduces: - AES-256-GCM encryption for LLM provider API keys in the database. - HMAC-SHA256 signed session tokens with activity-based refresh logic. - Standardized frontend XSS protection using a global escapeHtml utility. - Hardened security headers and request body size limits. - Improved database integrity with foreign key enforcement and atomic transactions. - Integration tests for the full encrypted key storage and proxy usage lifecycle.
31 lines
886 B
Plaintext
31 lines
886 B
Plaintext
# LLM Proxy Gateway Environment Variables
|
|
# Copy to .env and fill in your API keys
|
|
|
|
# OpenAI
|
|
OPENAI_API_KEY=your_openai_api_key_here
|
|
|
|
# Google Gemini
|
|
GEMINI_API_KEY=your_gemini_api_key_here
|
|
|
|
# DeepSeek
|
|
DEEPSEEK_API_KEY=your_deepseek_api_key_here
|
|
|
|
# xAI Grok (not yet available)
|
|
GROK_API_KEY=your_grok_api_key_here
|
|
|
|
# Ollama (local server)
|
|
# LLM_PROXY__PROVIDERS__OLLAMA__BASE_URL=http://your-ollama-host:11434/v1
|
|
# LLM_PROXY__PROVIDERS__OLLAMA__ENABLED=true
|
|
# LLM_PROXY__PROVIDERS__OLLAMA__MODELS=llama3,mistral,llava
|
|
|
|
# Authentication tokens (comma-separated list)
|
|
LLM_PROXY__SERVER__AUTH_TOKENS=your_bearer_token_here,another_token
|
|
|
|
# Server port (optional)
|
|
LLM_PROXY__SERVER__PORT=8080
|
|
|
|
# Database path (optional)
|
|
LLM_PROXY__DATABASE__PATH=./data/llm_proxy.db
|
|
|
|
# Session secret for HMAC-signed tokens (hex or base64 encoded, 32 bytes)
|
|
SESSION_SECRET=your_session_secret_here_32_bytes |