feat: implement system metrics and fix monitoring charts
Some checks failed
CI / Lint (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Build (push) Has been cancelled

Added /api/system/metrics with CPU/Mem/Disk/Load data using gopsutil. Updated Hub to track active WebSocket listeners. Verified log format for monitoring charts.
This commit is contained in:
2026-03-19 13:15:48 -04:00
parent 3f1e6d3407
commit dec4b927dc
5 changed files with 97 additions and 10 deletions

View File

@@ -4,6 +4,7 @@ import (
"database/sql"
"fmt"
"net/http"
"os"
"strings"
"time"
@@ -12,6 +13,12 @@ import (
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"golang.org/x/crypto/bcrypt"
"github.com/shirou/gopsutil/v3/cpu"
"github.com/shirou/gopsutil/v3/mem"
"github.com/shirou/gopsutil/v3/disk"
"github.com/shirou/gopsutil/v3/load"
"github.com/shirou/gopsutil/v3/process"
)
type ApiResponse struct {
@@ -955,7 +962,6 @@ func (s *Server) handleGetModels(c *gin.Context) {
usedOnly := c.Query("used_only") == "true"
// Registry provider normalized name -> Proxy-internal provider ID
// This ensures we only show models from YOUR providers.
allowedRegistryProviders := map[string]string{
"openai": "openai",
"google": "gemini",
@@ -983,7 +989,6 @@ func (s *Server) handleGetModels(c *gin.Context) {
err := s.database.Select(&pairs, "SELECT DISTINCT model, provider FROM llm_requests WHERE status = 'success'")
if err == nil {
for _, p := range pairs {
// Key format: "gpt-4o:openai"
usedPairs[fmt.Sprintf("%s:%s", p.Model, p.Provider)] = true
}
}
@@ -992,14 +997,12 @@ func (s *Server) handleGetModels(c *gin.Context) {
var result []gin.H
if s.registry != nil {
for pID, pInfo := range s.registry.Providers {
// Only include models from the 4 allowed registry IDs
proxyProvider, allowed := allowedRegistryProviders[pID]
if !allowed {
continue
}
for mID, mMeta := range pInfo.Models {
// If usedOnly is true, only include if this exact (model, provider) was logged
if usedOnly && !usedPairs[fmt.Sprintf("%s:%s", mID, proxyProvider)] {
continue
}
@@ -1043,7 +1046,7 @@ func (s *Server) handleGetModels(c *gin.Context) {
result = append(result, gin.H{
"id": mID,
"name": mMeta.Name,
"provider": proxyProvider, // Correctly normalized provider name
"provider": proxyProvider,
"enabled": enabled,
"prompt_cost": promptCost,
"completion_cost": completionCost,
@@ -1218,6 +1221,42 @@ func (s *Server) handleSystemHealth(c *gin.Context) {
}))
}
func (s *Server) handleSystemMetrics(c *gin.Context) {
v, _ := mem.VirtualMemory()
c_usage, _ := cpu.Percent(time.Second, false)
d, _ := disk.Usage("/")
l, _ := load.Avg()
p, _ := process.NewProcess(int32(os.Getpid()))
rss, _ := p.MemoryInfo()
cpuPercent := 0.0
if len(c_usage) > 0 {
cpuPercent = c_usage[0]
}
c.JSON(http.StatusOK, SuccessResponse(gin.H{
"cpu": gin.H{
"usage_percent": fmt.Sprintf("%.1f", cpuPercent),
"load_average": []float64{l.Load1, l.Load5, l.Load15},
},
"memory": gin.H{
"used_mb": v.Used / 1024 / 1024,
"total_mb": v.Total / 1024 / 1024,
"usage_percent": fmt.Sprintf("%.1f", v.UsedPercent),
"process_rss_mb": rss.RSS / 1024 / 1024,
},
"disk": gin.H{
"used_gb": float64(d.Used) / 1024 / 1024 / 1024,
"total_gb": float64(d.Total) / 1024 / 1024 / 1024,
"usage_percent": fmt.Sprintf("%.1f", d.UsedPercent),
},
"connections": gin.H{
"db_active": s.database.Stats().OpenConnections,
"websocket_listeners": s.hub.GetClientCount(),
},
}))
}
func (s *Server) handleGetSettings(c *gin.Context) {
providerCount := 0
modelCount := 0
@@ -1268,6 +1307,7 @@ func (s *Server) handleGetLogs(c *gin.Context) {
Model string `json:"model"`
Tokens int `json:"tokens"`
Status string `json:"status"`
Duration int `json:"duration"`
}
uiLogs := make([]UILog, len(logs))
@@ -1288,6 +1328,10 @@ func (s *Server) handleGetLogs(c *gin.Context) {
if l.TotalTokens != nil {
tokens = *l.TotalTokens
}
duration := 0
if l.DurationMS != nil {
duration = *l.DurationMS
}
uiLogs[i] = UILog{
Timestamp: l.Timestamp.Format(time.RFC3339),
@@ -1296,6 +1340,7 @@ func (s *Server) handleGetLogs(c *gin.Context) {
Model: model,
Tokens: tokens,
Status: l.Status,
Duration: duration,
}
}