refactor: replace GraphQL/REST with MQTT-only for Unraid server data
All server stats (CPU, RAM, Docker, shares, disks, array) now come directly from MQTT topics published by the Unraid MQTT Agent. This eliminates the need for API keys, HTTP polling, and the GraphQL/REST fallback chain. - Rewrote unraid_service.py to read from MQTT store (no httpx needed) - Simplified servers router (no cache, no enrichment hack) - Added mqtt_prefix field to UnraidServer config - Updated DB: both Daddelolymp and Adriahub with mqtt_prefix, no api_key - Data is always fresh (MQTT pushes every ~15s) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
5d3d4f4015
commit
c6db0ab569
3 changed files with 201 additions and 469 deletions
|
|
@ -1,4 +1,4 @@
|
|||
"""Unraid servers status router."""
|
||||
"""Unraid servers status router — MQTT-only data source."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
@ -7,151 +7,45 @@ from typing import Any, Dict, List
|
|||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from server.cache import cache
|
||||
from server.config import get_settings
|
||||
from server.services.mqtt_service import mqtt_service
|
||||
from server.services.unraid_service import ServerConfig, fetch_all_servers
|
||||
from server.services.unraid_service import ServerConfig, fetch_all_servers_mqtt
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["servers"])
|
||||
|
||||
CACHE_KEY = "servers"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MQTT enrichment — overlay live system metrics from MQTT topics
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _enrich_from_mqtt(servers: List[Dict[str, Any]]) -> None:
|
||||
"""Merge live CPU/RAM data from MQTT ``<prefix>/system`` topics.
|
||||
|
||||
The Unraid MQTT Agent plugin publishes JSON payloads to topics like
|
||||
``unraid-daddelolymp/system`` or ``Adriahub/system`` every ~15 s.
|
||||
These contain live ``cpu_usage_percent``, ``ram_usage_percent``, etc.
|
||||
that the GraphQL API does not expose.
|
||||
"""
|
||||
|
||||
store = mqtt_service.store
|
||||
|
||||
for srv in servers:
|
||||
name = srv.get("name", "")
|
||||
if not name:
|
||||
continue
|
||||
|
||||
# Try common topic patterns
|
||||
system_data: Dict[str, Any] | None = None
|
||||
for pattern in (
|
||||
f"{name}/system", # "Adriahub/system"
|
||||
f"unraid-{name.lower()}/system", # "unraid-daddelolymp/system"
|
||||
f"unraid-{name}/system", # "unraid-Daddelolymp/system"
|
||||
):
|
||||
msg = store.get(pattern)
|
||||
if msg is not None and isinstance(msg.payload, dict):
|
||||
system_data = msg.payload
|
||||
break
|
||||
|
||||
if not system_data:
|
||||
continue
|
||||
|
||||
# --- CPU ---
|
||||
cpu_pct = system_data.get("cpu_usage_percent")
|
||||
if cpu_pct is not None:
|
||||
srv["cpu"]["usage_pct"] = round(float(cpu_pct), 1)
|
||||
|
||||
cpu_model = system_data.get("cpu_model")
|
||||
if cpu_model:
|
||||
srv["cpu"]["brand"] = cpu_model
|
||||
|
||||
cpu_temp = system_data.get("cpu_temp_celsius")
|
||||
if cpu_temp is not None:
|
||||
srv["cpu"]["temp_c"] = cpu_temp
|
||||
|
||||
cores = system_data.get("cpu_cores")
|
||||
if cores:
|
||||
srv["cpu"]["cores"] = cores
|
||||
|
||||
threads = system_data.get("cpu_threads")
|
||||
if threads:
|
||||
srv["cpu"]["threads"] = threads
|
||||
|
||||
# --- RAM ---
|
||||
ram_pct = system_data.get("ram_usage_percent")
|
||||
if ram_pct is not None:
|
||||
srv["ram"]["pct"] = round(float(ram_pct), 1)
|
||||
|
||||
ram_total = system_data.get("ram_total_bytes")
|
||||
if ram_total:
|
||||
srv["ram"]["total_gb"] = round(ram_total / (1024 ** 3), 1)
|
||||
|
||||
ram_used = system_data.get("ram_used_bytes")
|
||||
if ram_used:
|
||||
srv["ram"]["used_gb"] = round(ram_used / (1024 ** 3), 1)
|
||||
|
||||
# --- Uptime ---
|
||||
uptime_secs = system_data.get("uptime_seconds")
|
||||
if uptime_secs:
|
||||
days = uptime_secs // 86400
|
||||
hours = (uptime_secs % 86400) // 3600
|
||||
srv["uptime"] = f"{days}d {hours}h"
|
||||
|
||||
srv["online"] = True
|
||||
|
||||
logger.debug(
|
||||
"[UNRAID] %s: MQTT enriched — CPU %.1f%% %.0f°C, RAM %.1f%%",
|
||||
name,
|
||||
srv["cpu"].get("usage_pct", 0),
|
||||
srv["cpu"].get("temp_c", 0) or 0,
|
||||
srv["ram"].get("pct", 0),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/servers")
|
||||
async def get_servers() -> Dict[str, Any]:
|
||||
"""Return status information for all configured Unraid servers.
|
||||
|
||||
Response shape::
|
||||
|
||||
{
|
||||
"servers": [ ... server dicts ... ]
|
||||
}
|
||||
All data comes from the MQTT message store — no HTTP polling,
|
||||
no API keys, no cache needed (MQTT data is always fresh).
|
||||
"""
|
||||
|
||||
# --- cache hit? -----------------------------------------------------------
|
||||
cached = await cache.get(CACHE_KEY)
|
||||
if cached is not None:
|
||||
# Always overlay fresh MQTT data even on cache hits
|
||||
_enrich_from_mqtt(cached.get("servers", []))
|
||||
return cached
|
||||
settings = get_settings()
|
||||
|
||||
# --- cache miss -----------------------------------------------------------
|
||||
server_configs: List[ServerConfig] = [
|
||||
ServerConfig(
|
||||
name=srv.name,
|
||||
host=srv.host,
|
||||
api_key=srv.api_key,
|
||||
port=srv.port,
|
||||
mqtt_prefix=getattr(srv, "mqtt_prefix", "") or srv.name,
|
||||
)
|
||||
for srv in get_settings().unraid_servers
|
||||
for srv in settings.unraid_servers
|
||||
]
|
||||
|
||||
servers_data: List[Dict[str, Any]] = []
|
||||
if not server_configs:
|
||||
return {"servers": []}
|
||||
|
||||
try:
|
||||
servers_data = await fetch_all_servers(server_configs)
|
||||
servers_data = fetch_all_servers_mqtt(server_configs, mqtt_service.store)
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch Unraid server data")
|
||||
logger.exception("Failed to read Unraid server data from MQTT")
|
||||
return {
|
||||
"servers": [],
|
||||
"error": True,
|
||||
"message": str(exc),
|
||||
}
|
||||
|
||||
# Overlay live MQTT system metrics
|
||||
_enrich_from_mqtt(servers_data)
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"servers": servers_data,
|
||||
}
|
||||
|
||||
await cache.set(CACHE_KEY, payload, get_settings().unraid_cache_ttl)
|
||||
return payload
|
||||
return {"servers": servers_data}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue