2026-03-02 23:25:57 +01:00
|
|
|
"""Unraid server stats — MQTT-only data source.
|
|
|
|
|
|
|
|
|
|
Reads all server data directly from the MQTT message store, which is
|
|
|
|
|
populated by the Unraid MQTT Agent running on each server. This
|
|
|
|
|
eliminates the need for GraphQL/REST API keys or HTTP polling.
|
|
|
|
|
|
|
|
|
|
MQTT topics used per server (prefix = e.g. "Adriahub" or "unraid-daddelolymp"):
|
|
|
|
|
{prefix}/system — CPU, RAM, uptime, temps, hostname, version
|
|
|
|
|
{prefix}/docker/containers — Docker container list with per-container stats
|
|
|
|
|
{prefix}/shares — Share names, free/used/total bytes
|
|
|
|
|
{prefix}/disks — Disk info with temps, SMART, model
|
|
|
|
|
{prefix}/array — Array state
|
|
|
|
|
{prefix}/availability — "online" / "offline"
|
|
|
|
|
"""
|
|
|
|
|
|
2026-03-02 01:48:51 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
2026-03-02 17:45:23 +01:00
|
|
|
import logging
|
2026-03-02 23:25:57 +01:00
|
|
|
import time
|
|
|
|
|
from dataclasses import dataclass
|
2026-03-02 01:48:51 +01:00
|
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
|
|
2026-03-02 17:45:23 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
2026-03-02 01:48:51 +01:00
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
|
class ServerConfig:
|
|
|
|
|
"""Configuration for a single Unraid server."""
|
|
|
|
|
|
|
|
|
|
name: str
|
2026-03-02 23:25:57 +01:00
|
|
|
host: str = ""
|
|
|
|
|
mqtt_prefix: str = ""
|
|
|
|
|
# Deprecated — kept for backward compat with old DB rows
|
2026-03-02 01:48:51 +01:00
|
|
|
api_key: str = ""
|
|
|
|
|
port: int = 80
|
|
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def _empty_stats(name: str, host: str = "") -> Dict[str, Any]:
|
|
|
|
|
"""Return a default stats dictionary for a server with no data yet."""
|
2026-03-02 01:48:51 +01:00
|
|
|
return {
|
2026-03-02 23:25:57 +01:00
|
|
|
"name": name,
|
|
|
|
|
"host": host,
|
2026-03-02 01:48:51 +01:00
|
|
|
"online": False,
|
|
|
|
|
"uptime": "",
|
2026-03-02 23:25:57 +01:00
|
|
|
"cpu": {"usage_pct": 0, "cores": 0, "temp_c": None, "threads": 0, "brand": ""},
|
2026-03-02 01:48:51 +01:00
|
|
|
"ram": {"used_gb": 0, "total_gb": 0, "pct": 0},
|
|
|
|
|
"array": {"status": "unknown", "disks": []},
|
|
|
|
|
"docker": {"running": 0, "containers": []},
|
2026-03-02 23:25:57 +01:00
|
|
|
"shares": [],
|
|
|
|
|
"disks": [],
|
2026-03-02 01:48:51 +01:00
|
|
|
"error": None,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def _format_uptime(seconds: int) -> str:
|
|
|
|
|
"""Convert uptime seconds to a human-readable string."""
|
|
|
|
|
days = seconds // 86400
|
|
|
|
|
hours = (seconds % 86400) // 3600
|
|
|
|
|
if days > 0:
|
|
|
|
|
return f"{days}d {hours}h"
|
|
|
|
|
return f"{hours}h"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_system(data: Dict[str, Any], result: Dict[str, Any]) -> None:
|
|
|
|
|
"""Parse the ``{prefix}/system`` MQTT payload."""
|
|
|
|
|
# CPU
|
|
|
|
|
cpu_pct = data.get("cpu_usage_percent")
|
|
|
|
|
if cpu_pct is not None:
|
|
|
|
|
result["cpu"]["usage_pct"] = round(float(cpu_pct), 1)
|
|
|
|
|
result["cpu"]["cores"] = data.get("cpu_cores", 0)
|
|
|
|
|
result["cpu"]["threads"] = data.get("cpu_threads", 0)
|
|
|
|
|
result["cpu"]["brand"] = data.get("cpu_model", "")
|
|
|
|
|
cpu_temp = data.get("cpu_temp_celsius")
|
|
|
|
|
if cpu_temp is not None:
|
|
|
|
|
result["cpu"]["temp_c"] = cpu_temp
|
|
|
|
|
mb_temp = data.get("motherboard_temp_celsius")
|
|
|
|
|
if mb_temp is not None:
|
|
|
|
|
result["cpu"]["mb_temp_c"] = mb_temp
|
|
|
|
|
|
|
|
|
|
# RAM
|
|
|
|
|
ram_pct = data.get("ram_usage_percent")
|
|
|
|
|
if ram_pct is not None:
|
|
|
|
|
result["ram"]["pct"] = round(float(ram_pct), 1)
|
|
|
|
|
ram_total = data.get("ram_total_bytes")
|
|
|
|
|
if ram_total:
|
|
|
|
|
result["ram"]["total_gb"] = round(ram_total / (1024 ** 3), 1)
|
|
|
|
|
ram_used = data.get("ram_used_bytes")
|
|
|
|
|
if ram_used:
|
|
|
|
|
result["ram"]["used_gb"] = round(ram_used / (1024 ** 3), 1)
|
|
|
|
|
|
|
|
|
|
# Uptime
|
|
|
|
|
uptime_secs = data.get("uptime_seconds")
|
|
|
|
|
if uptime_secs:
|
|
|
|
|
result["uptime"] = _format_uptime(int(uptime_secs))
|
|
|
|
|
|
|
|
|
|
# Extra metadata
|
|
|
|
|
result["version"] = data.get("version", "")
|
|
|
|
|
result["kernel"] = data.get("kernel_version", "")
|
|
|
|
|
result["motherboard"] = data.get("server_model", "")
|
2026-03-02 22:30:05 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
result["online"] = True
|
2026-03-02 22:30:05 +01:00
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def _parse_docker(containers_list: List[Dict[str, Any]], result: Dict[str, Any]) -> None:
|
|
|
|
|
"""Parse the ``{prefix}/docker/containers`` MQTT payload."""
|
2026-03-02 22:30:05 +01:00
|
|
|
containers: List[Dict[str, Any]] = []
|
|
|
|
|
running_count = 0
|
2026-03-02 23:25:57 +01:00
|
|
|
|
|
|
|
|
for c in containers_list:
|
2026-03-02 22:30:05 +01:00
|
|
|
state = c.get("state", "unknown")
|
2026-03-02 23:25:57 +01:00
|
|
|
is_running = state == "running"
|
2026-03-02 22:30:05 +01:00
|
|
|
if is_running:
|
|
|
|
|
running_count += 1
|
|
|
|
|
containers.append({
|
2026-03-02 23:25:57 +01:00
|
|
|
"name": c.get("name", "unknown"),
|
2026-03-02 22:30:05 +01:00
|
|
|
"status": c.get("status", ""),
|
|
|
|
|
"image": c.get("image", ""),
|
|
|
|
|
"running": is_running,
|
|
|
|
|
})
|
2026-03-02 23:25:57 +01:00
|
|
|
|
2026-03-02 22:30:05 +01:00
|
|
|
result["docker"]["running"] = running_count
|
|
|
|
|
result["docker"]["containers"] = containers
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
|
|
|
|
|
def _parse_shares(shares_list: List[Dict[str, Any]], result: Dict[str, Any]) -> None:
|
|
|
|
|
"""Parse the ``{prefix}/shares`` MQTT payload."""
|
2026-03-02 22:30:05 +01:00
|
|
|
shares: List[Dict[str, Any]] = []
|
2026-03-02 23:25:57 +01:00
|
|
|
for s in shares_list:
|
|
|
|
|
free_bytes = s.get("free_bytes", 0)
|
|
|
|
|
total_bytes = s.get("total_bytes", 0)
|
2026-03-02 22:30:05 +01:00
|
|
|
shares.append({
|
|
|
|
|
"name": s.get("name", ""),
|
2026-03-02 23:25:57 +01:00
|
|
|
"free_gb": round(free_bytes / (1024 ** 3), 1) if free_bytes else 0,
|
|
|
|
|
"total_gb": round(total_bytes / (1024 ** 3), 1) if total_bytes else 0,
|
2026-03-02 22:30:05 +01:00
|
|
|
})
|
|
|
|
|
result["shares"] = shares
|
|
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def _parse_disks(disks_list: List[Dict[str, Any]], result: Dict[str, Any]) -> None:
|
|
|
|
|
"""Parse the ``{prefix}/disks`` MQTT payload."""
|
|
|
|
|
disks: List[Dict[str, Any]] = []
|
|
|
|
|
for d in disks_list:
|
|
|
|
|
# Skip placeholder disks (empty parity slots etc.)
|
|
|
|
|
if d.get("status") == "DISK_NP":
|
|
|
|
|
continue
|
|
|
|
|
disks.append({
|
|
|
|
|
"name": d.get("name", ""),
|
|
|
|
|
"model": d.get("model", ""),
|
|
|
|
|
"temp_c": d.get("temperature_celsius", None),
|
|
|
|
|
"size_gb": round(d.get("size_bytes", 0) / (1024 ** 3), 1) if d.get("size_bytes") else 0,
|
|
|
|
|
"smart_status": d.get("smart_status", ""),
|
|
|
|
|
"role": d.get("role", ""),
|
2026-03-02 01:48:51 +01:00
|
|
|
})
|
2026-03-02 23:25:57 +01:00
|
|
|
result["disks"] = disks
|
2026-03-02 01:48:51 +01:00
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def _parse_array(data: Dict[str, Any], result: Dict[str, Any]) -> None:
|
|
|
|
|
"""Parse the ``{prefix}/array`` MQTT payload."""
|
|
|
|
|
result["array"]["status"] = data.get("state", "unknown").lower()
|
|
|
|
|
result["array"]["num_disks"] = data.get("num_disks", 0)
|
2026-03-02 01:48:51 +01:00
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def fetch_server_from_mqtt(
|
2026-03-02 01:48:51 +01:00
|
|
|
server: ServerConfig,
|
2026-03-02 23:25:57 +01:00
|
|
|
store: Dict[str, Any],
|
|
|
|
|
) -> Dict[str, Any]:
|
|
|
|
|
"""Build complete server stats from the MQTT message store.
|
2026-03-02 01:48:51 +01:00
|
|
|
|
|
|
|
|
Args:
|
2026-03-02 23:25:57 +01:00
|
|
|
server: Server configuration with ``mqtt_prefix``.
|
|
|
|
|
store: The ``mqtt_service.store`` dict (topic → MqttMessage).
|
2026-03-02 01:48:51 +01:00
|
|
|
|
|
|
|
|
Returns:
|
2026-03-02 23:25:57 +01:00
|
|
|
Server stats dictionary ready for the API response.
|
2026-03-02 01:48:51 +01:00
|
|
|
"""
|
2026-03-02 23:25:57 +01:00
|
|
|
prefix = server.mqtt_prefix or server.name
|
|
|
|
|
result = _empty_stats(server.name, server.host)
|
|
|
|
|
|
|
|
|
|
def _get(topic: str) -> Optional[Any]:
|
|
|
|
|
msg = store.get(topic)
|
|
|
|
|
if msg is None:
|
|
|
|
|
return None
|
|
|
|
|
return msg.payload
|
|
|
|
|
|
|
|
|
|
# --- Availability ---
|
|
|
|
|
avail = _get(f"{prefix}/availability")
|
|
|
|
|
if avail == "online":
|
|
|
|
|
result["online"] = True
|
|
|
|
|
elif avail == "offline":
|
|
|
|
|
result["online"] = False
|
|
|
|
|
result["error"] = "Server offline (MQTT availability)"
|
2026-03-02 01:48:51 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
# --- System (CPU, RAM, uptime, temps) ---
|
|
|
|
|
system_data = _get(f"{prefix}/system")
|
|
|
|
|
if system_data and isinstance(system_data, dict):
|
|
|
|
|
_parse_system(system_data, result)
|
|
|
|
|
else:
|
|
|
|
|
# No system data means MQTT agent isn't reporting
|
|
|
|
|
if result["online"] is False:
|
|
|
|
|
result["error"] = "No MQTT data available"
|
2026-03-02 22:30:05 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
# --- Docker containers ---
|
|
|
|
|
docker_data = _get(f"{prefix}/docker/containers")
|
|
|
|
|
if docker_data and isinstance(docker_data, list):
|
|
|
|
|
_parse_docker(docker_data, result)
|
2026-03-02 01:48:51 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
# --- Shares ---
|
|
|
|
|
shares_data = _get(f"{prefix}/shares")
|
|
|
|
|
if shares_data and isinstance(shares_data, list):
|
|
|
|
|
_parse_shares(shares_data, result)
|
2026-03-02 01:48:51 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
# --- Disks ---
|
|
|
|
|
disks_data = _get(f"{prefix}/disks")
|
|
|
|
|
if disks_data and isinstance(disks_data, list):
|
|
|
|
|
_parse_disks(disks_data, result)
|
2026-03-02 17:45:23 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
# --- Array ---
|
|
|
|
|
array_data = _get(f"{prefix}/array")
|
|
|
|
|
if array_data and isinstance(array_data, dict):
|
|
|
|
|
_parse_array(array_data, result)
|
|
|
|
|
|
|
|
|
|
# Check data freshness (system topic timestamp)
|
|
|
|
|
sys_msg = store.get(f"{prefix}/system")
|
|
|
|
|
if sys_msg:
|
|
|
|
|
age = time.time() - sys_msg.timestamp
|
|
|
|
|
if age > 120: # More than 2 minutes old
|
|
|
|
|
result["stale"] = True
|
|
|
|
|
logger.warning(
|
|
|
|
|
"[UNRAID] %s: MQTT data is %.0fs old (stale)",
|
|
|
|
|
server.name, age,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
|
"[UNRAID] %s: MQTT — CPU %.1f%% (%d°C), RAM %.1f%%, Docker %d/%d",
|
|
|
|
|
server.name,
|
|
|
|
|
result["cpu"].get("usage_pct", 0),
|
|
|
|
|
result["cpu"].get("temp_c", 0) or 0,
|
|
|
|
|
result["ram"].get("pct", 0),
|
|
|
|
|
result["docker"]["running"],
|
|
|
|
|
len(result["docker"]["containers"]),
|
|
|
|
|
)
|
2026-03-02 01:48:51 +01:00
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
def fetch_all_servers_mqtt(
|
|
|
|
|
servers: List[ServerConfig],
|
|
|
|
|
store: Dict[str, Any],
|
|
|
|
|
) -> List[Dict[str, Any]]:
|
|
|
|
|
"""Fetch stats for all configured servers from the MQTT store.
|
2026-03-02 01:48:51 +01:00
|
|
|
|
2026-03-02 23:25:57 +01:00
|
|
|
This is synchronous — no HTTP calls, just reading in-memory data.
|
2026-03-02 01:48:51 +01:00
|
|
|
"""
|
|
|
|
|
if not servers:
|
|
|
|
|
return []
|
2026-03-02 23:25:57 +01:00
|
|
|
return [fetch_server_from_mqtt(srv, store) for srv in servers]
|