refactor: complete rewrite as React+FastAPI dashboard
Replace monolithic Jinja2 template with modern stack: Backend (FastAPI): - Modular router/service architecture - Async PostgreSQL (asyncpg) for news from n8n pipeline - Live Unraid server stats (2 servers via API) - Home Assistant, Vikunja tasks, weather (wttr.in) - WebSocket broadcast for real-time updates (15s) - TTL cache per endpoint, all config via ENV vars Frontend (React + Vite + TypeScript): - Glassmorphism dark theme with Tailwind CSS - Responsive grid: mobile/tablet/desktop/ultrawide - Weather cards, hourly forecast, news with category tabs - Server stats (CPU ring, RAM bar, Docker list) - Home Assistant controls, task management - Live clock, WebSocket connection indicator Infrastructure: - Multi-stage Dockerfile (node:22-alpine + python:3.11-slim) - docker-compose with full ENV configuration - Kaniko CI/CD pipeline for GitLab registry Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
4bbc125a67
commit
9f7330e217
48 changed files with 6390 additions and 1461 deletions
0
server/routers/__init__.py
Normal file
0
server/routers/__init__.py
Normal file
123
server/routers/dashboard.py
Normal file
123
server/routers/dashboard.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
"""Dashboard aggregate router -- combined endpoint and WebSocket push."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
|
||||
from server.routers.homeassistant import get_ha
|
||||
from server.routers.news import get_news_articles
|
||||
from server.routers.servers import get_servers
|
||||
from server.routers.tasks import get_tasks
|
||||
from server.routers.weather import get_weather
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["dashboard"])
|
||||
|
||||
# Connected WebSocket clients
|
||||
clients: List[WebSocket] = []
|
||||
|
||||
|
||||
@router.get("/api/all")
|
||||
async def get_all() -> Dict[str, Any]:
|
||||
"""Fetch every data source in parallel and return a single combined dict.
|
||||
|
||||
Response shape::
|
||||
|
||||
{
|
||||
"weather": { ... },
|
||||
"news": { ... },
|
||||
"servers": { ... },
|
||||
"ha": { ... },
|
||||
"tasks": { ... },
|
||||
"timestamp": "ISO-8601 string"
|
||||
}
|
||||
|
||||
Individual sections that fail will contain ``{"error": true, "message": "..."}``.
|
||||
"""
|
||||
|
||||
results = await asyncio.gather(
|
||||
_safe(get_weather, "weather"),
|
||||
_safe(lambda: get_news_articles(limit=20, offset=0, category=None), "news"),
|
||||
_safe(get_servers, "servers"),
|
||||
_safe(get_ha, "ha"),
|
||||
_safe(get_tasks, "tasks"),
|
||||
)
|
||||
|
||||
weather_data, news_data, servers_data, ha_data, tasks_data = results
|
||||
|
||||
return {
|
||||
"weather": weather_data,
|
||||
"news": news_data,
|
||||
"servers": servers_data,
|
||||
"ha": ha_data,
|
||||
"tasks": tasks_data,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.websocket("/ws")
|
||||
async def ws_endpoint(ws: WebSocket) -> None:
|
||||
"""WebSocket that pushes fresh dashboard data on every client ping.
|
||||
|
||||
The client should send periodic text messages (e.g. ``"ping"``) to request
|
||||
an update. If no message arrives within 20 seconds the server sends a
|
||||
refresh anyway, keeping the connection alive.
|
||||
"""
|
||||
|
||||
await ws.accept()
|
||||
clients.append(ws)
|
||||
logger.info("WebSocket client connected (%d total)", len(clients))
|
||||
|
||||
try:
|
||||
while True:
|
||||
# Wait for a client ping / keepalive; refresh on timeout too.
|
||||
try:
|
||||
_msg = await asyncio.wait_for(ws.receive_text(), timeout=20.0)
|
||||
except asyncio.TimeoutError:
|
||||
pass
|
||||
|
||||
# Build and push the latest data
|
||||
try:
|
||||
data = await get_all()
|
||||
await ws.send_json(data)
|
||||
except Exception as exc:
|
||||
logger.exception("Error sending WebSocket payload")
|
||||
# Try to send a lightweight error frame; if that also fails the
|
||||
# outer handler will close the connection.
|
||||
try:
|
||||
await ws.send_json({"error": True, "message": str(exc)})
|
||||
except Exception:
|
||||
break
|
||||
|
||||
except WebSocketDisconnect:
|
||||
logger.info("WebSocket client disconnected")
|
||||
except Exception as exc:
|
||||
logger.exception("Unexpected WebSocket error")
|
||||
finally:
|
||||
if ws in clients:
|
||||
clients.remove(ws)
|
||||
logger.info("WebSocket clients remaining: %d", len(clients))
|
||||
|
||||
|
||||
# -- internal helpers ---------------------------------------------------------
|
||||
|
||||
async def _safe(coro_or_callable: Any, label: str) -> Dict[str, Any]:
|
||||
"""Call an async function and return its result, or an error dict."""
|
||||
try:
|
||||
if asyncio.iscoroutinefunction(coro_or_callable):
|
||||
return await coro_or_callable()
|
||||
# Support lambdas that return coroutines
|
||||
result = coro_or_callable()
|
||||
if asyncio.iscoroutine(result):
|
||||
return await result
|
||||
return result
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch %s data for dashboard", label)
|
||||
return {"error": True, "message": str(exc)}
|
||||
47
server/routers/homeassistant.py
Normal file
47
server/routers/homeassistant.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
"""Home Assistant data router."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from server.cache import cache
|
||||
from server.config import settings
|
||||
from server.services.ha_service import fetch_ha_data
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["homeassistant"])
|
||||
|
||||
CACHE_KEY = "ha"
|
||||
|
||||
|
||||
@router.get("/ha")
|
||||
async def get_ha() -> Dict[str, Any]:
|
||||
"""Return Home Assistant entity data.
|
||||
|
||||
The exact shape depends on what ``fetch_ha_data`` returns; on failure an
|
||||
error stub is returned instead::
|
||||
|
||||
{ "error": true, "message": "..." }
|
||||
"""
|
||||
|
||||
# --- cache hit? -----------------------------------------------------------
|
||||
cached = await cache.get(CACHE_KEY)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
# --- cache miss -----------------------------------------------------------
|
||||
try:
|
||||
data: Dict[str, Any] = await fetch_ha_data(
|
||||
settings.ha_url,
|
||||
settings.ha_token,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch Home Assistant data")
|
||||
return {"error": True, "message": str(exc)}
|
||||
|
||||
await cache.set(CACHE_KEY, data, settings.ha_cache_ttl)
|
||||
return data
|
||||
80
server/routers/news.py
Normal file
80
server/routers/news.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
"""News articles router -- paginated, filterable by category."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Query
|
||||
|
||||
from server.cache import cache
|
||||
from server.config import settings
|
||||
from server.services.news_service import get_news, get_news_count
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["news"])
|
||||
|
||||
|
||||
def _cache_key(limit: int, offset: int, category: Optional[str]) -> str:
|
||||
return f"news:{limit}:{offset}:{category}"
|
||||
|
||||
|
||||
@router.get("/news")
|
||||
async def get_news_articles(
|
||||
limit: int = Query(default=20, le=50, ge=1),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
category: Optional[str] = Query(default=None),
|
||||
) -> Dict[str, Any]:
|
||||
"""Return a paginated list of news articles.
|
||||
|
||||
Response shape::
|
||||
|
||||
{
|
||||
"articles": [ ... ],
|
||||
"total": int,
|
||||
"limit": int,
|
||||
"offset": int,
|
||||
}
|
||||
"""
|
||||
|
||||
key = _cache_key(limit, offset, category)
|
||||
|
||||
# --- cache hit? -----------------------------------------------------------
|
||||
cached = await cache.get(key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
# --- cache miss -----------------------------------------------------------
|
||||
articles: List[Dict[str, Any]] = []
|
||||
total: int = 0
|
||||
|
||||
try:
|
||||
articles = await get_news(limit=limit, offset=offset, category=category, max_age_hours=settings.news_max_age_hours)
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch news articles")
|
||||
return {
|
||||
"articles": [],
|
||||
"total": 0,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
"error": True,
|
||||
"message": str(exc),
|
||||
}
|
||||
|
||||
try:
|
||||
total = await get_news_count(max_age_hours=settings.news_max_age_hours, category=category)
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch news count")
|
||||
# We still have articles -- return them with total = len(articles)
|
||||
total = len(articles)
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"articles": articles,
|
||||
"total": total,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
await cache.set(key, payload, settings.news_cache_ttl)
|
||||
return payload
|
||||
64
server/routers/servers.py
Normal file
64
server/routers/servers.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
"""Unraid servers status router."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from server.cache import cache
|
||||
from server.config import settings
|
||||
from server.services.unraid_service import ServerConfig, fetch_all_servers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["servers"])
|
||||
|
||||
CACHE_KEY = "servers"
|
||||
|
||||
|
||||
@router.get("/servers")
|
||||
async def get_servers() -> Dict[str, Any]:
|
||||
"""Return status information for all configured Unraid servers.
|
||||
|
||||
Response shape::
|
||||
|
||||
{
|
||||
"servers": [ ... server dicts ... ]
|
||||
}
|
||||
"""
|
||||
|
||||
# --- cache hit? -----------------------------------------------------------
|
||||
cached = await cache.get(CACHE_KEY)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
# --- cache miss -----------------------------------------------------------
|
||||
server_configs: List[ServerConfig] = [
|
||||
ServerConfig(
|
||||
name=srv.name,
|
||||
host=srv.host,
|
||||
api_key=srv.api_key,
|
||||
port=srv.port,
|
||||
)
|
||||
for srv in settings.unraid_servers
|
||||
]
|
||||
|
||||
servers_data: List[Dict[str, Any]] = []
|
||||
try:
|
||||
servers_data = await fetch_all_servers(server_configs)
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch Unraid server data")
|
||||
return {
|
||||
"servers": [],
|
||||
"error": True,
|
||||
"message": str(exc),
|
||||
}
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"servers": servers_data,
|
||||
}
|
||||
|
||||
await cache.set(CACHE_KEY, payload, settings.unraid_cache_ttl)
|
||||
return payload
|
||||
47
server/routers/tasks.py
Normal file
47
server/routers/tasks.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
"""Vikunja tasks router."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from server.cache import cache
|
||||
from server.config import settings
|
||||
from server.services.vikunja_service import fetch_tasks
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["tasks"])
|
||||
|
||||
CACHE_KEY = "tasks"
|
||||
|
||||
|
||||
@router.get("/tasks")
|
||||
async def get_tasks() -> Dict[str, Any]:
|
||||
"""Return Vikunja task data.
|
||||
|
||||
The exact shape depends on what ``fetch_tasks`` returns; on failure an
|
||||
error stub is returned instead::
|
||||
|
||||
{ "error": true, "message": "..." }
|
||||
"""
|
||||
|
||||
# --- cache hit? -----------------------------------------------------------
|
||||
cached = await cache.get(CACHE_KEY)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
# --- cache miss -----------------------------------------------------------
|
||||
try:
|
||||
data: Dict[str, Any] = await fetch_tasks(
|
||||
settings.vikunja_url,
|
||||
settings.vikunja_token,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch Vikunja tasks")
|
||||
return {"error": True, "message": str(exc)}
|
||||
|
||||
await cache.set(CACHE_KEY, data, settings.vikunja_cache_ttl)
|
||||
return data
|
||||
85
server/routers/weather.py
Normal file
85
server/routers/weather.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
"""Weather data router -- primary + secondary locations and hourly forecast."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from server.cache import cache
|
||||
from server.config import settings
|
||||
from server.services.weather_service import fetch_hourly_forecast, fetch_weather
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["weather"])
|
||||
|
||||
CACHE_KEY = "weather"
|
||||
|
||||
|
||||
@router.get("/weather")
|
||||
async def get_weather() -> Dict[str, Any]:
|
||||
"""Return weather for both configured locations plus an hourly forecast.
|
||||
|
||||
The response shape is::
|
||||
|
||||
{
|
||||
"primary": { ... weather dict or error stub },
|
||||
"secondary": { ... weather dict or error stub },
|
||||
"hourly": [ ... forecast entries or empty list ],
|
||||
}
|
||||
"""
|
||||
|
||||
# --- cache hit? -----------------------------------------------------------
|
||||
cached = await cache.get(CACHE_KEY)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
# --- cache miss -- fetch all three in parallel ----------------------------
|
||||
primary_data: Dict[str, Any] = {}
|
||||
secondary_data: Dict[str, Any] = {}
|
||||
hourly_data: List[Dict[str, Any]] = []
|
||||
|
||||
results = await asyncio.gather(
|
||||
_safe_fetch_weather(settings.weather_location),
|
||||
_safe_fetch_weather(settings.weather_location_secondary),
|
||||
_safe_fetch_hourly(settings.weather_location),
|
||||
return_exceptions=False, # we handle errors inside the helpers
|
||||
)
|
||||
|
||||
primary_data = results[0]
|
||||
secondary_data = results[1]
|
||||
hourly_data = results[2]
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"primary": primary_data,
|
||||
"secondary": secondary_data,
|
||||
"hourly": hourly_data,
|
||||
}
|
||||
|
||||
await cache.set(CACHE_KEY, payload, settings.weather_cache_ttl)
|
||||
return payload
|
||||
|
||||
|
||||
# -- internal helpers ---------------------------------------------------------
|
||||
|
||||
async def _safe_fetch_weather(location: str) -> Dict[str, Any]:
|
||||
"""Fetch weather for *location*, returning an error stub on failure."""
|
||||
try:
|
||||
data = await fetch_weather(location)
|
||||
return data
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch weather for %s", location)
|
||||
return {"error": True, "message": str(exc), "location": location}
|
||||
|
||||
|
||||
async def _safe_fetch_hourly(location: str) -> List[Dict[str, Any]]:
|
||||
"""Fetch hourly forecast for *location*, returning ``[]`` on failure."""
|
||||
try:
|
||||
data = await fetch_hourly_forecast(location)
|
||||
return data
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to fetch hourly forecast for %s", location)
|
||||
return []
|
||||
Loading…
Add table
Add a link
Reference in a new issue