feat: add Admin Panel with JWT auth, DB settings, and integration management
Complete admin backend with login, where all integrations (weather, news, Home Assistant, Vikunja, Unraid, MQTT) can be configured via web UI instead of ENV variables. Two-layer config: ENV seeds DB on first start, then DB is source of truth. Auto-migration system on startup. Backend: db.py shared pool, auth.py JWT, settings_service CRUD, seed_service, admin router (protected), test_connections per integration, config.py rewrite. Frontend: react-router v6, login page, admin layout with sidebar, 8 settings pages (General, Weather, News, HA, Vikunja, Unraid, MQTT, ChangePassword), shared IntegrationForm + TestButton components. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
89ed0c6d0a
commit
f6a42c2dd2
40 changed files with 3487 additions and 311 deletions
|
|
@ -1,43 +1,11 @@
|
|||
"""News service — queries market_news from PostgreSQL via shared pool."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncpg
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
_pool: Optional[asyncpg.Pool] = None
|
||||
|
||||
|
||||
async def init_pool(
|
||||
host: str,
|
||||
port: int,
|
||||
dbname: str,
|
||||
user: str,
|
||||
password: str,
|
||||
) -> None:
|
||||
"""Initialise the global asyncpg connection pool.
|
||||
|
||||
Call once at application startup.
|
||||
"""
|
||||
global _pool
|
||||
_pool = await asyncpg.create_pool(
|
||||
host=host,
|
||||
port=port,
|
||||
database=dbname,
|
||||
user=user,
|
||||
password=password,
|
||||
min_size=1,
|
||||
max_size=5,
|
||||
)
|
||||
|
||||
|
||||
async def close_pool() -> None:
|
||||
"""Close the global asyncpg connection pool.
|
||||
|
||||
Call once at application shutdown.
|
||||
"""
|
||||
global _pool
|
||||
if _pool is not None:
|
||||
await _pool.close()
|
||||
_pool = None
|
||||
from server.db import get_pool
|
||||
|
||||
|
||||
def _row_to_dict(row: asyncpg.Record) -> Dict[str, Any]:
|
||||
|
|
@ -54,19 +22,8 @@ async def get_news(
|
|||
category: Optional[str] = None,
|
||||
max_age_hours: int = 48,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Fetch recent news articles from the market_news table.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of rows to return.
|
||||
offset: Number of rows to skip (for pagination).
|
||||
category: Optional category filter (exact match).
|
||||
max_age_hours: Only return articles published within this many hours.
|
||||
|
||||
Returns:
|
||||
List of news article dictionaries.
|
||||
"""
|
||||
if _pool is None:
|
||||
raise RuntimeError("Database pool is not initialised. Call init_pool() first.")
|
||||
"""Fetch recent news articles from the market_news table."""
|
||||
pool = await get_pool()
|
||||
|
||||
params: List[Any] = []
|
||||
param_idx = 1
|
||||
|
|
@ -86,7 +43,7 @@ async def get_news(
|
|||
params.append(limit)
|
||||
params.append(offset)
|
||||
|
||||
async with _pool.acquire() as conn:
|
||||
async with pool.acquire() as conn:
|
||||
rows = await conn.fetch(base_query, *params)
|
||||
|
||||
return [_row_to_dict(row) for row in rows]
|
||||
|
|
@ -96,17 +53,8 @@ async def get_news_count(
|
|||
max_age_hours: int = 48,
|
||||
category: Optional[str] = None,
|
||||
) -> int:
|
||||
"""Return the total count of recent news articles.
|
||||
|
||||
Args:
|
||||
max_age_hours: Only count articles published within this many hours.
|
||||
category: Optional category filter.
|
||||
|
||||
Returns:
|
||||
Integer count.
|
||||
"""
|
||||
if _pool is None:
|
||||
raise RuntimeError("Database pool is not initialised. Call init_pool() first.")
|
||||
"""Return the total count of recent news articles."""
|
||||
pool = await get_pool()
|
||||
|
||||
params: List[Any] = []
|
||||
param_idx = 1
|
||||
|
|
@ -121,23 +69,15 @@ async def get_news_count(
|
|||
query += f" AND category = ${param_idx}"
|
||||
params.append(category)
|
||||
|
||||
async with _pool.acquire() as conn:
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(query, *params)
|
||||
|
||||
return int(row["cnt"]) if row else 0
|
||||
|
||||
|
||||
async def get_categories(max_age_hours: int = 48) -> List[str]:
|
||||
"""Return distinct categories from recent news articles.
|
||||
|
||||
Args:
|
||||
max_age_hours: Only consider articles published within this many hours.
|
||||
|
||||
Returns:
|
||||
Sorted list of category strings.
|
||||
"""
|
||||
if _pool is None:
|
||||
raise RuntimeError("Database pool is not initialised. Call init_pool() first.")
|
||||
"""Return distinct categories from recent news articles."""
|
||||
pool = await get_pool()
|
||||
|
||||
query = (
|
||||
"SELECT DISTINCT category "
|
||||
|
|
@ -147,7 +87,7 @@ async def get_categories(max_age_hours: int = 48) -> List[str]:
|
|||
"ORDER BY category"
|
||||
)
|
||||
|
||||
async with _pool.acquire() as conn:
|
||||
async with pool.acquire() as conn:
|
||||
rows = await conn.fetch(query)
|
||||
|
||||
return [row["category"] for row in rows]
|
||||
|
|
|
|||
150
server/services/seed_service.py
Normal file
150
server/services/seed_service.py
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
"""First-run seeder: populates DB from ENV defaults when tables are empty."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import secrets
|
||||
|
||||
from server.auth import hash_password
|
||||
from server.db import get_pool
|
||||
from server.services import settings_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def seed_if_empty() -> None:
|
||||
"""Check if admin tables are empty and seed with ENV-derived values."""
|
||||
pool = await get_pool()
|
||||
|
||||
# ---- Admin User ----
|
||||
user = await settings_service.get_admin_user()
|
||||
if user is None:
|
||||
admin_pw = os.getenv("ADMIN_PASSWORD", "")
|
||||
if not admin_pw:
|
||||
admin_pw = secrets.token_urlsafe(16)
|
||||
logger.warning(
|
||||
"=" * 60 + "\n"
|
||||
" No ADMIN_PASSWORD set — generated: %s\n"
|
||||
" Set ADMIN_PASSWORD env to use your own.\n" +
|
||||
"=" * 60,
|
||||
admin_pw,
|
||||
)
|
||||
await settings_service.create_admin_user("admin", hash_password(admin_pw))
|
||||
logger.info("Admin user seeded from ENV")
|
||||
|
||||
# ---- Integrations ----
|
||||
existing = await settings_service.get_integrations()
|
||||
existing_types = {i["type"] for i in existing}
|
||||
|
||||
seed_integrations = [
|
||||
{
|
||||
"type": "weather",
|
||||
"name": "Wetter (wttr.in)",
|
||||
"config": {
|
||||
"location": os.getenv("WEATHER_LOCATION", "Leverkusen"),
|
||||
"location_secondary": os.getenv("WEATHER_LOCATION_SECONDARY", "Rab,Croatia"),
|
||||
},
|
||||
"enabled": True,
|
||||
"display_order": 0,
|
||||
},
|
||||
{
|
||||
"type": "news",
|
||||
"name": "News (PostgreSQL)",
|
||||
"config": {
|
||||
"max_age_hours": int(os.getenv("NEWS_MAX_AGE_HOURS", "48")),
|
||||
},
|
||||
"enabled": True,
|
||||
"display_order": 1,
|
||||
},
|
||||
{
|
||||
"type": "ha",
|
||||
"name": "Home Assistant",
|
||||
"config": {
|
||||
"url": os.getenv("HA_URL", ""),
|
||||
"token": os.getenv("HA_TOKEN", ""),
|
||||
},
|
||||
"enabled": bool(os.getenv("HA_URL")),
|
||||
"display_order": 2,
|
||||
},
|
||||
{
|
||||
"type": "vikunja",
|
||||
"name": "Vikunja Tasks",
|
||||
"config": {
|
||||
"url": os.getenv("VIKUNJA_URL", ""),
|
||||
"token": os.getenv("VIKUNJA_TOKEN", ""),
|
||||
"private_projects": [3, 4],
|
||||
"sams_projects": [2, 5],
|
||||
},
|
||||
"enabled": bool(os.getenv("VIKUNJA_URL")),
|
||||
"display_order": 3,
|
||||
},
|
||||
{
|
||||
"type": "unraid",
|
||||
"name": "Unraid Server",
|
||||
"config": {
|
||||
"servers": _parse_unraid_env(),
|
||||
},
|
||||
"enabled": bool(os.getenv("UNRAID_SERVERS")),
|
||||
"display_order": 4,
|
||||
},
|
||||
{
|
||||
"type": "mqtt",
|
||||
"name": "MQTT Broker",
|
||||
"config": {
|
||||
"host": os.getenv("MQTT_HOST", ""),
|
||||
"port": int(os.getenv("MQTT_PORT", "1883")),
|
||||
"username": os.getenv("MQTT_USERNAME", ""),
|
||||
"password": os.getenv("MQTT_PASSWORD", ""),
|
||||
"client_id": os.getenv("MQTT_CLIENT_ID", "daily-briefing"),
|
||||
"topics": _parse_mqtt_topics(),
|
||||
},
|
||||
"enabled": bool(os.getenv("MQTT_HOST")),
|
||||
"display_order": 5,
|
||||
},
|
||||
]
|
||||
|
||||
for seed in seed_integrations:
|
||||
if seed["type"] not in existing_types:
|
||||
await settings_service.upsert_integration(
|
||||
type_name=seed["type"],
|
||||
name=seed["name"],
|
||||
config=seed["config"],
|
||||
enabled=seed["enabled"],
|
||||
display_order=seed["display_order"],
|
||||
)
|
||||
logger.info("Seeded integration: %s", seed["type"])
|
||||
|
||||
# ---- App Settings ----
|
||||
existing_settings = await settings_service.get_all_settings()
|
||||
if not existing_settings:
|
||||
default_settings = [
|
||||
("weather_cache_ttl", "1800", "int", "cache", "Wetter Cache TTL", "Sekunden"),
|
||||
("ha_cache_ttl", "30", "int", "cache", "HA Cache TTL", "Sekunden"),
|
||||
("vikunja_cache_ttl", "60", "int", "cache", "Vikunja Cache TTL", "Sekunden"),
|
||||
("unraid_cache_ttl", "15", "int", "cache", "Unraid Cache TTL", "Sekunden"),
|
||||
("news_cache_ttl", "300", "int", "cache", "News Cache TTL", "Sekunden"),
|
||||
("ws_interval", "15", "int", "general", "WebSocket Intervall", "Sekunden"),
|
||||
]
|
||||
for key, value, vtype, cat, label, desc in default_settings:
|
||||
await settings_service.set_setting(key, value, vtype, cat, label, desc)
|
||||
logger.info("Seeded %d default settings", len(default_settings))
|
||||
|
||||
|
||||
def _parse_unraid_env() -> list:
|
||||
"""Parse UNRAID_SERVERS env var."""
|
||||
raw = os.getenv("UNRAID_SERVERS", "[]")
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return []
|
||||
|
||||
|
||||
def _parse_mqtt_topics() -> list:
|
||||
"""Parse MQTT_TOPICS env var."""
|
||||
raw = os.getenv("MQTT_TOPICS", "#")
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return [t.strip() for t in raw.split(",") if t.strip()]
|
||||
297
server/services/settings_service.py
Normal file
297
server/services/settings_service.py
Normal file
|
|
@ -0,0 +1,297 @@
|
|||
"""Database-backed settings, integrations, and user management."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from server.db import get_pool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Admin User
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_admin_user() -> Optional[Dict[str, Any]]:
|
||||
"""Return the admin user row, or None if not yet created."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow("SELECT * FROM admin_user LIMIT 1")
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
async def create_admin_user(username: str, password_hash: str) -> None:
|
||||
"""Insert the initial admin user."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"INSERT INTO admin_user (username, password_hash) VALUES ($1, $2)",
|
||||
username,
|
||||
password_hash,
|
||||
)
|
||||
logger.info("Admin user '%s' created", username)
|
||||
|
||||
|
||||
async def update_admin_password(user_id: int, password_hash: str) -> None:
|
||||
"""Update the admin user's password."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"UPDATE admin_user SET password_hash = $1, updated_at = NOW() WHERE id = $2",
|
||||
password_hash,
|
||||
user_id,
|
||||
)
|
||||
logger.info("Admin password updated (user_id=%d)", user_id)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# App Settings (key/value)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_all_settings() -> Dict[str, Any]:
|
||||
"""Return all settings as a dict, casting values to their declared type."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
rows = await conn.fetch("SELECT * FROM app_settings ORDER BY category, key")
|
||||
|
||||
result: Dict[str, Any] = {}
|
||||
for row in rows:
|
||||
result[row["key"]] = {
|
||||
"value": _cast_value(row["value"], row["value_type"]),
|
||||
"value_type": row["value_type"],
|
||||
"category": row["category"],
|
||||
"label": row["label"],
|
||||
"description": row["description"],
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
async def get_setting(key: str) -> Optional[Any]:
|
||||
"""Return a single setting's typed value, or None."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow("SELECT value, value_type FROM app_settings WHERE key = $1", key)
|
||||
if row is None:
|
||||
return None
|
||||
return _cast_value(row["value"], row["value_type"])
|
||||
|
||||
|
||||
async def set_setting(
|
||||
key: str,
|
||||
value: Any,
|
||||
value_type: str = "string",
|
||||
category: str = "general",
|
||||
label: str = "",
|
||||
description: str = "",
|
||||
) -> None:
|
||||
"""Upsert a single setting."""
|
||||
pool = await get_pool()
|
||||
str_value = json.dumps(value) if value_type == "json" else str(value)
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"""
|
||||
INSERT INTO app_settings (key, value, value_type, category, label, description, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, NOW())
|
||||
ON CONFLICT (key) DO UPDATE SET
|
||||
value = EXCLUDED.value,
|
||||
value_type = EXCLUDED.value_type,
|
||||
category = EXCLUDED.category,
|
||||
label = EXCLUDED.label,
|
||||
description = EXCLUDED.description,
|
||||
updated_at = NOW()
|
||||
""",
|
||||
key, str_value, value_type, category, label, description,
|
||||
)
|
||||
|
||||
|
||||
async def bulk_set_settings(settings_dict: Dict[str, Any]) -> None:
|
||||
"""Bulk upsert settings from a flat key→value dict."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
async with conn.transaction():
|
||||
for key, val in settings_dict.items():
|
||||
str_val = str(val)
|
||||
await conn.execute(
|
||||
"""
|
||||
UPDATE app_settings SET value = $1, updated_at = NOW()
|
||||
WHERE key = $2
|
||||
""",
|
||||
str_val, key,
|
||||
)
|
||||
|
||||
|
||||
def _cast_value(raw: str, value_type: str) -> Any:
|
||||
"""Cast a stored string value to its declared type."""
|
||||
if value_type == "int":
|
||||
try:
|
||||
return int(raw)
|
||||
except (ValueError, TypeError):
|
||||
return 0
|
||||
elif value_type == "bool":
|
||||
return raw.lower() in ("1", "true", "yes")
|
||||
elif value_type == "json":
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return raw
|
||||
return raw
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Integrations
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_integrations() -> List[Dict[str, Any]]:
|
||||
"""Return all integration configs."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
rows = await conn.fetch(
|
||||
"SELECT * FROM integrations ORDER BY display_order, type"
|
||||
)
|
||||
return [_integration_to_dict(row) for row in rows]
|
||||
|
||||
|
||||
async def get_integration(type_name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Return a single integration by type name."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"SELECT * FROM integrations WHERE type = $1", type_name
|
||||
)
|
||||
return _integration_to_dict(row) if row else None
|
||||
|
||||
|
||||
async def upsert_integration(
|
||||
type_name: str,
|
||||
name: str,
|
||||
config: Dict[str, Any],
|
||||
enabled: bool = True,
|
||||
display_order: int = 0,
|
||||
) -> Dict[str, Any]:
|
||||
"""Insert or update an integration config."""
|
||||
pool = await get_pool()
|
||||
config_json = json.dumps(config)
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
INSERT INTO integrations (type, name, config, enabled, display_order, updated_at)
|
||||
VALUES ($1, $2, $3::jsonb, $4, $5, NOW())
|
||||
ON CONFLICT (type) DO UPDATE SET
|
||||
name = EXCLUDED.name,
|
||||
config = EXCLUDED.config,
|
||||
enabled = EXCLUDED.enabled,
|
||||
display_order = EXCLUDED.display_order,
|
||||
updated_at = NOW()
|
||||
RETURNING *
|
||||
""",
|
||||
type_name, name, config_json, enabled, display_order,
|
||||
)
|
||||
return _integration_to_dict(row)
|
||||
|
||||
|
||||
async def toggle_integration(type_name: str, enabled: bool) -> None:
|
||||
"""Enable or disable an integration."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
await conn.execute(
|
||||
"UPDATE integrations SET enabled = $1, updated_at = NOW() WHERE type = $2",
|
||||
enabled, type_name,
|
||||
)
|
||||
|
||||
|
||||
def _integration_to_dict(row: Any) -> Dict[str, Any]:
|
||||
"""Convert an integration row to a dict."""
|
||||
d = dict(row)
|
||||
# Ensure config is a dict (asyncpg returns JSONB as dict already)
|
||||
if isinstance(d.get("config"), str):
|
||||
d["config"] = json.loads(d["config"])
|
||||
# Convert datetimes
|
||||
for k in ("created_at", "updated_at"):
|
||||
if k in d and d[k] is not None:
|
||||
d[k] = d[k].isoformat()
|
||||
return d
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MQTT Subscriptions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_mqtt_subscriptions() -> List[Dict[str, Any]]:
|
||||
"""Return all MQTT subscriptions."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
rows = await conn.fetch(
|
||||
"SELECT * FROM mqtt_subscriptions ORDER BY display_order, id"
|
||||
)
|
||||
return [_sub_to_dict(row) for row in rows]
|
||||
|
||||
|
||||
async def create_mqtt_subscription(
|
||||
topic_pattern: str,
|
||||
display_name: str = "",
|
||||
category: str = "other",
|
||||
unit: str = "",
|
||||
widget_type: str = "value",
|
||||
enabled: bool = True,
|
||||
display_order: int = 0,
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new MQTT subscription."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
"""
|
||||
INSERT INTO mqtt_subscriptions
|
||||
(topic_pattern, display_name, category, unit, widget_type, enabled, display_order)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING *
|
||||
""",
|
||||
topic_pattern, display_name, category, unit, widget_type, enabled, display_order,
|
||||
)
|
||||
return _sub_to_dict(row)
|
||||
|
||||
|
||||
async def update_mqtt_subscription(sub_id: int, **fields: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Update specific fields of an MQTT subscription."""
|
||||
pool = await get_pool()
|
||||
allowed = {"topic_pattern", "display_name", "category", "unit", "widget_type", "enabled", "display_order"}
|
||||
updates = {k: v for k, v in fields.items() if k in allowed}
|
||||
if not updates:
|
||||
return None
|
||||
|
||||
set_parts = []
|
||||
params = []
|
||||
for i, (k, v) in enumerate(updates.items(), start=1):
|
||||
set_parts.append(f"{k} = ${i}")
|
||||
params.append(v)
|
||||
params.append(sub_id)
|
||||
set_clause = ", ".join(set_parts)
|
||||
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow(
|
||||
f"UPDATE mqtt_subscriptions SET {set_clause}, updated_at = NOW() "
|
||||
f"WHERE id = ${len(params)} RETURNING *",
|
||||
*params,
|
||||
)
|
||||
return _sub_to_dict(row) if row else None
|
||||
|
||||
|
||||
async def delete_mqtt_subscription(sub_id: int) -> bool:
|
||||
"""Delete an MQTT subscription. Returns True if deleted."""
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
result = await conn.execute(
|
||||
"DELETE FROM mqtt_subscriptions WHERE id = $1", sub_id
|
||||
)
|
||||
return result == "DELETE 1"
|
||||
|
||||
|
||||
def _sub_to_dict(row: Any) -> Dict[str, Any]:
|
||||
d = dict(row)
|
||||
for k in ("created_at", "updated_at"):
|
||||
if k in d and d[k] is not None:
|
||||
d[k] = d[k].isoformat()
|
||||
return d
|
||||
147
server/services/test_connections.py
Normal file
147
server/services/test_connections.py
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
"""Integration connection testing functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 10.0
|
||||
|
||||
|
||||
async def test_weather(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Test weather service by fetching current conditions."""
|
||||
location = config.get("location", "Leverkusen")
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
||||
r = await client.get(f"https://wttr.in/{location}?format=j1")
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
temp = data["current_condition"][0]["temp_C"]
|
||||
return {"success": True, "message": f"Verbunden — {location}: {temp}°C"}
|
||||
except Exception as exc:
|
||||
return {"success": False, "message": str(exc)}
|
||||
|
||||
|
||||
async def test_ha(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Test Home Assistant connection."""
|
||||
url = config.get("url", "")
|
||||
token = config.get("token", "")
|
||||
if not url or not token:
|
||||
return {"success": False, "message": "URL und Token sind erforderlich"}
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=TIMEOUT, verify=False) as client:
|
||||
r = await client.get(
|
||||
f"{url.rstrip('/')}/api/",
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
return {"success": True, "message": f"Verbunden — {data.get('message', 'OK')}"}
|
||||
except Exception as exc:
|
||||
return {"success": False, "message": str(exc)}
|
||||
|
||||
|
||||
async def test_vikunja(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Test Vikunja API connection."""
|
||||
url = config.get("url", "")
|
||||
token = config.get("token", "")
|
||||
if not url or not token:
|
||||
return {"success": False, "message": "URL und Token sind erforderlich"}
|
||||
try:
|
||||
base = url.rstrip("/")
|
||||
# Try to reach the info or user endpoint
|
||||
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
||||
r = await client.get(
|
||||
f"{base}/user",
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
return {"success": True, "message": f"Verbunden als {data.get('username', 'OK')}"}
|
||||
except Exception as exc:
|
||||
return {"success": False, "message": str(exc)}
|
||||
|
||||
|
||||
async def test_unraid(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Test Unraid server connectivity."""
|
||||
servers = config.get("servers", [])
|
||||
if not servers:
|
||||
return {"success": False, "message": "Keine Server konfiguriert"}
|
||||
|
||||
results = []
|
||||
for srv in servers:
|
||||
name = srv.get("name", srv.get("host", "?"))
|
||||
host = srv.get("host", "")
|
||||
port = srv.get("port", 80)
|
||||
if not host:
|
||||
results.append(f"{name}: Kein Host")
|
||||
continue
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
r = await client.get(f"http://{host}:{port}/")
|
||||
results.append(f"{name}: Online ({r.status_code})")
|
||||
except Exception as exc:
|
||||
results.append(f"{name}: Offline ({exc})")
|
||||
|
||||
all_ok = all("Online" in r for r in results)
|
||||
return {
|
||||
"success": all_ok,
|
||||
"message": " | ".join(results),
|
||||
}
|
||||
|
||||
|
||||
async def test_mqtt(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Test MQTT broker connection."""
|
||||
host = config.get("host", "")
|
||||
port = int(config.get("port", 1883))
|
||||
username = config.get("username") or None
|
||||
password = config.get("password") or None
|
||||
|
||||
if not host:
|
||||
return {"success": False, "message": "MQTT Host ist erforderlich"}
|
||||
|
||||
try:
|
||||
import aiomqtt
|
||||
|
||||
async with aiomqtt.Client(
|
||||
hostname=host,
|
||||
port=port,
|
||||
username=username,
|
||||
password=password,
|
||||
identifier="daily-briefing-test",
|
||||
) as client:
|
||||
# If we get here, connection succeeded
|
||||
pass
|
||||
return {"success": True, "message": f"Verbunden mit {host}:{port}"}
|
||||
except Exception as exc:
|
||||
return {"success": False, "message": str(exc)}
|
||||
|
||||
|
||||
async def test_news_db(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Test that market_news table is accessible."""
|
||||
try:
|
||||
from server.db import get_pool
|
||||
|
||||
pool = await get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
row = await conn.fetchrow("SELECT COUNT(*) AS cnt FROM market_news")
|
||||
count = row["cnt"] if row else 0
|
||||
return {"success": True, "message": f"Verbunden — {count} Artikel in der Datenbank"}
|
||||
except Exception as exc:
|
||||
return {"success": False, "message": str(exc)}
|
||||
|
||||
|
||||
# Map integration type → test function
|
||||
TEST_FUNCTIONS = {
|
||||
"weather": test_weather,
|
||||
"ha": test_ha,
|
||||
"vikunja": test_vikunja,
|
||||
"unraid": test_unraid,
|
||||
"mqtt": test_mqtt,
|
||||
"news": test_news_db,
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue