Files
kuma-strapper/backend/services/database.py
Debian 1fd29e449f
All checks were successful
Build and Push Container / build (push) Successful in 1m4s
Add local SQLite database and sync with Uptime Kuma
Features:
- SQLite database to track monitors and hosts locally
- Uses Uptime Kuma tags to mark monitors as managed by Kuma Strapper
- Sync on startup, before each scan, and on-demand via API
- Shows existing monitors when re-scanning a host

New files:
- backend/services/database.py - SQLite database service
- backend/services/sync.py - Sync service for Uptime Kuma reconciliation

API endpoints:
- POST /api/sync - Full sync with Uptime Kuma
- POST /api/sync/host/<hostname> - Sync specific host
- GET /api/hosts - List tracked hosts
- GET /api/hosts/<hostname>/monitors - Get monitors for host
- GET /api/monitors/tracked - Get all tracked monitors

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-06 09:10:26 +00:00

378 lines
12 KiB
Python

"""SQLite database for tracking monitors and hosts."""
import os
import sqlite3
import logging
from contextlib import contextmanager
from dataclasses import dataclass, asdict
from datetime import datetime
from typing import Optional
from config import get_config
logger = logging.getLogger(__name__)
SCHEMA = """
-- Hosts that have been scanned
CREATE TABLE IF NOT EXISTS hosts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
hostname TEXT UNIQUE NOT NULL,
last_scan_at TIMESTAMP,
last_sync_at TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Monitors created by Kuma Strapper
CREATE TABLE IF NOT EXISTS monitors (
id INTEGER PRIMARY KEY AUTOINCREMENT,
kuma_monitor_id INTEGER UNIQUE NOT NULL,
host_id INTEGER NOT NULL,
name TEXT NOT NULL,
type TEXT NOT NULL,
target TEXT,
port INTEGER,
interval_seconds INTEGER DEFAULT 60,
push_metric TEXT,
status TEXT DEFAULT 'active',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
synced_at TIMESTAMP,
FOREIGN KEY (host_id) REFERENCES hosts(id) ON DELETE CASCADE
);
-- Push script deployments
CREATE TABLE IF NOT EXISTS deployments (
id INTEGER PRIMARY KEY AUTOINCREMENT,
monitor_id INTEGER NOT NULL,
host_id INTEGER NOT NULL,
script_path TEXT NOT NULL,
scheduling_method TEXT,
scheduling_info TEXT,
deployed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
status TEXT DEFAULT 'deployed',
FOREIGN KEY (monitor_id) REFERENCES monitors(id) ON DELETE CASCADE,
FOREIGN KEY (host_id) REFERENCES hosts(id) ON DELETE CASCADE
);
-- Indexes
CREATE INDEX IF NOT EXISTS idx_monitors_host_id ON monitors(host_id);
CREATE INDEX IF NOT EXISTS idx_monitors_kuma_id ON monitors(kuma_monitor_id);
CREATE INDEX IF NOT EXISTS idx_hosts_hostname ON hosts(hostname);
"""
@dataclass
class Host:
id: Optional[int]
hostname: str
last_scan_at: Optional[datetime] = None
last_sync_at: Optional[datetime] = None
created_at: Optional[datetime] = None
def to_dict(self) -> dict:
d = asdict(self)
for k, v in d.items():
if isinstance(v, datetime):
d[k] = v.isoformat()
return d
@dataclass
class TrackedMonitor:
id: Optional[int]
kuma_monitor_id: int
host_id: int
name: str
type: str
target: Optional[str] = None
port: Optional[int] = None
interval_seconds: int = 60
push_metric: Optional[str] = None
status: str = "active"
created_at: Optional[datetime] = None
synced_at: Optional[datetime] = None
def to_dict(self) -> dict:
d = asdict(self)
for k, v in d.items():
if isinstance(v, datetime):
d[k] = v.isoformat()
return d
@dataclass
class Deployment:
id: Optional[int]
monitor_id: int
host_id: int
script_path: str
scheduling_method: Optional[str] = None
scheduling_info: Optional[str] = None
deployed_at: Optional[datetime] = None
status: str = "deployed"
def to_dict(self) -> dict:
d = asdict(self)
for k, v in d.items():
if isinstance(v, datetime):
d[k] = v.isoformat()
return d
class Database:
def __init__(self, db_path: Optional[str] = None):
self.db_path = db_path or get_config().database_path
self._ensure_directory()
self._init_schema()
def _ensure_directory(self):
dir_path = os.path.dirname(self.db_path)
if dir_path:
os.makedirs(dir_path, exist_ok=True)
@contextmanager
def get_connection(self):
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA foreign_keys = ON")
try:
yield conn
conn.commit()
except Exception:
conn.rollback()
raise
finally:
conn.close()
def _init_schema(self):
with self.get_connection() as conn:
conn.executescript(SCHEMA)
logger.info(f"Database initialized at {self.db_path}")
# Host operations
def get_or_create_host(self, hostname: str) -> Host:
with self.get_connection() as conn:
row = conn.execute(
"SELECT * FROM hosts WHERE hostname = ?", (hostname,)
).fetchone()
if row:
return self._row_to_host(row)
cursor = conn.execute(
"INSERT INTO hosts (hostname) VALUES (?)", (hostname,)
)
return Host(id=cursor.lastrowid, hostname=hostname)
def get_host_by_hostname(self, hostname: str) -> Optional[Host]:
with self.get_connection() as conn:
row = conn.execute(
"SELECT * FROM hosts WHERE hostname = ?", (hostname,)
).fetchone()
return self._row_to_host(row) if row else None
def get_all_hosts(self) -> list[Host]:
with self.get_connection() as conn:
rows = conn.execute("SELECT * FROM hosts ORDER BY hostname").fetchall()
return [self._row_to_host(row) for row in rows]
def update_host_scan_time(self, host_id: int):
with self.get_connection() as conn:
conn.execute(
"UPDATE hosts SET last_scan_at = ? WHERE id = ?",
(datetime.utcnow(), host_id),
)
def update_host_sync_time(self, host_id: int):
with self.get_connection() as conn:
conn.execute(
"UPDATE hosts SET last_sync_at = ? WHERE id = ?",
(datetime.utcnow(), host_id),
)
def _row_to_host(self, row: sqlite3.Row) -> Host:
return Host(
id=row["id"],
hostname=row["hostname"],
last_scan_at=self._parse_timestamp(row["last_scan_at"]),
last_sync_at=self._parse_timestamp(row["last_sync_at"]),
created_at=self._parse_timestamp(row["created_at"]),
)
# Monitor operations
def add_monitor(self, monitor: TrackedMonitor) -> int:
with self.get_connection() as conn:
cursor = conn.execute(
"""INSERT INTO monitors
(kuma_monitor_id, host_id, name, type, target, port,
interval_seconds, push_metric, status, synced_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(
monitor.kuma_monitor_id,
monitor.host_id,
monitor.name,
monitor.type,
monitor.target,
monitor.port,
monitor.interval_seconds,
monitor.push_metric,
monitor.status,
datetime.utcnow(),
),
)
return cursor.lastrowid
def get_monitor_by_kuma_id(self, kuma_monitor_id: int) -> Optional[TrackedMonitor]:
with self.get_connection() as conn:
row = conn.execute(
"SELECT * FROM monitors WHERE kuma_monitor_id = ?", (kuma_monitor_id,)
).fetchone()
return self._row_to_monitor(row) if row else None
def get_monitors_for_host(self, host_id: int) -> list[TrackedMonitor]:
with self.get_connection() as conn:
rows = conn.execute(
"SELECT * FROM monitors WHERE host_id = ? ORDER BY name",
(host_id,),
).fetchall()
return [self._row_to_monitor(row) for row in rows]
def get_monitors_for_hostname(self, hostname: str) -> list[TrackedMonitor]:
with self.get_connection() as conn:
rows = conn.execute(
"""SELECT m.* FROM monitors m
JOIN hosts h ON m.host_id = h.id
WHERE h.hostname = ?
ORDER BY m.name""",
(hostname,),
).fetchall()
return [self._row_to_monitor(row) for row in rows]
def get_all_monitors(self) -> list[TrackedMonitor]:
with self.get_connection() as conn:
rows = conn.execute("SELECT * FROM monitors ORDER BY name").fetchall()
return [self._row_to_monitor(row) for row in rows]
def update_monitor(self, monitor: TrackedMonitor):
with self.get_connection() as conn:
conn.execute(
"""UPDATE monitors SET
name = ?, type = ?, target = ?, port = ?,
interval_seconds = ?, push_metric = ?, status = ?, synced_at = ?
WHERE id = ?""",
(
monitor.name,
monitor.type,
monitor.target,
monitor.port,
monitor.interval_seconds,
monitor.push_metric,
monitor.status,
datetime.utcnow(),
monitor.id,
),
)
def update_monitor_status(self, monitor_id: int, status: str):
with self.get_connection() as conn:
conn.execute(
"UPDATE monitors SET status = ?, synced_at = ? WHERE id = ?",
(status, datetime.utcnow(), monitor_id),
)
def mark_monitor_synced(self, monitor_id: int):
with self.get_connection() as conn:
conn.execute(
"UPDATE monitors SET synced_at = ? WHERE id = ?",
(datetime.utcnow(), monitor_id),
)
def delete_monitor(self, monitor_id: int):
with self.get_connection() as conn:
conn.execute("DELETE FROM monitors WHERE id = ?", (monitor_id,))
def _row_to_monitor(self, row: sqlite3.Row) -> TrackedMonitor:
return TrackedMonitor(
id=row["id"],
kuma_monitor_id=row["kuma_monitor_id"],
host_id=row["host_id"],
name=row["name"],
type=row["type"],
target=row["target"],
port=row["port"],
interval_seconds=row["interval_seconds"],
push_metric=row["push_metric"],
status=row["status"],
created_at=self._parse_timestamp(row["created_at"]),
synced_at=self._parse_timestamp(row["synced_at"]),
)
# Deployment operations
def add_deployment(self, deployment: Deployment) -> int:
with self.get_connection() as conn:
cursor = conn.execute(
"""INSERT INTO deployments
(monitor_id, host_id, script_path, scheduling_method,
scheduling_info, status)
VALUES (?, ?, ?, ?, ?, ?)""",
(
deployment.monitor_id,
deployment.host_id,
deployment.script_path,
deployment.scheduling_method,
deployment.scheduling_info,
deployment.status,
),
)
return cursor.lastrowid
def get_deployment_for_monitor(self, monitor_id: int) -> Optional[Deployment]:
with self.get_connection() as conn:
row = conn.execute(
"SELECT * FROM deployments WHERE monitor_id = ?", (monitor_id,)
).fetchone()
return self._row_to_deployment(row) if row else None
def update_deployment_status(self, deployment_id: int, status: str):
with self.get_connection() as conn:
conn.execute(
"UPDATE deployments SET status = ? WHERE id = ?",
(status, deployment_id),
)
def _row_to_deployment(self, row: sqlite3.Row) -> Deployment:
return Deployment(
id=row["id"],
monitor_id=row["monitor_id"],
host_id=row["host_id"],
script_path=row["script_path"],
scheduling_method=row["scheduling_method"],
scheduling_info=row["scheduling_info"],
deployed_at=self._parse_timestamp(row["deployed_at"]),
status=row["status"],
)
def _parse_timestamp(self, value) -> Optional[datetime]:
if value is None:
return None
if isinstance(value, datetime):
return value
try:
return datetime.fromisoformat(value)
except (ValueError, TypeError):
return None
# Global database instance
_database: Optional[Database] = None
def get_database() -> Database:
"""Get the global database instance."""
global _database
if _database is None:
_database = Database()
return _database