Add local SQLite database and sync with Uptime Kuma
All checks were successful
Build and Push Container / build (push) Successful in 1m4s
All checks were successful
Build and Push Container / build (push) Successful in 1m4s
Features: - SQLite database to track monitors and hosts locally - Uses Uptime Kuma tags to mark monitors as managed by Kuma Strapper - Sync on startup, before each scan, and on-demand via API - Shows existing monitors when re-scanning a host New files: - backend/services/database.py - SQLite database service - backend/services/sync.py - Sync service for Uptime Kuma reconciliation API endpoints: - POST /api/sync - Full sync with Uptime Kuma - POST /api/sync/host/<hostname> - Sync specific host - GET /api/hosts - List tracked hosts - GET /api/hosts/<hostname>/monitors - Get monitors for host - GET /api/monitors/tracked - Get all tracked monitors 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
104
backend/app.py
104
backend/app.py
@@ -17,6 +17,8 @@ from services.monitors import (
|
|||||||
parse_docker_containers_from_scan,
|
parse_docker_containers_from_scan,
|
||||||
)
|
)
|
||||||
from services.kuma_client import get_kuma_client
|
from services.kuma_client import get_kuma_client
|
||||||
|
from services.database import get_database
|
||||||
|
from services.sync import get_sync_service
|
||||||
from utils.approval import get_approval_queue, ApprovalStatus
|
from utils.approval import get_approval_queue, ApprovalStatus
|
||||||
|
|
||||||
|
|
||||||
@@ -90,6 +92,20 @@ def start_scan():
|
|||||||
|
|
||||||
# Start scan in background thread
|
# Start scan in background thread
|
||||||
def run_scan():
|
def run_scan():
|
||||||
|
# Sync this host's monitors before scanning
|
||||||
|
try:
|
||||||
|
sync = get_sync_service()
|
||||||
|
sync_result = sync.sync_host(hostname)
|
||||||
|
db = get_database()
|
||||||
|
existing_monitors = db.get_monitors_for_hostname(hostname)
|
||||||
|
socketio.emit("host_sync_complete", {
|
||||||
|
"hostname": hostname,
|
||||||
|
"sync_result": sync_result,
|
||||||
|
"existing_monitors": [m.to_dict() for m in existing_monitors],
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Pre-scan sync failed (non-fatal): {e}")
|
||||||
|
|
||||||
discovery = get_discovery_service()
|
discovery = get_discovery_service()
|
||||||
|
|
||||||
def on_progress(cmd_name, status):
|
def on_progress(cmd_name, status):
|
||||||
@@ -576,6 +592,72 @@ def test_kuma_connection():
|
|||||||
return jsonify({"connected": False, "error": str(e)})
|
return jsonify({"connected": False, "error": str(e)})
|
||||||
|
|
||||||
|
|
||||||
|
# Sync endpoints
|
||||||
|
@app.route("/api/sync", methods=["POST"])
|
||||||
|
def trigger_sync():
|
||||||
|
"""Trigger a full sync with Uptime Kuma."""
|
||||||
|
try:
|
||||||
|
sync = get_sync_service()
|
||||||
|
result = sync.full_sync()
|
||||||
|
return jsonify(result)
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"error": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/sync/host/<hostname>", methods=["POST"])
|
||||||
|
def sync_host(hostname):
|
||||||
|
"""Sync monitors for a specific host."""
|
||||||
|
try:
|
||||||
|
sync = get_sync_service()
|
||||||
|
result = sync.sync_host(hostname)
|
||||||
|
return jsonify(result)
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"error": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/hosts", methods=["GET"])
|
||||||
|
def get_hosts():
|
||||||
|
"""Get all tracked hosts."""
|
||||||
|
try:
|
||||||
|
db = get_database()
|
||||||
|
hosts = db.get_all_hosts()
|
||||||
|
return jsonify({
|
||||||
|
"hosts": [h.to_dict() for h in hosts],
|
||||||
|
"count": len(hosts)
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"error": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/hosts/<hostname>/monitors", methods=["GET"])
|
||||||
|
def get_host_monitors(hostname):
|
||||||
|
"""Get all tracked monitors for a host."""
|
||||||
|
try:
|
||||||
|
db = get_database()
|
||||||
|
monitors = db.get_monitors_for_hostname(hostname)
|
||||||
|
return jsonify({
|
||||||
|
"hostname": hostname,
|
||||||
|
"monitors": [m.to_dict() for m in monitors],
|
||||||
|
"count": len(monitors)
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"error": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/api/monitors/tracked", methods=["GET"])
|
||||||
|
def get_tracked_monitors():
|
||||||
|
"""Get all tracked monitors across all hosts."""
|
||||||
|
try:
|
||||||
|
db = get_database()
|
||||||
|
monitors = db.get_all_monitors()
|
||||||
|
return jsonify({
|
||||||
|
"monitors": [m.to_dict() for m in monitors],
|
||||||
|
"count": len(monitors)
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"error": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
# WebSocket events
|
# WebSocket events
|
||||||
@socketio.on("connect")
|
@socketio.on("connect")
|
||||||
def handle_connect():
|
def handle_connect():
|
||||||
@@ -587,6 +669,19 @@ def handle_disconnect():
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def startup_sync():
|
||||||
|
"""Run initial sync with Uptime Kuma on startup."""
|
||||||
|
try:
|
||||||
|
print("Running startup sync with Uptime Kuma...")
|
||||||
|
sync = get_sync_service()
|
||||||
|
result = sync.full_sync()
|
||||||
|
print(f"Startup sync complete: added={result['added']}, updated={result['updated']}, removed={result['removed']}")
|
||||||
|
if result['errors']:
|
||||||
|
print(f"Sync errors: {result['errors']}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Startup sync failed (non-fatal): {e}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# Validate config on startup
|
# Validate config on startup
|
||||||
config = get_config()
|
config = get_config()
|
||||||
@@ -600,4 +695,13 @@ if __name__ == "__main__":
|
|||||||
print("Configuration OK")
|
print("Configuration OK")
|
||||||
print(f"Dev mode: {'enabled' if config.dev_mode else 'disabled'}")
|
print(f"Dev mode: {'enabled' if config.dev_mode else 'disabled'}")
|
||||||
|
|
||||||
|
# Run startup sync in background after short delay
|
||||||
|
def delayed_sync():
|
||||||
|
import time
|
||||||
|
time.sleep(2) # Wait for app to fully start
|
||||||
|
startup_sync()
|
||||||
|
|
||||||
|
sync_thread = threading.Thread(target=delayed_sync, daemon=True)
|
||||||
|
sync_thread.start()
|
||||||
|
|
||||||
socketio.run(app, host="0.0.0.0", port=5000, debug=os.environ.get("DEBUG", "false").lower() == "true")
|
socketio.run(app, host="0.0.0.0", port=5000, debug=os.environ.get("DEBUG", "false").lower() == "true")
|
||||||
|
|||||||
@@ -73,6 +73,7 @@ class Config:
|
|||||||
uptime_kuma_api_key: str
|
uptime_kuma_api_key: str
|
||||||
claude_api_key: str
|
claude_api_key: str
|
||||||
dev_mode: bool = False
|
dev_mode: bool = False
|
||||||
|
database_path: str = "/app/data/kuma_strapper.db"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_env(cls) -> "Config":
|
def from_env(cls) -> "Config":
|
||||||
@@ -91,6 +92,7 @@ class Config:
|
|||||||
uptime_kuma_api_key=resolve_secret("UPTIME_KUMA_API_KEY", ""),
|
uptime_kuma_api_key=resolve_secret("UPTIME_KUMA_API_KEY", ""),
|
||||||
claude_api_key=resolve_secret("CLAUDE_API_KEY", ""),
|
claude_api_key=resolve_secret("CLAUDE_API_KEY", ""),
|
||||||
dev_mode=os.environ.get("DEV_MODE", "false").lower() == "true",
|
dev_mode=os.environ.get("DEV_MODE", "false").lower() == "true",
|
||||||
|
database_path=os.environ.get("DATABASE_PATH", "/app/data/kuma_strapper.db"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate(self) -> list[str]:
|
def validate(self) -> list[str]:
|
||||||
|
|||||||
377
backend/services/database.py
Normal file
377
backend/services/database.py
Normal file
@@ -0,0 +1,377 @@
|
|||||||
|
"""SQLite database for tracking monitors and hosts."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
import logging
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from dataclasses import dataclass, asdict
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from config import get_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SCHEMA = """
|
||||||
|
-- Hosts that have been scanned
|
||||||
|
CREATE TABLE IF NOT EXISTS hosts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
hostname TEXT UNIQUE NOT NULL,
|
||||||
|
last_scan_at TIMESTAMP,
|
||||||
|
last_sync_at TIMESTAMP,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Monitors created by Kuma Strapper
|
||||||
|
CREATE TABLE IF NOT EXISTS monitors (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
kuma_monitor_id INTEGER UNIQUE NOT NULL,
|
||||||
|
host_id INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
target TEXT,
|
||||||
|
port INTEGER,
|
||||||
|
interval_seconds INTEGER DEFAULT 60,
|
||||||
|
push_metric TEXT,
|
||||||
|
status TEXT DEFAULT 'active',
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
synced_at TIMESTAMP,
|
||||||
|
FOREIGN KEY (host_id) REFERENCES hosts(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Push script deployments
|
||||||
|
CREATE TABLE IF NOT EXISTS deployments (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
monitor_id INTEGER NOT NULL,
|
||||||
|
host_id INTEGER NOT NULL,
|
||||||
|
script_path TEXT NOT NULL,
|
||||||
|
scheduling_method TEXT,
|
||||||
|
scheduling_info TEXT,
|
||||||
|
deployed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
status TEXT DEFAULT 'deployed',
|
||||||
|
FOREIGN KEY (monitor_id) REFERENCES monitors(id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (host_id) REFERENCES hosts(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_monitors_host_id ON monitors(host_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_monitors_kuma_id ON monitors(kuma_monitor_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_hosts_hostname ON hosts(hostname);
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Host:
|
||||||
|
id: Optional[int]
|
||||||
|
hostname: str
|
||||||
|
last_scan_at: Optional[datetime] = None
|
||||||
|
last_sync_at: Optional[datetime] = None
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
d = asdict(self)
|
||||||
|
for k, v in d.items():
|
||||||
|
if isinstance(v, datetime):
|
||||||
|
d[k] = v.isoformat()
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TrackedMonitor:
|
||||||
|
id: Optional[int]
|
||||||
|
kuma_monitor_id: int
|
||||||
|
host_id: int
|
||||||
|
name: str
|
||||||
|
type: str
|
||||||
|
target: Optional[str] = None
|
||||||
|
port: Optional[int] = None
|
||||||
|
interval_seconds: int = 60
|
||||||
|
push_metric: Optional[str] = None
|
||||||
|
status: str = "active"
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
|
synced_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
d = asdict(self)
|
||||||
|
for k, v in d.items():
|
||||||
|
if isinstance(v, datetime):
|
||||||
|
d[k] = v.isoformat()
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Deployment:
|
||||||
|
id: Optional[int]
|
||||||
|
monitor_id: int
|
||||||
|
host_id: int
|
||||||
|
script_path: str
|
||||||
|
scheduling_method: Optional[str] = None
|
||||||
|
scheduling_info: Optional[str] = None
|
||||||
|
deployed_at: Optional[datetime] = None
|
||||||
|
status: str = "deployed"
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
d = asdict(self)
|
||||||
|
for k, v in d.items():
|
||||||
|
if isinstance(v, datetime):
|
||||||
|
d[k] = v.isoformat()
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class Database:
|
||||||
|
def __init__(self, db_path: Optional[str] = None):
|
||||||
|
self.db_path = db_path or get_config().database_path
|
||||||
|
self._ensure_directory()
|
||||||
|
self._init_schema()
|
||||||
|
|
||||||
|
def _ensure_directory(self):
|
||||||
|
dir_path = os.path.dirname(self.db_path)
|
||||||
|
if dir_path:
|
||||||
|
os.makedirs(dir_path, exist_ok=True)
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def get_connection(self):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
conn.execute("PRAGMA foreign_keys = ON")
|
||||||
|
try:
|
||||||
|
yield conn
|
||||||
|
conn.commit()
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def _init_schema(self):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.executescript(SCHEMA)
|
||||||
|
logger.info(f"Database initialized at {self.db_path}")
|
||||||
|
|
||||||
|
# Host operations
|
||||||
|
|
||||||
|
def get_or_create_host(self, hostname: str) -> Host:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
row = conn.execute(
|
||||||
|
"SELECT * FROM hosts WHERE hostname = ?", (hostname,)
|
||||||
|
).fetchone()
|
||||||
|
|
||||||
|
if row:
|
||||||
|
return self._row_to_host(row)
|
||||||
|
|
||||||
|
cursor = conn.execute(
|
||||||
|
"INSERT INTO hosts (hostname) VALUES (?)", (hostname,)
|
||||||
|
)
|
||||||
|
return Host(id=cursor.lastrowid, hostname=hostname)
|
||||||
|
|
||||||
|
def get_host_by_hostname(self, hostname: str) -> Optional[Host]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
row = conn.execute(
|
||||||
|
"SELECT * FROM hosts WHERE hostname = ?", (hostname,)
|
||||||
|
).fetchone()
|
||||||
|
return self._row_to_host(row) if row else None
|
||||||
|
|
||||||
|
def get_all_hosts(self) -> list[Host]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
rows = conn.execute("SELECT * FROM hosts ORDER BY hostname").fetchall()
|
||||||
|
return [self._row_to_host(row) for row in rows]
|
||||||
|
|
||||||
|
def update_host_scan_time(self, host_id: int):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE hosts SET last_scan_at = ? WHERE id = ?",
|
||||||
|
(datetime.utcnow(), host_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_host_sync_time(self, host_id: int):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE hosts SET last_sync_at = ? WHERE id = ?",
|
||||||
|
(datetime.utcnow(), host_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _row_to_host(self, row: sqlite3.Row) -> Host:
|
||||||
|
return Host(
|
||||||
|
id=row["id"],
|
||||||
|
hostname=row["hostname"],
|
||||||
|
last_scan_at=self._parse_timestamp(row["last_scan_at"]),
|
||||||
|
last_sync_at=self._parse_timestamp(row["last_sync_at"]),
|
||||||
|
created_at=self._parse_timestamp(row["created_at"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Monitor operations
|
||||||
|
|
||||||
|
def add_monitor(self, monitor: TrackedMonitor) -> int:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""INSERT INTO monitors
|
||||||
|
(kuma_monitor_id, host_id, name, type, target, port,
|
||||||
|
interval_seconds, push_metric, status, synced_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
monitor.kuma_monitor_id,
|
||||||
|
monitor.host_id,
|
||||||
|
monitor.name,
|
||||||
|
monitor.type,
|
||||||
|
monitor.target,
|
||||||
|
monitor.port,
|
||||||
|
monitor.interval_seconds,
|
||||||
|
monitor.push_metric,
|
||||||
|
monitor.status,
|
||||||
|
datetime.utcnow(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return cursor.lastrowid
|
||||||
|
|
||||||
|
def get_monitor_by_kuma_id(self, kuma_monitor_id: int) -> Optional[TrackedMonitor]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
row = conn.execute(
|
||||||
|
"SELECT * FROM monitors WHERE kuma_monitor_id = ?", (kuma_monitor_id,)
|
||||||
|
).fetchone()
|
||||||
|
return self._row_to_monitor(row) if row else None
|
||||||
|
|
||||||
|
def get_monitors_for_host(self, host_id: int) -> list[TrackedMonitor]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
rows = conn.execute(
|
||||||
|
"SELECT * FROM monitors WHERE host_id = ? ORDER BY name",
|
||||||
|
(host_id,),
|
||||||
|
).fetchall()
|
||||||
|
return [self._row_to_monitor(row) for row in rows]
|
||||||
|
|
||||||
|
def get_monitors_for_hostname(self, hostname: str) -> list[TrackedMonitor]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
rows = conn.execute(
|
||||||
|
"""SELECT m.* FROM monitors m
|
||||||
|
JOIN hosts h ON m.host_id = h.id
|
||||||
|
WHERE h.hostname = ?
|
||||||
|
ORDER BY m.name""",
|
||||||
|
(hostname,),
|
||||||
|
).fetchall()
|
||||||
|
return [self._row_to_monitor(row) for row in rows]
|
||||||
|
|
||||||
|
def get_all_monitors(self) -> list[TrackedMonitor]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
rows = conn.execute("SELECT * FROM monitors ORDER BY name").fetchall()
|
||||||
|
return [self._row_to_monitor(row) for row in rows]
|
||||||
|
|
||||||
|
def update_monitor(self, monitor: TrackedMonitor):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"""UPDATE monitors SET
|
||||||
|
name = ?, type = ?, target = ?, port = ?,
|
||||||
|
interval_seconds = ?, push_metric = ?, status = ?, synced_at = ?
|
||||||
|
WHERE id = ?""",
|
||||||
|
(
|
||||||
|
monitor.name,
|
||||||
|
monitor.type,
|
||||||
|
monitor.target,
|
||||||
|
monitor.port,
|
||||||
|
monitor.interval_seconds,
|
||||||
|
monitor.push_metric,
|
||||||
|
monitor.status,
|
||||||
|
datetime.utcnow(),
|
||||||
|
monitor.id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_monitor_status(self, monitor_id: int, status: str):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE monitors SET status = ?, synced_at = ? WHERE id = ?",
|
||||||
|
(status, datetime.utcnow(), monitor_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
def mark_monitor_synced(self, monitor_id: int):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE monitors SET synced_at = ? WHERE id = ?",
|
||||||
|
(datetime.utcnow(), monitor_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete_monitor(self, monitor_id: int):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute("DELETE FROM monitors WHERE id = ?", (monitor_id,))
|
||||||
|
|
||||||
|
def _row_to_monitor(self, row: sqlite3.Row) -> TrackedMonitor:
|
||||||
|
return TrackedMonitor(
|
||||||
|
id=row["id"],
|
||||||
|
kuma_monitor_id=row["kuma_monitor_id"],
|
||||||
|
host_id=row["host_id"],
|
||||||
|
name=row["name"],
|
||||||
|
type=row["type"],
|
||||||
|
target=row["target"],
|
||||||
|
port=row["port"],
|
||||||
|
interval_seconds=row["interval_seconds"],
|
||||||
|
push_metric=row["push_metric"],
|
||||||
|
status=row["status"],
|
||||||
|
created_at=self._parse_timestamp(row["created_at"]),
|
||||||
|
synced_at=self._parse_timestamp(row["synced_at"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Deployment operations
|
||||||
|
|
||||||
|
def add_deployment(self, deployment: Deployment) -> int:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
"""INSERT INTO deployments
|
||||||
|
(monitor_id, host_id, script_path, scheduling_method,
|
||||||
|
scheduling_info, status)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
deployment.monitor_id,
|
||||||
|
deployment.host_id,
|
||||||
|
deployment.script_path,
|
||||||
|
deployment.scheduling_method,
|
||||||
|
deployment.scheduling_info,
|
||||||
|
deployment.status,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return cursor.lastrowid
|
||||||
|
|
||||||
|
def get_deployment_for_monitor(self, monitor_id: int) -> Optional[Deployment]:
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
row = conn.execute(
|
||||||
|
"SELECT * FROM deployments WHERE monitor_id = ?", (monitor_id,)
|
||||||
|
).fetchone()
|
||||||
|
return self._row_to_deployment(row) if row else None
|
||||||
|
|
||||||
|
def update_deployment_status(self, deployment_id: int, status: str):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"UPDATE deployments SET status = ? WHERE id = ?",
|
||||||
|
(status, deployment_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _row_to_deployment(self, row: sqlite3.Row) -> Deployment:
|
||||||
|
return Deployment(
|
||||||
|
id=row["id"],
|
||||||
|
monitor_id=row["monitor_id"],
|
||||||
|
host_id=row["host_id"],
|
||||||
|
script_path=row["script_path"],
|
||||||
|
scheduling_method=row["scheduling_method"],
|
||||||
|
scheduling_info=row["scheduling_info"],
|
||||||
|
deployed_at=self._parse_timestamp(row["deployed_at"]),
|
||||||
|
status=row["status"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_timestamp(self, value) -> Optional[datetime]:
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
if isinstance(value, datetime):
|
||||||
|
return value
|
||||||
|
try:
|
||||||
|
return datetime.fromisoformat(value)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Global database instance
|
||||||
|
_database: Optional[Database] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_database() -> Database:
|
||||||
|
"""Get the global database instance."""
|
||||||
|
global _database
|
||||||
|
if _database is None:
|
||||||
|
_database = Database()
|
||||||
|
return _database
|
||||||
@@ -283,6 +283,50 @@ class UptimeKumaClient:
|
|||||||
self._disconnect()
|
self._disconnect()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Tag management methods
|
||||||
|
|
||||||
|
def get_tags(self) -> list[dict]:
|
||||||
|
"""Get all tags."""
|
||||||
|
try:
|
||||||
|
api = self._get_api()
|
||||||
|
return api.get_tags()
|
||||||
|
except Exception as e:
|
||||||
|
self._disconnect()
|
||||||
|
raise Exception(f"Failed to get tags: {str(e)}")
|
||||||
|
|
||||||
|
def add_tag(self, name: str, color: str) -> dict:
|
||||||
|
"""Create a new tag."""
|
||||||
|
try:
|
||||||
|
api = self._get_api()
|
||||||
|
return api.add_tag(name=name, color=color)
|
||||||
|
except Exception as e:
|
||||||
|
self._disconnect()
|
||||||
|
raise Exception(f"Failed to add tag: {str(e)}")
|
||||||
|
|
||||||
|
def add_monitor_tag(self, tag_id: int, monitor_id: int, value: str = "") -> dict:
|
||||||
|
"""Add a tag to a monitor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag_id: The tag ID
|
||||||
|
monitor_id: The monitor ID
|
||||||
|
value: Optional value for the tag (e.g., hostname)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
api = self._get_api()
|
||||||
|
return api.add_monitor_tag(tag_id=tag_id, monitor_id=monitor_id, value=value)
|
||||||
|
except Exception as e:
|
||||||
|
self._disconnect()
|
||||||
|
raise Exception(f"Failed to add tag to monitor: {str(e)}")
|
||||||
|
|
||||||
|
def delete_monitor_tag(self, tag_id: int, monitor_id: int, value: str = "") -> dict:
|
||||||
|
"""Remove a tag from a monitor."""
|
||||||
|
try:
|
||||||
|
api = self._get_api()
|
||||||
|
return api.delete_monitor_tag(tag_id=tag_id, monitor_id=monitor_id, value=value)
|
||||||
|
except Exception as e:
|
||||||
|
self._disconnect()
|
||||||
|
raise Exception(f"Failed to remove tag from monitor: {str(e)}")
|
||||||
|
|
||||||
def get_push_url(self, push_token: str) -> str:
|
def get_push_url(self, push_token: str) -> str:
|
||||||
"""Build the full push URL for a push monitor.
|
"""Build the full push URL for a push monitor.
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import logging
|
|||||||
from services.kuma_client import get_kuma_client, Monitor
|
from services.kuma_client import get_kuma_client, Monitor
|
||||||
from services.claude_agent import MonitorSuggestion
|
from services.claude_agent import MonitorSuggestion
|
||||||
from services.ssh_manager import get_ssh_manager
|
from services.ssh_manager import get_ssh_manager
|
||||||
|
from services.database import get_database, TrackedMonitor, Deployment
|
||||||
|
from services.sync import get_sync_service
|
||||||
from services import push_scripts
|
from services import push_scripts
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -74,6 +76,56 @@ class MonitorService:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.created_monitors: list[dict] = []
|
self.created_monitors: list[dict] = []
|
||||||
|
|
||||||
|
def _create_and_track_monitor(
|
||||||
|
self,
|
||||||
|
monitor: Monitor,
|
||||||
|
hostname: str,
|
||||||
|
kuma,
|
||||||
|
) -> dict:
|
||||||
|
"""Create a monitor in Uptime Kuma and track it in the database."""
|
||||||
|
try:
|
||||||
|
result = kuma.create_monitor(monitor)
|
||||||
|
kuma_monitor_id = result.get("monitorID")
|
||||||
|
|
||||||
|
# Tag and track in database
|
||||||
|
if kuma_monitor_id:
|
||||||
|
try:
|
||||||
|
sync = get_sync_service()
|
||||||
|
sync.add_tag_to_monitor(kuma_monitor_id, hostname)
|
||||||
|
|
||||||
|
db = get_database()
|
||||||
|
host = db.get_or_create_host(hostname)
|
||||||
|
tracked = TrackedMonitor(
|
||||||
|
id=None,
|
||||||
|
kuma_monitor_id=kuma_monitor_id,
|
||||||
|
host_id=host.id,
|
||||||
|
name=monitor.name,
|
||||||
|
type=monitor.type,
|
||||||
|
target=monitor.url or monitor.hostname or monitor.docker_container,
|
||||||
|
port=monitor.port,
|
||||||
|
interval_seconds=monitor.interval,
|
||||||
|
push_metric=None,
|
||||||
|
status="active",
|
||||||
|
)
|
||||||
|
db.add_monitor(tracked)
|
||||||
|
logger.info(f"Tracked monitor '{monitor.name}' in database")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to tag/track monitor: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"monitor": monitor.name,
|
||||||
|
"type": monitor.type,
|
||||||
|
"status": "created",
|
||||||
|
"result": result,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"monitor": monitor.name,
|
||||||
|
"type": monitor.type,
|
||||||
|
"status": "failed",
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
def create_default_monitors(
|
def create_default_monitors(
|
||||||
self,
|
self,
|
||||||
hostname: str,
|
hostname: str,
|
||||||
@@ -92,21 +144,8 @@ class MonitorService:
|
|||||||
# Host health monitors
|
# Host health monitors
|
||||||
health_monitors = create_host_health_monitors(hostname, ssh_port)
|
health_monitors = create_host_health_monitors(hostname, ssh_port)
|
||||||
for monitor in health_monitors:
|
for monitor in health_monitors:
|
||||||
try:
|
result = self._create_and_track_monitor(monitor, hostname, kuma)
|
||||||
result = kuma.create_monitor(monitor)
|
created.append(result)
|
||||||
created.append({
|
|
||||||
"monitor": monitor.name,
|
|
||||||
"type": monitor.type,
|
|
||||||
"status": "created",
|
|
||||||
"result": result,
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
created.append({
|
|
||||||
"monitor": monitor.name,
|
|
||||||
"type": monitor.type,
|
|
||||||
"status": "failed",
|
|
||||||
"error": str(e),
|
|
||||||
})
|
|
||||||
|
|
||||||
# Web server monitors
|
# Web server monitors
|
||||||
if web_ports:
|
if web_ports:
|
||||||
@@ -114,41 +153,15 @@ class MonitorService:
|
|||||||
https = port == 443 or port == 8443
|
https = port == 443 or port == 8443
|
||||||
web_monitors = create_web_server_monitors(hostname, port, https)
|
web_monitors = create_web_server_monitors(hostname, port, https)
|
||||||
for monitor in web_monitors:
|
for monitor in web_monitors:
|
||||||
try:
|
result = self._create_and_track_monitor(monitor, hostname, kuma)
|
||||||
result = kuma.create_monitor(monitor)
|
created.append(result)
|
||||||
created.append({
|
|
||||||
"monitor": monitor.name,
|
|
||||||
"type": monitor.type,
|
|
||||||
"status": "created",
|
|
||||||
"result": result,
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
created.append({
|
|
||||||
"monitor": monitor.name,
|
|
||||||
"type": monitor.type,
|
|
||||||
"status": "failed",
|
|
||||||
"error": str(e),
|
|
||||||
})
|
|
||||||
|
|
||||||
# Docker container monitors
|
# Docker container monitors
|
||||||
if has_docker and containers:
|
if has_docker and containers:
|
||||||
docker_monitors = create_docker_container_monitors(hostname, containers)
|
docker_monitors = create_docker_container_monitors(hostname, containers)
|
||||||
for monitor in docker_monitors:
|
for monitor in docker_monitors:
|
||||||
try:
|
result = self._create_and_track_monitor(monitor, hostname, kuma)
|
||||||
result = kuma.create_monitor(monitor)
|
created.append(result)
|
||||||
created.append({
|
|
||||||
"monitor": monitor.name,
|
|
||||||
"type": monitor.type,
|
|
||||||
"status": "created",
|
|
||||||
"result": result,
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
created.append({
|
|
||||||
"monitor": monitor.name,
|
|
||||||
"type": monitor.type,
|
|
||||||
"status": "failed",
|
|
||||||
"error": str(e),
|
|
||||||
})
|
|
||||||
|
|
||||||
self.created_monitors.extend(created)
|
self.created_monitors.extend(created)
|
||||||
return created
|
return created
|
||||||
@@ -199,6 +212,34 @@ class MonitorService:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
result = kuma.create_monitor(monitor)
|
result = kuma.create_monitor(monitor)
|
||||||
|
kuma_monitor_id = result.get("monitorID")
|
||||||
|
|
||||||
|
# Add kuma-strapper tag and track in database
|
||||||
|
if kuma_monitor_id:
|
||||||
|
try:
|
||||||
|
sync = get_sync_service()
|
||||||
|
sync.add_tag_to_monitor(kuma_monitor_id, hostname)
|
||||||
|
|
||||||
|
# Track in local database
|
||||||
|
db = get_database()
|
||||||
|
host = db.get_or_create_host(hostname)
|
||||||
|
tracked = TrackedMonitor(
|
||||||
|
id=None,
|
||||||
|
kuma_monitor_id=kuma_monitor_id,
|
||||||
|
host_id=host.id,
|
||||||
|
name=monitor.name,
|
||||||
|
type=monitor.type,
|
||||||
|
target=suggestion.target,
|
||||||
|
port=suggestion.port,
|
||||||
|
interval_seconds=suggestion.interval,
|
||||||
|
push_metric=suggestion.push_metric,
|
||||||
|
status="active",
|
||||||
|
)
|
||||||
|
db.add_monitor(tracked)
|
||||||
|
logger.info(f"Tracked monitor '{monitor.name}' in database")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to tag/track monitor: {e}")
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
"monitor": monitor.name,
|
"monitor": monitor.name,
|
||||||
"type": monitor.type,
|
"type": monitor.type,
|
||||||
@@ -224,6 +265,26 @@ class MonitorService:
|
|||||||
port=port,
|
port=port,
|
||||||
)
|
)
|
||||||
response["deployment"] = deploy_result
|
response["deployment"] = deploy_result
|
||||||
|
|
||||||
|
# Track deployment in database
|
||||||
|
if deploy_result.get("status") == "deployed":
|
||||||
|
try:
|
||||||
|
db = get_database()
|
||||||
|
host = db.get_or_create_host(hostname)
|
||||||
|
tracked = db.get_monitor_by_kuma_id(monitor_id)
|
||||||
|
if tracked:
|
||||||
|
deployment = Deployment(
|
||||||
|
id=None,
|
||||||
|
monitor_id=tracked.id,
|
||||||
|
host_id=host.id,
|
||||||
|
script_path=deploy_result.get("script_path", ""),
|
||||||
|
scheduling_method=deploy_result.get("scheduling", {}).get("method"),
|
||||||
|
scheduling_info=deploy_result.get("scheduling", {}).get("info"),
|
||||||
|
status="deployed",
|
||||||
|
)
|
||||||
|
db.add_deployment(deployment)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to track deployment: {e}")
|
||||||
else:
|
else:
|
||||||
response["deployment"] = {
|
response["deployment"] = {
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
|
|||||||
325
backend/services/sync.py
Normal file
325
backend/services/sync.py
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
"""Sync service for reconciling local database with Uptime Kuma."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from services.database import get_database, TrackedMonitor, Host
|
||||||
|
from services.kuma_client import get_kuma_client
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
TAG_NAME = "kuma-strapper"
|
||||||
|
TAG_COLOR = "#5865F2" # Discord blurple
|
||||||
|
|
||||||
|
|
||||||
|
class SyncService:
|
||||||
|
"""Service for syncing local database with Uptime Kuma."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.db = get_database()
|
||||||
|
self.kuma = get_kuma_client()
|
||||||
|
self._tag_id: Optional[int] = None
|
||||||
|
|
||||||
|
def ensure_tag_exists(self) -> int:
|
||||||
|
"""Get or create the kuma-strapper tag in Uptime Kuma.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The tag ID
|
||||||
|
"""
|
||||||
|
if self._tag_id is not None:
|
||||||
|
return self._tag_id
|
||||||
|
|
||||||
|
try:
|
||||||
|
tags = self.kuma.get_tags()
|
||||||
|
for tag in tags:
|
||||||
|
if tag.get("name") == TAG_NAME:
|
||||||
|
self._tag_id = tag["id"]
|
||||||
|
logger.info(f"Found existing tag '{TAG_NAME}' with id {self._tag_id}")
|
||||||
|
return self._tag_id
|
||||||
|
|
||||||
|
# Create tag if it doesn't exist
|
||||||
|
result = self.kuma.add_tag(name=TAG_NAME, color=TAG_COLOR)
|
||||||
|
self._tag_id = result.get("id")
|
||||||
|
logger.info(f"Created tag '{TAG_NAME}' with id {self._tag_id}")
|
||||||
|
return self._tag_id
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to ensure tag exists: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_tag_id(self) -> int:
|
||||||
|
"""Get the kuma-strapper tag ID, creating it if needed."""
|
||||||
|
if self._tag_id is None:
|
||||||
|
return self.ensure_tag_exists()
|
||||||
|
return self._tag_id
|
||||||
|
|
||||||
|
def add_tag_to_monitor(self, monitor_id: int, hostname: str):
|
||||||
|
"""Add the kuma-strapper tag to a monitor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
monitor_id: The Uptime Kuma monitor ID
|
||||||
|
hostname: The source hostname (stored as tag value)
|
||||||
|
"""
|
||||||
|
tag_id = self.get_tag_id()
|
||||||
|
self.kuma.add_monitor_tag(tag_id=tag_id, monitor_id=monitor_id, value=hostname)
|
||||||
|
logger.info(f"Added tag to monitor {monitor_id} with hostname '{hostname}'")
|
||||||
|
|
||||||
|
def full_sync(self) -> dict:
|
||||||
|
"""Sync all monitors with kuma-strapper tag from Uptime Kuma.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Summary dict with added, updated, removed counts and any errors
|
||||||
|
"""
|
||||||
|
logger.info("Starting full sync with Uptime Kuma")
|
||||||
|
added, updated, removed = 0, 0, 0
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.ensure_tag_exists()
|
||||||
|
monitors = self.kuma.get_monitors()
|
||||||
|
|
||||||
|
# Track which Kuma IDs we've seen
|
||||||
|
seen_kuma_ids = set()
|
||||||
|
|
||||||
|
for monitor in monitors:
|
||||||
|
tag_info = self._get_kuma_strapper_tag(monitor)
|
||||||
|
if not tag_info:
|
||||||
|
continue # Not managed by kuma-strapper
|
||||||
|
|
||||||
|
kuma_id = monitor["id"]
|
||||||
|
seen_kuma_ids.add(kuma_id)
|
||||||
|
hostname = tag_info.get("value", "unknown")
|
||||||
|
|
||||||
|
# Get or create host
|
||||||
|
host = self.db.get_or_create_host(hostname)
|
||||||
|
|
||||||
|
# Check if we already track this monitor
|
||||||
|
existing = self.db.get_monitor_by_kuma_id(kuma_id)
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
# Update if changed
|
||||||
|
if self._monitor_changed(existing, monitor):
|
||||||
|
self._update_tracked_monitor(existing, monitor)
|
||||||
|
updated += 1
|
||||||
|
self.db.mark_monitor_synced(existing.id)
|
||||||
|
else:
|
||||||
|
# New monitor - import it
|
||||||
|
self._import_monitor(monitor, host, hostname)
|
||||||
|
added += 1
|
||||||
|
|
||||||
|
# Mark monitors deleted in Kuma
|
||||||
|
all_local = self.db.get_all_monitors()
|
||||||
|
for local in all_local:
|
||||||
|
if local.kuma_monitor_id not in seen_kuma_ids:
|
||||||
|
if local.status != "deleted_in_kuma":
|
||||||
|
self.db.update_monitor_status(local.id, "deleted_in_kuma")
|
||||||
|
removed += 1
|
||||||
|
logger.info(
|
||||||
|
f"Marked monitor '{local.name}' (kuma_id={local.kuma_monitor_id}) "
|
||||||
|
"as deleted_in_kuma"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Full sync failed: {e}")
|
||||||
|
errors.append(str(e))
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"added": added,
|
||||||
|
"updated": updated,
|
||||||
|
"removed": removed,
|
||||||
|
"errors": errors,
|
||||||
|
}
|
||||||
|
logger.info(f"Full sync complete: {result}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
def sync_host(self, hostname: str) -> dict:
|
||||||
|
"""Sync monitors for a specific host.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
hostname: The hostname to sync
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Summary dict with added, updated, removed counts and any errors
|
||||||
|
"""
|
||||||
|
logger.info(f"Starting host sync for '{hostname}'")
|
||||||
|
added, updated, removed = 0, 0, 0
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.ensure_tag_exists()
|
||||||
|
monitors = self.kuma.get_monitors()
|
||||||
|
|
||||||
|
host = self.db.get_or_create_host(hostname)
|
||||||
|
seen_kuma_ids = set()
|
||||||
|
|
||||||
|
for monitor in monitors:
|
||||||
|
tag_info = self._get_kuma_strapper_tag(monitor)
|
||||||
|
if not tag_info:
|
||||||
|
continue
|
||||||
|
|
||||||
|
tag_hostname = tag_info.get("value", "")
|
||||||
|
if tag_hostname != hostname:
|
||||||
|
continue
|
||||||
|
|
||||||
|
kuma_id = monitor["id"]
|
||||||
|
seen_kuma_ids.add(kuma_id)
|
||||||
|
|
||||||
|
existing = self.db.get_monitor_by_kuma_id(kuma_id)
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
if self._monitor_changed(existing, monitor):
|
||||||
|
self._update_tracked_monitor(existing, monitor)
|
||||||
|
updated += 1
|
||||||
|
self.db.mark_monitor_synced(existing.id)
|
||||||
|
else:
|
||||||
|
self._import_monitor(monitor, host, hostname)
|
||||||
|
added += 1
|
||||||
|
|
||||||
|
# Mark host's monitors as deleted if not in Kuma
|
||||||
|
local_monitors = self.db.get_monitors_for_host(host.id)
|
||||||
|
for local in local_monitors:
|
||||||
|
if local.kuma_monitor_id not in seen_kuma_ids:
|
||||||
|
if local.status != "deleted_in_kuma":
|
||||||
|
self.db.update_monitor_status(local.id, "deleted_in_kuma")
|
||||||
|
removed += 1
|
||||||
|
|
||||||
|
self.db.update_host_sync_time(host.id)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Host sync failed for '{hostname}': {e}")
|
||||||
|
errors.append(str(e))
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"hostname": hostname,
|
||||||
|
"added": added,
|
||||||
|
"updated": updated,
|
||||||
|
"removed": removed,
|
||||||
|
"errors": errors,
|
||||||
|
}
|
||||||
|
logger.info(f"Host sync complete for '{hostname}': {result}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_kuma_strapper_tag(self, monitor: dict) -> Optional[dict]:
|
||||||
|
"""Extract kuma-strapper tag info from monitor if present.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
monitor: Monitor dict from Uptime Kuma
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tag dict with id, name, value, color or None if not found
|
||||||
|
"""
|
||||||
|
for tag in monitor.get("tags", []):
|
||||||
|
if tag.get("name") == TAG_NAME:
|
||||||
|
return tag
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _import_monitor(self, kuma_monitor: dict, host: Host, hostname: str):
|
||||||
|
"""Import a monitor from Uptime Kuma into local DB.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
kuma_monitor: Monitor dict from Uptime Kuma
|
||||||
|
host: Local Host object
|
||||||
|
hostname: The hostname string
|
||||||
|
"""
|
||||||
|
monitor = TrackedMonitor(
|
||||||
|
id=None,
|
||||||
|
kuma_monitor_id=kuma_monitor["id"],
|
||||||
|
host_id=host.id,
|
||||||
|
name=kuma_monitor.get("name", "Unknown"),
|
||||||
|
type=self._kuma_type_to_string(kuma_monitor.get("type")),
|
||||||
|
target=self._extract_target(kuma_monitor),
|
||||||
|
port=kuma_monitor.get("port"),
|
||||||
|
interval_seconds=kuma_monitor.get("interval", 60),
|
||||||
|
push_metric=None, # Can't determine from Kuma API
|
||||||
|
status="active" if kuma_monitor.get("active", True) else "paused",
|
||||||
|
synced_at=datetime.utcnow(),
|
||||||
|
)
|
||||||
|
self.db.add_monitor(monitor)
|
||||||
|
logger.info(f"Imported monitor '{monitor.name}' (kuma_id={monitor.kuma_monitor_id})")
|
||||||
|
|
||||||
|
def _monitor_changed(self, local: TrackedMonitor, kuma: dict) -> bool:
|
||||||
|
"""Check if a monitor has changed in Uptime Kuma.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
local: Local TrackedMonitor
|
||||||
|
kuma: Monitor dict from Uptime Kuma
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the monitor has changed
|
||||||
|
"""
|
||||||
|
if local.name != kuma.get("name"):
|
||||||
|
return True
|
||||||
|
if local.interval_seconds != kuma.get("interval", 60):
|
||||||
|
return True
|
||||||
|
kuma_active = kuma.get("active", True)
|
||||||
|
local_active = local.status == "active"
|
||||||
|
if local_active != kuma_active:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _update_tracked_monitor(self, local: TrackedMonitor, kuma: dict):
|
||||||
|
"""Update a tracked monitor with data from Uptime Kuma.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
local: Local TrackedMonitor to update
|
||||||
|
kuma: Monitor dict from Uptime Kuma
|
||||||
|
"""
|
||||||
|
local.name = kuma.get("name", local.name)
|
||||||
|
local.interval_seconds = kuma.get("interval", 60)
|
||||||
|
local.status = "active" if kuma.get("active", True) else "paused"
|
||||||
|
local.target = self._extract_target(kuma)
|
||||||
|
local.port = kuma.get("port")
|
||||||
|
self.db.update_monitor(local)
|
||||||
|
logger.info(f"Updated monitor '{local.name}' (kuma_id={local.kuma_monitor_id})")
|
||||||
|
|
||||||
|
def _kuma_type_to_string(self, kuma_type) -> str:
|
||||||
|
"""Convert Uptime Kuma monitor type to string.
|
||||||
|
|
||||||
|
The uptime-kuma-api returns types as MonitorType enum values.
|
||||||
|
"""
|
||||||
|
if kuma_type is None:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
# Handle MonitorType enum or string
|
||||||
|
type_str = str(kuma_type)
|
||||||
|
|
||||||
|
# Map common types
|
||||||
|
type_map = {
|
||||||
|
"MonitorType.HTTP": "http",
|
||||||
|
"MonitorType.PORT": "tcp",
|
||||||
|
"MonitorType.PING": "ping",
|
||||||
|
"MonitorType.KEYWORD": "keyword",
|
||||||
|
"MonitorType.DOCKER": "docker",
|
||||||
|
"MonitorType.PUSH": "push",
|
||||||
|
}
|
||||||
|
|
||||||
|
return type_map.get(type_str, type_str.lower().replace("monitortype.", ""))
|
||||||
|
|
||||||
|
def _extract_target(self, monitor: dict) -> Optional[str]:
|
||||||
|
"""Extract the target from a monitor based on its type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
monitor: Monitor dict from Uptime Kuma
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The target URL, hostname, or container name
|
||||||
|
"""
|
||||||
|
if monitor.get("url"):
|
||||||
|
return monitor["url"]
|
||||||
|
if monitor.get("hostname"):
|
||||||
|
return monitor["hostname"]
|
||||||
|
if monitor.get("docker_container"):
|
||||||
|
return monitor["docker_container"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Global sync service instance
|
||||||
|
_sync_service: Optional[SyncService] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_sync_service() -> SyncService:
|
||||||
|
"""Get the global sync service instance."""
|
||||||
|
global _sync_service
|
||||||
|
if _sync_service is None:
|
||||||
|
_sync_service = SyncService()
|
||||||
|
return _sync_service
|
||||||
@@ -19,6 +19,10 @@ services:
|
|||||||
- DEV_MODE=${DEV_MODE:-false}
|
- DEV_MODE=${DEV_MODE:-false}
|
||||||
# Optional: Enable debug mode
|
# Optional: Enable debug mode
|
||||||
- DEBUG=${DEBUG:-false}
|
- DEBUG=${DEBUG:-false}
|
||||||
|
# Database path (inside container)
|
||||||
|
- DATABASE_PATH=/app/data/kuma_strapper.db
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:5000/api/health"]
|
test: ["CMD", "curl", "-f", "http://localhost:5000/api/health"]
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ export default function App() {
|
|||||||
const [scanProgress, setScanProgress] = useState({});
|
const [scanProgress, setScanProgress] = useState({});
|
||||||
const [scanResults, setScanResults] = useState({});
|
const [scanResults, setScanResults] = useState({});
|
||||||
const [analysisResults, setAnalysisResults] = useState({});
|
const [analysisResults, setAnalysisResults] = useState({});
|
||||||
|
const [existingMonitors, setExistingMonitors] = useState({});
|
||||||
const [kumaAuth, setKumaAuth] = useState({ authenticated: false, url: '' });
|
const [kumaAuth, setKumaAuth] = useState({ authenticated: false, url: '' });
|
||||||
const [showKumaLogin, setShowKumaLogin] = useState(false);
|
const [showKumaLogin, setShowKumaLogin] = useState(false);
|
||||||
|
|
||||||
@@ -94,6 +95,13 @@ export default function App() {
|
|||||||
setPendingApprovals(prev => prev.filter(r => r.id !== request.id));
|
setPendingApprovals(prev => prev.filter(r => r.id !== request.id));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
socket.on('host_sync_complete', (data) => {
|
||||||
|
setExistingMonitors(prev => ({
|
||||||
|
...prev,
|
||||||
|
[data.hostname]: data.existing_monitors || [],
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
socket.off('connect');
|
socket.off('connect');
|
||||||
socket.off('disconnect');
|
socket.off('disconnect');
|
||||||
@@ -105,6 +113,7 @@ export default function App() {
|
|||||||
socket.off('analysis_error');
|
socket.off('analysis_error');
|
||||||
socket.off('approval_request');
|
socket.off('approval_request');
|
||||||
socket.off('approval_resolved');
|
socket.off('approval_resolved');
|
||||||
|
socket.off('host_sync_complete');
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@@ -183,6 +192,7 @@ export default function App() {
|
|||||||
scanProgress={scanProgress}
|
scanProgress={scanProgress}
|
||||||
scanResults={scanResults}
|
scanResults={scanResults}
|
||||||
analysisResults={analysisResults}
|
analysisResults={analysisResults}
|
||||||
|
existingMonitors={existingMonitors}
|
||||||
devMode={settings.dev_mode}
|
devMode={settings.dev_mode}
|
||||||
/>
|
/>
|
||||||
</main>
|
</main>
|
||||||
|
|||||||
@@ -81,4 +81,13 @@ export const api = {
|
|||||||
body: JSON.stringify({ username, password, totp }),
|
body: JSON.stringify({ username, password, totp }),
|
||||||
}),
|
}),
|
||||||
kumaLogout: () => fetchApi('/kuma/logout', { method: 'POST' }),
|
kumaLogout: () => fetchApi('/kuma/logout', { method: 'POST' }),
|
||||||
|
|
||||||
|
// Sync
|
||||||
|
triggerSync: () => fetchApi('/sync', { method: 'POST' }),
|
||||||
|
syncHost: (hostname) => fetchApi(`/sync/host/${encodeURIComponent(hostname)}`, { method: 'POST' }),
|
||||||
|
|
||||||
|
// Hosts and tracked monitors
|
||||||
|
getHosts: () => fetchApi('/hosts'),
|
||||||
|
getHostMonitors: (hostname) => fetchApi(`/hosts/${encodeURIComponent(hostname)}/monitors`),
|
||||||
|
getTrackedMonitors: () => fetchApi('/monitors/tracked'),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { api } from '../api/client';
|
|||||||
import HostCard from './HostCard';
|
import HostCard from './HostCard';
|
||||||
import DiscoveryResults from './DiscoveryResults';
|
import DiscoveryResults from './DiscoveryResults';
|
||||||
|
|
||||||
export default function Dashboard({ scanProgress, scanResults, analysisResults, devMode }) {
|
export default function Dashboard({ scanProgress, scanResults, analysisResults, existingMonitors, devMode }) {
|
||||||
const [hostname, setHostname] = useState('');
|
const [hostname, setHostname] = useState('');
|
||||||
const [username, setUsername] = useState('root');
|
const [username, setUsername] = useState('root');
|
||||||
const [port, setPort] = useState('22');
|
const [port, setPort] = useState('22');
|
||||||
@@ -133,6 +133,7 @@ export default function Dashboard({ scanProgress, scanResults, analysisResults,
|
|||||||
scanId={currentScanId}
|
scanId={currentScanId}
|
||||||
scan={currentScan}
|
scan={currentScan}
|
||||||
analysis={currentAnalysis}
|
analysis={currentAnalysis}
|
||||||
|
existingMonitors={existingMonitors?.[hostname] || []}
|
||||||
devMode={devMode}
|
devMode={devMode}
|
||||||
onCommandApproved={async (command) => {
|
onCommandApproved={async (command) => {
|
||||||
await api.runCommand(currentScanId, command, 'User approved from UI');
|
await api.runCommand(currentScanId, command, 'User approved from UI');
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { api } from '../api/client';
|
import { api } from '../api/client';
|
||||||
|
|
||||||
export default function DiscoveryResults({ scanId, scan, analysis, devMode, onCommandApproved, onQuestionAnswered }) {
|
export default function DiscoveryResults({ scanId, scan, analysis, existingMonitors, devMode, onCommandApproved, onQuestionAnswered }) {
|
||||||
const [selectedMonitors, setSelectedMonitors] = useState([]);
|
const [selectedMonitors, setSelectedMonitors] = useState([]);
|
||||||
const [creatingDefaults, setCreatingDefaults] = useState(false);
|
const [creatingDefaults, setCreatingDefaults] = useState(false);
|
||||||
const [creatingSuggested, setCreatingSuggested] = useState(false);
|
const [creatingSuggested, setCreatingSuggested] = useState(false);
|
||||||
@@ -194,6 +194,66 @@ export default function DiscoveryResults({ scanId, scan, analysis, devMode, onCo
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Existing Monitors */}
|
||||||
|
{existingMonitors && existingMonitors.length > 0 && (
|
||||||
|
<div className="mb-6">
|
||||||
|
<h4 className="font-medium mb-3 flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-green-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
Existing Monitors ({existingMonitors.length})
|
||||||
|
<span className="text-xs text-slate-400 font-normal">
|
||||||
|
Already tracked for this host
|
||||||
|
</span>
|
||||||
|
</h4>
|
||||||
|
<div className="space-y-2">
|
||||||
|
{existingMonitors.map((monitor, index) => (
|
||||||
|
<div
|
||||||
|
key={index}
|
||||||
|
className={`p-3 rounded ${
|
||||||
|
monitor.status === 'deleted_in_kuma'
|
||||||
|
? 'bg-red-900/20 border border-red-800'
|
||||||
|
: 'bg-slate-700/30 border border-slate-600'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<span className={`w-2 h-2 rounded-full ${
|
||||||
|
monitor.status === 'active' ? 'bg-green-400' :
|
||||||
|
monitor.status === 'paused' ? 'bg-yellow-400' :
|
||||||
|
monitor.status === 'deleted_in_kuma' ? 'bg-red-400' :
|
||||||
|
'bg-slate-400'
|
||||||
|
}`}></span>
|
||||||
|
<span className={`px-2 py-0.5 text-xs font-medium rounded ${
|
||||||
|
monitor.type === 'http' ? 'bg-blue-900 text-blue-200' :
|
||||||
|
monitor.type === 'tcp' ? 'bg-green-900 text-green-200' :
|
||||||
|
monitor.type === 'docker' ? 'bg-cyan-900 text-cyan-200' :
|
||||||
|
monitor.type === 'push' ? 'bg-purple-900 text-purple-200' :
|
||||||
|
monitor.type === 'ping' ? 'bg-emerald-900 text-emerald-200' :
|
||||||
|
'bg-slate-600 text-slate-200'
|
||||||
|
}`}>
|
||||||
|
{monitor.type.toUpperCase()}
|
||||||
|
{monitor.push_metric && (
|
||||||
|
<span className="ml-1 opacity-75">({monitor.push_metric})</span>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
<span className="text-slate-300">{monitor.name}</span>
|
||||||
|
{monitor.status === 'deleted_in_kuma' && (
|
||||||
|
<span className="ml-auto text-xs text-red-400">
|
||||||
|
Deleted in Uptime Kuma
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{monitor.status === 'paused' && (
|
||||||
|
<span className="ml-auto text-xs text-yellow-400">
|
||||||
|
Paused
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Suggested Monitors */}
|
{/* Suggested Monitors */}
|
||||||
{analysis.monitors && analysis.monitors.length > 0 && (
|
{analysis.monitors && analysis.monitors.length > 0 && (
|
||||||
<div>
|
<div>
|
||||||
|
|||||||
Reference in New Issue
Block a user