From 70eec043271022a89d3e9e501dfe32423c41cb7b Mon Sep 17 00:00:00 2001 From: triggermeelmo Date: Thu, 6 Nov 2025 20:09:51 +0100 Subject: [PATCH] viele Fixes --- Tests/populate_testdata.py | 412 ++++++++++++++++++++++ Watcher/Views/Container/Overview.cshtml | 2 +- Watcher/Views/Server/Details.cshtml | 14 +- Watcher/Views/Shared/_Layout.cshtml | 6 +- Watcher/wwwroot/css/Login.css | 2 +- Watcher/wwwroot/css/server-detail.css | 7 +- Watcher/wwwroot/css/services-overview.css | 25 +- Watcher/wwwroot/css/site.css | 24 +- Watcher/wwwroot/css/user-info.css | 3 +- 9 files changed, 469 insertions(+), 26 deletions(-) create mode 100644 Tests/populate_testdata.py diff --git a/Tests/populate_testdata.py b/Tests/populate_testdata.py new file mode 100644 index 0000000..b87941c --- /dev/null +++ b/Tests/populate_testdata.py @@ -0,0 +1,412 @@ +#!/usr/bin/env python3 +""" +Watcher Database Test Data Population Script +Füllt die SQLite-Datenbank mit realistischen Testdaten für lokale Entwicklung +""" + +import sqlite3 +import random +from datetime import datetime, timedelta +import os + +# Datenbankpfad +DB_PATH = "Watcher\persistence\watcher.db" + +# Prüfe ob Datenbank existiert +if not os.path.exists(DB_PATH): + print(f"❌ Datenbank nicht gefunden: {DB_PATH}") + print("Bitte stelle sicher, dass die Anwendung einmal gestartet wurde, um die Datenbank zu erstellen.") + exit(1) + +print(f"📊 Verbinde mit Datenbank: {DB_PATH}") +conn = sqlite3.connect(DB_PATH) +cursor = conn.cursor() + +# Lösche vorhandene Daten (optional - auskommentieren wenn bestehende Daten behalten werden sollen) +print("\n🗑️ Lösche vorhandene Testdaten...") +cursor.execute("DELETE FROM ContainerMetrics") +cursor.execute("DELETE FROM Metrics") +cursor.execute("DELETE FROM LogEvents") +cursor.execute("DELETE FROM Containers") +cursor.execute("DELETE FROM Images") +cursor.execute("DELETE FROM Tags") +cursor.execute("DELETE FROM Servers") +# Users werden NICHT gelöscht +conn.commit() + +print("✅ Alte Daten gelöscht (Users bleiben erhalten)\n") + +# ============================================================================ +# 2. SERVERS - Test-Server erstellen +# ============================================================================ +print("\n🖥️ Erstelle Server...") + +servers_data = [ + { + "name": "Production-Web-01", + "ip": "192.168.1.10", + "type": "Ubuntu 22.04", + "description": "Haupt-Webserver für Production", + "cpu_type": "Intel Core i7-12700K", + "cpu_cores": 12, + "gpu_type": None, + "ram_size": 34359738368, # 32 GB in Bytes + "disk_space": "512 GB NVMe SSD", + "is_online": True + }, + { + "name": "Dev-Server", + "ip": "192.168.1.20", + "type": "Debian 12", + "description": "Entwicklungs- und Testserver", + "cpu_type": "AMD Ryzen 9 5900X", + "cpu_cores": 12, + "gpu_type": None, + "ram_size": 68719476736, # 64 GB in Bytes + "disk_space": "1 TB NVMe SSD", + "is_online": True + }, + { + "name": "GPU-Server-ML", + "ip": "192.168.1.30", + "type": "Ubuntu 22.04 LTS", + "description": "Machine Learning Training Server", + "cpu_type": "AMD Ryzen Threadripper 3970X", + "cpu_cores": 32, + "gpu_type": "NVIDIA RTX 4090", + "ram_size": 137438953472, # 128 GB in Bytes + "disk_space": "2 TB NVMe SSD", + "is_online": True + }, + { + "name": "Backup-Server", + "ip": "192.168.1.40", + "type": "Ubuntu 20.04", + "description": "Backup und Storage Server", + "cpu_type": "Intel Xeon E5-2680 v4", + "cpu_cores": 14, + "gpu_type": None, + "ram_size": 17179869184, # 16 GB in Bytes + "disk_space": "10 TB HDD RAID5", + "is_online": False + }, + { + "name": "Docker-Host-01", + "ip": "192.168.1.50", + "type": "Ubuntu 22.04", + "description": "Docker Container Host", + "cpu_type": "Intel Xeon Gold 6248R", + "cpu_cores": 24, + "gpu_type": None, + "ram_size": 68719476736, # 64 GB in Bytes + "disk_space": "2 TB NVMe SSD", + "is_online": True + } +] + +server_ids = [] +for server in servers_data: + last_seen = datetime.utcnow() - timedelta(minutes=random.randint(0, 30)) if server["is_online"] else datetime.utcnow() - timedelta(hours=random.randint(2, 48)) + + cursor.execute(""" + INSERT INTO Servers ( + Name, IPAddress, Type, Description, + CpuType, CpuCores, GpuType, RamSize, DiskSpace, + CPU_Load_Warning, CPU_Load_Critical, + CPU_Temp_Warning, CPU_Temp_Critical, + RAM_Load_Warning, RAM_Load_Critical, + GPU_Load_Warning, GPU_Load_Critical, + GPU_Temp_Warning, GPU_Temp_Critical, + Disk_Usage_Warning, Disk_Usage_Critical, + DISK_Temp_Warning, DISK_Temp_Critical, + CreatedAt, IsOnline, LastSeen, IsVerified + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, ( + server["name"], server["ip"], server["type"], server["description"], + server["cpu_type"], server["cpu_cores"], server["gpu_type"], server["ram_size"], server["disk_space"], + 75.0, 90.0, # CPU Load + 80.0, 90.0, # CPU Temp + 85.0, 95.0, # RAM Load + 75.0, 90.0, # GPU Load + 70.0, 80.0, # GPU Temp + 75.0, 90.0, # Disk Usage + 34.0, 36.0, # Disk Temp + datetime.utcnow() - timedelta(days=random.randint(30, 365)), + server["is_online"], last_seen, True + )) + server_ids.append(cursor.lastrowid) + print(f" ✓ Server '{server['name']}' erstellt (ID: {cursor.lastrowid})") + +conn.commit() + +# ============================================================================ +# 3. METRICS - Server-Metriken erstellen (letzte 48 Stunden) +# ============================================================================ +print("\n📈 Erstelle Server-Metriken (letzte 48 Stunden)...") + +metrics_count = 0 +for server_id in server_ids: + # Finde den Server + cursor.execute("SELECT IsOnline FROM Servers WHERE Id = ?", (server_id,)) + is_online = cursor.fetchone()[0] + + if not is_online: + continue # Keine Metriken für offline Server + + # Erstelle Metriken für die letzten 48 Stunden (alle 5 Minuten) + start_time = datetime.utcnow() - timedelta(hours=48) + current_time = start_time + + # Basis-Werte für realistische Schwankungen + base_cpu = random.uniform(20, 40) + base_ram = random.uniform(40, 60) + base_gpu = random.uniform(10, 30) if server_id == server_ids[2] else 0 # Nur GPU-Server + + while current_time <= datetime.utcnow(): + # Realistische Schwankungen + cpu_load = max(0, min(100, base_cpu + random.gauss(0, 15))) + cpu_temp = 30 + (cpu_load * 0.5) + random.gauss(0, 3) + + ram_load = max(0, min(100, base_ram + random.gauss(0, 10))) + + gpu_load = max(0, min(100, base_gpu + random.gauss(0, 20))) if base_gpu > 0 else 0 + gpu_temp = 25 + (gpu_load * 0.6) + random.gauss(0, 3) if gpu_load > 0 else 0 + gpu_vram_usage = gpu_load * 0.8 if gpu_load > 0 else 0 + + disk_usage = random.uniform(40, 75) + disk_temp = random.uniform(28, 35) + + net_in = random.uniform(1000000, 10000000) # 1-10 Mbps in Bits + net_out = random.uniform(500000, 5000000) # 0.5-5 Mbps in Bits + + cursor.execute(""" + INSERT INTO Metrics ( + ServerId, Timestamp, + CPU_Load, CPU_Temp, + GPU_Load, GPU_Temp, GPU_Vram_Size, GPU_Vram_Usage, + RAM_Size, RAM_Load, + DISK_Size, DISK_Usage, DISK_Temp, + NET_In, NET_Out + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, ( + server_id, current_time, + cpu_load, cpu_temp, + gpu_load, gpu_temp, 24.0, gpu_vram_usage, # 24 GB VRAM + 64.0, ram_load, + 512.0, disk_usage, disk_temp, + net_in, net_out + )) + + metrics_count += 1 + current_time += timedelta(minutes=5) + + print(f" ✓ Server {server_id}: {metrics_count} Metriken erstellt") + +conn.commit() +print(f"✅ Insgesamt {metrics_count} Metriken erstellt") + +# ============================================================================ +# 4. IMAGES - Docker Images +# ============================================================================ +print("\n🐳 Erstelle Docker Images...") + +images_data = [ + ("nginx", "latest"), + ("nginx", "alpine"), + ("postgres", "15"), + ("postgres", "14-alpine"), + ("redis", "7-alpine"), + ("node", "18-alpine"), + ("python", "3.11-slim"), + ("mysql", "8.0"), + ("traefik", "v2.10"), + ("portainer", "latest") +] + +image_ids = {} +for name, tag in images_data: + cursor.execute(""" + INSERT INTO Images (Name, Tag) + VALUES (?, ?) + """, (name, tag)) + image_ids[f"{name}:{tag}"] = cursor.lastrowid + print(f" ✓ Image '{name}:{tag}' erstellt") + +conn.commit() + +# ============================================================================ +# 5. CONTAINERS - Docker Container +# ============================================================================ +print("\n📦 Erstelle Docker Container...") + +# Nur für Server, die online sind +online_server_ids = [sid for sid in server_ids[:2]] # Erste 2 Server haben Container + +containers_data = [ + # Production-Web-01 + ("nginx-web", "abc123def456", "nginx:latest", online_server_ids[0], True), + ("postgres-db", "def456ghi789", "postgres:15", online_server_ids[0], True), + ("redis-cache", "ghi789jkl012", "redis:7-alpine", online_server_ids[0], True), + ("traefik-proxy", "jkl012mno345", "traefik:v2.10", online_server_ids[0], True), + + # Dev-Server + ("dev-nginx", "mno345pqr678", "nginx:alpine", online_server_ids[1], True), + ("dev-postgres", "pqr678stu901", "postgres:14-alpine", online_server_ids[1], False), + ("dev-redis", "stu901vwx234", "redis:7-alpine", online_server_ids[1], True), + ("test-app", "vwx234yz567", "node:18-alpine", online_server_ids[1], True), + ("portainer", "yz567abc890", "portainer:latest", online_server_ids[1], True), +] + +container_ids = [] +for name, container_id, image, server_id, is_running in containers_data: + cursor.execute(""" + INSERT INTO Containers (Name, ContainerId, Image, ServerId, IsRunning) + VALUES (?, ?, ?, ?, ?) + """, (name, container_id, image, server_id, is_running)) + container_ids.append(cursor.lastrowid) + status = "🟢 Running" if is_running else "🔴 Stopped" + print(f" ✓ Container '{name}' erstellt - {status}") + +conn.commit() + +# ============================================================================ +# 6. CONTAINER METRICS - Container-Metriken (letzte 24 Stunden) +# ============================================================================ +print("\n📊 Erstelle Container-Metriken (letzte 24 Stunden)...") + +container_metrics_count = 0 +for container_id in container_ids: + # Prüfe ob Container läuft + cursor.execute("SELECT IsRunning FROM Containers WHERE Id = ?", (container_id,)) + is_running = cursor.fetchone()[0] + + if not is_running: + continue # Keine Metriken für gestoppte Container + + # Erstelle Metriken für die letzten 24 Stunden (alle 5 Minuten) + start_time = datetime.utcnow() - timedelta(hours=24) + current_time = start_time + + # Basis-Werte für Container (meist niedriger als Host) + base_cpu = random.uniform(5, 15) + base_ram = random.uniform(10, 30) + + while current_time <= datetime.utcnow(): + cpu_load = max(0, min(100, base_cpu + random.gauss(0, 8))) + cpu_temp = 30 + (cpu_load * 0.5) + random.gauss(0, 2) + + ram_load = max(0, min(100, base_ram + random.gauss(0, 5))) + ram_size = random.uniform(0.5, 4.0) # Container nutzen weniger RAM + + cursor.execute(""" + INSERT INTO ContainerMetrics ( + ContainerId, Timestamp, + CPU_Load, CPU_Temp, + RAM_Size, RAM_Load + ) VALUES (?, ?, ?, ?, ?, ?) + """, ( + container_id, current_time, + cpu_load, cpu_temp, + ram_size, ram_load + )) + + container_metrics_count += 1 + current_time += timedelta(minutes=5) + +conn.commit() +print(f"✅ Insgesamt {container_metrics_count} Container-Metriken erstellt") + +# ============================================================================ +# 7. LOG EVENTS - Log-Einträge erstellen +# ============================================================================ +print("\n📝 Erstelle Log Events...") + +log_messages = [ + ("Info", "Server erfolgreich gestartet", None, None), + ("Info", "Backup abgeschlossen", server_ids[3], None), + ("Warning", "CPU-Auslastung über 80%", server_ids[0], None), + ("Info", "Container gestartet", server_ids[0], container_ids[0]), + ("Error", "Datenbank-Verbindung fehlgeschlagen", server_ids[1], container_ids[5]), + ("Warning", "Speicherplatz unter 25%", server_ids[1], None), + ("Info", "Update installiert", server_ids[2], None), + ("Info", "Container neu gestartet", server_ids[0], container_ids[2]), + ("Warning", "GPU-Temperatur über 75°C", server_ids[2], None), + ("Info", "Netzwerk-Check erfolgreich", server_ids[0], None), +] + +for level, message, server_id, container_id in log_messages: + timestamp = datetime.utcnow() - timedelta(hours=random.randint(0, 48)) + cursor.execute(""" + INSERT INTO LogEvents (Timestamp, Message, Level, ServerId, ContainerId) + VALUES (?, ?, ?, ?, ?) + """, (timestamp, message, level, server_id, container_id)) + print(f" ✓ Log: [{level}] {message}") + +conn.commit() + +# ============================================================================ +# 8. TAGS - Tags für Server/Container +# ============================================================================ +print("\n🏷️ Erstelle Tags...") + +tags_data = ["production", "development", "backup", "docker", "monitoring", "critical"] + +for tag_name in tags_data: + cursor.execute(""" + INSERT INTO Tags (Name) + VALUES (?) + """, (tag_name,)) + print(f" ✓ Tag '{tag_name}' erstellt") + +conn.commit() + +# ============================================================================ +# Abschluss +# ============================================================================ +print("\n" + "="*60) +print("✅ Testdaten erfolgreich erstellt!") +print("="*60) + +# Statistiken ausgeben +cursor.execute("SELECT COUNT(*) FROM Servers") +server_count = cursor.fetchone()[0] + +cursor.execute("SELECT COUNT(*) FROM Containers") +container_count = cursor.fetchone()[0] + +cursor.execute("SELECT COUNT(*) FROM Metrics") +metrics_count = cursor.fetchone()[0] + +cursor.execute("SELECT COUNT(*) FROM ContainerMetrics") +container_metrics_count = cursor.fetchone()[0] + +cursor.execute("SELECT COUNT(*) FROM LogEvents") +log_count = cursor.fetchone()[0] + +cursor.execute("SELECT COUNT(*) FROM Images") +image_count = cursor.fetchone()[0] + +cursor.execute("SELECT COUNT(*) FROM Tags") +tag_count = cursor.fetchone()[0] + +print(f""" +📊 STATISTIK: + 🖥️ Server: {server_count} + 📦 Container: {container_count} + 📈 Server-Metriken: {metrics_count} + 📊 Container-Metriken: {container_metrics_count} + 📝 Log Events: {log_count} + 🐳 Images: {image_count} + 🏷️ Tags: {tag_count} + +💡 HINWEIS: + - User-Tabelle wurde nicht verändert + - Metriken wurden für die letzten 48 Stunden generiert + - Server 'Backup-Server' ist offline (für Tests) + - Container 'dev-postgres' ist gestoppt (für Tests) + - Die Datenbank befindet sich unter: {DB_PATH} +""") + +# Verbindung schließen +conn.close() +print("🔒 Datenbankverbindung geschlossen\n") diff --git a/Watcher/Views/Container/Overview.cshtml b/Watcher/Views/Container/Overview.cshtml index 176d793..63694e4 100644 --- a/Watcher/Views/Container/Overview.cshtml +++ b/Watcher/Views/Container/Overview.cshtml @@ -90,7 +90,7 @@
-
+
CPU diff --git a/Watcher/Views/Server/Details.cshtml b/Watcher/Views/Server/Details.cshtml index a2b9759..ffb2aa1 100644 --- a/Watcher/Views/Server/Details.cshtml +++ b/Watcher/Views/Server/Details.cshtml @@ -13,7 +13,7 @@
-
+
@Model.Name
@@ -231,8 +231,8 @@ datasets: [{ label: 'CPU Last (%)', data: [], - borderColor: 'rgba(54, 162, 235, 1)', - backgroundColor: 'rgba(54, 162, 235, 0.1)', + borderColor: 'rgba(13, 202, 240, 1)', + backgroundColor: 'rgba(13, 202, 240, 0.2)', borderWidth: 2, fill: true, tension: 0.4, @@ -284,8 +284,8 @@ datasets: [{ label: 'RAM Last (%)', data: [], - borderColor: 'rgba(75, 192, 192, 1)', - backgroundColor: 'rgba(75, 192, 192, 0.1)', + borderColor: 'rgba(25, 135, 84, 1)', + backgroundColor: 'rgba(25, 135, 84, 0.2)', borderWidth: 2, fill: true, tension: 0.4, @@ -337,8 +337,8 @@ datasets: [{ label: 'GPU Last (%)', data: [], - borderColor: 'rgba(153, 102, 255, 1)', - backgroundColor: 'rgba(153, 102, 255, 0.1)', + borderColor: 'rgba(220, 53, 69, 1)', + backgroundColor: 'rgba(220, 53, 69, 0.2)', borderWidth: 2, fill: true, tension: 0.4, diff --git a/Watcher/Views/Shared/_Layout.cshtml b/Watcher/Views/Shared/_Layout.cshtml index 23e61bf..226fc88 100644 --- a/Watcher/Views/Shared/_Layout.cshtml +++ b/Watcher/Views/Shared/_Layout.cshtml @@ -104,7 +104,7 @@
@User.Identity?.Name
- Profil ansehen + Profil ansehen
@@ -115,7 +115,7 @@ }
- + @{ var statusColor = UpdateCheckStore.IsUpdateAvailable ? "#ffc107" : "#28a745"; var statusTitle = UpdateCheckStore.IsUpdateAvailable @@ -123,7 +123,7 @@ : "System ist aktuell"; } - Version: @VersionService.GetVersion() + Version: @VersionService.GetVersion()
diff --git a/Watcher/wwwroot/css/Login.css b/Watcher/wwwroot/css/Login.css index 4960097..e02fd04 100644 --- a/Watcher/wwwroot/css/Login.css +++ b/Watcher/wwwroot/css/Login.css @@ -19,6 +19,6 @@ } .form-error { - color: #ff6b6b; + color: var(--color-danger); font-size: 0.875rem; } \ No newline at end of file diff --git a/Watcher/wwwroot/css/server-detail.css b/Watcher/wwwroot/css/server-detail.css index 7f9683a..2633530 100644 --- a/Watcher/wwwroot/css/server-detail.css +++ b/Watcher/wwwroot/css/server-detail.css @@ -14,14 +14,14 @@ .info-label { font-size: 0.85rem; font-weight: 500; - color: #6c757d; + color: var(--color-muted); display: flex; align-items: center; } .info-value { font-size: 0.95rem; - color: var(--color-text, #212529); + color: var(--color-text, #f9feff); font-weight: 400; padding-left: 1.25rem; } @@ -37,6 +37,7 @@ text-transform: uppercase; letter-spacing: 0.5px; font-weight: 600; + color: var(--color-text) !important; } .card-body h6.text-muted i { @@ -47,7 +48,7 @@ .graphcontainer { height: 25rem; width: 100%; - background-color: var(--color-surface, #f8f9fa); + background-color: var(--color-surface, #212121); border-radius: 0.375rem; } diff --git a/Watcher/wwwroot/css/services-overview.css b/Watcher/wwwroot/css/services-overview.css index cc0779a..c31ebb4 100644 --- a/Watcher/wwwroot/css/services-overview.css +++ b/Watcher/wwwroot/css/services-overview.css @@ -2,7 +2,7 @@ .container-card { transition: transform 0.2s ease, box-shadow 0.2s ease; border: 1px solid rgba(0, 0, 0, 0.125); - background: var(--color-background-secondary, #fff); + background: var(--color-surface); } .container-card:hover { @@ -11,8 +11,8 @@ } .container-card .card-header { - background-color: rgba(0, 0, 0, 0.03); - border-bottom: 1px solid rgba(0, 0, 0, 0.125); + background-color: var(--color-bg); + border-bottom: 1px solid rgba(255, 255, 255, 0.1); padding: 0.75rem 1rem; } @@ -37,15 +37,24 @@ } .info-label { - color: #6c757d; + color: var(--color-muted); font-weight: 500; } .info-value { - color: var(--color-text, #212529); + color: var(--color-text, #f9feff); word-break: break-all; } +.info-value a { + color: var(--color-text); + transition: color 0.2s ease; +} + +.info-value a:hover { + color: var(--color-primary); +} + /* Action Buttons */ .action-buttons .btn { font-size: 0.85rem; @@ -65,8 +74,8 @@ } .metrics-content { - background-color: #f8f9fa !important; - border: 1px solid #dee2e6; + background-color: var(--color-bg) !important; + border: 1px solid rgba(255, 255, 255, 0.1); } .metric-item { @@ -110,6 +119,6 @@ /* Server Group Header */ h5.text-muted { font-weight: 600; - border-bottom: 2px solid #dee2e6; + border-bottom: 2px solid var(--color-accent); padding-bottom: 0.5rem; } \ No newline at end of file diff --git a/Watcher/wwwroot/css/site.css b/Watcher/wwwroot/css/site.css index 071d263..304ea49 100644 --- a/Watcher/wwwroot/css/site.css +++ b/Watcher/wwwroot/css/site.css @@ -6,6 +6,7 @@ --color-text: #f9feff; --color-muted: #c0c0c0; --color-success: #14a44d; + --color-success-hover: #0f8c3c; --color-danger: #ff6b6b; } @@ -54,9 +55,30 @@ a { } .btn-pocketid:hover { - background-color: #0f8c3c; + background-color: var(--color-success-hover); } hr { border-top: 1px solid var(--color-accent); } + +/* Bootstrap Overrides für Dark Theme */ +.text-muted { + color: var(--color-muted) !important; +} + +.bg-light { + background-color: var(--color-surface) !important; +} + +.text-text { + color: var(--color-text) !important; +} + +.text-primary-emphasis { + color: var(--color-primary) !important; +} + +.border-secondary { + border-color: rgba(255, 255, 255, 0.2) !important; +} diff --git a/Watcher/wwwroot/css/user-info.css b/Watcher/wwwroot/css/user-info.css index 89c5118..ebbca38 100644 --- a/Watcher/wwwroot/css/user-info.css +++ b/Watcher/wwwroot/css/user-info.css @@ -1,6 +1,5 @@ .table { - color: red; - + color: var(--color-text); } .picture {