commit a4b71f363126fd1cdae56ff209a4b86cc737867a Author: Schulz Date: Mon Feb 16 21:00:32 2026 +0100 Initial commit: Watchdog Docker v0.1 Complete OPNsense monitoring system with: - DHCP lease monitoring - New device detection (ARP) - Interface and gateway status monitoring - Web dashboard with real-time updates - Email notifications via SMTP - SQLite database for event logging - Docker deployment ready Co-Authored-By: Claude Sonnet 4.5 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..54f4393 --- /dev/null +++ b/.gitignore @@ -0,0 +1,31 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +venv/ +env/ +ENV/ +*.egg-info/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# Data & Logs +data/*.db +data/*.log +data/backup.db + +# Sensitive Config (optional - keep structure but remove secrets) +# config/config.yaml + +# Docker +*.log + +# OS +.DS_Store +Thumbs.db diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..4b24a10 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,134 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Watchdog Docker v0.1 - OPNsense monitoring system with web dashboard and email notifications. Monitors DHCP leases, new devices (ARP), interface status changes, and gateway status changes. + +## Tech Stack + +- **Backend:** Python 3.11, Flask, Flask-Login +- **Scheduling:** APScheduler (background polling) +- **Database:** SQLite +- **Frontend:** Bootstrap 5, Vanilla JavaScript +- **API:** OPNsense REST API +- **Email:** SMTP with TLS +- **Deployment:** Docker + Docker Compose + +## Commands + +### Docker +```bash +# Build and start +docker-compose up -d --build + +# View logs +docker-compose logs -f + +# Stop +docker-compose down + +# Restart +docker-compose restart +``` + +### Local Development +```bash +# Create virtual environment +python3 -m venv venv +source venv/bin/activate # Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt + +# Run app +python app/main.py +``` + +### Testing +```bash +# Test OPNsense API connection +python -c "from app.opnsense_api import OPNsenseAPI; import yaml; config = yaml.safe_load(open('config/config.yaml')); api = OPNsenseAPI(**config['opnsense']); print(api.test_connection())" + +# Generate password hash +python -c "from werkzeug.security import generate_password_hash; print(generate_password_hash('your_password'))" +``` + +## Architecture + +### Application Flow +1. **main.py** - Flask app initialization, routes, APScheduler setup +2. **monitor.py** - OPNsenseMonitor class runs periodic checks (check_all) +3. **opnsense_api.py** - OPNsenseAPI client handles all API communication +4. **database.py** - Database class manages SQLite operations +5. **email_handler.py** - EmailHandler sends SMTP notifications + +### Monitoring Cycle +- APScheduler triggers `monitor.check_all()` every N seconds (configurable) +- Each check method compares current state vs previous state +- On changes: log to database + send email (if enabled) +- State stored in memory (resets on restart) + +### Database Schema +- **events** - All monitored events (id, timestamp, type, interface, details, data JSON) +- **known_devices** - MAC addresses of known devices (auto-populated on first detection) + +### Web Interface +- Login required (Flask-Login) +- Dashboard shows stats + filterable event table +- AJAX auto-refresh every 10s via /api/events endpoint + +## Key Files + +- **config/config.yaml** - ALL settings (OPNsense, monitoring, web, email, database) +- **app/main.py** - Flask app entry point +- **app/monitor.py** - Core monitoring logic with state tracking +- **app/database.py** - SQLite operations +- **app/email_handler.py** - Email notifications with HTML templates +- **app/opnsense_api.py** - OPNsense API wrapper +- **app/templates/** - Jinja2 templates (login.html, dashboard.html) + +## Configuration + +All settings in `config/config.yaml`: +- OPNsense host, API credentials, SSL verification +- Monitoring interval, monitored interfaces, event toggles +- Web port, secret key, admin password hash +- SMTP settings, recipients +- Database path, retention days +- Logging level + +## Important Notes + +- **State Tracking:** Monitor uses previous_* dicts to detect changes (DHCP leases, devices, interfaces, gateways) +- **Interface Filtering:** If `monitored_interfaces` is empty, monitor ALL interfaces +- **Known Devices:** New devices are auto-added to known_devices table on first detection +- **Email Logic:** Only send emails for unknown devices (new_device event with known=False) +- **Retention:** Old events auto-cleanup based on retention_days (default 90) +- **Password:** Admin password must be werkzeug scrypt hash in config.yaml + +## Code Style + +- German language in UI/emails (Deutsch) +- Docstrings in English +- Logging: logger.info for important events, logger.debug for routine operations +- Error handling: try/except with logger.error(..., exc_info=True) +- Type hints where applicable (Dict, List, Optional) + +## Common Tasks + +### Add new event type +1. Add check method to `monitor.py` (check_xyz) +2. Add toggle to config.yaml under events +3. Call from check_all() if enabled +4. Update email_handler colors/formatting if needed + +### Modify OPNsense API calls +- All API methods in `opnsense_api.py` +- Use requests with basic auth (key:secret) +- SSL warnings suppressed if verify_ssl=false + +### Change database schema +- Update initialize() in database.py +- Add migration logic if needed (or recreate DB) diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f1074eb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,30 @@ +FROM python:3.11-slim + +LABEL maintainer="Watchdog Docker" +LABEL version="0.1" +LABEL description="OPNsense Monitoring Container" + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application +COPY app/ ./app/ +COPY config/ ./config/ + +# Create data directory +RUN mkdir -p /app/data + +EXPOSE 5000 + +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD python -c "import requests; requests.get('http://localhost:5000/health', timeout=5)" || exit 1 + +CMD ["python", "app/main.py"] \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..81ece08 --- /dev/null +++ b/README.md @@ -0,0 +1,382 @@ +# Watchdog Docker v0.1 + +Docker Container für OPNsense Monitoring mit Web-Interface und E-Mail-Benachrichtigungen. + +## Features + +- 🔍 **DHCP Lease Monitoring** - Überwachung neuer IP-Vergaben +- 📱 **Device Detection** - Erkennung neuer Geräte via ARP (mit Abgleich bekannter Geräte) +- 🔌 **Interface Monitoring** - Status-Änderungen von Netzwerk-Interfaces +- 🌐 **Gateway Monitoring** - Überwachung von Gateway-Status +- 📧 **E-Mail Benachrichtigungen** - Automatische Alerts bei Events +- 🖥️ **Web Dashboard** - Übersichtliches Interface mit Echtzeit-Updates +- 🔐 **Passwort-Schutz** - Gesicherter Zugang zum Dashboard + +## Überwachte Events + +| Event-Typ | Beschreibung | Interface-Filter | +|-----------|--------------|------------------| +| DHCP Lease | Neue IP-Adresse vergeben | ✓ | +| New Device | Neues Gerät im Netzwerk erkannt | ✓ | +| Interface Status | Interface up/down Änderungen | ✓ | +| Gateway Status | Gateway Status-Änderungen | - | + +## Voraussetzungen + +- Docker & Docker Compose +- OPNsense Firewall mit aktivierter API +- SMTP Server für E-Mail-Benachrichtigungen (optional) + +## OPNsense API Setup + +### 1. API Key & Secret generieren + +1. In OPNsense: **System → Access → Users** +2. Wähle deinen Admin-User oder erstelle einen neuen +3. Scrolle zu **API keys** und klicke auf **+** (Add) +4. Notiere dir **Key** und **Secret** - sie werden nur einmal angezeigt! + +### 2. API Zugriff testen + +```bash +curl -k -u "YOUR_KEY:YOUR_SECRET" https://192.168.1.1/api/core/menu/search/ +``` + +Falls erfolgreich, erhältst du JSON-Daten zurück. + +## Installation + +### 1. Repository klonen + +```bash +git clone +cd watchdog-docker +``` + +### 2. Konfiguration anpassen + +Bearbeite `config/config.yaml`: + +#### OPNsense Einstellungen + +```yaml +opnsense: + host: "https://192.168.1.1" # Deine OPNsense IP + api_key: "YOUR_API_KEY_HERE" + api_secret: "YOUR_API_SECRET_HERE" + verify_ssl: false # Auf true setzen bei gültigem Zertifikat +``` + +#### Admin-Passwort generieren + +Passwort-Hash für das Web-Interface generieren: + +```python +from werkzeug.security import generate_password_hash + +# Ersetze 'dein_passwort' mit deinem gewünschten Passwort +password = "dein_passwort" +hash = generate_password_hash(password) +print(hash) +``` + +Kopiere den generierten Hash in die `config.yaml`: + +```yaml +web: + admin_password_hash: "scrypt:32768:8:1$..." # Dein generierter Hash +``` + +#### E-Mail Konfiguration (optional) + +```yaml +email: + enabled: true + smtp_server: "mail.yourdomain.com" + smtp_port: 587 + smtp_use_tls: true + smtp_username: "watchdog@yourdomain.com" + smtp_password: "YOUR_PASSWORD" + from_address: "watchdog@yourdomain.com" + to_addresses: + - "admin@yourdomain.com" +``` + +Falls du keine E-Mails versenden möchtest, setze `enabled: false`. + +#### Monitoring-Einstellungen + +```yaml +monitoring: + polling_interval: 60 # Sekunden zwischen Checks + + # Nur bestimmte Interfaces überwachen (leer = alle) + monitored_interfaces: + - lan + - wan + + # Events ein/ausschalten + events: + dhcp_leases: true + new_devices: true + interface_status: true + gateway_status: true +``` + +### 3. Docker Container starten + +```bash +# Container bauen und starten +docker-compose up -d + +# Logs anzeigen +docker-compose logs -f + +# Status prüfen +docker-compose ps +``` + +### 4. Web-Interface öffnen + +Öffne im Browser: `http://localhost:5000` + +- **Benutzername:** `admin` +- **Passwort:** Dein konfiguriertes Passwort + +## Verzeichnisstruktur + +``` +watchdog-docker/ +├── Dockerfile # Docker Image Definition +├── docker-compose.yml # Docker Compose Konfiguration +├── requirements.txt # Python Dependencies +├── README.md # Diese Datei +├── config/ +│ └── config.yaml # Hauptkonfiguration +├── app/ +│ ├── main.py # Flask App & Routes +│ ├── opnsense_api.py # OPNsense API Client +│ ├── monitor.py # Event Monitoring Logik +│ ├── database.py # SQLite Datenbank Handler +│ ├── email_handler.py # E-Mail Versand +│ └── templates/ +│ ├── login.html # Login-Seite +│ └── dashboard.html # Dashboard +└── data/ # Auto-generiert beim ersten Start + ├── watchdog.db # SQLite Datenbank + └── watchdog.log # Log-Datei +``` + +## API Endpoints + +Das Dashboard verwendet folgende REST-API Endpoints: + +- `GET /api/events` - Events abrufen + - Parameter: `limit`, `type`, `interface` +- `GET /api/stats` - Statistiken abrufen +- `GET /health` - Health Check + +Beispiel: +```bash +curl http://localhost:5000/api/events?type=dhcp_lease&limit=10 +``` + +## Datenbank + +Die SQLite-Datenbank wird automatisch beim ersten Start erstellt. + +### Tabellen + +#### events +| Spalte | Typ | Beschreibung | +|--------|-----|--------------| +| id | INTEGER | Primary Key | +| timestamp | DATETIME | Event-Zeitpunkt | +| type | TEXT | Event-Typ | +| interface | TEXT | Interface-Name | +| details | TEXT | Event-Details | +| data | JSON | Vollständige Event-Daten | + +#### known_devices +| Spalte | Typ | Beschreibung | +|--------|-----|--------------| +| mac | TEXT | MAC-Adresse (Primary Key) | +| name | TEXT | Geräte-Name | +| first_seen | DATETIME | Erstmals gesehen | +| last_seen | DATETIME | Zuletzt gesehen | + +### Datenbank-Wartung + +Alte Events werden automatisch nach der konfigurierten Retention-Zeit gelöscht (Standard: 90 Tage). + +## E-Mail Benachrichtigungen + +### Event-Typen + +- **DHCP Lease** 🔵 - Neue IP-Vergabe +- **New Device** 🔴 - Unbekanntes Gerät (nur bei unbekannten Geräten) +- **Interface Status** ⚠️ - Interface up/down +- **Gateway Status** ⚠️ - Gateway-Änderungen + +### E-Mail Beispiel + +![Email Notification](docs/email-example.png) + +Jede E-Mail enthält: +- Event-Typ mit Icon +- Zeitpunkt +- Interface/Gateway +- IP, MAC, Hostname (bei Geräten) +- Detaillierte Beschreibung + +## Troubleshooting + +### Container startet nicht + +```bash +# Logs prüfen +docker-compose logs + +# Container Status +docker-compose ps +``` + +### OPNsense API-Verbindung schlägt fehl + +1. Prüfe ob OPNsense erreichbar ist: + ```bash + ping 192.168.1.1 + ``` + +2. Teste API manuell: + ```bash + curl -k -u "KEY:SECRET" https://192.168.1.1/api/core/menu/search/ + ``` + +3. Prüfe Firewall-Regeln auf OPNsense + +### Keine E-Mails werden versendet + +1. Prüfe SMTP-Einstellungen in `config.yaml` +2. Teste SMTP-Verbindung: + ```bash + telnet mail.yourdomain.com 587 + ``` +3. Prüfe Container-Logs auf SMTP-Fehler + +### Login funktioniert nicht + +1. Prüfe ob Passwort-Hash korrekt generiert wurde +2. Versuche neuen Hash zu generieren: + ```python + from werkzeug.security import generate_password_hash + print(generate_password_hash("dein_passwort")) + ``` +3. Hash in `config.yaml` ersetzen und Container neu starten + +### Dashboard zeigt keine Events + +1. Prüfe ob Monitoring läuft: + ```bash + docker-compose logs | grep "Monitoring started" + ``` + +2. Prüfe ob OPNsense API antwortet: + ```bash + docker-compose logs | grep "OPNsense" + ``` + +3. Erhöhe Log-Level auf DEBUG in `config.yaml`: + ```yaml + logging: + level: "DEBUG" + ``` + +## Update + +```bash +# Container stoppen +docker-compose down + +# Neueste Version pullen +git pull + +# Container neu bauen und starten +docker-compose up -d --build +``` + +**Hinweis:** Die Datenbank (`data/watchdog.db`) bleibt bei Updates erhalten. + +## Backup + +### Datenbank sichern + +```bash +# Backup erstellen +docker-compose exec watchdog sqlite3 /app/data/watchdog.db ".backup /app/data/backup.db" + +# Backup aus Container kopieren +docker cp watchdog:/app/data/backup.db ./backup.db +``` + +### Konfiguration sichern + +```bash +# Config sichern +cp config/config.yaml config/config.yaml.backup +``` + +## Sicherheitshinweise + +- ⚠️ Ändere das `secret_key` in der `config.yaml` +- ⚠️ Verwende ein sicheres Admin-Passwort +- ⚠️ Aktiviere SSL-Verifizierung (`verify_ssl: true`) in Produktion +- ⚠️ Verwende HTTPS für das Web-Interface (z.B. mit Reverse Proxy) +- ⚠️ Speichere keine API-Secrets in Git (nutze `.env` Dateien) + +## Entwicklung + +### Lokale Entwicklung ohne Docker + +```bash +# Virtual Environment erstellen +python3 -m venv venv +source venv/bin/activate # Windows: venv\Scripts\activate + +# Dependencies installieren +pip install -r requirements.txt + +# App starten +python app/main.py +``` + +### Tests + +```bash +# OPNsense API Connection testen +python -c "from app.opnsense_api import OPNsenseAPI; import yaml; config = yaml.safe_load(open('config/config.yaml')); api = OPNsenseAPI(**config['opnsense']); print(api.test_connection())" +``` + +## Support + +Bei Problemen oder Fragen: + +1. Prüfe die [Troubleshooting](#troubleshooting) Sektion +2. Aktiviere DEBUG-Logging und prüfe die Logs +3. Erstelle ein Issue mit: + - Container-Logs (`docker-compose logs`) + - Konfiguration (ohne Secrets!) + - Fehlerbeschreibung + +## Lizenz + +MIT License + +## Changelog + +### v0.1 (Initial Release) +- OPNsense Monitoring (DHCP, Devices, Interfaces, Gateways) +- Web Dashboard mit Echtzeit-Updates +- E-Mail Benachrichtigungen +- SQLite Datenbank +- Docker Support diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..b6fe5c6 --- /dev/null +++ b/app/database.py @@ -0,0 +1,253 @@ +import sqlite3 +import logging +import json +from datetime import datetime, timedelta +from typing import Dict, List, Optional + +logger = logging.getLogger(__name__) + +class Database: + """SQLite database handler for Watchdog Docker""" + + def __init__(self, db_path: str): + self.db_path = db_path + logger.info(f"Database initialized at {db_path}") + + def _get_connection(self): + """Get database connection""" + conn = sqlite3.connect(self.db_path) + conn.row_factory = sqlite3.Row + return conn + + def initialize(self): + """Create database tables if they don't exist""" + logger.info("Initializing database tables") + + conn = self._get_connection() + cursor = conn.cursor() + + # Events table + cursor.execute(''' + CREATE TABLE IF NOT EXISTS events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, + type TEXT NOT NULL, + interface TEXT, + details TEXT NOT NULL, + data JSON + ) + ''') + + # Create index on timestamp for faster queries + cursor.execute(''' + CREATE INDEX IF NOT EXISTS idx_events_timestamp + ON events(timestamp DESC) + ''') + + # Create index on type for filtering + cursor.execute(''' + CREATE INDEX IF NOT EXISTS idx_events_type + ON events(type) + ''') + + # Known devices table + cursor.execute(''' + CREATE TABLE IF NOT EXISTS known_devices ( + mac TEXT PRIMARY KEY, + name TEXT, + first_seen DATETIME DEFAULT CURRENT_TIMESTAMP, + last_seen DATETIME DEFAULT CURRENT_TIMESTAMP + ) + ''') + + conn.commit() + conn.close() + logger.info("Database tables initialized successfully") + + def add_event(self, event: Dict): + """Add an event to the database""" + conn = self._get_connection() + cursor = conn.cursor() + + try: + cursor.execute(''' + INSERT INTO events (type, interface, details, data) + VALUES (?, ?, ?, ?) + ''', ( + event.get('type'), + event.get('interface'), + event.get('details'), + json.dumps(event) + )) + + conn.commit() + logger.debug(f"Event added: {event.get('type')} - {event.get('details')}") + + # Auto-add new devices to known_devices if applicable + if event.get('type') == 'new_device' and not event.get('known'): + mac = event.get('mac') + hostname = event.get('hostname', 'Unknown') + if mac: + self.add_known_device(mac, hostname) + + except Exception as e: + logger.error(f"Error adding event: {e}", exc_info=True) + conn.rollback() + finally: + conn.close() + + def get_recent_events(self, limit: int = 100, event_type: Optional[str] = None, + interface: Optional[str] = None) -> List[Dict]: + """Get recent events with optional filtering""" + conn = self._get_connection() + cursor = conn.cursor() + + query = 'SELECT * FROM events WHERE 1=1' + params = [] + + if event_type: + query += ' AND type = ?' + params.append(event_type) + + if interface: + query += ' AND interface = ?' + params.append(interface) + + query += ' ORDER BY timestamp DESC LIMIT ?' + params.append(limit) + + try: + cursor.execute(query, params) + rows = cursor.fetchall() + + events = [] + for row in rows: + event = { + 'id': row['id'], + 'timestamp': row['timestamp'], + 'type': row['type'], + 'interface': row['interface'], + 'details': row['details'], + 'data': json.loads(row['data']) if row['data'] else {} + } + events.append(event) + + return events + + except Exception as e: + logger.error(f"Error getting events: {e}", exc_info=True) + return [] + finally: + conn.close() + + def get_statistics(self) -> Dict: + """Get event statistics""" + conn = self._get_connection() + cursor = conn.cursor() + + try: + # Total events + cursor.execute('SELECT COUNT(*) as count FROM events') + total = cursor.fetchone()['count'] + + # Events today + today = datetime.now().strftime('%Y-%m-%d') + cursor.execute( + 'SELECT COUNT(*) as count FROM events WHERE DATE(timestamp) = ?', + (today,) + ) + today_count = cursor.fetchone()['count'] + + # Events last hour + one_hour_ago = (datetime.now() - timedelta(hours=1)).strftime('%Y-%m-%d %H:%M:%S') + cursor.execute( + 'SELECT COUNT(*) as count FROM events WHERE timestamp >= ?', + (one_hour_ago,) + ) + hour_count = cursor.fetchone()['count'] + + # Events by type + cursor.execute(''' + SELECT type, COUNT(*) as count + FROM events + GROUP BY type + ''') + by_type = {row['type']: row['count'] for row in cursor.fetchall()} + + # Known devices count + cursor.execute('SELECT COUNT(*) as count FROM known_devices') + known_devices = cursor.fetchone()['count'] + + return { + 'total_events': total, + 'events_today': today_count, + 'events_last_hour': hour_count, + 'events_by_type': by_type, + 'known_devices': known_devices + } + + except Exception as e: + logger.error(f"Error getting statistics: {e}", exc_info=True) + return { + 'total_events': 0, + 'events_today': 0, + 'events_last_hour': 0, + 'events_by_type': {}, + 'known_devices': 0 + } + finally: + conn.close() + + def is_known_device(self, mac: str) -> bool: + """Check if a device is known""" + conn = self._get_connection() + cursor = conn.cursor() + + try: + cursor.execute('SELECT mac FROM known_devices WHERE mac = ?', (mac,)) + result = cursor.fetchone() + return result is not None + except Exception as e: + logger.error(f"Error checking known device: {e}", exc_info=True) + return False + finally: + conn.close() + + def add_known_device(self, mac: str, name: str = 'Unknown'): + """Add a device to known devices""" + conn = self._get_connection() + cursor = conn.cursor() + + try: + cursor.execute(''' + INSERT OR REPLACE INTO known_devices (mac, name, last_seen) + VALUES (?, ?, CURRENT_TIMESTAMP) + ''', (mac, name)) + + conn.commit() + logger.info(f"Device added to known devices: {mac} ({name})") + except Exception as e: + logger.error(f"Error adding known device: {e}", exc_info=True) + conn.rollback() + finally: + conn.close() + + def cleanup_old_events(self, retention_days: int): + """Delete events older than retention period""" + conn = self._get_connection() + cursor = conn.cursor() + + try: + cutoff_date = (datetime.now() - timedelta(days=retention_days)).strftime('%Y-%m-%d %H:%M:%S') + + cursor.execute('DELETE FROM events WHERE timestamp < ?', (cutoff_date,)) + deleted = cursor.rowcount + + conn.commit() + logger.info(f"Cleaned up {deleted} events older than {retention_days} days") + + except Exception as e: + logger.error(f"Error cleaning up old events: {e}", exc_info=True) + conn.rollback() + finally: + conn.close() diff --git a/app/email_handler.py b/app/email_handler.py new file mode 100644 index 0000000..b3d6a8c --- /dev/null +++ b/app/email_handler.py @@ -0,0 +1,308 @@ +import smtplib +import logging +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart +from datetime import datetime +from typing import Dict, List + +logger = logging.getLogger(__name__) + +class EmailHandler: + """Handle email notifications for Watchdog events""" + + def __init__(self, config: Dict): + self.config = config + self.smtp_server = config['smtp_server'] + self.smtp_port = config['smtp_port'] + self.smtp_use_tls = config['smtp_use_tls'] + self.smtp_username = config['smtp_username'] + self.smtp_password = config['smtp_password'] + self.from_address = config['from_address'] + self.to_addresses = config['to_addresses'] + + logger.info(f"EmailHandler initialized for {self.smtp_server}:{self.smtp_port}") + + def _send_email(self, subject: str, html_content: str): + """Send an email""" + try: + # Create message + msg = MIMEMultipart('alternative') + msg['Subject'] = subject + msg['From'] = self.from_address + msg['To'] = ', '.join(self.to_addresses) + + # Add HTML content + html_part = MIMEText(html_content, 'html') + msg.attach(html_part) + + # Send email + with smtplib.SMTP(self.smtp_server, self.smtp_port) as server: + if self.smtp_use_tls: + server.starttls() + + server.login(self.smtp_username, self.smtp_password) + server.send_message(msg) + + logger.info(f"Email sent: {subject}") + return True + + except Exception as e: + logger.error(f"Failed to send email: {e}", exc_info=True) + return False + + def send_event_notification(self, event: Dict): + """Send notification for a single event""" + subject = self._format_subject(event) + html_content = self._format_event_email(event) + + self._send_email(subject, html_content) + + def send_startup_notification(self): + """Send notification when Watchdog starts""" + subject = "🟢 Watchdog Docker gestartet" + + html_content = f""" + + + + + +
+
+

🟢 Watchdog Docker

+

Monitoring gestartet

+
+
+

Status: Aktiv

+

Zeitpunkt: {datetime.now().strftime('%d.%m.%Y %H:%M:%S')}

+

Version: 0.1

+
+

OPNsense Monitoring ist aktiv und überwacht folgende Events:

+
    +
  • DHCP Leases
  • +
  • Neue Geräte (ARP)
  • +
  • Interface Status
  • +
  • Gateway Status
  • +
+
+ +
+ + + """ + + self._send_email(subject, html_content) + + def _format_subject(self, event: Dict) -> str: + """Format email subject based on event type""" + event_type = event.get('type', 'unknown') + + prefixes = { + 'dhcp_lease': '🔵 Neue DHCP Lease', + 'new_device': '🔴 Neues Gerät erkannt' if not event.get('known') else '🟡 Bekanntes Gerät', + 'interface_status': '⚠️ Interface Status', + 'gateway_status': '⚠️ Gateway Status' + } + + prefix = prefixes.get(event_type, '📢 Event') + interface = event.get('interface', '') + + if interface: + return f"{prefix} - {interface}" + else: + return prefix + + def _format_event_email(self, event: Dict) -> str: + """Format event as HTML email""" + event_type = event.get('type', 'unknown') + timestamp = datetime.now().strftime('%d.%m.%Y %H:%M:%S') + + # Event type specific colors + colors = { + 'dhcp_lease': '#0d6efd', + 'new_device': '#dc3545', + 'interface_status': '#ffc107', + 'gateway_status': '#fd7e14' + } + color = colors.get(event_type, '#6c757d') + + # Build event details HTML + details_html = self._build_event_details_html(event) + + html_content = f""" + + + + + +
+
+

{self._format_subject(event)}

+

{event.get('details', 'Event detected')}

+
+
+
+ Zeitpunkt: + {timestamp} +
+
+ Event-Typ: + {event_type.upper()} +
+ {details_html} +
+ +
+ + + """ + + return html_content + + def _build_event_details_html(self, event: Dict) -> str: + """Build event-specific details HTML""" + event_type = event.get('type', 'unknown') + html = "" + + # Common fields + if event.get('interface'): + html += f""" +
+ Interface: + {event['interface']} +
+ """ + + # Type-specific fields + if event_type in ['dhcp_lease', 'new_device']: + if event.get('ip'): + html += f""" +
+ IP-Adresse: + {event['ip']} +
+ """ + if event.get('mac'): + html += f""" +
+ MAC-Adresse: + {event['mac']} +
+ """ + if event.get('hostname'): + html += f""" +
+ Hostname: + {event['hostname']} +
+ """ + + if event_type == 'new_device': + known = event.get('known', False) + html += f""" +
+ Bekannt: + + {'✓ Ja' if known else '✗ Nein (Erstes Mal gesehen!)'} + +
+ """ + + if event_type in ['interface_status', 'gateway_status']: + if event.get('old_status'): + html += f""" +
+ Vorheriger Status: + {event['old_status']} +
+ """ + if event.get('new_status'): + html += f""" +
+ Neuer Status: + {event['new_status']} +
+ """ + + if event_type == 'gateway_status' and event.get('gateway'): + html += f""" +
+ Gateway: + {event['gateway']} +
+ """ + + return html diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..3b0b76c --- /dev/null +++ b/app/main.py @@ -0,0 +1,161 @@ +from flask import Flask, render_template, redirect, url_for, request, flash, jsonify +from flask_login import LoginManager, UserMixin, login_user, logout_user, login_required, current_user +from werkzeug.security import check_password_hash +import yaml +import logging +from apscheduler.schedulers.background import BackgroundScheduler +from datetime import datetime +import os + +from database import Database +from monitor import OPNsenseMonitor +from email_handler import EmailHandler + +# Load configuration +with open('config/config.yaml', 'r') as f: + config = yaml.safe_load(f) + +# Setup logging +logging.basicConfig( + level=getattr(logging, config['logging']['level']), + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[ + logging.FileHandler(config['logging']['file']), + logging.StreamHandler() + ] +) +logger = logging.getLogger(__name__) + +# Initialize Flask +app = Flask(__name__) +app.config['SECRET_KEY'] = config['web']['secret_key'] + +# Initialize Flask-Login +login_manager = LoginManager() +login_manager.init_app(app) +login_manager.login_view = 'login' + +# Initialize components +db = Database(config['database']['path']) +email_handler = EmailHandler(config['email']) if config['email']['enabled'] else None +monitor = OPNsenseMonitor(config, db, email_handler) + +# Simple User class +class User(UserMixin): + def __init__(self, id): + self.id = id + +@login_manager.user_loader +def load_user(user_id): + return User(user_id) + +# Routes +@app.route('/') +@login_required +def index(): + return redirect(url_for('dashboard')) + +@app.route('/login', methods=['GET', 'POST']) +def login(): + if current_user.is_authenticated: + return redirect(url_for('dashboard')) + + if request.method == 'POST': + username = request.form.get('username') + password = request.form.get('password') + + # Simple authentication (username: admin) + if username == 'admin' and check_password_hash( + config['web']['admin_password_hash'], password + ): + user = User('admin') + login_user(user) + logger.info(f"User {username} logged in successfully") + return redirect(url_for('dashboard')) + else: + logger.warning(f"Failed login attempt for user: {username}") + flash('Invalid username or password', 'danger') + + return render_template('login.html') + +@app.route('/logout') +@login_required +def logout(): + logger.info(f"User {current_user.id} logged out") + logout_user() + return redirect(url_for('login')) + +@app.route('/dashboard') +@login_required +def dashboard(): + # Get recent events + events = db.get_recent_events(limit=100) + stats = db.get_statistics() + + return render_template('dashboard.html', + events=events, + stats=stats, + config=config) + +@app.route('/api/events') +@login_required +def api_events(): + """API endpoint for real-time event updates""" + limit = request.args.get('limit', 50, type=int) + event_type = request.args.get('type', None) + interface = request.args.get('interface', None) + + events = db.get_recent_events(limit=limit, event_type=event_type, interface=interface) + return jsonify(events) + +@app.route('/api/stats') +@login_required +def api_stats(): + """API endpoint for statistics""" + return jsonify(db.get_statistics()) + +@app.route('/health') +def health(): + """Health check endpoint""" + return jsonify({ + 'status': 'healthy', + 'timestamp': datetime.now().isoformat(), + 'version': '0.1' + }) + +# Initialize scheduler +def start_monitoring(): + scheduler = BackgroundScheduler() + interval = config['monitoring']['polling_interval'] + + scheduler.add_job( + func=monitor.check_all, + trigger="interval", + seconds=interval, + id='opnsense_monitor', + name='OPNsense Monitor', + replace_existing=True + ) + + scheduler.start() + logger.info(f"Monitoring started with {interval}s interval") + + # Send startup notification + if email_handler and config['email']['send_on_startup']: + email_handler.send_startup_notification() + +if __name__ == '__main__': + logger.info("Starting Watchdog Docker v0.1") + + # Initialize database + db.initialize() + + # Start monitoring + start_monitoring() + + # Run Flask + app.run( + host=config['web']['host'], + port=config['web']['port'], + debug=False + ) \ No newline at end of file diff --git a/app/monitor.py b/app/monitor.py new file mode 100644 index 0000000..7934797 --- /dev/null +++ b/app/monitor.py @@ -0,0 +1,197 @@ +import logging +from datetime import datetime +from typing import Dict, List, Optional +from opnsense_api import OPNsenseAPI +from database import Database +from email_handler import EmailHandler + +logger = logging.getLogger(__name__) + +class OPNsenseMonitor: + """Monitor OPNsense events""" + + def __init__(self, config: Dict, db: Database, email_handler: Optional[EmailHandler]): + self.config = config + self.db = db + self.email_handler = email_handler + + # Initialize API client + opn_config = config['opnsense'] + self.api = OPNsenseAPI( + host=opn_config['host'], + api_key=opn_config['api_key'], + api_secret=opn_config['api_secret'], + verify_ssl=opn_config['verify_ssl'] + ) + + # Previous states + self.previous_leases = {} + self.previous_devices = {} + self.previous_interfaces = {} + self.previous_gateways = {} + + logger.info("OPNsense Monitor initialized") + + def check_all(self): + """Check all monitored events""" + logger.debug("Starting monitoring cycle") + + events_config = self.config['monitoring']['events'] + + try: + if events_config.get('dhcp_leases'): + self.check_dhcp_leases() + + if events_config.get('new_devices'): + self.check_new_devices() + + if events_config.get('interface_status'): + self.check_interface_status() + + if events_config.get('gateway_status'): + self.check_gateway_status() + + except Exception as e: + logger.error(f"Error in monitoring cycle: {e}", exc_info=True) + + def check_dhcp_leases(self): + """Check for new DHCP leases""" + leases = self.api.get_dhcp_leases() + if not leases: + return + + monitored_interfaces = self.config['monitoring'].get('monitored_interfaces', []) + + for lease in leases: + lease_id = lease.get('address', '') + lease.get('mac', '') + interface = lease.get('if', '') + + # Filter by interface if specified + if monitored_interfaces and interface not in monitored_interfaces: + continue + + if lease_id not in self.previous_leases: + # New lease detected + event = { + 'type': 'dhcp_lease', + 'interface': interface, + 'ip': lease.get('address'), + 'mac': lease.get('mac'), + 'hostname': lease.get('hostname', 'Unknown'), + 'details': f"New DHCP lease: {lease.get('address')} ({lease.get('hostname', 'Unknown')})" + } + + self.db.add_event(event) + logger.info(f"New DHCP lease: {event['details']}") + + if self.email_handler: + self.email_handler.send_event_notification(event) + + # Update previous state + self.previous_leases = {lease.get('address', '') + lease.get('mac', ''): lease for lease in leases} + + def check_new_devices(self): + """Check for new devices via ARP table""" + arp_entries = self.api.get_arp_table() + if not arp_entries: + return + + monitored_interfaces = self.config['monitoring'].get('monitored_interfaces', []) + + for entry in arp_entries: + mac = entry.get('mac', '') + interface = entry.get('intf', '') + + # Filter by interface + if monitored_interfaces and interface not in monitored_interfaces: + continue + + if mac and mac not in self.previous_devices: + # Check if device is in known devices DB + is_known = self.db.is_known_device(mac) + + event = { + 'type': 'new_device', + 'interface': interface, + 'ip': entry.get('ip'), + 'mac': mac, + 'hostname': entry.get('hostname', 'Unknown'), + 'known': is_known, + 'details': f"{'Known' if is_known else 'Unknown'} device detected: {mac} ({entry.get('hostname', 'Unknown')})" + } + + self.db.add_event(event) + logger.info(f"New device: {event['details']}") + + if self.email_handler and not is_known: + # Only send email for unknown devices + self.email_handler.send_event_notification(event) + + # Update previous state + self.previous_devices = {entry.get('mac', ''): entry for entry in arp_entries} + + def check_interface_status(self): + """Check interface status changes""" + interfaces = self.api.get_interfaces() + if not interfaces: + return + + monitored_interfaces = self.config['monitoring'].get('monitored_interfaces', []) + + for if_name, if_data in interfaces.items(): + # Filter by interface + if monitored_interfaces and if_name not in monitored_interfaces: + continue + + current_status = if_data.get('status', 'unknown') + + if if_name in self.previous_interfaces: + previous_status = self.previous_interfaces[if_name].get('status', 'unknown') + + if current_status != previous_status: + event = { + 'type': 'interface_status', + 'interface': if_name, + 'old_status': previous_status, + 'new_status': current_status, + 'details': f"Interface {if_name} changed: {previous_status} → {current_status}" + } + + self.db.add_event(event) + logger.warning(f"Interface status change: {event['details']}") + + if self.email_handler: + self.email_handler.send_event_notification(event) + + # Update previous state + self.previous_interfaces = interfaces + + def check_gateway_status(self): + """Check gateway status changes""" + gateways = self.api.get_gateways() + if not gateways: + return + + for gw_name, gw_data in gateways.items(): + current_status = gw_data.get('status', 'unknown') + + if gw_name in self.previous_gateways: + previous_status = self.previous_gateways[gw_name].get('status', 'unknown') + + if current_status != previous_status: + event = { + 'type': 'gateway_status', + 'gateway': gw_name, + 'old_status': previous_status, + 'new_status': current_status, + 'details': f"Gateway {gw_name} changed: {previous_status} → {current_status}" + } + + self.db.add_event(event) + logger.warning(f"Gateway status change: {event['details']}") + + if self.email_handler: + self.email_handler.send_event_notification(event) + + # Update previous state + self.previous_gateways = gateways \ No newline at end of file diff --git a/app/opnsense_api.py b/app/opnsense_api.py new file mode 100644 index 0000000..86adcb7 --- /dev/null +++ b/app/opnsense_api.py @@ -0,0 +1,70 @@ +import requests +import logging +from typing import Dict, List, Optional +import urllib3 + +# Disable SSL warnings (only if verify_ssl is false) +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + +logger = logging.getLogger(__name__) + +class OPNsenseAPI: + """Client for OPNsense API""" + + def __init__(self, host: str, api_key: str, api_secret: str, verify_ssl: bool = True): + self.host = host.rstrip('/') + self.api_key = api_key + self.api_secret = api_secret + self.verify_ssl = verify_ssl + self.session = requests.Session() + self.session.auth = (api_key, api_secret) + self.session.verify = verify_ssl + + logger.info(f"OPNsense API client initialized for {self.host}") + + def _request(self, method: str, endpoint: str, **kwargs) -> Optional[Dict]: + """Make API request""" + url = f"{self.host}/api/{endpoint}" + + try: + response = self.session.request(method, url, **kwargs) + response.raise_for_status() + return response.json() + except requests.exceptions.RequestException as e: + logger.error(f"API request failed: {e}") + return None + + def get_dhcp_leases(self) -> Optional[List[Dict]]: + """Get current DHCP leases""" + data = self._request('GET', 'dhcpv4/leases/searchLease') + if data and 'rows' in data: + return data['rows'] + return [] + + def get_interfaces(self) -> Optional[Dict]: + """Get interface status""" + return self._request('GET', 'diagnostics/interface/getInterfaceConfig') + + def get_interface_statistics(self) -> Optional[Dict]: + """Get interface statistics""" + return self._request('GET', 'diagnostics/traffic/interface') + + def get_gateways(self) -> Optional[Dict]: + """Get gateway status""" + return self._request('GET', 'routes/gateway/status') + + def get_arp_table(self) -> Optional[List[Dict]]: + """Get ARP table for device detection""" + data = self._request('GET', 'diagnostics/interface/search_arp') + if data and 'rows' in data: + return data['rows'] + return [] + + def test_connection(self) -> bool: + """Test API connection""" + try: + result = self._request('GET', 'core/firmware/status') + return result is not None + except Exception as e: + logger.error(f"Connection test failed: {e}") + return False \ No newline at end of file diff --git a/app/templates/dashboard.html b/app/templates/dashboard.html new file mode 100644 index 0000000..77a9a7c --- /dev/null +++ b/app/templates/dashboard.html @@ -0,0 +1,363 @@ + + + + + + Dashboard - Watchdog Docker + + + + + + + + +
+ +
+
+
+
+
+
+
Gesamt Events
+

{{ stats.total_events }}

+
+
+ +
+
+
+
+
+
+
+
+
+
+
Heute
+

{{ stats.events_today }}

+
+
+ +
+
+
+
+
+
+
+
+
+
+
Letzte Stunde
+

{{ stats.events_last_hour }}

+
+
+ +
+
+
+
+
+
+
+
+
+
+
Bekannte Geräte
+

{{ stats.known_devices }}

+
+
+ +
+
+
+
+
+
+ + +
+
+
+ + +
+
+ + +
+
+ +
+ + +
+
+
+ + +
+
+
+ + +
+
+
+
+ Aktuelle Events +
+ + Zuletzt aktualisiert: {{ stats.last_updated if stats.last_updated else 'Nie' }} + +
+
+
+
+ + + + + + + + + + + {% for event in events %} + + + + + + + {% else %} + + + + {% endfor %} + +
ZeitpunktTypInterfaceDetails
+ {{ event.timestamp }} + + + {{ event.type.replace('_', ' ').upper() }} + + + {{ event.interface or 'N/A' }} + + {{ event.details }} +
+ Keine Events vorhanden +
+
+
+
+
+ + + + + diff --git a/app/templates/login.html b/app/templates/login.html new file mode 100644 index 0000000..be9ae80 --- /dev/null +++ b/app/templates/login.html @@ -0,0 +1,116 @@ + + + + + + Login - Watchdog Docker + + + + + + + + + diff --git a/config/config.yaml b/config/config.yaml new file mode 100644 index 0000000..5d26483 --- /dev/null +++ b/config/config.yaml @@ -0,0 +1,62 @@ +# OPNsense Configuration +opnsense: + host: "https://192.168.1.1" # Your OPNsense IP/Hostname + api_key: "YOUR_API_KEY_HERE" + api_secret: "YOUR_API_SECRET_HERE" + verify_ssl: false # Set to true in production with valid cert + +# Monitoring Configuration +monitoring: + polling_interval: 60 # Seconds between checks + + # Interfaces to monitor (leave empty to monitor all) + monitored_interfaces: + - lan + - wan + # - opt1 + # - opt2 + + # Events to monitor + events: + dhcp_leases: true # New IP assigned + new_devices: true # New device detected + interface_status: true # Interface up/down + gateway_status: true # Gateway status changes + +# Web Interface Configuration +web: + host: "0.0.0.0" + port: 5000 + secret_key: "CHANGE_THIS_SECRET_KEY_IN_PRODUCTION" # Change this! + + # Login credentials (username: admin) + admin_password_hash: "scrypt:32768:8:1$CHANGEME$hash" # See README for generation + +# Email Notification Configuration +email: + enabled: true + smtp_server: "mail.yourdomain.com" + smtp_port: 587 + smtp_use_tls: true + smtp_username: "watchdog@yourdomain.com" + smtp_password: "YOUR_SMTP_PASSWORD" + + from_address: "watchdog@yourdomain.com" + to_addresses: + - "admin@yourdomain.com" + - "security@yourdomain.com" + + # Email settings + send_on_startup: true + batch_notifications: false # Group multiple events in one email + batch_interval: 300 # Seconds to wait before sending batch + +# Database Configuration +database: + path: "/app/data/watchdog.db" + retention_days: 90 # Keep events for 90 days + +# Logging +logging: + level: "INFO" # DEBUG, INFO, WARNING, ERROR + file: "/app/data/watchdog.log" \ No newline at end of file diff --git a/data/.gitkeep b/data/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..804ce08 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,27 @@ +version: '3.8' + +services: + watchdog: + build: . + container_name: watchdog-docker + restart: unless-stopped + ports: + - "5000:5000" + volumes: + - ./config/config.yaml:/app/config/config.yaml:ro + - ./data:/app/data + environment: + - TZ=Europe/Berlin + - PYTHONUNBUFFERED=1 + healthcheck: + test: ["CMD", "python", "-c", "import requests; requests.get('http://localhost:5000/health')"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + networks: + - watchdog-net + +networks: + watchdog-net: + driver: bridge \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6f17833 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +Flask==3.0.0 +Flask-Login==0.6.3 +APScheduler==3.10.4 +requests==2.31.0 +PyYAML==6.0.1 +Werkzeug==3.0.1 +python-dotenv==1.0.0 \ No newline at end of file