ipwatch
This commit is contained in:
7
backend/app/services/__init__.py
Executable file
7
backend/app/services/__init__.py
Executable file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Services réseau pour IPWatch
|
||||
"""
|
||||
from .network import NetworkScanner
|
||||
from .scheduler import ScanScheduler
|
||||
|
||||
__all__ = ["NetworkScanner", "ScanScheduler"]
|
||||
80
backend/app/services/mqtt_client.py
Normal file
80
backend/app/services/mqtt_client.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""
|
||||
Service MQTT pour IPWatch Backend
|
||||
Envoie des commandes MQTT aux agents installés sur les machines
|
||||
"""
|
||||
import paho.mqtt.client as mqtt
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Configuration MQTT (à charger depuis config.yaml ou variables d'environnement)
|
||||
MQTT_BROKER = os.getenv('MQTT_BROKER', 'localhost')
|
||||
MQTT_PORT = int(os.getenv('MQTT_PORT', '1883'))
|
||||
MQTT_USERNAME = os.getenv('MQTT_USERNAME', None)
|
||||
MQTT_PASSWORD = os.getenv('MQTT_PASSWORD', None)
|
||||
|
||||
|
||||
def send_mqtt_command(ip_address: str, command: str) -> bool:
|
||||
"""
|
||||
Envoie une commande MQTT à un équipement
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP de l'équipement
|
||||
command: Commande à envoyer (shutdown, reboot, status)
|
||||
|
||||
Returns:
|
||||
bool: True si la commande a été envoyée avec succès
|
||||
"""
|
||||
try:
|
||||
# Créer le client MQTT
|
||||
client = mqtt.Client(client_id=f"ipwatch-backend-{os.getpid()}")
|
||||
|
||||
# Authentification si configurée
|
||||
if MQTT_USERNAME and MQTT_PASSWORD:
|
||||
client.username_pw_set(MQTT_USERNAME, MQTT_PASSWORD)
|
||||
|
||||
# Connexion au broker
|
||||
client.connect(MQTT_BROKER, MQTT_PORT, keepalive=10)
|
||||
|
||||
# Topic de commande pour l'équipement
|
||||
topic = f"ipwatch/device/{ip_address}/command"
|
||||
|
||||
# Payload JSON
|
||||
payload = json.dumps({
|
||||
"command": command,
|
||||
"timestamp": __import__('datetime').datetime.now().isoformat()
|
||||
})
|
||||
|
||||
# Publier la commande
|
||||
result = client.publish(topic, payload, qos=1)
|
||||
|
||||
# Attendre que le message soit envoyé
|
||||
result.wait_for_publish(timeout=5)
|
||||
|
||||
# Déconnexion
|
||||
client.disconnect()
|
||||
|
||||
logger.info(f"✓ Commande '{command}' envoyée à {ip_address} via MQTT")
|
||||
return result.is_published()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"✗ Erreur envoi commande MQTT à {ip_address}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_device_status(ip_address: str) -> Optional[dict]:
|
||||
"""
|
||||
Récupère le statut d'un équipement via MQTT (si disponible)
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP de l'équipement
|
||||
|
||||
Returns:
|
||||
dict: Statut de l'équipement ou None
|
||||
"""
|
||||
# TODO: Implémenter la récupération du statut
|
||||
# Nécessite un mécanisme de souscription et d'attente de réponse
|
||||
pass
|
||||
365
backend/app/services/network.py
Executable file
365
backend/app/services/network.py
Executable file
@@ -0,0 +1,365 @@
|
||||
"""
|
||||
Modules réseau pour scan d'IP, ping, ARP et port scan
|
||||
Implémente le workflow de scan selon workflow-scan.md
|
||||
"""
|
||||
import asyncio
|
||||
import ipaddress
|
||||
import platform
|
||||
import subprocess
|
||||
import socket
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from datetime import datetime
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
# Scapy pour ARP
|
||||
try:
|
||||
from scapy.all import ARP, Ether, srp
|
||||
SCAPY_AVAILABLE = True
|
||||
except ImportError:
|
||||
SCAPY_AVAILABLE = False
|
||||
|
||||
|
||||
class NetworkScanner:
|
||||
"""Scanner réseau principal"""
|
||||
|
||||
def __init__(self, cidr: str, timeout: float = 1.0, ping_count: int = 1):
|
||||
"""
|
||||
Initialise le scanner réseau
|
||||
|
||||
Args:
|
||||
cidr: Réseau CIDR (ex: "192.168.1.0/24")
|
||||
timeout: Timeout pour ping et connexions (secondes)
|
||||
ping_count: Nombre de ping par IP
|
||||
"""
|
||||
self.cidr = cidr
|
||||
self.timeout = timeout
|
||||
self.ping_count = max(1, int(ping_count))
|
||||
self.network = ipaddress.ip_network(cidr, strict=False)
|
||||
|
||||
def generate_ip_list(self) -> List[str]:
|
||||
"""
|
||||
Génère la liste complète d'IP depuis le CIDR
|
||||
|
||||
Returns:
|
||||
Liste des adresses IP en string
|
||||
"""
|
||||
return [str(ip) for ip in self.network.hosts()]
|
||||
|
||||
async def ping(self, ip: str) -> bool:
|
||||
"""
|
||||
Ping une adresse IP (async)
|
||||
|
||||
Args:
|
||||
ip: Adresse IP à pinger
|
||||
|
||||
Returns:
|
||||
True si l'IP répond, False sinon
|
||||
"""
|
||||
# Détection de l'OS pour la commande ping
|
||||
param = '-n' if platform.system().lower() == 'windows' else '-c'
|
||||
timeout_param = '-w' if platform.system().lower() == 'windows' else '-W'
|
||||
|
||||
command = [
|
||||
'ping',
|
||||
param, str(self.ping_count),
|
||||
timeout_param,
|
||||
str(int(self.timeout * 1000) if platform.system().lower() == 'windows' else str(int(self.timeout))),
|
||||
ip
|
||||
]
|
||||
|
||||
try:
|
||||
# Exécuter le ping de manière asynchrone
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
await asyncio.wait_for(process.wait(), timeout=self.timeout + 1)
|
||||
return process.returncode == 0
|
||||
except (asyncio.TimeoutError, Exception):
|
||||
return False
|
||||
|
||||
async def ping_parallel(self, ip_list: List[str], max_concurrent: int = 50) -> Dict[str, bool]:
|
||||
"""
|
||||
Ping multiple IPs en parallèle
|
||||
|
||||
Args:
|
||||
ip_list: Liste des IPs à pinger
|
||||
max_concurrent: Nombre maximum de pings simultanés
|
||||
|
||||
Returns:
|
||||
Dictionnaire {ip: online_status}
|
||||
"""
|
||||
results = {}
|
||||
semaphore = asyncio.Semaphore(max_concurrent)
|
||||
|
||||
async def ping_with_semaphore(ip: str):
|
||||
async with semaphore:
|
||||
results[ip] = await self.ping(ip)
|
||||
|
||||
# Lancer tous les pings en parallèle avec limite
|
||||
await asyncio.gather(*[ping_with_semaphore(ip) for ip in ip_list])
|
||||
|
||||
return results
|
||||
|
||||
def get_arp_table(self) -> Dict[str, Tuple[str, str]]:
|
||||
"""
|
||||
Récupère la table ARP du système
|
||||
|
||||
Returns:
|
||||
Dictionnaire {ip: (mac, vendor)}
|
||||
"""
|
||||
arp_data = {}
|
||||
|
||||
if SCAPY_AVAILABLE:
|
||||
try:
|
||||
# Utiliser Scapy pour ARP scan
|
||||
answered, _ = srp(
|
||||
Ether(dst="ff:ff:ff:ff:ff:ff") / ARP(pdst=self.cidr),
|
||||
timeout=2,
|
||||
verbose=False
|
||||
)
|
||||
|
||||
for sent, received in answered:
|
||||
ip = received.psrc
|
||||
mac = received.hwsrc
|
||||
vendor = self._get_mac_vendor(mac)
|
||||
arp_data[ip] = (mac, vendor)
|
||||
except Exception as e:
|
||||
print(f"Erreur ARP scan avec Scapy: {e}")
|
||||
else:
|
||||
# Fallback: parser la table ARP système
|
||||
try:
|
||||
if platform.system().lower() == 'windows':
|
||||
output = subprocess.check_output(['arp', '-a'], text=True)
|
||||
pattern = r'(\d+\.\d+\.\d+\.\d+)\s+([0-9a-fA-F-:]+)'
|
||||
else:
|
||||
output = subprocess.check_output(['arp', '-n'], text=True)
|
||||
pattern = r'(\d+\.\d+\.\d+\.\d+)\s+\w+\s+([0-9a-fA-F:]+)'
|
||||
|
||||
matches = re.findall(pattern, output)
|
||||
for ip, mac in matches:
|
||||
if ip in [str(h) for h in self.network.hosts()]:
|
||||
vendor = self._get_mac_vendor(mac)
|
||||
arp_data[ip] = (mac, vendor)
|
||||
except Exception as e:
|
||||
print(f"Erreur lecture table ARP: {e}")
|
||||
|
||||
return arp_data
|
||||
|
||||
def _get_mac_vendor(self, mac: str) -> str:
|
||||
"""
|
||||
Lookup du fabricant depuis l'adresse MAC
|
||||
Simplifié pour l'instant - peut être étendu avec une vraie DB OUI
|
||||
|
||||
Args:
|
||||
mac: Adresse MAC
|
||||
|
||||
Returns:
|
||||
Nom du fabricant ou "Unknown"
|
||||
"""
|
||||
mac_norm = re.sub(r"[^0-9A-Fa-f]", "", mac).upper()
|
||||
if not mac_norm:
|
||||
return "Unknown"
|
||||
|
||||
# Lookup OUI si fichier disponible
|
||||
vendor = OuiLookup.lookup(mac_norm)
|
||||
if vendor:
|
||||
return vendor
|
||||
|
||||
# Mini DB des fabricants courants (fallback)
|
||||
vendors = {
|
||||
"00:0C:29": "VMware",
|
||||
"00:50:56": "VMware",
|
||||
"08:00:27": "VirtualBox",
|
||||
"DC:A6:32": "Raspberry Pi",
|
||||
"B8:27:EB": "Raspberry Pi",
|
||||
}
|
||||
|
||||
for prefix, vendor in vendors.items():
|
||||
prefix_norm = prefix.replace(":", "").upper()
|
||||
if mac_norm.startswith(prefix_norm):
|
||||
return vendor
|
||||
|
||||
return "Unknown"
|
||||
|
||||
|
||||
async def scan_ports(self, ip: str, ports: List[int]) -> List[int]:
|
||||
"""
|
||||
Scan des ports TCP sur une IP
|
||||
|
||||
Args:
|
||||
ip: Adresse IP cible
|
||||
ports: Liste des ports à scanner
|
||||
|
||||
Returns:
|
||||
Liste des ports ouverts
|
||||
"""
|
||||
open_ports = []
|
||||
|
||||
async def check_port(port: int) -> Optional[int]:
|
||||
try:
|
||||
# Tentative de connexion TCP
|
||||
reader, writer = await asyncio.wait_for(
|
||||
asyncio.open_connection(ip, port),
|
||||
timeout=self.timeout
|
||||
)
|
||||
writer.close()
|
||||
await writer.wait_closed()
|
||||
return port
|
||||
except:
|
||||
return None
|
||||
|
||||
# Scanner tous les ports en parallèle
|
||||
results = await asyncio.gather(*[check_port(p) for p in ports])
|
||||
open_ports = [p for p in results if p is not None]
|
||||
|
||||
return open_ports
|
||||
|
||||
def get_hostname(self, ip: str) -> Optional[str]:
|
||||
"""
|
||||
Résolution DNS inversée pour obtenir le hostname
|
||||
|
||||
Args:
|
||||
ip: Adresse IP
|
||||
|
||||
Returns:
|
||||
Hostname ou None
|
||||
"""
|
||||
try:
|
||||
hostname, _, _ = socket.gethostbyaddr(ip)
|
||||
return hostname
|
||||
except:
|
||||
return None
|
||||
|
||||
def classify_ip_status(self, is_online: bool, is_known: bool) -> str:
|
||||
"""
|
||||
Classification de l'état d'une IP
|
||||
|
||||
Args:
|
||||
is_online: IP en ligne
|
||||
is_known: IP connue dans la config
|
||||
|
||||
Returns:
|
||||
État: "online", "offline"
|
||||
"""
|
||||
return "online" if is_online else "offline"
|
||||
|
||||
async def full_scan(self, known_ips: Dict[str, Dict], port_list: List[int], max_concurrent: int = 50, progress_callback=None) -> Dict[str, Dict]:
|
||||
"""
|
||||
Scan complet du réseau selon workflow-scan.md
|
||||
|
||||
Args:
|
||||
known_ips: Dictionnaire des IPs connues depuis config
|
||||
port_list: Liste des ports à scanner
|
||||
max_concurrent: Pings simultanés max
|
||||
progress_callback: Fonction optionnelle pour rapporter la progression
|
||||
|
||||
Returns:
|
||||
Dictionnaire des résultats de scan pour chaque IP
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# 1. Générer liste IP du CIDR
|
||||
ip_list = self.generate_ip_list()
|
||||
total_ips = len(ip_list)
|
||||
|
||||
# 2. Ping parallélisé
|
||||
ping_results = await self.ping_parallel(ip_list, max_concurrent)
|
||||
|
||||
# 3. ARP + MAC vendor
|
||||
arp_table = self.get_arp_table()
|
||||
|
||||
# 4. Pour chaque IP
|
||||
for index, ip in enumerate(ip_list, start=1):
|
||||
is_online = ping_results.get(ip, False)
|
||||
is_known = ip in known_ips
|
||||
|
||||
ip_data = {
|
||||
"ip": ip,
|
||||
"known": is_known,
|
||||
"last_status": self.classify_ip_status(is_online, is_known),
|
||||
"last_seen": datetime.now() if is_online else None,
|
||||
"mac": None,
|
||||
"vendor": None,
|
||||
"hostname": None,
|
||||
"open_ports": [],
|
||||
}
|
||||
|
||||
# Ajouter infos connues
|
||||
if is_known:
|
||||
ip_data.update(known_ips[ip])
|
||||
|
||||
# Infos ARP
|
||||
if ip in arp_table:
|
||||
mac, vendor = arp_table[ip]
|
||||
ip_data["mac"] = mac
|
||||
ip_data["vendor"] = vendor
|
||||
|
||||
# Hostname
|
||||
if is_online:
|
||||
hostname = self.get_hostname(ip)
|
||||
if hostname:
|
||||
ip_data["hostname"] = hostname
|
||||
|
||||
# 5. Port scan (uniquement si online)
|
||||
if is_online and port_list:
|
||||
open_ports = await self.scan_ports(ip, port_list)
|
||||
ip_data["open_ports"] = open_ports
|
||||
|
||||
results[ip] = ip_data
|
||||
|
||||
# Rapporter la progression
|
||||
if progress_callback:
|
||||
await progress_callback(index, total_ips, ip, ip_data["last_status"], is_online)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class OuiLookup:
|
||||
"""Lookup OUI basé sur un fichier local (oui.txt)"""
|
||||
_cache = {}
|
||||
_mtime = None
|
||||
_path = Path("./data/oui/oui.txt")
|
||||
|
||||
@classmethod
|
||||
def _load(cls):
|
||||
if not cls._path.exists():
|
||||
cls._cache = {}
|
||||
cls._mtime = None
|
||||
return
|
||||
|
||||
mtime = cls._path.stat().st_mtime
|
||||
if cls._mtime == mtime and cls._cache:
|
||||
return
|
||||
|
||||
cache = {}
|
||||
try:
|
||||
with cls._path.open("r", encoding="utf-8", errors="ignore") as handle:
|
||||
for line in handle:
|
||||
raw = line.strip()
|
||||
if "(hex)" in raw:
|
||||
left, right = raw.split("(hex)", 1)
|
||||
prefix = re.sub(r"[^0-9A-Fa-f]", "", left).upper()[:6]
|
||||
vendor = right.strip()
|
||||
if len(prefix) == 6 and vendor:
|
||||
cache[prefix] = vendor
|
||||
except Exception:
|
||||
cache = {}
|
||||
|
||||
cls._cache = cache
|
||||
cls._mtime = mtime
|
||||
print(f"[OUI] Base chargée: {len(cls._cache)} entrées depuis {cls._path}")
|
||||
|
||||
@classmethod
|
||||
def lookup(cls, mac: str) -> Optional[str]:
|
||||
if not mac:
|
||||
return None
|
||||
cls._load()
|
||||
if not cls._cache:
|
||||
return None
|
||||
prefix = re.sub(r"[^0-9A-Fa-f]", "", mac).upper()[:6]
|
||||
if len(prefix) != 6:
|
||||
return None
|
||||
return cls._cache.get(prefix)
|
||||
194
backend/app/services/opnsense_client.py
Normal file
194
backend/app/services/opnsense_client.py
Normal file
@@ -0,0 +1,194 @@
|
||||
"""
|
||||
Client API OPNsense pour IPWatch
|
||||
Gère les communications avec l'API REST OPNsense (Kea DHCP)
|
||||
"""
|
||||
import httpx
|
||||
import ipaddress
|
||||
from typing import Optional, Dict, Any, List
|
||||
from backend.app.core.config import config_manager
|
||||
|
||||
|
||||
class OPNsenseAPIError(Exception):
|
||||
"""Erreur retournée par l'API OPNsense (validation, etc.)"""
|
||||
def __init__(self, message: str, validations: dict = None):
|
||||
self.validations = validations or {}
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class OPNsenseClient:
|
||||
"""Client pour l'API OPNsense avec authentification Basic (api_key:api_secret)"""
|
||||
|
||||
def __init__(self):
|
||||
config = config_manager.config.opnsense
|
||||
self.base_url = f"{config.protocol}://{config.host}"
|
||||
self.auth = (config.api_key, config.api_secret)
|
||||
self.verify_ssl = config.verify_ssl
|
||||
self.enabled = config.enabled
|
||||
print(f"[OPNsense] Client initialisé: {self.base_url} (ssl_verify={self.verify_ssl})")
|
||||
|
||||
def _get_client(self) -> httpx.AsyncClient:
|
||||
"""Crée un client HTTP async configuré"""
|
||||
return httpx.AsyncClient(
|
||||
base_url=self.base_url,
|
||||
auth=self.auth,
|
||||
verify=self.verify_ssl,
|
||||
timeout=30.0
|
||||
)
|
||||
|
||||
def _check_result(self, data: Dict[str, Any], action: str):
|
||||
"""Vérifie que le résultat OPNsense n'est pas 'failed'"""
|
||||
if data.get("result") == "failed":
|
||||
validations = data.get("validations", {})
|
||||
msg = f"{action} échoué"
|
||||
if validations:
|
||||
details = "; ".join(f"{k}: {v}" for k, v in validations.items())
|
||||
msg = f"{action} échoué: {details}"
|
||||
print(f"[OPNsense] VALIDATION ERREUR: {msg}")
|
||||
raise OPNsenseAPIError(msg, validations)
|
||||
|
||||
async def test_connection(self) -> Dict[str, Any]:
|
||||
"""Teste la connexion à l'API OPNsense"""
|
||||
print(f"[OPNsense] Test connexion: GET {self.base_url}/api/core/firmware/status")
|
||||
async with self._get_client() as client:
|
||||
response = await client.get("/api/core/firmware/status")
|
||||
print(f"[OPNsense] Réponse test: {response.status_code}")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def search_subnets(self) -> Dict[str, Any]:
|
||||
"""Liste les subnets Kea DHCPv4"""
|
||||
print(f"[OPNsense] Recherche subnets: GET {self.base_url}/api/kea/dhcpv4/search_subnet")
|
||||
async with self._get_client() as client:
|
||||
response = await client.get("/api/kea/dhcpv4/search_subnet")
|
||||
print(f"[OPNsense] Réponse search_subnet: {response.status_code}")
|
||||
if response.status_code != 200:
|
||||
print(f"[OPNsense] Corps réponse erreur: {response.text[:500]}")
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
rows = data.get("rows", [])
|
||||
print(f"[OPNsense] {len(rows)} subnet(s) trouvé(s)")
|
||||
for row in rows:
|
||||
print(f"[OPNsense] - {row.get('subnet')}: uuid={row.get('uuid')}")
|
||||
return data
|
||||
|
||||
async def find_subnet_for_ip(self, ip_address: str) -> Optional[str]:
|
||||
"""Trouve le subnet UUID correspondant à une adresse IP"""
|
||||
print(f"[OPNsense] Recherche subnet pour IP {ip_address}")
|
||||
ip_obj = ipaddress.ip_address(ip_address)
|
||||
data = await self.search_subnets()
|
||||
rows = data.get("rows", [])
|
||||
for row in rows:
|
||||
subnet_cidr = row.get("subnet", "")
|
||||
try:
|
||||
network = ipaddress.ip_network(subnet_cidr, strict=False)
|
||||
if ip_obj in network:
|
||||
uuid = row.get("uuid")
|
||||
print(f"[OPNsense] Subnet trouvé: {subnet_cidr} -> uuid={uuid}")
|
||||
return uuid
|
||||
except ValueError:
|
||||
continue
|
||||
print(f"[OPNsense] Aucun subnet trouvé pour {ip_address}")
|
||||
return None
|
||||
|
||||
async def search_reservations(self) -> Dict[str, Any]:
|
||||
"""Liste toutes les réservations DHCP Kea"""
|
||||
print(f"[OPNsense] Recherche réservations: GET {self.base_url}/api/kea/dhcpv4/search_reservation")
|
||||
async with self._get_client() as client:
|
||||
response = await client.get("/api/kea/dhcpv4/search_reservation")
|
||||
print(f"[OPNsense] Réponse search_reservation: {response.status_code}")
|
||||
if response.status_code != 200:
|
||||
print(f"[OPNsense] Corps réponse erreur: {response.text[:500]}")
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
rows = data.get("rows", [])
|
||||
print(f"[OPNsense] {len(rows)} réservation(s) trouvée(s)")
|
||||
return data
|
||||
|
||||
async def get_reservation(self, uuid: str) -> Dict[str, Any]:
|
||||
"""Récupère une réservation par UUID"""
|
||||
print(f"[OPNsense] Get réservation: {uuid}")
|
||||
async with self._get_client() as client:
|
||||
response = await client.get(f"/api/kea/dhcpv4/get_reservation/{uuid}")
|
||||
print(f"[OPNsense] Réponse get_reservation: {response.status_code}")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def add_reservation(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Crée une nouvelle réservation DHCP Kea"""
|
||||
payload = {"reservation": data}
|
||||
print(f"[OPNsense] Ajout réservation: POST {self.base_url}/api/kea/dhcpv4/add_reservation")
|
||||
print(f"[OPNsense] Payload: {payload}")
|
||||
async with self._get_client() as client:
|
||||
response = await client.post(
|
||||
"/api/kea/dhcpv4/add_reservation",
|
||||
json=payload
|
||||
)
|
||||
print(f"[OPNsense] Réponse add_reservation: {response.status_code}")
|
||||
print(f"[OPNsense] Corps réponse: {response.text[:500]}")
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
self._check_result(result, "Ajout réservation")
|
||||
return result
|
||||
|
||||
async def set_reservation(self, uuid: str, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Met à jour une réservation existante"""
|
||||
payload = {"reservation": data}
|
||||
print(f"[OPNsense] Mise à jour réservation {uuid}: POST {self.base_url}/api/kea/dhcpv4/set_reservation/{uuid}")
|
||||
print(f"[OPNsense] Payload: {payload}")
|
||||
async with self._get_client() as client:
|
||||
response = await client.post(
|
||||
f"/api/kea/dhcpv4/set_reservation/{uuid}",
|
||||
json=payload
|
||||
)
|
||||
print(f"[OPNsense] Réponse set_reservation: {response.status_code}")
|
||||
print(f"[OPNsense] Corps réponse: {response.text[:500]}")
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
self._check_result(result, "Mise à jour réservation")
|
||||
return result
|
||||
|
||||
async def del_reservation(self, uuid: str) -> Dict[str, Any]:
|
||||
"""Supprime une réservation"""
|
||||
print(f"[OPNsense] Suppression réservation: {uuid}")
|
||||
async with self._get_client() as client:
|
||||
response = await client.post(f"/api/kea/dhcpv4/del_reservation/{uuid}")
|
||||
print(f"[OPNsense] Réponse del_reservation: {response.status_code}")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def reconfigure_kea(self) -> Dict[str, Any]:
|
||||
"""Applique les changements Kea (reconfigure le service)"""
|
||||
print(f"[OPNsense] Reconfiguration Kea: POST {self.base_url}/api/kea/service/reconfigure")
|
||||
async with self._get_client() as client:
|
||||
response = await client.post("/api/kea/service/reconfigure")
|
||||
print(f"[OPNsense] Réponse reconfigure: {response.status_code}")
|
||||
if response.status_code != 200:
|
||||
print(f"[OPNsense] Corps réponse erreur: {response.text[:500]}")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def find_reservation_by_ip(self, ip_address: str) -> Optional[Dict[str, Any]]:
|
||||
"""Cherche une réservation existante par adresse IP"""
|
||||
print(f"[OPNsense] Recherche réservation par IP: {ip_address}")
|
||||
result = await self.search_reservations()
|
||||
rows = result.get("rows", [])
|
||||
for row in rows:
|
||||
if row.get("ip_address") == ip_address:
|
||||
print(f"[OPNsense] Réservation trouvée: uuid={row.get('uuid')}")
|
||||
return row
|
||||
print(f"[OPNsense] Aucune réservation existante pour {ip_address}")
|
||||
return None
|
||||
|
||||
async def find_reservation_by_mac(self, mac_address: str) -> Optional[Dict[str, Any]]:
|
||||
"""Cherche une réservation existante par adresse MAC"""
|
||||
mac_normalized = mac_address.lower().replace("-", ":")
|
||||
print(f"[OPNsense] Recherche réservation par MAC: {mac_normalized}")
|
||||
result = await self.search_reservations()
|
||||
rows = result.get("rows", [])
|
||||
for row in rows:
|
||||
row_mac = (row.get("hw_address") or "").lower().replace("-", ":")
|
||||
if row_mac == mac_normalized:
|
||||
print(f"[OPNsense] Réservation trouvée par MAC: uuid={row.get('uuid')}")
|
||||
return row
|
||||
print(f"[OPNsense] Aucune réservation pour MAC {mac_normalized}")
|
||||
return None
|
||||
103
backend/app/services/scheduler.py
Executable file
103
backend/app/services/scheduler.py
Executable file
@@ -0,0 +1,103 @@
|
||||
"""
|
||||
Scheduler APScheduler pour les scans réseau périodiques
|
||||
"""
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Callable
|
||||
import asyncio
|
||||
|
||||
|
||||
class ScanScheduler:
|
||||
"""Gestionnaire de tâches planifiées pour les scans"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialise le scheduler"""
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
self.is_running = False
|
||||
|
||||
def start(self):
|
||||
"""Démarre le scheduler"""
|
||||
if not self.is_running:
|
||||
self.scheduler.start()
|
||||
self.is_running = True
|
||||
print(f"[{datetime.now()}] Scheduler démarré")
|
||||
|
||||
def stop(self):
|
||||
"""Arrête le scheduler"""
|
||||
if self.is_running:
|
||||
self.scheduler.shutdown()
|
||||
self.is_running = False
|
||||
print(f"[{datetime.now()}] Scheduler arrêté")
|
||||
|
||||
def add_ping_scan_job(self, scan_function: Callable, interval_seconds: int = 60):
|
||||
"""
|
||||
Ajoute une tâche de scan ping périodique
|
||||
|
||||
Args:
|
||||
scan_function: Fonction async à exécuter
|
||||
interval_seconds: Intervalle en secondes
|
||||
"""
|
||||
self.scheduler.add_job(
|
||||
scan_function,
|
||||
trigger=IntervalTrigger(seconds=interval_seconds),
|
||||
id='ping_scan',
|
||||
name='Scan Ping périodique',
|
||||
replace_existing=True
|
||||
)
|
||||
print(f"Tâche ping_scan configurée: toutes les {interval_seconds}s")
|
||||
|
||||
def add_port_scan_job(self, scan_function: Callable, interval_seconds: int = 300):
|
||||
"""
|
||||
Ajoute une tâche de scan de ports périodique
|
||||
|
||||
Args:
|
||||
scan_function: Fonction async à exécuter
|
||||
interval_seconds: Intervalle en secondes
|
||||
"""
|
||||
self.scheduler.add_job(
|
||||
scan_function,
|
||||
trigger=IntervalTrigger(seconds=interval_seconds),
|
||||
id='port_scan',
|
||||
name='Scan ports périodique',
|
||||
replace_existing=True
|
||||
)
|
||||
print(f"Tâche port_scan configurée: toutes les {interval_seconds}s")
|
||||
|
||||
def add_cleanup_job(self, cleanup_function: Callable, interval_hours: int = 1):
|
||||
"""
|
||||
Ajoute une tâche de nettoyage de l'historique
|
||||
|
||||
Args:
|
||||
cleanup_function: Fonction async de nettoyage
|
||||
interval_hours: Intervalle en heures
|
||||
"""
|
||||
self.scheduler.add_job(
|
||||
cleanup_function,
|
||||
trigger=IntervalTrigger(hours=interval_hours),
|
||||
id='history_cleanup',
|
||||
name='Nettoyage historique',
|
||||
replace_existing=True
|
||||
)
|
||||
print(f"Tâche cleanup configurée: toutes les {interval_hours}h")
|
||||
|
||||
def remove_job(self, job_id: str):
|
||||
"""
|
||||
Supprime une tâche planifiée
|
||||
|
||||
Args:
|
||||
job_id: ID de la tâche
|
||||
"""
|
||||
try:
|
||||
self.scheduler.remove_job(job_id)
|
||||
print(f"Tâche {job_id} supprimée")
|
||||
except Exception as e:
|
||||
print(f"Erreur suppression tâche {job_id}: {e}")
|
||||
|
||||
def get_jobs(self):
|
||||
"""Retourne la liste des tâches planifiées"""
|
||||
return self.scheduler.get_jobs()
|
||||
|
||||
|
||||
# Instance globale du scheduler
|
||||
scan_scheduler = ScanScheduler()
|
||||
146
backend/app/services/websocket.py
Executable file
146
backend/app/services/websocket.py
Executable file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
Gestionnaire WebSocket pour notifications temps réel
|
||||
"""
|
||||
from fastapi import WebSocket
|
||||
from typing import List, Dict, Any
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class WebSocketManager:
|
||||
"""Gestionnaire de connexions WebSocket"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialise le gestionnaire"""
|
||||
self.active_connections: List[WebSocket] = []
|
||||
|
||||
async def connect(self, websocket: WebSocket):
|
||||
"""
|
||||
Accepte une nouvelle connexion WebSocket
|
||||
|
||||
Args:
|
||||
websocket: Instance WebSocket
|
||||
"""
|
||||
await websocket.accept()
|
||||
self.active_connections.append(websocket)
|
||||
print(f"[{datetime.now()}] Nouvelle connexion WebSocket. Total: {len(self.active_connections)}")
|
||||
|
||||
def disconnect(self, websocket: WebSocket):
|
||||
"""
|
||||
Déconnecte un client WebSocket
|
||||
|
||||
Args:
|
||||
websocket: Instance WebSocket à déconnecter
|
||||
"""
|
||||
if websocket in self.active_connections:
|
||||
self.active_connections.remove(websocket)
|
||||
print(f"[{datetime.now()}] Déconnexion WebSocket. Total: {len(self.active_connections)}")
|
||||
|
||||
async def send_personal_message(self, message: str, websocket: WebSocket):
|
||||
"""
|
||||
Envoie un message à un client spécifique
|
||||
|
||||
Args:
|
||||
message: Message à envoyer
|
||||
websocket: Client destinataire
|
||||
"""
|
||||
try:
|
||||
await websocket.send_text(message)
|
||||
except Exception as e:
|
||||
print(f"Erreur envoi message personnel: {e}")
|
||||
|
||||
async def broadcast(self, message: Dict[str, Any]):
|
||||
"""
|
||||
Diffuse un message à tous les clients connectés
|
||||
|
||||
Args:
|
||||
message: Dictionnaire du message (sera converti en JSON)
|
||||
"""
|
||||
# Ajouter un timestamp
|
||||
message["timestamp"] = datetime.now().isoformat()
|
||||
|
||||
json_message = json.dumps(message)
|
||||
|
||||
# Liste des connexions à supprimer (déconnectées)
|
||||
disconnected = []
|
||||
|
||||
for connection in self.active_connections:
|
||||
try:
|
||||
await connection.send_text(json_message)
|
||||
except Exception as e:
|
||||
print(f"Erreur broadcast: {e}")
|
||||
disconnected.append(connection)
|
||||
|
||||
# Nettoyer les connexions mortes
|
||||
for conn in disconnected:
|
||||
self.disconnect(conn)
|
||||
|
||||
async def broadcast_scan_start(self):
|
||||
"""Notifie le début d'un scan"""
|
||||
await self.broadcast({
|
||||
"type": "scan_start",
|
||||
"message": "Scan réseau démarré"
|
||||
})
|
||||
|
||||
async def broadcast_scan_complete(self, stats: Dict[str, int]):
|
||||
"""
|
||||
Notifie la fin d'un scan avec statistiques
|
||||
|
||||
Args:
|
||||
stats: Statistiques du scan (total, online, offline, etc.)
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "scan_complete",
|
||||
"message": "Scan réseau terminé",
|
||||
"stats": stats
|
||||
})
|
||||
|
||||
async def broadcast_ip_update(self, ip_data: Dict[str, Any]):
|
||||
"""
|
||||
Notifie un changement d'état d'IP
|
||||
|
||||
Args:
|
||||
ip_data: Données de l'IP mise à jour
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "ip_update",
|
||||
"data": ip_data
|
||||
})
|
||||
|
||||
async def broadcast_new_ip(self, ip_data: Dict[str, Any]):
|
||||
"""
|
||||
Notifie la détection d'une nouvelle IP
|
||||
|
||||
Args:
|
||||
ip_data: Données de la nouvelle IP
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "new_ip",
|
||||
"data": ip_data,
|
||||
"message": f"Nouvelle IP détectée: {ip_data.get('ip')}"
|
||||
})
|
||||
|
||||
async def broadcast_scan_progress(self, progress_data: Dict[str, Any]):
|
||||
"""
|
||||
Notifie la progression d'un scan en cours
|
||||
|
||||
Args:
|
||||
progress_data: Données de progression (current, total, ip)
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "scan_progress",
|
||||
"current": progress_data.get("current"),
|
||||
"total": progress_data.get("total"),
|
||||
"ip": progress_data.get("ip")
|
||||
})
|
||||
|
||||
async def broadcast_scan_log(self, message: str):
|
||||
"""Diffuse une ligne de log de scan"""
|
||||
await self.broadcast({
|
||||
"type": "scan_log",
|
||||
"message": message
|
||||
})
|
||||
|
||||
|
||||
# Instance globale du gestionnaire WebSocket
|
||||
ws_manager = WebSocketManager()
|
||||
Reference in New Issue
Block a user