Files
LANMap/backend/app/routers/scan.py
2025-12-07 01:16:52 +01:00

268 lines
8.3 KiB
Python

"""
Endpoints API pour le contrôle des scans réseau
"""
from fastapi import APIRouter, Depends, BackgroundTasks
from sqlalchemy.orm import Session
from datetime import datetime, timedelta
from typing import Dict, Any
from backend.app.core.database import get_db
from backend.app.core.config import config_manager
from backend.app.models.ip import IP, IPHistory
from backend.app.services.network import NetworkScanner
from backend.app.services.websocket import ws_manager
router = APIRouter(prefix="/api/scan", tags=["Scan"])
async def perform_scan(db: Session):
"""
Effectue un scan complet du réseau
Fonction asynchrone pour background task
Args:
db: Session de base de données
"""
try:
print(f"[{datetime.now()}] Début du scan réseau...")
# Notifier début du scan
try:
await ws_manager.broadcast_scan_start()
except Exception as e:
print(f"Erreur broadcast start (ignorée): {e}")
# Récupérer la config
config = config_manager.config
print(f"[{datetime.now()}] Config chargée: {config.network.cidr}")
# Initialiser le scanner
scanner = NetworkScanner(
cidr=config.network.cidr,
timeout=config.scan.timeout
)
# Convertir les ports en liste d'entiers
port_list = []
for port_range in config.ports.ranges:
if '-' in port_range:
start, end = map(int, port_range.split('-'))
port_list.extend(range(start, end + 1))
else:
port_list.append(int(port_range))
print(f"[{datetime.now()}] Ports à scanner: {port_list}")
# Récupérer les IPs connues
known_ips = config.ip_classes
print(f"[{datetime.now()}] IPs connues: {len(known_ips)}")
# Lancer le scan
print(f"[{datetime.now()}] Lancement du scan (parallélisme: {config.scan.parallel_pings})...")
scan_results = await scanner.full_scan(
known_ips=known_ips,
port_list=port_list,
max_concurrent=config.scan.parallel_pings
)
print(f"[{datetime.now()}] Scan terminé: {len(scan_results)} IPs trouvées")
# Mettre à jour la base de données
stats = {
"total": 0,
"online": 0,
"offline": 0,
"new": 0,
"updated": 0
}
for ip_address, ip_data in scan_results.items():
stats["total"] += 1
if ip_data["last_status"] == "online":
stats["online"] += 1
else:
stats["offline"] += 1
# Vérifier si l'IP existe déjà
existing_ip = db.query(IP).filter(IP.ip == ip_address).first()
if existing_ip:
# Mettre à jour l'IP existante
old_status = existing_ip.last_status
existing_ip.last_status = ip_data["last_status"]
if ip_data["last_seen"]:
existing_ip.last_seen = ip_data["last_seen"]
existing_ip.mac = ip_data.get("mac") or existing_ip.mac
existing_ip.vendor = ip_data.get("vendor") or existing_ip.vendor
existing_ip.hostname = ip_data.get("hostname") or existing_ip.hostname
existing_ip.open_ports = ip_data.get("open_ports", [])
# Si l'état a changé, notifier via WebSocket
if old_status != ip_data["last_status"]:
await ws_manager.broadcast_ip_update({
"ip": ip_address,
"old_status": old_status,
"new_status": ip_data["last_status"]
})
stats["updated"] += 1
else:
# Créer une nouvelle IP
new_ip = IP(
ip=ip_address,
name=ip_data.get("name"),
known=ip_data.get("known", False),
location=ip_data.get("location"),
host=ip_data.get("host"),
first_seen=datetime.utcnow(),
last_seen=ip_data.get("last_seen") or datetime.utcnow(),
last_status=ip_data["last_status"],
mac=ip_data.get("mac"),
vendor=ip_data.get("vendor"),
hostname=ip_data.get("hostname"),
open_ports=ip_data.get("open_ports", [])
)
db.add(new_ip)
# Notifier nouvelle IP
await ws_manager.broadcast_new_ip({
"ip": ip_address,
"status": ip_data["last_status"],
"known": ip_data.get("known", False)
})
stats["new"] += 1
# Ajouter à l'historique
history_entry = IPHistory(
ip=ip_address,
timestamp=datetime.utcnow(),
status=ip_data["last_status"],
open_ports=ip_data.get("open_ports", [])
)
db.add(history_entry)
# Commit les changements
db.commit()
# Notifier fin du scan avec stats
await ws_manager.broadcast_scan_complete(stats)
print(f"[{datetime.now()}] Scan terminé: {stats}")
except Exception as e:
print(f"Erreur lors du scan: {e}")
db.rollback()
@router.post("/start")
async def start_scan(background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
"""
Déclenche un scan réseau immédiat
Returns:
Message de confirmation
"""
# Lancer le scan en arrière-plan
background_tasks.add_task(perform_scan, db)
return {
"message": "Scan réseau démarré",
"timestamp": datetime.utcnow()
}
@router.post("/ports/{ip_address}")
async def scan_ip_ports(ip_address: str, db: Session = Depends(get_db)):
"""
Scanne les ports d'une IP spécifique
Args:
ip_address: Adresse IP à scanner
db: Session de base de données
Returns:
Liste des ports ouverts
"""
try:
# Récupérer la config
config = config_manager.config
# Convertir les ports en liste d'entiers
port_list = []
for port_range in config.ports.ranges:
if '-' in port_range:
start, end = map(int, port_range.split('-'))
port_list.extend(range(start, end + 1))
else:
port_list.append(int(port_range))
# Initialiser le scanner
scanner = NetworkScanner(
cidr=config.network.cidr,
timeout=config.scan.timeout
)
# Scanner les ports de cette IP
print(f"[{datetime.now()}] Scan ports pour {ip_address}...")
open_ports = await scanner.scan_ports(ip_address, port_list)
print(f"[{datetime.now()}] Ports ouverts pour {ip_address}: {open_ports}")
# Mettre à jour la base de données
ip_record = db.query(IP).filter(IP.ip == ip_address).first()
if ip_record:
ip_record.open_ports = open_ports
ip_record.last_seen = datetime.utcnow()
db.commit()
# Notifier via WebSocket
await ws_manager.broadcast_ip_update({
"ip": ip_address,
"open_ports": open_ports
})
return {
"message": "Scan de ports terminé",
"ip": ip_address,
"open_ports": open_ports,
"timestamp": datetime.utcnow()
}
except Exception as e:
print(f"Erreur scan ports {ip_address}: {e}")
return {
"message": f"Erreur: {str(e)}",
"ip": ip_address,
"open_ports": [],
"timestamp": datetime.utcnow()
}
@router.post("/cleanup-history")
async def cleanup_history(hours: int = 24, db: Session = Depends(get_db)):
"""
Nettoie l'historique plus ancien que X heures
Args:
hours: Nombre d'heures à conserver (défaut: 24h)
db: Session de base de données
Returns:
Nombre d'entrées supprimées
"""
cutoff_date = datetime.utcnow() - timedelta(hours=hours)
deleted = db.query(IPHistory).filter(
IPHistory.timestamp < cutoff_date
).delete()
db.commit()
return {
"message": f"Historique nettoyé",
"deleted_entries": deleted,
"older_than_hours": hours
}