ipwatch
This commit is contained in:
8
backend/app/routers/__init__.py
Executable file
8
backend/app/routers/__init__.py
Executable file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
Routers API pour IPWatch
|
||||
"""
|
||||
from .ips import router as ips_router
|
||||
from .scan import router as scan_router
|
||||
from .websocket import router as websocket_router
|
||||
|
||||
__all__ = ["ips_router", "scan_router", "websocket_router"]
|
||||
132
backend/app/routers/architecture.py
Normal file
132
backend/app/routers/architecture.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""
|
||||
Endpoints API pour l'éditeur d'architecture
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional, Dict, Any
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.core.database import get_arch_db
|
||||
from backend.app.models.architecture import ArchitectureNode
|
||||
|
||||
router = APIRouter(prefix="/api/architecture", tags=["Architecture"])
|
||||
DATA_DIR = Path(__file__).resolve().parents[3] / "data"
|
||||
WORLD_FILE = DATA_DIR / "architecture.json"
|
||||
|
||||
|
||||
class ArchitectureNodeCreate(BaseModel):
|
||||
id: Optional[str] = None
|
||||
type: str
|
||||
x: int
|
||||
y: int
|
||||
width: int
|
||||
height: int
|
||||
rotation: int = 0
|
||||
payload: Dict[str, Any]
|
||||
|
||||
|
||||
class ArchitectureNodeResponse(BaseModel):
|
||||
id: str
|
||||
type: str
|
||||
x: int
|
||||
y: int
|
||||
width: int
|
||||
height: int
|
||||
rotation: int
|
||||
payload: Dict[str, Any]
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ArchitectureWorldPayload(BaseModel):
|
||||
items: List[Dict[str, Any]]
|
||||
splines: Optional[List[Dict[str, Any]]] = None
|
||||
|
||||
|
||||
@router.get("/nodes", response_model=List[ArchitectureNodeResponse])
|
||||
async def list_nodes(db: Session = Depends(get_arch_db)):
|
||||
"""Liste tous les noeuds d'architecture"""
|
||||
nodes = db.query(ArchitectureNode).order_by(ArchitectureNode.created_at.asc()).all()
|
||||
results = []
|
||||
for node in nodes:
|
||||
try:
|
||||
payload = json.loads(node.payload or "{}")
|
||||
except json.JSONDecodeError:
|
||||
payload = {}
|
||||
results.append(ArchitectureNodeResponse(
|
||||
id=node.id,
|
||||
type=node.type,
|
||||
x=node.x,
|
||||
y=node.y,
|
||||
width=node.width,
|
||||
height=node.height,
|
||||
rotation=node.rotation,
|
||||
payload=payload,
|
||||
created_at=node.created_at
|
||||
))
|
||||
return results
|
||||
|
||||
|
||||
@router.post("/nodes", response_model=ArchitectureNodeResponse)
|
||||
async def create_node(payload: ArchitectureNodeCreate, db: Session = Depends(get_arch_db)):
|
||||
"""Créer un noeud d'architecture"""
|
||||
node_id = payload.id or str(uuid4())
|
||||
node = ArchitectureNode(
|
||||
id=node_id,
|
||||
type=payload.type,
|
||||
x=payload.x,
|
||||
y=payload.y,
|
||||
width=payload.width,
|
||||
height=payload.height,
|
||||
rotation=payload.rotation,
|
||||
payload=json.dumps(payload.payload)
|
||||
)
|
||||
db.add(node)
|
||||
db.commit()
|
||||
db.refresh(node)
|
||||
return ArchitectureNodeResponse(
|
||||
id=node.id,
|
||||
type=node.type,
|
||||
x=node.x,
|
||||
y=node.y,
|
||||
width=node.width,
|
||||
height=node.height,
|
||||
rotation=node.rotation,
|
||||
payload=payload.payload,
|
||||
created_at=node.created_at
|
||||
)
|
||||
|
||||
|
||||
def ensure_world_file() -> None:
|
||||
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
if not WORLD_FILE.exists():
|
||||
WORLD_FILE.write_text(json.dumps({"items": [], "splines": []}, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
@router.get("/world")
|
||||
async def get_world():
|
||||
"""Charge le fichier architecture.json, le crée si absent."""
|
||||
ensure_world_file()
|
||||
try:
|
||||
data = json.loads(WORLD_FILE.read_text(encoding="utf-8"))
|
||||
except json.JSONDecodeError:
|
||||
data = {"items": [], "splines": []}
|
||||
return data
|
||||
|
||||
|
||||
@router.post("/world")
|
||||
async def save_world(payload: ArchitectureWorldPayload):
|
||||
"""Sauvegarde les éléments du world dans architecture.json."""
|
||||
ensure_world_file()
|
||||
splines = payload.splines or []
|
||||
WORLD_FILE.write_text(
|
||||
json.dumps({"items": payload.items, "splines": splines}, indent=2),
|
||||
encoding="utf-8"
|
||||
)
|
||||
return {"status": "ok", "count": len(payload.items), "splines": len(splines)}
|
||||
73
backend/app/routers/config.py
Executable file
73
backend/app/routers/config.py
Executable file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Routes pour la configuration
|
||||
"""
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
import yaml
|
||||
from backend.app.core.config import config_manager
|
||||
|
||||
router = APIRouter(prefix="/api/config", tags=["config"])
|
||||
|
||||
@router.get("/ui")
|
||||
async def get_ui_config():
|
||||
"""Récupérer la configuration UI"""
|
||||
config = config_manager.config
|
||||
return {
|
||||
"cell_size": config.ui.cell_size,
|
||||
"font_size": config.ui.font_size,
|
||||
"cell_gap": config.ui.cell_gap,
|
||||
"offline_transparency": config.ui.offline_transparency,
|
||||
"show_mac": config.ui.show_mac,
|
||||
"show_vendor": config.ui.show_vendor,
|
||||
"architecture_title_font_size": config.ui.architecture_title_font_size
|
||||
}
|
||||
|
||||
@router.post("/reload")
|
||||
async def reload_config():
|
||||
"""Recharger la configuration depuis le fichier config.yaml"""
|
||||
try:
|
||||
config = config_manager.reload_config()
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Configuration rechargée avec succès",
|
||||
"ui": {
|
||||
"cell_size": config.ui.cell_size,
|
||||
"font_size": config.ui.font_size,
|
||||
"cell_gap": config.ui.cell_gap,
|
||||
"offline_transparency": config.ui.offline_transparency,
|
||||
"show_mac": config.ui.show_mac,
|
||||
"show_vendor": config.ui.show_vendor,
|
||||
"architecture_title_font_size": config.ui.architecture_title_font_size
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur rechargement config: {str(e)}")
|
||||
|
||||
|
||||
class UIConfigUpdate(BaseModel):
|
||||
architecture_title_font_size: int
|
||||
|
||||
|
||||
@router.post("/ui")
|
||||
async def update_ui_config(payload: UIConfigUpdate):
|
||||
"""Mettre à jour la configuration UI"""
|
||||
config_path = "./config.yaml"
|
||||
try:
|
||||
with open(config_path, "r", encoding="utf-8") as f:
|
||||
yaml_data = yaml.safe_load(f) or {}
|
||||
|
||||
if "ui" not in yaml_data or yaml_data["ui"] is None:
|
||||
yaml_data["ui"] = {}
|
||||
|
||||
yaml_data["ui"]["architecture_title_font_size"] = int(payload.architecture_title_font_size)
|
||||
|
||||
with open(config_path, "w", encoding="utf-8") as f:
|
||||
yaml.safe_dump(yaml_data, f, allow_unicode=True, sort_keys=False)
|
||||
|
||||
config = config_manager.reload_config()
|
||||
return {
|
||||
"message": "Configuration UI mise à jour",
|
||||
"architecture_title_font_size": config.ui.architecture_title_font_size
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur mise à jour config UI: {str(e)}")
|
||||
665
backend/app/routers/ips.py
Executable file
665
backend/app/routers/ips.py
Executable file
@@ -0,0 +1,665 @@
|
||||
"""
|
||||
Endpoints API pour la gestion des IPs
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import xml.etree.ElementTree as ET
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
import re
|
||||
import time
|
||||
import urllib.request
|
||||
|
||||
from backend.app.core.database import get_db
|
||||
from backend.app.models.ip import IP, IPHistory
|
||||
from backend.app.core.config import config_manager
|
||||
from pydantic import BaseModel
|
||||
|
||||
router = APIRouter(prefix="/api/ips", tags=["IPs"])
|
||||
|
||||
ICONS_DIR = Path("./data/icons")
|
||||
ALLOWED_ICON_EXTENSIONS = {".png", ".jpg", ".jpeg", ".webp", ".svg"}
|
||||
OUI_URL = "https://standards-oui.ieee.org/oui/oui.txt"
|
||||
OUI_PATH = Path("./data/oui/oui.txt")
|
||||
|
||||
|
||||
def _sanitize_filename(filename: str) -> str:
|
||||
name = Path(filename).name
|
||||
name = re.sub(r"[^A-Za-z0-9._-]+", "_", name)
|
||||
if not name or name in {".", ".."}:
|
||||
return f"icon_{int(time.time())}.png"
|
||||
if "." not in name:
|
||||
return f"{name}.png"
|
||||
return name
|
||||
|
||||
|
||||
@router.get("/oui/status")
|
||||
async def oui_status():
|
||||
"""
|
||||
Statut du fichier OUI local
|
||||
"""
|
||||
if not OUI_PATH.exists():
|
||||
return {"exists": False, "updated_at": None}
|
||||
updated_at = datetime.fromtimestamp(OUI_PATH.stat().st_mtime)
|
||||
return {"exists": True, "updated_at": updated_at.isoformat()}
|
||||
|
||||
|
||||
@router.post("/oui/update")
|
||||
async def update_oui(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Télécharge le fichier OUI et met à jour les fabricants inconnus
|
||||
"""
|
||||
OUI_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
request = urllib.request.Request(
|
||||
OUI_URL,
|
||||
headers={
|
||||
"User-Agent": "IPWatch/1.0 (+https://ipwatch.local)"
|
||||
}
|
||||
)
|
||||
with urllib.request.urlopen(request) as response:
|
||||
OUI_PATH.write_bytes(response.read())
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur téléchargement OUI: {str(e)}")
|
||||
|
||||
# Mettre à jour les vendors inconnus dans la DB
|
||||
from backend.app.services.network import OuiLookup
|
||||
|
||||
updated = 0
|
||||
ips = db.query(IP).filter(IP.mac.isnot(None)).all()
|
||||
for ip in ips:
|
||||
if ip.vendor and ip.vendor not in {"Unknown", ""}:
|
||||
continue
|
||||
vendor = OuiLookup.lookup(ip.mac)
|
||||
if vendor:
|
||||
ip.vendor = vendor
|
||||
updated += 1
|
||||
db.commit()
|
||||
|
||||
return {"message": "Liste OUI mise à jour", "updated_vendors": updated}
|
||||
|
||||
|
||||
@router.get("/icons")
|
||||
async def list_icons():
|
||||
"""
|
||||
Liste les icônes disponibles dans le dossier partagé
|
||||
"""
|
||||
ICONS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
files = []
|
||||
for path in ICONS_DIR.iterdir():
|
||||
if path.is_file() and path.suffix.lower() in ALLOWED_ICON_EXTENSIONS:
|
||||
files.append(path.name)
|
||||
return {"icons": sorted(files)}
|
||||
|
||||
|
||||
@router.post("/icons/upload")
|
||||
async def upload_icon(file: UploadFile = File(...)):
|
||||
"""
|
||||
Upload d'une icône dans le dossier partagé
|
||||
"""
|
||||
ICONS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
filename = _sanitize_filename(file.filename or "")
|
||||
ext = Path(filename).suffix.lower()
|
||||
if ext not in ALLOWED_ICON_EXTENSIONS:
|
||||
raise HTTPException(status_code=400, detail="Format d'image non supporté")
|
||||
|
||||
target = ICONS_DIR / filename
|
||||
|
||||
try:
|
||||
content = await file.read()
|
||||
target.write_bytes(content)
|
||||
return {
|
||||
"filename": target.name,
|
||||
"url": f"/icons/{target.name}"
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur upload: {str(e)}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Schémas Pydantic pour validation
|
||||
class IPUpdate(BaseModel):
|
||||
"""Schéma pour mise à jour d'IP"""
|
||||
name: Optional[str] = None
|
||||
known: Optional[bool] = None
|
||||
tracked: Optional[bool] = None
|
||||
vm: Optional[bool] = None
|
||||
hardware_bench: Optional[bool] = None
|
||||
network_device: Optional[bool] = None
|
||||
location: Optional[str] = None
|
||||
host: Optional[str] = None
|
||||
link: Optional[str] = None
|
||||
last_status: Optional[str] = None
|
||||
mac: Optional[str] = None
|
||||
vendor: Optional[str] = None
|
||||
hostname: Optional[str] = None
|
||||
mac_changed: Optional[bool] = None
|
||||
open_ports: Optional[List[int]] = None
|
||||
first_seen: Optional[datetime] = None
|
||||
last_seen: Optional[datetime] = None
|
||||
icon_filename: Optional[str] = None
|
||||
icon_url: Optional[str] = None
|
||||
ip_parent: Optional[str] = None
|
||||
ip_enfant: Optional[List[str]] = None
|
||||
dhcp_synced: Optional[bool] = None
|
||||
|
||||
|
||||
class IPResponse(BaseModel):
|
||||
"""Schéma de réponse IP"""
|
||||
ip: str
|
||||
name: Optional[str]
|
||||
known: bool
|
||||
tracked: Optional[bool] = False
|
||||
vm: Optional[bool] = False
|
||||
hardware_bench: Optional[bool] = False
|
||||
network_device: Optional[bool] = False
|
||||
location: Optional[str]
|
||||
host: Optional[str]
|
||||
first_seen: Optional[datetime]
|
||||
last_seen: Optional[datetime]
|
||||
last_status: Optional[str]
|
||||
mac: Optional[str]
|
||||
vendor: Optional[str]
|
||||
hostname: Optional[str]
|
||||
link: Optional[str]
|
||||
mac_changed: Optional[bool] = False
|
||||
open_ports: List[int]
|
||||
icon_filename: Optional[str]
|
||||
icon_url: Optional[str]
|
||||
ip_parent: Optional[str]
|
||||
ip_enfant: List[str] = []
|
||||
dhcp_synced: Optional[bool] = False
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class IPHistoryResponse(BaseModel):
|
||||
"""Schéma de réponse historique"""
|
||||
id: int
|
||||
ip: str
|
||||
timestamp: datetime
|
||||
status: str
|
||||
open_ports: List[int]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
@router.get("/", response_model=List[IPResponse])
|
||||
async def get_all_ips(
|
||||
status: Optional[str] = None,
|
||||
known: Optional[bool] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Récupère toutes les IPs avec filtres optionnels
|
||||
|
||||
Args:
|
||||
status: Filtrer par statut (online/offline)
|
||||
known: Filtrer par IPs connues/inconnues
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Liste des IPs
|
||||
"""
|
||||
query = db.query(IP)
|
||||
|
||||
if status:
|
||||
query = query.filter(IP.last_status == status)
|
||||
|
||||
if known is not None:
|
||||
query = query.filter(IP.known == known)
|
||||
|
||||
ips = query.all()
|
||||
return ips
|
||||
|
||||
|
||||
@router.get("/{ip_address}", response_model=IPResponse)
|
||||
async def get_ip(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Récupère les détails d'une IP spécifique
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Détails de l'IP
|
||||
"""
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
return ip
|
||||
|
||||
|
||||
@router.put("/{ip_address}", response_model=IPResponse)
|
||||
async def update_ip(
|
||||
ip_address: str,
|
||||
ip_update: IPUpdate,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Met à jour les informations d'une IP
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
ip_update: Données à mettre à jour
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
IP mise à jour
|
||||
"""
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
# Mettre à jour les champs fournis
|
||||
update_data = ip_update.dict(exclude_unset=True)
|
||||
old_parent = ip.ip_parent
|
||||
new_parent = update_data.get("ip_parent", old_parent)
|
||||
for field, value in update_data.items():
|
||||
setattr(ip, field, value)
|
||||
|
||||
# Mettre à jour automatiquement network_device si host change
|
||||
if 'host' in update_data:
|
||||
ip.network_device = (update_data['host'] == 'Network')
|
||||
|
||||
if "ip_enfant" in update_data and update_data["ip_enfant"] is not None:
|
||||
ip.ip_enfant = update_data["ip_enfant"]
|
||||
|
||||
if new_parent != old_parent:
|
||||
if old_parent:
|
||||
parent = db.query(IP).filter(IP.ip == old_parent).first()
|
||||
if parent and parent.ip_enfant:
|
||||
parent.ip_enfant = [child for child in parent.ip_enfant if child != ip.ip]
|
||||
if new_parent:
|
||||
parent = db.query(IP).filter(IP.ip == new_parent).first()
|
||||
if parent:
|
||||
current_children = parent.ip_enfant or []
|
||||
if ip.ip not in current_children:
|
||||
parent.ip_enfant = current_children + [ip.ip]
|
||||
|
||||
db.commit()
|
||||
db.refresh(ip)
|
||||
|
||||
return ip
|
||||
|
||||
|
||||
@router.delete("/{ip_address}")
|
||||
async def delete_ip(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Supprime une IP (et son historique)
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Message de confirmation
|
||||
"""
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
db.delete(ip)
|
||||
db.commit()
|
||||
|
||||
return {"message": f"IP {ip_address} supprimée"}
|
||||
|
||||
|
||||
@router.get("/{ip_address}/history", response_model=List[IPHistoryResponse])
|
||||
async def get_ip_history(
|
||||
ip_address: str,
|
||||
hours: int = 24,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Récupère l'historique d'une IP
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
hours: Nombre d'heures d'historique (défaut: 24h)
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Liste des événements historiques
|
||||
"""
|
||||
# Vérifier que l'IP existe
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
# Calculer la date limite
|
||||
since = datetime.now() - timedelta(hours=hours)
|
||||
|
||||
# Récupérer l'historique
|
||||
history = db.query(IPHistory).filter(
|
||||
IPHistory.ip == ip_address,
|
||||
IPHistory.timestamp >= since
|
||||
).order_by(desc(IPHistory.timestamp)).all()
|
||||
|
||||
return history
|
||||
|
||||
|
||||
@router.delete("/{ip_address}/history")
|
||||
async def delete_ip_history(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Supprime l'historique d'une IP (sans supprimer l'IP elle-même)
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Message de confirmation avec nombre d'entrées supprimées
|
||||
"""
|
||||
# Vérifier que l'IP existe
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
# Supprimer tout l'historique de cette IP
|
||||
deleted_count = db.query(IPHistory).filter(IPHistory.ip == ip_address).delete()
|
||||
db.commit()
|
||||
|
||||
return {"message": f"Historique de {ip_address} supprimé", "deleted_count": deleted_count}
|
||||
|
||||
|
||||
@router.get("/stats/summary")
|
||||
async def get_stats(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Récupère les statistiques globales du réseau
|
||||
|
||||
Returns:
|
||||
Statistiques (total, online, offline, known, unknown)
|
||||
"""
|
||||
total = db.query(IP).count()
|
||||
online = db.query(IP).filter(IP.last_status == "online").count()
|
||||
offline = db.query(IP).filter(IP.last_status == "offline").count()
|
||||
known = db.query(IP).filter(IP.known == True).count()
|
||||
unknown = db.query(IP).filter(IP.known == False).count()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"online": online,
|
||||
"offline": offline,
|
||||
"known": known,
|
||||
"unknown": unknown
|
||||
}
|
||||
|
||||
|
||||
@router.get("/config/options")
|
||||
async def get_config_options():
|
||||
"""
|
||||
Récupère les options de configuration (locations, hosts, port_protocols, version, subnets)
|
||||
|
||||
Returns:
|
||||
Dictionnaire avec locations, hosts, port_protocols, subnets et version
|
||||
"""
|
||||
config = config_manager.config
|
||||
|
||||
# Récupérer les protocoles de ports depuis la config
|
||||
port_protocols = {}
|
||||
if hasattr(config.ports, 'protocols') and config.ports.protocols:
|
||||
port_protocols = config.ports.protocols
|
||||
|
||||
# Récupérer les subnets
|
||||
subnets = []
|
||||
if hasattr(config, 'subnets') and config.subnets:
|
||||
subnets = [
|
||||
{
|
||||
"name": s.name,
|
||||
"cidr": s.cidr,
|
||||
"start": s.start,
|
||||
"end": s.end,
|
||||
"description": s.description
|
||||
}
|
||||
for s in config.subnets
|
||||
]
|
||||
|
||||
return {
|
||||
"locations": config.locations,
|
||||
"hosts": [{"name": h.name, "location": h.location} for h in config.hosts],
|
||||
"port_protocols": port_protocols,
|
||||
"subnets": subnets,
|
||||
"version": config.app.version,
|
||||
"hardware_bench_url": getattr(config.links, "hardware_bench_url", None),
|
||||
"force_vendor_update": getattr(config.scan, "force_vendor_update", False)
|
||||
}
|
||||
|
||||
|
||||
class HardwareBenchConfig(BaseModel):
|
||||
"""Schéma pour mise à jour du lien hardware bench"""
|
||||
url: Optional[str] = None
|
||||
|
||||
|
||||
class ForceVendorConfig(BaseModel):
|
||||
"""Schéma pour mise à jour du mode force fabricant"""
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
@router.post("/config/hardware-bench")
|
||||
async def update_hardware_bench(config_update: HardwareBenchConfig):
|
||||
"""
|
||||
Met à jour l'URL hardware bench dans config.yaml
|
||||
|
||||
Returns:
|
||||
Message de confirmation
|
||||
"""
|
||||
config_path = "./config.yaml"
|
||||
|
||||
try:
|
||||
with open(config_path, "r", encoding="utf-8") as f:
|
||||
yaml_data = yaml.safe_load(f) or {}
|
||||
|
||||
if "links" not in yaml_data or yaml_data["links"] is None:
|
||||
yaml_data["links"] = {}
|
||||
|
||||
url = (config_update.url or "").strip()
|
||||
yaml_data["links"]["hardware_bench_url"] = url if url else None
|
||||
|
||||
with open(config_path, "w", encoding="utf-8") as f:
|
||||
yaml.safe_dump(yaml_data, f, allow_unicode=True, sort_keys=False)
|
||||
|
||||
config_manager.reload_config()
|
||||
return {"message": "Lien hardware bench mis à jour"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur mise à jour config: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/config/force-vendor")
|
||||
async def update_force_vendor(config_update: ForceVendorConfig):
|
||||
"""
|
||||
Active/désactive le mode force pour le fabricant
|
||||
"""
|
||||
config_path = "./config.yaml"
|
||||
|
||||
try:
|
||||
with open(config_path, "r", encoding="utf-8") as f:
|
||||
yaml_data = yaml.safe_load(f) or {}
|
||||
|
||||
if "scan" not in yaml_data or yaml_data["scan"] is None:
|
||||
yaml_data["scan"] = {}
|
||||
|
||||
yaml_data["scan"]["force_vendor_update"] = bool(config_update.enabled)
|
||||
|
||||
with open(config_path, "w", encoding="utf-8") as f:
|
||||
yaml.safe_dump(yaml_data, f, allow_unicode=True, sort_keys=False)
|
||||
|
||||
config_manager.reload_config()
|
||||
return {"message": "Mode force fabricant mis à jour"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur mise à jour config: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/config/content")
|
||||
async def get_config_content():
|
||||
"""
|
||||
Récupère le contenu brut du fichier config.yaml
|
||||
|
||||
Returns:
|
||||
Contenu du fichier YAML
|
||||
"""
|
||||
try:
|
||||
with open("./config.yaml", "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
return {"content": content}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur lecture config: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/config/reload")
|
||||
async def reload_config():
|
||||
"""
|
||||
Recharge la configuration depuis config.yaml
|
||||
|
||||
Returns:
|
||||
Message de confirmation
|
||||
"""
|
||||
try:
|
||||
config_manager.reload_config()
|
||||
return {"message": "Configuration rechargée avec succès"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur rechargement config: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/import/ipscan")
|
||||
async def import_ipscan(file: UploadFile = File(...), db: Session = Depends(get_db)):
|
||||
"""
|
||||
Importe les données depuis un fichier XML Angry IP Scanner
|
||||
|
||||
Args:
|
||||
file: Fichier XML uploadé
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Statistiques d'import
|
||||
"""
|
||||
if not file.filename.endswith('.xml'):
|
||||
raise HTTPException(status_code=400, detail="Le fichier doit être un XML")
|
||||
|
||||
try:
|
||||
# Lire le contenu du fichier
|
||||
content = await file.read()
|
||||
|
||||
# Essayer de parser le XML avec récupération d'erreurs
|
||||
try:
|
||||
root = ET.fromstring(content)
|
||||
except ET.ParseError as e:
|
||||
# Si le parsing échoue, essayer de nettoyer le contenu
|
||||
import re
|
||||
content_str = content.decode('utf-8', errors='ignore')
|
||||
|
||||
# Supprimer les caractères de contrôle invalides (sauf tab, CR, LF)
|
||||
content_str = ''.join(char for char in content_str
|
||||
if ord(char) >= 32 or char in '\t\r\n')
|
||||
|
||||
try:
|
||||
root = ET.fromstring(content_str.encode('utf-8'))
|
||||
except ET.ParseError:
|
||||
raise HTTPException(status_code=400, detail=f"Fichier XML invalide même après nettoyage: {str(e)}")
|
||||
|
||||
imported = 0
|
||||
updated = 0
|
||||
errors = []
|
||||
|
||||
# Parser chaque host
|
||||
for host in root.findall('.//host'):
|
||||
try:
|
||||
# Extraire l'adresse IP
|
||||
ip_address = host.get('address')
|
||||
if not ip_address:
|
||||
continue
|
||||
|
||||
# Extraire les informations
|
||||
hostname = None
|
||||
mac = None
|
||||
vendor = None
|
||||
ports = []
|
||||
|
||||
for result in host.findall('result'):
|
||||
name = result.get('name')
|
||||
value = result.text.strip() if result.text else ""
|
||||
|
||||
# Nettoyer les valeurs [n/a]
|
||||
if value == "[n/a]":
|
||||
value = None
|
||||
|
||||
if name == "Nom d'hôte" and value:
|
||||
hostname = value
|
||||
elif name == "Adresse MAC" and value:
|
||||
mac = value
|
||||
elif name == "Constructeur MAC" and value:
|
||||
vendor = value
|
||||
elif name == "Ports" and value:
|
||||
# Parser les ports (format: "22,80,443")
|
||||
try:
|
||||
ports = [int(p.strip()) for p in value.split(',') if p.strip().isdigit()]
|
||||
except Exception as e:
|
||||
ports = []
|
||||
|
||||
# Vérifier si l'IP existe déjà
|
||||
existing_ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if existing_ip:
|
||||
# Mettre à jour avec de nouvelles informations
|
||||
if hostname:
|
||||
if not existing_ip.hostname:
|
||||
existing_ip.hostname = hostname
|
||||
if not existing_ip.name:
|
||||
existing_ip.name = hostname
|
||||
if mac and not existing_ip.mac:
|
||||
existing_ip.mac = mac
|
||||
# Toujours mettre à jour vendor et ports depuis IPScan (plus complet et à jour)
|
||||
if vendor:
|
||||
existing_ip.vendor = vendor
|
||||
if ports:
|
||||
existing_ip.open_ports = ports
|
||||
existing_ip.last_status = "online"
|
||||
existing_ip.last_seen = datetime.now()
|
||||
updated += 1
|
||||
else:
|
||||
# Créer une nouvelle entrée
|
||||
new_ip = IP(
|
||||
ip=ip_address,
|
||||
name=hostname,
|
||||
hostname=hostname,
|
||||
mac=mac,
|
||||
vendor=vendor,
|
||||
open_ports=ports or [],
|
||||
last_status="online",
|
||||
known=False,
|
||||
first_seen=datetime.now(),
|
||||
last_seen=datetime.now()
|
||||
)
|
||||
db.add(new_ip)
|
||||
imported += 1
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Erreur pour {ip_address}: {str(e)}")
|
||||
continue
|
||||
|
||||
# Commit des changements
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": "Import terminé",
|
||||
"imported": imported,
|
||||
"updated": updated,
|
||||
"errors": errors[:10] # Limiter à 10 erreurs
|
||||
}
|
||||
|
||||
except ET.ParseError as e:
|
||||
raise HTTPException(status_code=400, detail=f"Fichier XML invalide: {str(e)}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur import: {str(e)}")
|
||||
164
backend/app/routers/opnsense.py
Normal file
164
backend/app/routers/opnsense.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""
|
||||
Endpoints API pour l'intégration OPNsense (Kea DHCP)
|
||||
"""
|
||||
import traceback
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
from backend.app.core.database import get_db
|
||||
from backend.app.core.config import config_manager
|
||||
from backend.app.models.ip import IP
|
||||
from backend.app.services.opnsense_client import OPNsenseClient, OPNsenseAPIError
|
||||
|
||||
router = APIRouter(prefix="/api/opnsense", tags=["OPNsense"])
|
||||
|
||||
|
||||
class DHCPReservationRequest(BaseModel):
|
||||
"""Schéma pour créer/mettre à jour une réservation DHCP"""
|
||||
ip_address: str
|
||||
hw_address: str
|
||||
hostname: str = ""
|
||||
description: str = "Ajouté par IPWatch"
|
||||
|
||||
|
||||
def get_opnsense_client() -> OPNsenseClient:
|
||||
"""Retourne un client OPNsense configuré"""
|
||||
config = config_manager.config.opnsense
|
||||
print(f"[OPNsense Router] Config: enabled={config.enabled}, host={config.host}, api_key={'***' + config.api_key[-8:] if config.api_key else 'VIDE'}")
|
||||
if not config.enabled:
|
||||
raise HTTPException(status_code=503, detail="Intégration OPNsense désactivée")
|
||||
if not config.host or not config.api_key:
|
||||
raise HTTPException(status_code=503, detail="Configuration OPNsense incomplète")
|
||||
return OPNsenseClient()
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
async def opnsense_status():
|
||||
"""Teste la connexion à l'API OPNsense"""
|
||||
client = get_opnsense_client()
|
||||
try:
|
||||
result = await client.test_connection()
|
||||
return {"status": "connected", "data": result}
|
||||
except Exception as e:
|
||||
print(f"[OPNsense Router] Erreur status: {type(e).__name__}: {e}")
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=502, detail=f"Connexion OPNsense échouée: {type(e).__name__}: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/dhcp/reservations")
|
||||
async def list_reservations():
|
||||
"""Liste toutes les réservations DHCP Kea"""
|
||||
client = get_opnsense_client()
|
||||
try:
|
||||
result = await client.search_reservations()
|
||||
return result
|
||||
except Exception as e:
|
||||
print(f"[OPNsense Router] Erreur list_reservations: {type(e).__name__}: {e}")
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=502, detail=f"Erreur récupération réservations: {type(e).__name__}: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/dhcp/reservation/{ip_address}")
|
||||
async def get_reservation_by_ip(ip_address: str):
|
||||
"""Cherche une réservation DHCP par adresse IP"""
|
||||
client = get_opnsense_client()
|
||||
try:
|
||||
reservation = await client.find_reservation_by_ip(ip_address)
|
||||
if reservation:
|
||||
return {"found": True, "reservation": reservation}
|
||||
return {"found": False, "reservation": None}
|
||||
except Exception as e:
|
||||
print(f"[OPNsense Router] Erreur get_reservation_by_ip: {type(e).__name__}: {e}")
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=502, detail=f"Erreur recherche réservation: {type(e).__name__}: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/dhcp/reservation")
|
||||
async def upsert_reservation(
|
||||
request: DHCPReservationRequest,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Crée ou met à jour une réservation DHCP Kea pour une IP.
|
||||
Après succès, met à jour dhcp_synced=True dans la BDD.
|
||||
"""
|
||||
print(f"[OPNsense Router] === UPSERT RESERVATION ===")
|
||||
print(f"[OPNsense Router] IP: {request.ip_address}, MAC: {request.hw_address}, Hostname: {request.hostname}")
|
||||
|
||||
client = get_opnsense_client()
|
||||
|
||||
try:
|
||||
# Étape 0 : Résoudre le subnet UUID
|
||||
print(f"[OPNsense Router] Étape 0: Résolution du subnet pour {request.ip_address}...")
|
||||
subnet_uuid = await client.find_subnet_for_ip(request.ip_address)
|
||||
if not subnet_uuid:
|
||||
raise HTTPException(status_code=400, detail=f"Aucun subnet Kea trouvé pour l'IP {request.ip_address}")
|
||||
|
||||
reservation_data = {
|
||||
"subnet": subnet_uuid,
|
||||
"ip_address": request.ip_address,
|
||||
"hw_address": request.hw_address,
|
||||
"hostname": request.hostname,
|
||||
"description": request.description
|
||||
}
|
||||
print(f"[OPNsense Router] Données réservation: {reservation_data}")
|
||||
|
||||
# Étape 1 : Chercher si une réservation existe déjà
|
||||
print(f"[OPNsense Router] Étape 1: Recherche réservation existante...")
|
||||
existing = await client.find_reservation_by_ip(request.ip_address)
|
||||
|
||||
if existing:
|
||||
# Mise à jour de la réservation existante
|
||||
uuid = existing.get("uuid")
|
||||
print(f"[OPNsense Router] Étape 2: Mise à jour réservation existante uuid={uuid}")
|
||||
if not uuid:
|
||||
raise HTTPException(status_code=500, detail="UUID de réservation introuvable")
|
||||
result = await client.set_reservation(uuid, reservation_data)
|
||||
action = "updated"
|
||||
else:
|
||||
# Création d'une nouvelle réservation
|
||||
print(f"[OPNsense Router] Étape 2: Création nouvelle réservation")
|
||||
result = await client.add_reservation(reservation_data)
|
||||
action = "created"
|
||||
|
||||
print(f"[OPNsense Router] Étape 2 terminée: action={action}, result={result}")
|
||||
|
||||
# Étape 3 : Appliquer les changements dans Kea
|
||||
print(f"[OPNsense Router] Étape 3: Reconfiguration Kea...")
|
||||
await client.reconfigure_kea()
|
||||
print(f"[OPNsense Router] Étape 3 terminée: Kea reconfiguré")
|
||||
|
||||
# Étape 4 : Mettre à jour dhcp_synced dans la BDD
|
||||
print(f"[OPNsense Router] Étape 4: Mise à jour BDD dhcp_synced=True")
|
||||
ip_record = db.query(IP).filter(IP.ip == request.ip_address).first()
|
||||
if ip_record:
|
||||
ip_record.dhcp_synced = True
|
||||
db.commit()
|
||||
db.refresh(ip_record)
|
||||
print(f"[OPNsense Router] Étape 4 terminée: BDD mise à jour")
|
||||
else:
|
||||
print(f"[OPNsense Router] ATTENTION: IP {request.ip_address} non trouvée en BDD")
|
||||
|
||||
print(f"[OPNsense Router] === SUCCÈS: {action} ===")
|
||||
return {
|
||||
"status": "success",
|
||||
"action": action,
|
||||
"ip_address": request.ip_address,
|
||||
"result": result
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except OPNsenseAPIError as e:
|
||||
print(f"[OPNsense Router] === ERREUR VALIDATION ===")
|
||||
print(f"[OPNsense Router] Message: {str(e)}")
|
||||
print(f"[OPNsense Router] Validations: {e.validations}")
|
||||
raise HTTPException(status_code=422, detail=str(e))
|
||||
except Exception as e:
|
||||
print(f"[OPNsense Router] === ERREUR ===")
|
||||
print(f"[OPNsense Router] Type: {type(e).__name__}")
|
||||
print(f"[OPNsense Router] Message: {str(e)}")
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=502, detail=f"Erreur OPNsense: {type(e).__name__}: {str(e)}")
|
||||
362
backend/app/routers/scan.py
Executable file
362
backend/app/routers/scan.py
Executable file
@@ -0,0 +1,362 @@
|
||||
"""
|
||||
Endpoints API pour le contrôle des scans réseau
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, BackgroundTasks
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any, Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.app.core.database import get_db
|
||||
from backend.app.core.config import config_manager
|
||||
from backend.app.models.ip import IP, IPHistory
|
||||
from backend.app.models.scan_log import ScanLog
|
||||
from backend.app.services.network import NetworkScanner, OuiLookup
|
||||
from backend.app.services.websocket import ws_manager
|
||||
|
||||
router = APIRouter(prefix="/api/scan", tags=["Scan"])
|
||||
|
||||
|
||||
class ScanLogResponse(BaseModel):
|
||||
"""Schéma de réponse logs scan"""
|
||||
id: int
|
||||
ip: Optional[str]
|
||||
status: Optional[str]
|
||||
message: str
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
async def perform_scan(db: Session):
|
||||
"""
|
||||
Effectue un scan complet du réseau
|
||||
Fonction asynchrone pour background task
|
||||
|
||||
Args:
|
||||
db: Session de base de données
|
||||
"""
|
||||
try:
|
||||
async def scan_log(message: str):
|
||||
print(message)
|
||||
try:
|
||||
await ws_manager.broadcast_scan_log(message)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
await scan_log(f"[{datetime.now()}] Début du scan réseau...")
|
||||
|
||||
# Notifier début du scan
|
||||
try:
|
||||
await ws_manager.broadcast_scan_start()
|
||||
except Exception as e:
|
||||
print(f"Erreur broadcast start (ignorée): {e}")
|
||||
|
||||
# Récupérer la config
|
||||
config = config_manager.config
|
||||
await scan_log(f"[{datetime.now()}] Config chargée: {config.network.cidr}")
|
||||
|
||||
# Initialiser le scanner
|
||||
scanner = NetworkScanner(
|
||||
cidr=config.network.cidr,
|
||||
timeout=config.scan.timeout,
|
||||
ping_count=config.scan.ping_count
|
||||
)
|
||||
|
||||
# Convertir les ports en liste d'entiers
|
||||
port_list = []
|
||||
for port_range in config.ports.ranges:
|
||||
if '-' in port_range:
|
||||
start, end = map(int, port_range.split('-'))
|
||||
port_list.extend(range(start, end + 1))
|
||||
else:
|
||||
port_list.append(int(port_range))
|
||||
|
||||
await scan_log(f"[{datetime.now()}] Ports à scanner: {len(port_list)}")
|
||||
|
||||
# Récupérer les IPs connues
|
||||
known_ips = config.ip_classes
|
||||
await scan_log(f"[{datetime.now()}] IPs connues: {len(known_ips)}")
|
||||
|
||||
# Callback de progression pour WebSocket
|
||||
async def progress_callback(current: int, total: int, current_ip: str, status: str, ping_ok: bool):
|
||||
try:
|
||||
ping_label = "ok" if ping_ok else "fail"
|
||||
await ws_manager.broadcast_scan_progress({
|
||||
"current": current,
|
||||
"total": total,
|
||||
"ip": current_ip
|
||||
})
|
||||
await ws_manager.broadcast_scan_log(
|
||||
f"[{current}/{total}] {current_ip} -> ping:{ping_label} ({status})"
|
||||
)
|
||||
except Exception:
|
||||
# Ignorer les erreurs WebSocket pour ne pas bloquer le scan
|
||||
pass
|
||||
|
||||
# Lancer le scan
|
||||
await scan_log(f"[{datetime.now()}] Lancement du scan (parallélisme: {config.scan.parallel_pings})...")
|
||||
scan_results = await scanner.full_scan(
|
||||
known_ips=known_ips,
|
||||
port_list=port_list,
|
||||
max_concurrent=config.scan.parallel_pings,
|
||||
progress_callback=progress_callback
|
||||
)
|
||||
await scan_log(f"[{datetime.now()}] Scan terminé: {len(scan_results)} IPs trouvées")
|
||||
|
||||
# Mettre à jour la base de données
|
||||
stats = {
|
||||
"total": 0,
|
||||
"online": 0,
|
||||
"offline": 0,
|
||||
"new": 0,
|
||||
"updated": 0
|
||||
}
|
||||
|
||||
for ip_address, ip_data in scan_results.items():
|
||||
stats["total"] += 1
|
||||
|
||||
if ip_data["last_status"] == "online":
|
||||
stats["online"] += 1
|
||||
else:
|
||||
stats["offline"] += 1
|
||||
|
||||
# Log par IP (historique scan)
|
||||
ping_label = "ok" if ip_data["last_status"] == "online" else "fail"
|
||||
log_message = f"Scan {ip_address} -> ping:{ping_label} ({ip_data['last_status']})"
|
||||
db.add(ScanLog(
|
||||
ip=ip_address,
|
||||
status=ip_data["last_status"],
|
||||
message=log_message
|
||||
))
|
||||
|
||||
# Vérifier si l'IP existe déjà
|
||||
existing_ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if existing_ip:
|
||||
# Mettre à jour l'IP existante
|
||||
old_status = existing_ip.last_status
|
||||
|
||||
# Si l'IP passe de offline à online ET qu'elle était inconnue, c'est une "nouvelle détection"
|
||||
# On réinitialise first_seen pour qu'elle apparaisse dans "Nouvelles Détections"
|
||||
if (old_status == "offline" and ip_data["last_status"] == "online" and not existing_ip.known):
|
||||
existing_ip.first_seen = datetime.now()
|
||||
|
||||
# Détecter changement de MAC address
|
||||
new_mac = ip_data.get("mac")
|
||||
if new_mac and existing_ip.mac and new_mac != existing_ip.mac:
|
||||
# MAC a changé ! Marquer comme changée
|
||||
existing_ip.mac_changed = True
|
||||
print(f"[ALERTE] MAC changée pour {ip_address}: {existing_ip.mac} -> {new_mac}")
|
||||
else:
|
||||
# Pas de changement ou pas de MAC précédente
|
||||
existing_ip.mac_changed = False
|
||||
|
||||
existing_ip.last_status = ip_data["last_status"]
|
||||
if ip_data["last_seen"]:
|
||||
existing_ip.last_seen = ip_data["last_seen"]
|
||||
existing_ip.mac = ip_data.get("mac") or existing_ip.mac
|
||||
|
||||
vendor = ip_data.get("vendor")
|
||||
if (not vendor or vendor == "Unknown") and existing_ip.mac:
|
||||
vendor = OuiLookup.lookup(existing_ip.mac) or vendor
|
||||
if config.scan.force_vendor_update:
|
||||
if vendor and vendor != "Unknown":
|
||||
existing_ip.vendor = vendor
|
||||
else:
|
||||
if (not existing_ip.vendor or existing_ip.vendor == "Unknown") and vendor and vendor != "Unknown":
|
||||
existing_ip.vendor = vendor
|
||||
existing_ip.hostname = ip_data.get("hostname") or existing_ip.hostname
|
||||
existing_ip.open_ports = ip_data.get("open_ports", [])
|
||||
|
||||
# Mettre à jour host seulement si présent dans ip_data (config)
|
||||
if "host" in ip_data:
|
||||
existing_ip.host = ip_data["host"]
|
||||
|
||||
# Mettre à jour le flag network_device (basé sur host="Network")
|
||||
# Utiliser le host existant si ip_data n'en a pas
|
||||
current_host = ip_data.get("host") or existing_ip.host
|
||||
existing_ip.network_device = (current_host == "Network")
|
||||
|
||||
# Si l'état a changé, notifier via WebSocket
|
||||
if old_status != ip_data["last_status"]:
|
||||
await ws_manager.broadcast_ip_update({
|
||||
"ip": ip_address,
|
||||
"old_status": old_status,
|
||||
"new_status": ip_data["last_status"]
|
||||
})
|
||||
|
||||
stats["updated"] += 1
|
||||
|
||||
else:
|
||||
# Créer une nouvelle IP
|
||||
vendor = ip_data.get("vendor")
|
||||
if (not vendor or vendor == "Unknown") and ip_data.get("mac"):
|
||||
vendor = OuiLookup.lookup(ip_data.get("mac")) or vendor
|
||||
new_ip = IP(
|
||||
ip=ip_address,
|
||||
name=ip_data.get("name"),
|
||||
known=ip_data.get("known", False),
|
||||
network_device=ip_data.get("host") == "Network",
|
||||
location=ip_data.get("location"),
|
||||
host=ip_data.get("host"),
|
||||
first_seen=datetime.now(),
|
||||
last_seen=ip_data.get("last_seen") or datetime.now(),
|
||||
last_status=ip_data["last_status"],
|
||||
mac=ip_data.get("mac"),
|
||||
vendor=vendor,
|
||||
hostname=ip_data.get("hostname"),
|
||||
open_ports=ip_data.get("open_ports", [])
|
||||
)
|
||||
db.add(new_ip)
|
||||
|
||||
# Notifier nouvelle IP
|
||||
await ws_manager.broadcast_new_ip({
|
||||
"ip": ip_address,
|
||||
"status": ip_data["last_status"],
|
||||
"known": ip_data.get("known", False)
|
||||
})
|
||||
|
||||
stats["new"] += 1
|
||||
|
||||
# Ajouter à l'historique
|
||||
history_entry = IPHistory(
|
||||
ip=ip_address,
|
||||
timestamp=datetime.now(),
|
||||
status=ip_data["last_status"],
|
||||
open_ports=ip_data.get("open_ports", [])
|
||||
)
|
||||
db.add(history_entry)
|
||||
|
||||
# Commit les changements
|
||||
db.commit()
|
||||
|
||||
# Notifier fin du scan avec stats
|
||||
await ws_manager.broadcast_scan_complete(stats)
|
||||
|
||||
print(f"[{datetime.now()}] Scan terminé: {stats}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Erreur lors du scan: {e}")
|
||||
db.rollback()
|
||||
|
||||
|
||||
@router.post("/start")
|
||||
async def start_scan(background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Déclenche un scan réseau immédiat
|
||||
|
||||
Returns:
|
||||
Message de confirmation
|
||||
"""
|
||||
# Lancer le scan en arrière-plan
|
||||
background_tasks.add_task(perform_scan, db)
|
||||
|
||||
return {
|
||||
"message": "Scan réseau démarré",
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
|
||||
|
||||
@router.get("/logs", response_model=List[ScanLogResponse])
|
||||
async def get_scan_logs(limit: int = 200, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Retourne les derniers logs de scan
|
||||
"""
|
||||
logs = db.query(ScanLog).order_by(ScanLog.created_at.desc()).limit(limit).all()
|
||||
return list(reversed(logs))
|
||||
|
||||
|
||||
@router.post("/ports/{ip_address}")
|
||||
async def scan_ip_ports(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Scanne les ports d'une IP spécifique
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP à scanner
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Liste des ports ouverts
|
||||
"""
|
||||
try:
|
||||
# Récupérer la config
|
||||
config = config_manager.config
|
||||
|
||||
# Convertir les ports en liste d'entiers
|
||||
port_list = []
|
||||
for port_range in config.ports.ranges:
|
||||
if '-' in port_range:
|
||||
start, end = map(int, port_range.split('-'))
|
||||
port_list.extend(range(start, end + 1))
|
||||
else:
|
||||
port_list.append(int(port_range))
|
||||
|
||||
# Initialiser le scanner
|
||||
scanner = NetworkScanner(
|
||||
cidr=config.network.cidr,
|
||||
timeout=config.scan.timeout,
|
||||
ping_count=config.scan.ping_count
|
||||
)
|
||||
|
||||
# Scanner les ports de cette IP
|
||||
print(f"[{datetime.now()}] Scan ports pour {ip_address}...")
|
||||
open_ports = await scanner.scan_ports(ip_address, port_list)
|
||||
print(f"[{datetime.now()}] Ports ouverts pour {ip_address}: {open_ports}")
|
||||
|
||||
# Mettre à jour la base de données
|
||||
ip_record = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
if ip_record:
|
||||
ip_record.open_ports = open_ports
|
||||
ip_record.last_seen = datetime.now()
|
||||
db.commit()
|
||||
|
||||
# Notifier via WebSocket
|
||||
await ws_manager.broadcast_ip_update({
|
||||
"ip": ip_address,
|
||||
"open_ports": open_ports
|
||||
})
|
||||
|
||||
return {
|
||||
"message": "Scan de ports terminé",
|
||||
"ip": ip_address,
|
||||
"open_ports": open_ports,
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Erreur scan ports {ip_address}: {e}")
|
||||
return {
|
||||
"message": f"Erreur: {str(e)}",
|
||||
"ip": ip_address,
|
||||
"open_ports": [],
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
|
||||
|
||||
@router.post("/cleanup-history")
|
||||
async def cleanup_history(hours: int = 24, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Nettoie l'historique plus ancien que X heures
|
||||
|
||||
Args:
|
||||
hours: Nombre d'heures à conserver (défaut: 24h)
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Nombre d'entrées supprimées
|
||||
"""
|
||||
cutoff_date = datetime.now() - timedelta(hours=hours)
|
||||
|
||||
deleted = db.query(IPHistory).filter(
|
||||
IPHistory.timestamp < cutoff_date
|
||||
).delete()
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": f"Historique nettoyé",
|
||||
"deleted_entries": deleted,
|
||||
"older_than_hours": hours
|
||||
}
|
||||
73
backend/app/routers/system.py
Executable file
73
backend/app/routers/system.py
Executable file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Router pour les statistiques système
|
||||
Fournit les métriques RAM et CPU du serveur IPWatch
|
||||
"""
|
||||
from fastapi import APIRouter
|
||||
import psutil
|
||||
from datetime import datetime
|
||||
|
||||
router = APIRouter(prefix="/api/system", tags=["system"])
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
async def get_system_stats():
|
||||
"""
|
||||
Récupère les statistiques système du serveur IPWatch
|
||||
|
||||
Returns:
|
||||
dict: Statistiques RAM et CPU
|
||||
- ram_percent: Pourcentage de RAM utilisée
|
||||
- ram_used: RAM utilisée en MB
|
||||
- ram_total: RAM totale en MB
|
||||
- ram_available: RAM disponible en MB
|
||||
- cpu_percent: Pourcentage d'utilisation CPU
|
||||
- cpu_count: Nombre de cœurs CPU
|
||||
- timestamp: Horodatage de la mesure
|
||||
"""
|
||||
# Statistiques mémoire
|
||||
memory = psutil.virtual_memory()
|
||||
|
||||
# Statistiques CPU (moyenne sur 1 seconde)
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
|
||||
# Informations processus IPWatch
|
||||
process = psutil.Process()
|
||||
process_memory = process.memory_info()
|
||||
|
||||
return {
|
||||
# RAM système
|
||||
"ram_percent": round(memory.percent, 1),
|
||||
"ram_used": round(memory.used / (1024 * 1024), 1), # MB
|
||||
"ram_total": round(memory.total / (1024 * 1024), 1), # MB
|
||||
"ram_available": round(memory.available / (1024 * 1024), 1), # MB
|
||||
|
||||
# CPU système
|
||||
"cpu_percent": round(cpu_percent, 1),
|
||||
"cpu_count": psutil.cpu_count(),
|
||||
|
||||
# Processus IPWatch
|
||||
"process_ram_mb": round(process_memory.rss / (1024 * 1024), 1), # MB
|
||||
"process_cpu_percent": round(process.cpu_percent(interval=0.1), 1),
|
||||
|
||||
# Timestamp
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
|
||||
@router.get("/uptime")
|
||||
async def get_uptime():
|
||||
"""
|
||||
Récupère l'uptime du système
|
||||
|
||||
Returns:
|
||||
dict: Informations sur l'uptime
|
||||
"""
|
||||
import time
|
||||
boot_time = psutil.boot_time()
|
||||
uptime_seconds = time.time() - boot_time
|
||||
|
||||
return {
|
||||
"uptime_seconds": int(uptime_seconds),
|
||||
"uptime_hours": round(uptime_seconds / 3600, 1),
|
||||
"boot_time": datetime.fromtimestamp(boot_time).isoformat()
|
||||
}
|
||||
227
backend/app/routers/tracking.py
Normal file
227
backend/app/routers/tracking.py
Normal file
@@ -0,0 +1,227 @@
|
||||
"""
|
||||
Endpoints API pour le suivi d'équipements (Wake-on-LAN, shutdown, etc.)
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.app.core.database import get_db
|
||||
from backend.app.models.ip import IP
|
||||
|
||||
router = APIRouter(prefix="/api/tracking", tags=["Tracking"])
|
||||
|
||||
|
||||
# Schémas Pydantic
|
||||
class IPTrackingResponse(BaseModel):
|
||||
"""Schéma de réponse pour les IPs suivies"""
|
||||
ip: str
|
||||
name: Optional[str]
|
||||
known: bool
|
||||
tracked: bool
|
||||
location: Optional[str]
|
||||
host: Optional[str]
|
||||
last_status: Optional[str]
|
||||
mac: Optional[str]
|
||||
vendor: Optional[str]
|
||||
hostname: Optional[str]
|
||||
link: Optional[str]
|
||||
last_seen: Optional[datetime]
|
||||
open_ports: List[int]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class WOLResponse(BaseModel):
|
||||
"""Réponse après envoi Wake-on-LAN"""
|
||||
message: str
|
||||
ip: str
|
||||
mac: str
|
||||
success: bool
|
||||
|
||||
|
||||
class ShutdownResponse(BaseModel):
|
||||
"""Réponse après commande d'arrêt"""
|
||||
message: str
|
||||
ip: str
|
||||
success: bool
|
||||
|
||||
|
||||
@router.get("/", response_model=List[IPTrackingResponse])
|
||||
async def get_tracked_ips(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Récupère toutes les IPs marquées comme suivies
|
||||
Retourne la liste des équipements avec leur état actuel
|
||||
"""
|
||||
tracked_ips = db.query(IP).filter(IP.tracked == True).order_by(IP.name, IP.ip).all()
|
||||
return tracked_ips
|
||||
|
||||
|
||||
@router.post("/wol/{ip_address}", response_model=WOLResponse)
|
||||
async def wake_on_lan(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Envoie un paquet Magic Packet Wake-on-LAN à l'équipement
|
||||
Nécessite que l'IP ait une adresse MAC enregistrée
|
||||
"""
|
||||
# Récupérer l'IP depuis la base
|
||||
ip_obj = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip_obj:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"IP {ip_address} non trouvée dans la base de données"
|
||||
)
|
||||
|
||||
if not ip_obj.mac:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Adresse MAC manquante pour {ip_address}. Impossible d'envoyer le paquet WOL."
|
||||
)
|
||||
|
||||
try:
|
||||
# Importer la bibliothèque wakeonlan
|
||||
from wakeonlan import send_magic_packet
|
||||
|
||||
# Envoyer le paquet Magic Packet
|
||||
send_magic_packet(ip_obj.mac)
|
||||
|
||||
return WOLResponse(
|
||||
message=f"Paquet Wake-on-LAN envoyé avec succès",
|
||||
ip=ip_address,
|
||||
mac=ip_obj.mac,
|
||||
success=True
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="La bibliothèque 'wakeonlan' n'est pas installée. Exécutez: pip install wakeonlan"
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Erreur lors de l'envoi du paquet WOL: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/shutdown/{ip_address}", response_model=ShutdownResponse)
|
||||
async def shutdown_device(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Envoie une commande shutdown via MQTT à l'équipement
|
||||
"""
|
||||
# Récupérer l'IP depuis la base
|
||||
ip_obj = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip_obj:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"IP {ip_address} non trouvée dans la base de données"
|
||||
)
|
||||
|
||||
if ip_obj.last_status != "online":
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"L'équipement {ip_address} est déjà hors ligne"
|
||||
)
|
||||
|
||||
try:
|
||||
from backend.app.services.mqtt_client import send_mqtt_command
|
||||
|
||||
# Envoyer commande shutdown via MQTT
|
||||
success = send_mqtt_command(ip_address, "shutdown")
|
||||
|
||||
if success:
|
||||
return ShutdownResponse(
|
||||
message=f"Commande shutdown envoyée à {ip_address} via MQTT",
|
||||
ip=ip_address,
|
||||
success=True
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Échec de l'envoi de la commande MQTT"
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Le service MQTT n'est pas configuré. Consultez mqtt/docs/README.md"
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Erreur lors de l'envoi de la commande: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/reboot/{ip_address}", response_model=ShutdownResponse)
|
||||
async def reboot_device(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Envoie une commande reboot via MQTT à l'équipement
|
||||
"""
|
||||
# Récupérer l'IP depuis la base
|
||||
ip_obj = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip_obj:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"IP {ip_address} non trouvée"
|
||||
)
|
||||
|
||||
if ip_obj.last_status != "online":
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"L'équipement {ip_address} est hors ligne"
|
||||
)
|
||||
|
||||
try:
|
||||
from backend.app.services.mqtt_client import send_mqtt_command
|
||||
|
||||
# Envoyer commande reboot via MQTT
|
||||
success = send_mqtt_command(ip_address, "reboot")
|
||||
|
||||
if success:
|
||||
return ShutdownResponse(
|
||||
message=f"Commande reboot envoyée à {ip_address} via MQTT",
|
||||
ip=ip_address,
|
||||
success=True
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Échec de l'envoi de la commande MQTT"
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Le service MQTT n'est pas configuré"
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Erreur: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/{ip_address}/toggle", response_model=IPTrackingResponse)
|
||||
async def toggle_tracking(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Bascule l'état de suivi d'une IP (tracked true/false)
|
||||
"""
|
||||
ip_obj = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip_obj:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"IP {ip_address} non trouvée"
|
||||
)
|
||||
|
||||
# Inverser l'état tracked
|
||||
ip_obj.tracked = not ip_obj.tracked
|
||||
db.commit()
|
||||
db.refresh(ip_obj)
|
||||
|
||||
return ip_obj
|
||||
35
backend/app/routers/websocket.py
Executable file
35
backend/app/routers/websocket.py
Executable file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Endpoint WebSocket pour notifications temps réel
|
||||
"""
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
from backend.app.services.websocket import ws_manager
|
||||
|
||||
router = APIRouter(tags=["WebSocket"])
|
||||
|
||||
|
||||
@router.websocket("/ws")
|
||||
async def websocket_endpoint(websocket: WebSocket):
|
||||
"""
|
||||
Endpoint WebSocket pour notifications temps réel
|
||||
|
||||
Args:
|
||||
websocket: Connexion WebSocket
|
||||
"""
|
||||
await ws_manager.connect(websocket)
|
||||
|
||||
try:
|
||||
# Boucle de réception (keep-alive)
|
||||
while True:
|
||||
# Recevoir des messages du client (heartbeat)
|
||||
data = await websocket.receive_text()
|
||||
|
||||
# On peut gérer des commandes du client ici si nécessaire
|
||||
# Pour l'instant, on fait juste un echo pour keep-alive
|
||||
if data == "ping":
|
||||
await ws_manager.send_personal_message("pong", websocket)
|
||||
|
||||
except WebSocketDisconnect:
|
||||
ws_manager.disconnect(websocket)
|
||||
except Exception as e:
|
||||
print(f"Erreur WebSocket: {e}")
|
||||
ws_manager.disconnect(websocket)
|
||||
Reference in New Issue
Block a user