fisrt
This commit is contained in:
1
backend/app/__init__.py
Normal file
1
backend/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# IPWatch Backend Application
|
||||
1
backend/app/core/__init__.py
Normal file
1
backend/app/core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core configuration modules
|
||||
111
backend/app/core/config.py
Normal file
111
backend/app/core/config.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Configuration management pour IPWatch
|
||||
Charge et valide le fichier config.yaml
|
||||
"""
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class AppConfig(BaseModel):
|
||||
"""Configuration de l'application"""
|
||||
name: str = "IPWatch"
|
||||
version: str = "1.0.0"
|
||||
debug: bool = False
|
||||
|
||||
|
||||
class NetworkConfig(BaseModel):
|
||||
"""Configuration réseau"""
|
||||
cidr: str
|
||||
gateway: Optional[str] = None
|
||||
dns: Optional[List[str]] = None
|
||||
|
||||
|
||||
class ScanConfig(BaseModel):
|
||||
"""Configuration des scans"""
|
||||
ping_interval: int = 60 # secondes
|
||||
port_scan_interval: int = 300 # secondes
|
||||
parallel_pings: int = 50
|
||||
timeout: float = 1.0
|
||||
|
||||
|
||||
class PortsConfig(BaseModel):
|
||||
"""Configuration des ports à scanner"""
|
||||
ranges: List[str] = ["22", "80", "443", "3389", "8080"]
|
||||
|
||||
|
||||
class HistoryConfig(BaseModel):
|
||||
"""Configuration de l'historique"""
|
||||
retention_hours: int = 24
|
||||
|
||||
|
||||
class UIConfig(BaseModel):
|
||||
"""Configuration UI"""
|
||||
offline_transparency: float = 0.5
|
||||
show_mac: bool = True
|
||||
show_vendor: bool = True
|
||||
|
||||
|
||||
class ColorsConfig(BaseModel):
|
||||
"""Configuration des couleurs"""
|
||||
free: str = "#75715E"
|
||||
online_known: str = "#A6E22E"
|
||||
online_unknown: str = "#66D9EF"
|
||||
offline_known: str = "#F92672"
|
||||
offline_unknown: str = "#AE81FF"
|
||||
|
||||
|
||||
class DatabaseConfig(BaseModel):
|
||||
"""Configuration base de données"""
|
||||
path: str = "./data/db.sqlite"
|
||||
|
||||
|
||||
class IPWatchConfig(BaseModel):
|
||||
"""Configuration complète IPWatch"""
|
||||
app: AppConfig = Field(default_factory=AppConfig)
|
||||
network: NetworkConfig
|
||||
ip_classes: Dict[str, Any] = Field(default_factory=dict)
|
||||
scan: ScanConfig = Field(default_factory=ScanConfig)
|
||||
ports: PortsConfig = Field(default_factory=PortsConfig)
|
||||
locations: List[str] = Field(default_factory=list)
|
||||
hosts: List[str] = Field(default_factory=list)
|
||||
history: HistoryConfig = Field(default_factory=HistoryConfig)
|
||||
ui: UIConfig = Field(default_factory=UIConfig)
|
||||
colors: ColorsConfig = Field(default_factory=ColorsConfig)
|
||||
database: DatabaseConfig = Field(default_factory=DatabaseConfig)
|
||||
|
||||
|
||||
class ConfigManager:
|
||||
"""Gestionnaire de configuration singleton"""
|
||||
_instance: Optional['ConfigManager'] = None
|
||||
_config: Optional[IPWatchConfig] = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def load_config(self, config_path: str = "./config.yaml") -> IPWatchConfig:
|
||||
"""Charge la configuration depuis le fichier YAML"""
|
||||
path = Path(config_path)
|
||||
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Fichier de configuration non trouvé: {config_path}")
|
||||
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
yaml_data = yaml.safe_load(f)
|
||||
|
||||
self._config = IPWatchConfig(**yaml_data)
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def config(self) -> IPWatchConfig:
|
||||
"""Retourne la configuration actuelle"""
|
||||
if self._config is None:
|
||||
raise RuntimeError("Configuration non chargée. Appelez load_config() d'abord.")
|
||||
return self._config
|
||||
|
||||
|
||||
# Instance globale
|
||||
config_manager = ConfigManager()
|
||||
47
backend/app/core/database.py
Normal file
47
backend/app/core/database.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Configuration de la base de données SQLAlchemy
|
||||
"""
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from pathlib import Path
|
||||
|
||||
# Base pour les modèles SQLAlchemy
|
||||
Base = declarative_base()
|
||||
|
||||
# Engine et session
|
||||
engine = None
|
||||
SessionLocal = None
|
||||
|
||||
|
||||
def init_database(db_path: str = "./data/db.sqlite"):
|
||||
"""Initialise la connexion à la base de données"""
|
||||
global engine, SessionLocal
|
||||
|
||||
# Créer le dossier data si nécessaire
|
||||
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Créer l'engine SQLite
|
||||
database_url = f"sqlite:///{db_path}"
|
||||
engine = create_engine(
|
||||
database_url,
|
||||
connect_args={"check_same_thread": False},
|
||||
echo=False
|
||||
)
|
||||
|
||||
# Créer la session factory
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
# Créer les tables
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
def get_db():
|
||||
"""Dependency pour obtenir une session DB"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
187
backend/app/main.py
Normal file
187
backend/app/main.py
Normal file
@@ -0,0 +1,187 @@
|
||||
"""
|
||||
Application FastAPI principale pour IPWatch
|
||||
Point d'entrée du backend
|
||||
"""
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
|
||||
from backend.app.core.config import config_manager
|
||||
from backend.app.core.database import init_database, get_db
|
||||
from backend.app.routers import ips_router, scan_router, websocket_router
|
||||
from backend.app.services.scheduler import scan_scheduler
|
||||
from backend.app.routers.scan import perform_scan
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""
|
||||
Gestionnaire du cycle de vie de l'application
|
||||
Initialise et nettoie les ressources
|
||||
"""
|
||||
# Startup
|
||||
print("=== Démarrage IPWatch ===")
|
||||
|
||||
# 1. Charger la configuration
|
||||
try:
|
||||
config = config_manager.load_config("./config.yaml")
|
||||
print(f"✓ Configuration chargée: {config.network.cidr}")
|
||||
except Exception as e:
|
||||
print(f"✗ Erreur chargement config: {e}")
|
||||
raise
|
||||
|
||||
# 2. Initialiser la base de données
|
||||
try:
|
||||
init_database(config.database.path)
|
||||
print(f"✓ Base de données initialisée: {config.database.path}")
|
||||
except Exception as e:
|
||||
print(f"✗ Erreur initialisation DB: {e}")
|
||||
raise
|
||||
|
||||
# 3. Démarrer le scheduler
|
||||
try:
|
||||
scan_scheduler.start()
|
||||
|
||||
# Créer une session DB pour les scans planifiés
|
||||
from backend.app.core.database import SessionLocal
|
||||
|
||||
async def scheduled_scan():
|
||||
"""Wrapper pour scan planifié avec DB session"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
await perform_scan(db)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
# Configurer les tâches périodiques
|
||||
scan_scheduler.add_ping_scan_job(
|
||||
scheduled_scan,
|
||||
interval_seconds=config.scan.ping_interval
|
||||
)
|
||||
|
||||
scan_scheduler.add_port_scan_job(
|
||||
scheduled_scan,
|
||||
interval_seconds=config.scan.port_scan_interval
|
||||
)
|
||||
|
||||
# Tâche de nettoyage historique
|
||||
async def cleanup_history():
|
||||
"""Nettoie l'historique ancien"""
|
||||
from backend.app.models.ip import IPHistory
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
cutoff = datetime.utcnow() - timedelta(hours=config.history.retention_hours)
|
||||
deleted = db.query(IPHistory).filter(IPHistory.timestamp < cutoff).delete()
|
||||
db.commit()
|
||||
print(f"Nettoyage historique: {deleted} entrées supprimées")
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
scan_scheduler.add_cleanup_job(cleanup_history, interval_hours=1)
|
||||
|
||||
print("✓ Scheduler démarré")
|
||||
except Exception as e:
|
||||
print(f"✗ Erreur démarrage scheduler: {e}")
|
||||
|
||||
print("=== IPWatch prêt ===\n")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
print("\n=== Arrêt IPWatch ===")
|
||||
scan_scheduler.stop()
|
||||
print("✓ Scheduler arrêté")
|
||||
|
||||
|
||||
# Créer l'application FastAPI
|
||||
app = FastAPI(
|
||||
title="IPWatch API",
|
||||
description="API backend pour IPWatch - Scanner réseau temps réel",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
# Configuration CORS pour le frontend
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # À restreindre en production
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Enregistrer les routers API
|
||||
app.include_router(ips_router)
|
||||
app.include_router(scan_router)
|
||||
app.include_router(websocket_router)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"scheduler": scan_scheduler.is_running
|
||||
}
|
||||
|
||||
|
||||
# Servir les fichiers statiques du frontend
|
||||
frontend_dist = Path(__file__).parent.parent.parent / "frontend" / "dist"
|
||||
|
||||
if frontend_dist.exists():
|
||||
# Monter les assets statiques
|
||||
app.mount("/assets", StaticFiles(directory=str(frontend_dist / "assets")), name="assets")
|
||||
|
||||
# Route racine pour servir index.html
|
||||
@app.get("/")
|
||||
async def serve_frontend():
|
||||
"""Servir le frontend Vue"""
|
||||
index_file = frontend_dist / "index.html"
|
||||
if index_file.exists():
|
||||
return FileResponse(index_file)
|
||||
return {
|
||||
"name": "IPWatch API",
|
||||
"version": "1.0.0",
|
||||
"status": "running",
|
||||
"error": "Frontend non trouvé"
|
||||
}
|
||||
|
||||
# Catch-all pour le routing Vue (SPA)
|
||||
@app.get("/{full_path:path}")
|
||||
async def catch_all(full_path: str):
|
||||
"""Catch-all pour le routing Vue Router"""
|
||||
# Ne pas intercepter les routes API
|
||||
if full_path.startswith("api/") or full_path.startswith("ws"):
|
||||
return {"error": "Not found"}
|
||||
|
||||
# Servir index.html pour toutes les autres routes
|
||||
index_file = frontend_dist / "index.html"
|
||||
if index_file.exists():
|
||||
return FileResponse(index_file)
|
||||
return {"error": "Frontend non trouvé"}
|
||||
else:
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Endpoint racine (mode développement sans frontend)"""
|
||||
return {
|
||||
"name": "IPWatch API",
|
||||
"version": "1.0.0",
|
||||
"status": "running",
|
||||
"note": "Frontend non buildé - utilisez le mode dev"
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(
|
||||
"backend.app.main:app",
|
||||
host="0.0.0.0",
|
||||
port=8080,
|
||||
reload=True
|
||||
)
|
||||
6
backend/app/models/__init__.py
Normal file
6
backend/app/models/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""
|
||||
Modèles SQLAlchemy pour IPWatch
|
||||
"""
|
||||
from .ip import IP, IPHistory
|
||||
|
||||
__all__ = ["IP", "IPHistory"]
|
||||
82
backend/app/models/ip.py
Normal file
82
backend/app/models/ip.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""
|
||||
Modèles de données pour les adresses IP et leur historique
|
||||
Basé sur modele-donnees.md
|
||||
"""
|
||||
from sqlalchemy import Column, String, Boolean, DateTime, Integer, ForeignKey, Index, JSON
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime
|
||||
from backend.app.core.database import Base
|
||||
|
||||
|
||||
class IP(Base):
|
||||
"""
|
||||
Table principale des adresses IP
|
||||
Stocke les informations actuelles et les métadonnées de chaque IP
|
||||
"""
|
||||
__tablename__ = "ip"
|
||||
|
||||
# Clé primaire
|
||||
ip = Column(String, primary_key=True, index=True)
|
||||
|
||||
# Métadonnées
|
||||
name = Column(String, nullable=True) # Nom donné à l'IP
|
||||
known = Column(Boolean, default=False, index=True) # IP connue ou inconnue
|
||||
location = Column(String, nullable=True) # Localisation (ex: "Bureau", "Serveur")
|
||||
host = Column(String, nullable=True) # Type d'hôte (ex: "PC", "Imprimante")
|
||||
|
||||
# Timestamps
|
||||
first_seen = Column(DateTime, default=datetime.utcnow) # Première détection
|
||||
last_seen = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) # Dernière vue
|
||||
|
||||
# État réseau
|
||||
last_status = Column(String, index=True) # "online", "offline", "unknown"
|
||||
|
||||
# Informations réseau
|
||||
mac = Column(String, nullable=True) # Adresse MAC
|
||||
vendor = Column(String, nullable=True) # Fabricant (lookup MAC)
|
||||
hostname = Column(String, nullable=True) # Nom d'hôte réseau
|
||||
|
||||
# Ports ouverts (stocké en JSON)
|
||||
open_ports = Column(JSON, default=list) # Liste des ports ouverts
|
||||
|
||||
# Relation avec l'historique
|
||||
history = relationship("IPHistory", back_populates="ip_ref", cascade="all, delete-orphan")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<IP {self.ip} - {self.last_status} - {self.name or 'unnamed'}>"
|
||||
|
||||
|
||||
class IPHistory(Base):
|
||||
"""
|
||||
Table d'historique des états d'IP
|
||||
Stocke l'évolution dans le temps (24h par défaut)
|
||||
"""
|
||||
__tablename__ = "ip_history"
|
||||
|
||||
# Clé primaire auto-incrémentée
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
# Foreign key vers la table IP
|
||||
ip = Column(String, ForeignKey("ip.ip", ondelete="CASCADE"), nullable=False, index=True)
|
||||
|
||||
# Timestamp de l'enregistrement
|
||||
timestamp = Column(DateTime, default=datetime.utcnow, index=True, nullable=False)
|
||||
|
||||
# État à ce moment
|
||||
status = Column(String, nullable=False) # "online", "offline"
|
||||
|
||||
# Ports ouverts à ce moment (JSON)
|
||||
open_ports = Column(JSON, default=list)
|
||||
|
||||
# Relation inverse vers IP
|
||||
ip_ref = relationship("IP", back_populates="history")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<IPHistory {self.ip} - {self.timestamp} - {self.status}>"
|
||||
|
||||
|
||||
# Index recommandés (déjà définis dans les colonnes avec index=True)
|
||||
# Index supplémentaires si nécessaire
|
||||
Index('idx_ip_last_status', IP.last_status)
|
||||
Index('idx_ip_history_timestamp', IPHistory.timestamp)
|
||||
Index('idx_ip_history_ip', IPHistory.ip)
|
||||
8
backend/app/routers/__init__.py
Normal file
8
backend/app/routers/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
Routers API pour IPWatch
|
||||
"""
|
||||
from .ips import router as ips_router
|
||||
from .scan import router as scan_router
|
||||
from .websocket import router as websocket_router
|
||||
|
||||
__all__ = ["ips_router", "scan_router", "websocket_router"]
|
||||
216
backend/app/routers/ips.py
Normal file
216
backend/app/routers/ips.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""
|
||||
Endpoints API pour la gestion des IPs
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import desc
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from backend.app.core.database import get_db
|
||||
from backend.app.models.ip import IP, IPHistory
|
||||
from pydantic import BaseModel
|
||||
|
||||
router = APIRouter(prefix="/api/ips", tags=["IPs"])
|
||||
|
||||
|
||||
# Schémas Pydantic pour validation
|
||||
class IPUpdate(BaseModel):
|
||||
"""Schéma pour mise à jour d'IP"""
|
||||
name: Optional[str] = None
|
||||
known: Optional[bool] = None
|
||||
location: Optional[str] = None
|
||||
host: Optional[str] = None
|
||||
|
||||
|
||||
class IPResponse(BaseModel):
|
||||
"""Schéma de réponse IP"""
|
||||
ip: str
|
||||
name: Optional[str]
|
||||
known: bool
|
||||
location: Optional[str]
|
||||
host: Optional[str]
|
||||
first_seen: Optional[datetime]
|
||||
last_seen: Optional[datetime]
|
||||
last_status: Optional[str]
|
||||
mac: Optional[str]
|
||||
vendor: Optional[str]
|
||||
hostname: Optional[str]
|
||||
open_ports: List[int]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class IPHistoryResponse(BaseModel):
|
||||
"""Schéma de réponse historique"""
|
||||
id: int
|
||||
ip: str
|
||||
timestamp: datetime
|
||||
status: str
|
||||
open_ports: List[int]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
@router.get("/", response_model=List[IPResponse])
|
||||
async def get_all_ips(
|
||||
status: Optional[str] = None,
|
||||
known: Optional[bool] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Récupère toutes les IPs avec filtres optionnels
|
||||
|
||||
Args:
|
||||
status: Filtrer par statut (online/offline)
|
||||
known: Filtrer par IPs connues/inconnues
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Liste des IPs
|
||||
"""
|
||||
query = db.query(IP)
|
||||
|
||||
if status:
|
||||
query = query.filter(IP.last_status == status)
|
||||
|
||||
if known is not None:
|
||||
query = query.filter(IP.known == known)
|
||||
|
||||
ips = query.all()
|
||||
return ips
|
||||
|
||||
|
||||
@router.get("/{ip_address}", response_model=IPResponse)
|
||||
async def get_ip(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Récupère les détails d'une IP spécifique
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Détails de l'IP
|
||||
"""
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
return ip
|
||||
|
||||
|
||||
@router.put("/{ip_address}", response_model=IPResponse)
|
||||
async def update_ip(
|
||||
ip_address: str,
|
||||
ip_update: IPUpdate,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Met à jour les informations d'une IP
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
ip_update: Données à mettre à jour
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
IP mise à jour
|
||||
"""
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
# Mettre à jour les champs fournis
|
||||
update_data = ip_update.dict(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(ip, field, value)
|
||||
|
||||
db.commit()
|
||||
db.refresh(ip)
|
||||
|
||||
return ip
|
||||
|
||||
|
||||
@router.delete("/{ip_address}")
|
||||
async def delete_ip(ip_address: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Supprime une IP (et son historique)
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Message de confirmation
|
||||
"""
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
db.delete(ip)
|
||||
db.commit()
|
||||
|
||||
return {"message": f"IP {ip_address} supprimée"}
|
||||
|
||||
|
||||
@router.get("/{ip_address}/history", response_model=List[IPHistoryResponse])
|
||||
async def get_ip_history(
|
||||
ip_address: str,
|
||||
hours: int = 24,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Récupère l'historique d'une IP
|
||||
|
||||
Args:
|
||||
ip_address: Adresse IP
|
||||
hours: Nombre d'heures d'historique (défaut: 24h)
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Liste des événements historiques
|
||||
"""
|
||||
# Vérifier que l'IP existe
|
||||
ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
if not ip:
|
||||
raise HTTPException(status_code=404, detail="IP non trouvée")
|
||||
|
||||
# Calculer la date limite
|
||||
since = datetime.utcnow() - timedelta(hours=hours)
|
||||
|
||||
# Récupérer l'historique
|
||||
history = db.query(IPHistory).filter(
|
||||
IPHistory.ip == ip_address,
|
||||
IPHistory.timestamp >= since
|
||||
).order_by(desc(IPHistory.timestamp)).all()
|
||||
|
||||
return history
|
||||
|
||||
|
||||
@router.get("/stats/summary")
|
||||
async def get_stats(db: Session = Depends(get_db)):
|
||||
"""
|
||||
Récupère les statistiques globales du réseau
|
||||
|
||||
Returns:
|
||||
Statistiques (total, online, offline, known, unknown)
|
||||
"""
|
||||
total = db.query(IP).count()
|
||||
online = db.query(IP).filter(IP.last_status == "online").count()
|
||||
offline = db.query(IP).filter(IP.last_status == "offline").count()
|
||||
known = db.query(IP).filter(IP.known == True).count()
|
||||
unknown = db.query(IP).filter(IP.known == False).count()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"online": online,
|
||||
"offline": offline,
|
||||
"known": known,
|
||||
"unknown": unknown
|
||||
}
|
||||
201
backend/app/routers/scan.py
Normal file
201
backend/app/routers/scan.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
Endpoints API pour le contrôle des scans réseau
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, BackgroundTasks
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
|
||||
from backend.app.core.database import get_db
|
||||
from backend.app.core.config import config_manager
|
||||
from backend.app.models.ip import IP, IPHistory
|
||||
from backend.app.services.network import NetworkScanner
|
||||
from backend.app.services.websocket import ws_manager
|
||||
|
||||
router = APIRouter(prefix="/api/scan", tags=["Scan"])
|
||||
|
||||
|
||||
async def perform_scan(db: Session):
|
||||
"""
|
||||
Effectue un scan complet du réseau
|
||||
Fonction asynchrone pour background task
|
||||
|
||||
Args:
|
||||
db: Session de base de données
|
||||
"""
|
||||
try:
|
||||
print(f"[{datetime.now()}] Début du scan réseau...")
|
||||
|
||||
# Notifier début du scan
|
||||
try:
|
||||
await ws_manager.broadcast_scan_start()
|
||||
except Exception as e:
|
||||
print(f"Erreur broadcast start (ignorée): {e}")
|
||||
|
||||
# Récupérer la config
|
||||
config = config_manager.config
|
||||
print(f"[{datetime.now()}] Config chargée: {config.network.cidr}")
|
||||
|
||||
# Initialiser le scanner
|
||||
scanner = NetworkScanner(
|
||||
cidr=config.network.cidr,
|
||||
timeout=config.scan.timeout
|
||||
)
|
||||
|
||||
# Convertir les ports en liste d'entiers
|
||||
port_list = []
|
||||
for port_range in config.ports.ranges:
|
||||
if '-' in port_range:
|
||||
start, end = map(int, port_range.split('-'))
|
||||
port_list.extend(range(start, end + 1))
|
||||
else:
|
||||
port_list.append(int(port_range))
|
||||
|
||||
print(f"[{datetime.now()}] Ports à scanner: {port_list}")
|
||||
|
||||
# Récupérer les IPs connues
|
||||
known_ips = config.ip_classes
|
||||
print(f"[{datetime.now()}] IPs connues: {len(known_ips)}")
|
||||
|
||||
# Lancer le scan
|
||||
print(f"[{datetime.now()}] Lancement du scan (parallélisme: {config.scan.parallel_pings})...")
|
||||
scan_results = await scanner.full_scan(
|
||||
known_ips=known_ips,
|
||||
port_list=port_list,
|
||||
max_concurrent=config.scan.parallel_pings
|
||||
)
|
||||
print(f"[{datetime.now()}] Scan terminé: {len(scan_results)} IPs trouvées")
|
||||
|
||||
# Mettre à jour la base de données
|
||||
stats = {
|
||||
"total": 0,
|
||||
"online": 0,
|
||||
"offline": 0,
|
||||
"new": 0,
|
||||
"updated": 0
|
||||
}
|
||||
|
||||
for ip_address, ip_data in scan_results.items():
|
||||
stats["total"] += 1
|
||||
|
||||
if ip_data["last_status"] == "online":
|
||||
stats["online"] += 1
|
||||
else:
|
||||
stats["offline"] += 1
|
||||
|
||||
# Vérifier si l'IP existe déjà
|
||||
existing_ip = db.query(IP).filter(IP.ip == ip_address).first()
|
||||
|
||||
if existing_ip:
|
||||
# Mettre à jour l'IP existante
|
||||
old_status = existing_ip.last_status
|
||||
|
||||
existing_ip.last_status = ip_data["last_status"]
|
||||
if ip_data["last_seen"]:
|
||||
existing_ip.last_seen = ip_data["last_seen"]
|
||||
existing_ip.mac = ip_data.get("mac") or existing_ip.mac
|
||||
existing_ip.vendor = ip_data.get("vendor") or existing_ip.vendor
|
||||
existing_ip.hostname = ip_data.get("hostname") or existing_ip.hostname
|
||||
existing_ip.open_ports = ip_data.get("open_ports", [])
|
||||
|
||||
# Si l'état a changé, notifier via WebSocket
|
||||
if old_status != ip_data["last_status"]:
|
||||
await ws_manager.broadcast_ip_update({
|
||||
"ip": ip_address,
|
||||
"old_status": old_status,
|
||||
"new_status": ip_data["last_status"]
|
||||
})
|
||||
|
||||
stats["updated"] += 1
|
||||
|
||||
else:
|
||||
# Créer une nouvelle IP
|
||||
new_ip = IP(
|
||||
ip=ip_address,
|
||||
name=ip_data.get("name"),
|
||||
known=ip_data.get("known", False),
|
||||
location=ip_data.get("location"),
|
||||
host=ip_data.get("host"),
|
||||
first_seen=datetime.utcnow(),
|
||||
last_seen=ip_data.get("last_seen") or datetime.utcnow(),
|
||||
last_status=ip_data["last_status"],
|
||||
mac=ip_data.get("mac"),
|
||||
vendor=ip_data.get("vendor"),
|
||||
hostname=ip_data.get("hostname"),
|
||||
open_ports=ip_data.get("open_ports", [])
|
||||
)
|
||||
db.add(new_ip)
|
||||
|
||||
# Notifier nouvelle IP
|
||||
await ws_manager.broadcast_new_ip({
|
||||
"ip": ip_address,
|
||||
"status": ip_data["last_status"],
|
||||
"known": ip_data.get("known", False)
|
||||
})
|
||||
|
||||
stats["new"] += 1
|
||||
|
||||
# Ajouter à l'historique
|
||||
history_entry = IPHistory(
|
||||
ip=ip_address,
|
||||
timestamp=datetime.utcnow(),
|
||||
status=ip_data["last_status"],
|
||||
open_ports=ip_data.get("open_ports", [])
|
||||
)
|
||||
db.add(history_entry)
|
||||
|
||||
# Commit les changements
|
||||
db.commit()
|
||||
|
||||
# Notifier fin du scan avec stats
|
||||
await ws_manager.broadcast_scan_complete(stats)
|
||||
|
||||
print(f"[{datetime.now()}] Scan terminé: {stats}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Erreur lors du scan: {e}")
|
||||
db.rollback()
|
||||
|
||||
|
||||
@router.post("/start")
|
||||
async def start_scan(background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Déclenche un scan réseau immédiat
|
||||
|
||||
Returns:
|
||||
Message de confirmation
|
||||
"""
|
||||
# Lancer le scan en arrière-plan
|
||||
background_tasks.add_task(perform_scan, db)
|
||||
|
||||
return {
|
||||
"message": "Scan réseau démarré",
|
||||
"timestamp": datetime.utcnow()
|
||||
}
|
||||
|
||||
|
||||
@router.post("/cleanup-history")
|
||||
async def cleanup_history(hours: int = 24, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Nettoie l'historique plus ancien que X heures
|
||||
|
||||
Args:
|
||||
hours: Nombre d'heures à conserver (défaut: 24h)
|
||||
db: Session de base de données
|
||||
|
||||
Returns:
|
||||
Nombre d'entrées supprimées
|
||||
"""
|
||||
cutoff_date = datetime.utcnow() - timedelta(hours=hours)
|
||||
|
||||
deleted = db.query(IPHistory).filter(
|
||||
IPHistory.timestamp < cutoff_date
|
||||
).delete()
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"message": f"Historique nettoyé",
|
||||
"deleted_entries": deleted,
|
||||
"older_than_hours": hours
|
||||
}
|
||||
35
backend/app/routers/websocket.py
Normal file
35
backend/app/routers/websocket.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Endpoint WebSocket pour notifications temps réel
|
||||
"""
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
from backend.app.services.websocket import ws_manager
|
||||
|
||||
router = APIRouter(tags=["WebSocket"])
|
||||
|
||||
|
||||
@router.websocket("/ws")
|
||||
async def websocket_endpoint(websocket: WebSocket):
|
||||
"""
|
||||
Endpoint WebSocket pour notifications temps réel
|
||||
|
||||
Args:
|
||||
websocket: Connexion WebSocket
|
||||
"""
|
||||
await ws_manager.connect(websocket)
|
||||
|
||||
try:
|
||||
# Boucle de réception (keep-alive)
|
||||
while True:
|
||||
# Recevoir des messages du client (heartbeat)
|
||||
data = await websocket.receive_text()
|
||||
|
||||
# On peut gérer des commandes du client ici si nécessaire
|
||||
# Pour l'instant, on fait juste un echo pour keep-alive
|
||||
if data == "ping":
|
||||
await ws_manager.send_personal_message("pong", websocket)
|
||||
|
||||
except WebSocketDisconnect:
|
||||
ws_manager.disconnect(websocket)
|
||||
except Exception as e:
|
||||
print(f"Erreur WebSocket: {e}")
|
||||
ws_manager.disconnect(websocket)
|
||||
7
backend/app/services/__init__.py
Normal file
7
backend/app/services/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Services réseau pour IPWatch
|
||||
"""
|
||||
from .network import NetworkScanner
|
||||
from .scheduler import ScanScheduler
|
||||
|
||||
__all__ = ["NetworkScanner", "ScanScheduler"]
|
||||
295
backend/app/services/network.py
Normal file
295
backend/app/services/network.py
Normal file
@@ -0,0 +1,295 @@
|
||||
"""
|
||||
Modules réseau pour scan d'IP, ping, ARP et port scan
|
||||
Implémente le workflow de scan selon workflow-scan.md
|
||||
"""
|
||||
import asyncio
|
||||
import ipaddress
|
||||
import platform
|
||||
import subprocess
|
||||
import socket
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
# Scapy pour ARP
|
||||
try:
|
||||
from scapy.all import ARP, Ether, srp
|
||||
SCAPY_AVAILABLE = True
|
||||
except ImportError:
|
||||
SCAPY_AVAILABLE = False
|
||||
|
||||
|
||||
class NetworkScanner:
|
||||
"""Scanner réseau principal"""
|
||||
|
||||
def __init__(self, cidr: str, timeout: float = 1.0):
|
||||
"""
|
||||
Initialise le scanner réseau
|
||||
|
||||
Args:
|
||||
cidr: Réseau CIDR (ex: "192.168.1.0/24")
|
||||
timeout: Timeout pour ping et connexions (secondes)
|
||||
"""
|
||||
self.cidr = cidr
|
||||
self.timeout = timeout
|
||||
self.network = ipaddress.ip_network(cidr, strict=False)
|
||||
|
||||
def generate_ip_list(self) -> List[str]:
|
||||
"""
|
||||
Génère la liste complète d'IP depuis le CIDR
|
||||
|
||||
Returns:
|
||||
Liste des adresses IP en string
|
||||
"""
|
||||
return [str(ip) for ip in self.network.hosts()]
|
||||
|
||||
async def ping(self, ip: str) -> bool:
|
||||
"""
|
||||
Ping une adresse IP (async)
|
||||
|
||||
Args:
|
||||
ip: Adresse IP à pinger
|
||||
|
||||
Returns:
|
||||
True si l'IP répond, False sinon
|
||||
"""
|
||||
# Détection de l'OS pour la commande ping
|
||||
param = '-n' if platform.system().lower() == 'windows' else '-c'
|
||||
timeout_param = '-w' if platform.system().lower() == 'windows' else '-W'
|
||||
|
||||
command = ['ping', param, '1', timeout_param, str(int(self.timeout * 1000) if platform.system().lower() == 'windows' else str(int(self.timeout))), ip]
|
||||
|
||||
try:
|
||||
# Exécuter le ping de manière asynchrone
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
await asyncio.wait_for(process.wait(), timeout=self.timeout + 1)
|
||||
return process.returncode == 0
|
||||
except (asyncio.TimeoutError, Exception):
|
||||
return False
|
||||
|
||||
async def ping_parallel(self, ip_list: List[str], max_concurrent: int = 50) -> Dict[str, bool]:
|
||||
"""
|
||||
Ping multiple IPs en parallèle
|
||||
|
||||
Args:
|
||||
ip_list: Liste des IPs à pinger
|
||||
max_concurrent: Nombre maximum de pings simultanés
|
||||
|
||||
Returns:
|
||||
Dictionnaire {ip: online_status}
|
||||
"""
|
||||
results = {}
|
||||
semaphore = asyncio.Semaphore(max_concurrent)
|
||||
|
||||
async def ping_with_semaphore(ip: str):
|
||||
async with semaphore:
|
||||
results[ip] = await self.ping(ip)
|
||||
|
||||
# Lancer tous les pings en parallèle avec limite
|
||||
await asyncio.gather(*[ping_with_semaphore(ip) for ip in ip_list])
|
||||
|
||||
return results
|
||||
|
||||
def get_arp_table(self) -> Dict[str, Tuple[str, str]]:
|
||||
"""
|
||||
Récupère la table ARP du système
|
||||
|
||||
Returns:
|
||||
Dictionnaire {ip: (mac, vendor)}
|
||||
"""
|
||||
arp_data = {}
|
||||
|
||||
if SCAPY_AVAILABLE:
|
||||
try:
|
||||
# Utiliser Scapy pour ARP scan
|
||||
answered, _ = srp(
|
||||
Ether(dst="ff:ff:ff:ff:ff:ff") / ARP(pdst=self.cidr),
|
||||
timeout=2,
|
||||
verbose=False
|
||||
)
|
||||
|
||||
for sent, received in answered:
|
||||
ip = received.psrc
|
||||
mac = received.hwsrc
|
||||
vendor = self._get_mac_vendor(mac)
|
||||
arp_data[ip] = (mac, vendor)
|
||||
except Exception as e:
|
||||
print(f"Erreur ARP scan avec Scapy: {e}")
|
||||
else:
|
||||
# Fallback: parser la table ARP système
|
||||
try:
|
||||
if platform.system().lower() == 'windows':
|
||||
output = subprocess.check_output(['arp', '-a'], text=True)
|
||||
pattern = r'(\d+\.\d+\.\d+\.\d+)\s+([0-9a-fA-F-:]+)'
|
||||
else:
|
||||
output = subprocess.check_output(['arp', '-n'], text=True)
|
||||
pattern = r'(\d+\.\d+\.\d+\.\d+)\s+\w+\s+([0-9a-fA-F:]+)'
|
||||
|
||||
matches = re.findall(pattern, output)
|
||||
for ip, mac in matches:
|
||||
if ip in [str(h) for h in self.network.hosts()]:
|
||||
vendor = self._get_mac_vendor(mac)
|
||||
arp_data[ip] = (mac, vendor)
|
||||
except Exception as e:
|
||||
print(f"Erreur lecture table ARP: {e}")
|
||||
|
||||
return arp_data
|
||||
|
||||
def _get_mac_vendor(self, mac: str) -> str:
|
||||
"""
|
||||
Lookup du fabricant depuis l'adresse MAC
|
||||
Simplifié pour l'instant - peut être étendu avec une vraie DB OUI
|
||||
|
||||
Args:
|
||||
mac: Adresse MAC
|
||||
|
||||
Returns:
|
||||
Nom du fabricant ou "Unknown"
|
||||
"""
|
||||
# TODO: Implémenter lookup OUI complet
|
||||
# Pour l'instant, retourne un placeholder
|
||||
mac_prefix = mac[:8].upper().replace(':', '').replace('-', '')
|
||||
|
||||
# Mini DB des fabricants courants
|
||||
vendors = {
|
||||
"00:0C:29": "VMware",
|
||||
"00:50:56": "VMware",
|
||||
"08:00:27": "VirtualBox",
|
||||
"DC:A6:32": "Raspberry Pi",
|
||||
"B8:27:EB": "Raspberry Pi",
|
||||
}
|
||||
|
||||
for prefix, vendor in vendors.items():
|
||||
if mac.upper().startswith(prefix.replace(':', '')):
|
||||
return vendor
|
||||
|
||||
return "Unknown"
|
||||
|
||||
async def scan_ports(self, ip: str, ports: List[int]) -> List[int]:
|
||||
"""
|
||||
Scan des ports TCP sur une IP
|
||||
|
||||
Args:
|
||||
ip: Adresse IP cible
|
||||
ports: Liste des ports à scanner
|
||||
|
||||
Returns:
|
||||
Liste des ports ouverts
|
||||
"""
|
||||
open_ports = []
|
||||
|
||||
async def check_port(port: int) -> Optional[int]:
|
||||
try:
|
||||
# Tentative de connexion TCP
|
||||
reader, writer = await asyncio.wait_for(
|
||||
asyncio.open_connection(ip, port),
|
||||
timeout=self.timeout
|
||||
)
|
||||
writer.close()
|
||||
await writer.wait_closed()
|
||||
return port
|
||||
except:
|
||||
return None
|
||||
|
||||
# Scanner tous les ports en parallèle
|
||||
results = await asyncio.gather(*[check_port(p) for p in ports])
|
||||
open_ports = [p for p in results if p is not None]
|
||||
|
||||
return open_ports
|
||||
|
||||
def get_hostname(self, ip: str) -> Optional[str]:
|
||||
"""
|
||||
Résolution DNS inversée pour obtenir le hostname
|
||||
|
||||
Args:
|
||||
ip: Adresse IP
|
||||
|
||||
Returns:
|
||||
Hostname ou None
|
||||
"""
|
||||
try:
|
||||
hostname, _, _ = socket.gethostbyaddr(ip)
|
||||
return hostname
|
||||
except:
|
||||
return None
|
||||
|
||||
def classify_ip_status(self, is_online: bool, is_known: bool) -> str:
|
||||
"""
|
||||
Classification de l'état d'une IP
|
||||
|
||||
Args:
|
||||
is_online: IP en ligne
|
||||
is_known: IP connue dans la config
|
||||
|
||||
Returns:
|
||||
État: "online", "offline"
|
||||
"""
|
||||
return "online" if is_online else "offline"
|
||||
|
||||
async def full_scan(self, known_ips: Dict[str, Dict], port_list: List[int], max_concurrent: int = 50) -> Dict[str, Dict]:
|
||||
"""
|
||||
Scan complet du réseau selon workflow-scan.md
|
||||
|
||||
Args:
|
||||
known_ips: Dictionnaire des IPs connues depuis config
|
||||
port_list: Liste des ports à scanner
|
||||
max_concurrent: Pings simultanés max
|
||||
|
||||
Returns:
|
||||
Dictionnaire des résultats de scan pour chaque IP
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# 1. Générer liste IP du CIDR
|
||||
ip_list = self.generate_ip_list()
|
||||
|
||||
# 2. Ping parallélisé
|
||||
ping_results = await self.ping_parallel(ip_list, max_concurrent)
|
||||
|
||||
# 3. ARP + MAC vendor
|
||||
arp_table = self.get_arp_table()
|
||||
|
||||
# 4. Pour chaque IP
|
||||
for ip in ip_list:
|
||||
is_online = ping_results.get(ip, False)
|
||||
is_known = ip in known_ips
|
||||
|
||||
ip_data = {
|
||||
"ip": ip,
|
||||
"known": is_known,
|
||||
"last_status": self.classify_ip_status(is_online, is_known),
|
||||
"last_seen": datetime.utcnow() if is_online else None,
|
||||
"mac": None,
|
||||
"vendor": None,
|
||||
"hostname": None,
|
||||
"open_ports": [],
|
||||
}
|
||||
|
||||
# Ajouter infos connues
|
||||
if is_known:
|
||||
ip_data.update(known_ips[ip])
|
||||
|
||||
# Infos ARP
|
||||
if ip in arp_table:
|
||||
mac, vendor = arp_table[ip]
|
||||
ip_data["mac"] = mac
|
||||
ip_data["vendor"] = vendor
|
||||
|
||||
# Hostname
|
||||
if is_online:
|
||||
hostname = self.get_hostname(ip)
|
||||
if hostname:
|
||||
ip_data["hostname"] = hostname
|
||||
|
||||
# 5. Port scan (uniquement si online)
|
||||
if is_online and port_list:
|
||||
open_ports = await self.scan_ports(ip, port_list)
|
||||
ip_data["open_ports"] = open_ports
|
||||
|
||||
results[ip] = ip_data
|
||||
|
||||
return results
|
||||
103
backend/app/services/scheduler.py
Normal file
103
backend/app/services/scheduler.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""
|
||||
Scheduler APScheduler pour les scans réseau périodiques
|
||||
"""
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.interval import IntervalTrigger
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Callable
|
||||
import asyncio
|
||||
|
||||
|
||||
class ScanScheduler:
|
||||
"""Gestionnaire de tâches planifiées pour les scans"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialise le scheduler"""
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
self.is_running = False
|
||||
|
||||
def start(self):
|
||||
"""Démarre le scheduler"""
|
||||
if not self.is_running:
|
||||
self.scheduler.start()
|
||||
self.is_running = True
|
||||
print(f"[{datetime.now()}] Scheduler démarré")
|
||||
|
||||
def stop(self):
|
||||
"""Arrête le scheduler"""
|
||||
if self.is_running:
|
||||
self.scheduler.shutdown()
|
||||
self.is_running = False
|
||||
print(f"[{datetime.now()}] Scheduler arrêté")
|
||||
|
||||
def add_ping_scan_job(self, scan_function: Callable, interval_seconds: int = 60):
|
||||
"""
|
||||
Ajoute une tâche de scan ping périodique
|
||||
|
||||
Args:
|
||||
scan_function: Fonction async à exécuter
|
||||
interval_seconds: Intervalle en secondes
|
||||
"""
|
||||
self.scheduler.add_job(
|
||||
scan_function,
|
||||
trigger=IntervalTrigger(seconds=interval_seconds),
|
||||
id='ping_scan',
|
||||
name='Scan Ping périodique',
|
||||
replace_existing=True
|
||||
)
|
||||
print(f"Tâche ping_scan configurée: toutes les {interval_seconds}s")
|
||||
|
||||
def add_port_scan_job(self, scan_function: Callable, interval_seconds: int = 300):
|
||||
"""
|
||||
Ajoute une tâche de scan de ports périodique
|
||||
|
||||
Args:
|
||||
scan_function: Fonction async à exécuter
|
||||
interval_seconds: Intervalle en secondes
|
||||
"""
|
||||
self.scheduler.add_job(
|
||||
scan_function,
|
||||
trigger=IntervalTrigger(seconds=interval_seconds),
|
||||
id='port_scan',
|
||||
name='Scan ports périodique',
|
||||
replace_existing=True
|
||||
)
|
||||
print(f"Tâche port_scan configurée: toutes les {interval_seconds}s")
|
||||
|
||||
def add_cleanup_job(self, cleanup_function: Callable, interval_hours: int = 1):
|
||||
"""
|
||||
Ajoute une tâche de nettoyage de l'historique
|
||||
|
||||
Args:
|
||||
cleanup_function: Fonction async de nettoyage
|
||||
interval_hours: Intervalle en heures
|
||||
"""
|
||||
self.scheduler.add_job(
|
||||
cleanup_function,
|
||||
trigger=IntervalTrigger(hours=interval_hours),
|
||||
id='history_cleanup',
|
||||
name='Nettoyage historique',
|
||||
replace_existing=True
|
||||
)
|
||||
print(f"Tâche cleanup configurée: toutes les {interval_hours}h")
|
||||
|
||||
def remove_job(self, job_id: str):
|
||||
"""
|
||||
Supprime une tâche planifiée
|
||||
|
||||
Args:
|
||||
job_id: ID de la tâche
|
||||
"""
|
||||
try:
|
||||
self.scheduler.remove_job(job_id)
|
||||
print(f"Tâche {job_id} supprimée")
|
||||
except Exception as e:
|
||||
print(f"Erreur suppression tâche {job_id}: {e}")
|
||||
|
||||
def get_jobs(self):
|
||||
"""Retourne la liste des tâches planifiées"""
|
||||
return self.scheduler.get_jobs()
|
||||
|
||||
|
||||
# Instance globale du scheduler
|
||||
scan_scheduler = ScanScheduler()
|
||||
125
backend/app/services/websocket.py
Normal file
125
backend/app/services/websocket.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Gestionnaire WebSocket pour notifications temps réel
|
||||
"""
|
||||
from fastapi import WebSocket
|
||||
from typing import List, Dict, Any
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class WebSocketManager:
|
||||
"""Gestionnaire de connexions WebSocket"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialise le gestionnaire"""
|
||||
self.active_connections: List[WebSocket] = []
|
||||
|
||||
async def connect(self, websocket: WebSocket):
|
||||
"""
|
||||
Accepte une nouvelle connexion WebSocket
|
||||
|
||||
Args:
|
||||
websocket: Instance WebSocket
|
||||
"""
|
||||
await websocket.accept()
|
||||
self.active_connections.append(websocket)
|
||||
print(f"[{datetime.now()}] Nouvelle connexion WebSocket. Total: {len(self.active_connections)}")
|
||||
|
||||
def disconnect(self, websocket: WebSocket):
|
||||
"""
|
||||
Déconnecte un client WebSocket
|
||||
|
||||
Args:
|
||||
websocket: Instance WebSocket à déconnecter
|
||||
"""
|
||||
if websocket in self.active_connections:
|
||||
self.active_connections.remove(websocket)
|
||||
print(f"[{datetime.now()}] Déconnexion WebSocket. Total: {len(self.active_connections)}")
|
||||
|
||||
async def send_personal_message(self, message: str, websocket: WebSocket):
|
||||
"""
|
||||
Envoie un message à un client spécifique
|
||||
|
||||
Args:
|
||||
message: Message à envoyer
|
||||
websocket: Client destinataire
|
||||
"""
|
||||
try:
|
||||
await websocket.send_text(message)
|
||||
except Exception as e:
|
||||
print(f"Erreur envoi message personnel: {e}")
|
||||
|
||||
async def broadcast(self, message: Dict[str, Any]):
|
||||
"""
|
||||
Diffuse un message à tous les clients connectés
|
||||
|
||||
Args:
|
||||
message: Dictionnaire du message (sera converti en JSON)
|
||||
"""
|
||||
# Ajouter un timestamp
|
||||
message["timestamp"] = datetime.utcnow().isoformat()
|
||||
|
||||
json_message = json.dumps(message)
|
||||
|
||||
# Liste des connexions à supprimer (déconnectées)
|
||||
disconnected = []
|
||||
|
||||
for connection in self.active_connections:
|
||||
try:
|
||||
await connection.send_text(json_message)
|
||||
except Exception as e:
|
||||
print(f"Erreur broadcast: {e}")
|
||||
disconnected.append(connection)
|
||||
|
||||
# Nettoyer les connexions mortes
|
||||
for conn in disconnected:
|
||||
self.disconnect(conn)
|
||||
|
||||
async def broadcast_scan_start(self):
|
||||
"""Notifie le début d'un scan"""
|
||||
await self.broadcast({
|
||||
"type": "scan_start",
|
||||
"message": "Scan réseau démarré"
|
||||
})
|
||||
|
||||
async def broadcast_scan_complete(self, stats: Dict[str, int]):
|
||||
"""
|
||||
Notifie la fin d'un scan avec statistiques
|
||||
|
||||
Args:
|
||||
stats: Statistiques du scan (total, online, offline, etc.)
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "scan_complete",
|
||||
"message": "Scan réseau terminé",
|
||||
"stats": stats
|
||||
})
|
||||
|
||||
async def broadcast_ip_update(self, ip_data: Dict[str, Any]):
|
||||
"""
|
||||
Notifie un changement d'état d'IP
|
||||
|
||||
Args:
|
||||
ip_data: Données de l'IP mise à jour
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "ip_update",
|
||||
"data": ip_data
|
||||
})
|
||||
|
||||
async def broadcast_new_ip(self, ip_data: Dict[str, Any]):
|
||||
"""
|
||||
Notifie la détection d'une nouvelle IP
|
||||
|
||||
Args:
|
||||
ip_data: Données de la nouvelle IP
|
||||
"""
|
||||
await self.broadcast({
|
||||
"type": "new_ip",
|
||||
"data": ip_data,
|
||||
"message": f"Nouvelle IP détectée: {ip_data.get('ip')}"
|
||||
})
|
||||
|
||||
|
||||
# Instance globale du gestionnaire WebSocket
|
||||
ws_manager = WebSocketManager()
|
||||
Reference in New Issue
Block a user