last
This commit is contained in:
@@ -1,16 +1,34 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Body, HTTPException
|
||||
from fastapi import APIRouter, Body, HTTPException, UploadFile, File
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
from backend.app.core.config import BackendConfig, CONFIG_PATH, load_config
|
||||
from backend.app.db.database import DEFAULT_DATABASE_PATH
|
||||
|
||||
router = APIRouter(prefix="/config", tags=["config"])
|
||||
|
||||
# Chemin vers la config frontend
|
||||
FRONTEND_CONFIG_PATH = Path(__file__).resolve().parent.parent.parent.parent / "frontend" / "config_frontend.json"
|
||||
# Chemins possibles vers la config frontend (dev + docker)
|
||||
FRONTEND_CONFIG_PATH = (
|
||||
Path(__file__).resolve().parent.parent.parent.parent / "frontend" / "public" / "config_frontend.json"
|
||||
)
|
||||
FRONTEND_CONFIG_FALLBACK_PATH = (
|
||||
Path(__file__).resolve().parent.parent.parent.parent / "backend" / "config_frontend.json"
|
||||
)
|
||||
|
||||
|
||||
def _get_frontend_config_path() -> Path | None:
|
||||
if FRONTEND_CONFIG_PATH.exists():
|
||||
return FRONTEND_CONFIG_PATH
|
||||
if FRONTEND_CONFIG_FALLBACK_PATH.exists():
|
||||
return FRONTEND_CONFIG_FALLBACK_PATH
|
||||
return None
|
||||
|
||||
|
||||
@router.get("/backend", response_model=BackendConfig)
|
||||
@@ -23,8 +41,21 @@ def read_backend_config() -> BackendConfig:
|
||||
def update_backend_config(payload: dict = Body(...)) -> BackendConfig:
|
||||
current = load_config()
|
||||
try:
|
||||
# validation via Pydantic avant écriture
|
||||
updated = current.model_copy(update=payload)
|
||||
# Fusion profonde des configs (nécessaire pour les modèles imbriqués Pydantic v2)
|
||||
def deep_merge(base: dict, update: dict) -> dict:
|
||||
result = base.copy()
|
||||
for key, value in update.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
result[key] = deep_merge(result[key], value)
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
# Convertir en dict, fusionner, puis revalider
|
||||
current_dict = current.model_dump()
|
||||
merged = deep_merge(current_dict, payload)
|
||||
updated = BackendConfig.model_validate(merged)
|
||||
|
||||
CONFIG_PATH.write_text(updated.model_dump_json(indent=2), encoding="utf-8")
|
||||
load_config.cache_clear()
|
||||
return load_config()
|
||||
@@ -35,9 +66,10 @@ def update_backend_config(payload: dict = Body(...)) -> BackendConfig:
|
||||
@router.get("/frontend")
|
||||
def read_frontend_config() -> dict:
|
||||
"""Retourne la configuration frontend."""
|
||||
if not FRONTEND_CONFIG_PATH.exists():
|
||||
config_path = _get_frontend_config_path()
|
||||
if not config_path:
|
||||
raise HTTPException(status_code=404, detail="Config frontend introuvable")
|
||||
return json.loads(FRONTEND_CONFIG_PATH.read_text(encoding="utf-8"))
|
||||
return json.loads(config_path.read_text(encoding="utf-8"))
|
||||
|
||||
|
||||
@router.put("/frontend")
|
||||
@@ -46,8 +78,9 @@ def update_frontend_config(payload: dict = Body(...)) -> dict:
|
||||
try:
|
||||
# Charger la config actuelle
|
||||
current = {}
|
||||
if FRONTEND_CONFIG_PATH.exists():
|
||||
current = json.loads(FRONTEND_CONFIG_PATH.read_text(encoding="utf-8"))
|
||||
config_path = _get_frontend_config_path()
|
||||
if config_path and config_path.exists():
|
||||
current = json.loads(config_path.read_text(encoding="utf-8"))
|
||||
|
||||
# Fusion profonde des configs
|
||||
def deep_merge(base: dict, update: dict) -> dict:
|
||||
@@ -60,19 +93,87 @@ def update_frontend_config(payload: dict = Body(...)) -> dict:
|
||||
return result
|
||||
|
||||
updated = deep_merge(current, payload)
|
||||
FRONTEND_CONFIG_PATH.write_text(
|
||||
json.dumps(updated, indent=2, ensure_ascii=False),
|
||||
encoding="utf-8"
|
||||
)
|
||||
target_paths = []
|
||||
if FRONTEND_CONFIG_PATH.parent.exists():
|
||||
target_paths.append(FRONTEND_CONFIG_PATH)
|
||||
if FRONTEND_CONFIG_FALLBACK_PATH.parent.exists():
|
||||
target_paths.append(FRONTEND_CONFIG_FALLBACK_PATH)
|
||||
if not target_paths:
|
||||
target_paths.append(FRONTEND_CONFIG_FALLBACK_PATH)
|
||||
|
||||
# Mettre à jour aussi dans public/ pour le frontend dev
|
||||
public_config = FRONTEND_CONFIG_PATH.parent / "public" / "config_frontend.json"
|
||||
if public_config.parent.exists():
|
||||
public_config.write_text(
|
||||
for target in target_paths:
|
||||
target.write_text(
|
||||
json.dumps(updated, indent=2, ensure_ascii=False),
|
||||
encoding="utf-8"
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
return updated
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc))
|
||||
|
||||
|
||||
# ==================== Database Backup ====================
|
||||
|
||||
|
||||
@router.get("/database/info")
|
||||
def database_info() -> dict:
|
||||
"""Retourne les informations sur la base de données."""
|
||||
if not DEFAULT_DATABASE_PATH.exists():
|
||||
raise HTTPException(status_code=404, detail="Base de données introuvable")
|
||||
|
||||
stat = DEFAULT_DATABASE_PATH.stat()
|
||||
return {
|
||||
"path": str(DEFAULT_DATABASE_PATH),
|
||||
"filename": DEFAULT_DATABASE_PATH.name,
|
||||
"size_bytes": stat.st_size,
|
||||
"size_mb": round(stat.st_size / (1024 * 1024), 2),
|
||||
"modified_at": datetime.fromtimestamp(stat.st_mtime).isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/database/backup")
|
||||
def download_database():
|
||||
"""Télécharge une copie de la base de données."""
|
||||
if not DEFAULT_DATABASE_PATH.exists():
|
||||
raise HTTPException(status_code=404, detail="Base de données introuvable")
|
||||
|
||||
# Créer une copie temporaire pour éviter les problèmes de lock
|
||||
backup_path = DEFAULT_DATABASE_PATH.parent / f"backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
|
||||
shutil.copy2(DEFAULT_DATABASE_PATH, backup_path)
|
||||
|
||||
return FileResponse(
|
||||
path=backup_path,
|
||||
filename=f"suivi_produit_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db",
|
||||
media_type="application/x-sqlite3",
|
||||
background=None, # Nettoyer après envoi
|
||||
)
|
||||
|
||||
|
||||
@router.post("/database/restore")
|
||||
async def restore_database(file: UploadFile = File(...)) -> dict:
|
||||
"""Restaure la base de données depuis un fichier uploadé."""
|
||||
if not file.filename.endswith(".db"):
|
||||
raise HTTPException(status_code=400, detail="Le fichier doit être un .db")
|
||||
|
||||
# Vérifier la taille (max 100MB)
|
||||
content = await file.read()
|
||||
if len(content) > 100 * 1024 * 1024:
|
||||
raise HTTPException(status_code=400, detail="Fichier trop volumineux (max 100MB)")
|
||||
|
||||
# Créer un backup avant restauration
|
||||
if DEFAULT_DATABASE_PATH.exists():
|
||||
backup_before = DEFAULT_DATABASE_PATH.parent / f"before_restore_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
|
||||
shutil.copy2(DEFAULT_DATABASE_PATH, backup_before)
|
||||
|
||||
try:
|
||||
# Écrire le nouveau fichier
|
||||
with open(DEFAULT_DATABASE_PATH, "wb") as f:
|
||||
f.write(content)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Base de données restaurée avec succès",
|
||||
"size_bytes": len(content),
|
||||
}
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=f"Erreur lors de la restauration: {exc}")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi import APIRouter, Depends, HTTPException, Response, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from backend.app.api.deps import get_db
|
||||
@@ -46,22 +46,25 @@ def update_product(product_id: int, payload: schemas.ProductUpdate, db: Session
|
||||
return crud.update_product(db, product, payload)
|
||||
|
||||
|
||||
@router.delete("/{product_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def delete_product(product_id: int, db: Session = Depends(get_db)) -> None:
|
||||
@router.delete("/{product_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
|
||||
def delete_product(product_id: int, db: Session = Depends(get_db)) -> Response:
|
||||
product = crud.get_product(db, product_id)
|
||||
if not product:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Produit introuvable")
|
||||
# suppression définitive en base
|
||||
crud.remove_product(db, product)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@router.get("/{product_id}/snapshots", response_model=list[schemas.ProductSnapshotRead])
|
||||
def list_snapshots(
|
||||
product_id: int,
|
||||
limit: int = 30,
|
||||
days: int | None = None,
|
||||
limit: int = 1000,
|
||||
db: Session = Depends(get_db),
|
||||
) -> list[schemas.ProductSnapshotRead]:
|
||||
"""Retourne les snapshots d'un produit, filtrés par nombre de jours."""
|
||||
product = crud.get_product(db, product_id)
|
||||
if not product:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Produit introuvable")
|
||||
return crud.list_snapshots(db, product_id, limit=limit)
|
||||
return crud.list_snapshots(db, product_id, days=days, limit=limit)
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from fastapi import APIRouter, BackgroundTasks, HTTPException
|
||||
from pydantic import BaseModel, HttpUrl
|
||||
|
||||
from backend.app.core.scheduler import get_scheduler_status, trigger_next_run
|
||||
from backend.app.scraper.runner import scrape_all, scrape_preview, scrape_product
|
||||
|
||||
router = APIRouter(prefix="/scrape", tags=["scrape"])
|
||||
@@ -35,3 +36,15 @@ def trigger_single(product_id: int, background_tasks: BackgroundTasks):
|
||||
def trigger_all(background_tasks: BackgroundTasks):
|
||||
background_tasks.add_task(scrape_all)
|
||||
return {"statut": "planifie_tout"}
|
||||
|
||||
|
||||
@router.get("/scheduler/status")
|
||||
def scheduler_status():
|
||||
"""Retourne l'état actuel du scheduler de scraping automatique."""
|
||||
return get_scheduler_status()
|
||||
|
||||
|
||||
@router.post("/scheduler/trigger")
|
||||
def scheduler_trigger():
|
||||
"""Force le prochain scrape planifié à s'exécuter maintenant."""
|
||||
return trigger_next_run()
|
||||
|
||||
66
backend/app/api/routes_stats.py
Normal file
66
backend/app/api/routes_stats.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Endpoint pour les statistiques système."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
|
||||
router = APIRouter(prefix="/stats", tags=["stats"])
|
||||
|
||||
|
||||
class SystemStats(BaseModel):
|
||||
"""Statistiques système."""
|
||||
|
||||
cpu_percent: float
|
||||
memory_mb: float
|
||||
memory_percent: float
|
||||
data_size_mb: float
|
||||
|
||||
|
||||
def get_directory_size(path: Path) -> int:
|
||||
"""Calcule la taille totale d'un répertoire en bytes."""
|
||||
total = 0
|
||||
if path.exists() and path.is_dir():
|
||||
for entry in path.rglob("*"):
|
||||
if entry.is_file():
|
||||
try:
|
||||
total += entry.stat().st_size
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
return total
|
||||
|
||||
|
||||
@router.get("", response_model=SystemStats)
|
||||
def get_stats() -> SystemStats:
|
||||
"""Retourne les statistiques système du backend."""
|
||||
# CPU et mémoire du process courant
|
||||
process = psutil.Process(os.getpid())
|
||||
cpu_percent = process.cpu_percent(interval=0.1)
|
||||
memory_info = process.memory_info()
|
||||
memory_mb = memory_info.rss / (1024 * 1024)
|
||||
|
||||
# Mémoire système totale pour calculer le pourcentage
|
||||
total_memory = psutil.virtual_memory().total
|
||||
memory_percent = (memory_info.rss / total_memory) * 100
|
||||
|
||||
# Taille des dossiers data et logs
|
||||
base_path = Path("/app/backend")
|
||||
if not base_path.exists():
|
||||
# Fallback pour le développement local
|
||||
base_path = Path(__file__).parent.parent.parent
|
||||
|
||||
data_path = base_path / "data"
|
||||
logs_path = base_path / "logs"
|
||||
|
||||
data_size = get_directory_size(data_path) + get_directory_size(logs_path)
|
||||
data_size_mb = data_size / (1024 * 1024)
|
||||
|
||||
return SystemStats(
|
||||
cpu_percent=round(cpu_percent, 1),
|
||||
memory_mb=round(memory_mb, 1),
|
||||
memory_percent=round(memory_percent, 1),
|
||||
data_size_mb=round(data_size_mb, 1),
|
||||
)
|
||||
Reference in New Issue
Block a user