before gemiin

This commit is contained in:
2026-02-22 22:18:32 +01:00
parent fb33540bb0
commit 9db5cbf236
147 changed files with 7948 additions and 531 deletions

View File

@@ -1,18 +1,28 @@
import os
import shutil
import time
import json
import tempfile
import zipfile
from datetime import datetime, timezone
from pathlib import Path
from typing import Any
from fastapi import APIRouter, Depends
from fastapi.responses import FileResponse
from starlette.background import BackgroundTask
from sqlmodel import Session, select
from app.database import get_session
from app.models.settings import UserSettings
from app.config import UPLOAD_DIR
from app.config import DATABASE_URL, UPLOAD_DIR
router = APIRouter(tags=["réglages"])
_PREV_CPU_USAGE_USEC: int | None = None
_PREV_CPU_TS: float | None = None
_TEXT_EXTENSIONS = {
".txt", ".md", ".markdown", ".json", ".csv", ".log", ".ini", ".yaml", ".yml", ".xml"
}
def _read_int_from_paths(paths: list[str]) -> int | None:
@@ -113,6 +123,68 @@ def _disk_stats() -> dict[str, Any]:
}
def _safe_remove(path: str) -> None:
try:
os.remove(path)
except OSError:
pass
def _resolve_sqlite_db_path() -> Path | None:
prefix = "sqlite:///"
if not DATABASE_URL.startswith(prefix):
return None
raw = DATABASE_URL[len(prefix):]
if not raw:
return None
db_path = Path(raw)
if db_path.is_absolute():
return db_path
return (Path.cwd() / db_path).resolve()
def _zip_directory(zipf: zipfile.ZipFile, source_dir: Path, arc_prefix: str) -> int:
count = 0
if not source_dir.is_dir():
return count
for root, _, files in os.walk(source_dir):
root_path = Path(root)
for name in files:
file_path = root_path / name
if not file_path.is_file():
continue
rel = file_path.relative_to(source_dir)
arcname = str(Path(arc_prefix) / rel)
zipf.write(file_path, arcname=arcname)
count += 1
return count
def _zip_data_text_files(
zipf: zipfile.ZipFile,
data_root: Path,
db_path: Path | None,
uploads_dir: Path,
) -> int:
count = 0
if not data_root.is_dir():
return count
for root, _, files in os.walk(data_root):
root_path = Path(root)
for name in files:
file_path = root_path / name
if db_path and file_path == db_path:
continue
if uploads_dir in file_path.parents:
continue
if file_path.suffix.lower() not in _TEXT_EXTENSIONS:
continue
rel = file_path.relative_to(data_root)
zipf.write(file_path, arcname=str(Path("data_text") / rel))
count += 1
return count
@router.get("/settings")
def get_settings(session: Session = Depends(get_session)):
rows = session.exec(select(UserSettings)).all()
@@ -161,3 +233,51 @@ def get_debug_system_stats() -> dict[str, Any]:
"memory": _memory_stats(),
"disk": _disk_stats(),
}
@router.get("/settings/backup/download")
def download_backup_zip() -> FileResponse:
now = datetime.now(timezone.utc)
ts = now.strftime("%Y%m%d_%H%M%S")
db_path = _resolve_sqlite_db_path()
uploads_dir = Path(UPLOAD_DIR).resolve()
data_root = db_path.parent if db_path else uploads_dir.parent
fd, tmp_zip_path = tempfile.mkstemp(prefix=f"jardin_backup_{ts}_", suffix=".zip")
os.close(fd)
tmp_zip = Path(tmp_zip_path)
stats = {
"database_files": 0,
"upload_files": 0,
"text_files": 0,
}
with zipfile.ZipFile(tmp_zip, mode="w", compression=zipfile.ZIP_DEFLATED, compresslevel=6) as zipf:
if db_path and db_path.is_file():
zipf.write(db_path, arcname=f"db/{db_path.name}")
stats["database_files"] = 1
stats["upload_files"] = _zip_directory(zipf, uploads_dir, "uploads")
stats["text_files"] = _zip_data_text_files(zipf, data_root, db_path, uploads_dir)
manifest = {
"generated_at_utc": now.isoformat(),
"database_url": DATABASE_URL,
"paths": {
"database_path": str(db_path) if db_path else None,
"uploads_path": str(uploads_dir),
"data_root": str(data_root),
},
"included": stats,
"text_extensions": sorted(_TEXT_EXTENSIONS),
}
zipf.writestr("manifest.json", json.dumps(manifest, ensure_ascii=False, indent=2))
download_name = f"jardin_backup_{ts}.zip"
return FileResponse(
path=str(tmp_zip),
media_type="application/zip",
filename=download_name,
background=BackgroundTask(_safe_remove, str(tmp_zip)),
)