aorus
This commit is contained in:
@@ -8,9 +8,10 @@ from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile
|
||||
from fastapi.responses import FileResponse
|
||||
from starlette.background import BackgroundTask
|
||||
from sqlalchemy import text
|
||||
from sqlmodel import Session, select
|
||||
from app.database import get_session
|
||||
from app.models.settings import UserSettings
|
||||
@@ -283,6 +284,185 @@ def download_backup_zip() -> FileResponse:
|
||||
)
|
||||
|
||||
|
||||
def _merge_db_add_only(backup_db_path: Path, current_db_path: Path) -> dict[str, int]:
|
||||
"""Insère dans la BDD courante les lignes absentes de la BDD de sauvegarde (INSERT OR IGNORE)."""
|
||||
import sqlite3
|
||||
|
||||
stats = {"rows_added": 0, "rows_skipped": 0}
|
||||
backup_conn = sqlite3.connect(str(backup_db_path))
|
||||
current_conn = sqlite3.connect(str(current_db_path))
|
||||
current_conn.execute("PRAGMA foreign_keys=OFF")
|
||||
|
||||
try:
|
||||
tables = backup_conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
||||
).fetchall()
|
||||
|
||||
for (table,) in tables:
|
||||
try:
|
||||
cur = backup_conn.execute(f'SELECT * FROM "{table}"')
|
||||
cols = [d[0] for d in cur.description]
|
||||
rows = cur.fetchall()
|
||||
if not rows:
|
||||
continue
|
||||
col_names = ", ".join(f'"{c}"' for c in cols)
|
||||
placeholders = ", ".join(["?"] * len(cols))
|
||||
before = current_conn.execute(f'SELECT COUNT(*) FROM "{table}"').fetchone()[0]
|
||||
current_conn.executemany(
|
||||
f'INSERT OR IGNORE INTO "{table}" ({col_names}) VALUES ({placeholders})',
|
||||
rows,
|
||||
)
|
||||
after = current_conn.execute(f'SELECT COUNT(*) FROM "{table}"').fetchone()[0]
|
||||
added = after - before
|
||||
stats["rows_added"] += added
|
||||
stats["rows_skipped"] += len(rows) - added
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
current_conn.commit()
|
||||
finally:
|
||||
backup_conn.close()
|
||||
current_conn.close()
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
@router.post("/settings/backup/restore")
|
||||
async def restore_backup(
|
||||
file: UploadFile = File(...),
|
||||
overwrite: bool = Form(default=True),
|
||||
) -> dict[str, Any]:
|
||||
"""Restaure une sauvegarde ZIP (DB + uploads). overwrite=true écrase, false ajoute uniquement."""
|
||||
import shutil
|
||||
|
||||
db_path = _resolve_sqlite_db_path()
|
||||
uploads_dir = Path(UPLOAD_DIR).resolve()
|
||||
|
||||
data = await file.read()
|
||||
if len(data) < 4 or data[:2] != b'PK':
|
||||
raise HTTPException(400, "Le fichier n'est pas une archive ZIP valide.")
|
||||
|
||||
fd, tmp_zip_path = tempfile.mkstemp(suffix=".zip")
|
||||
os.close(fd)
|
||||
tmp_zip = Path(tmp_zip_path)
|
||||
tmp_extract = Path(tempfile.mkdtemp(prefix="jardin_restore_"))
|
||||
|
||||
try:
|
||||
tmp_zip.write_bytes(data)
|
||||
|
||||
with zipfile.ZipFile(tmp_zip, "r") as zipf:
|
||||
zipf.extractall(str(tmp_extract))
|
||||
|
||||
stats: dict[str, Any] = {
|
||||
"uploads_copies": 0,
|
||||
"uploads_ignores": 0,
|
||||
"db_restauree": False,
|
||||
"db_lignes_ajoutees": 0,
|
||||
"erreurs": 0,
|
||||
}
|
||||
|
||||
# --- Uploads ---
|
||||
backup_uploads = tmp_extract / "uploads"
|
||||
if backup_uploads.is_dir():
|
||||
uploads_dir.mkdir(parents=True, exist_ok=True)
|
||||
for src in backup_uploads.rglob("*"):
|
||||
if not src.is_file():
|
||||
continue
|
||||
dst = uploads_dir / src.relative_to(backup_uploads)
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
if overwrite or not dst.exists():
|
||||
try:
|
||||
shutil.copy2(str(src), str(dst))
|
||||
stats["uploads_copies"] += 1
|
||||
except Exception:
|
||||
stats["erreurs"] += 1
|
||||
else:
|
||||
stats["uploads_ignores"] += 1
|
||||
|
||||
# --- Base de données ---
|
||||
backup_db_dir = tmp_extract / "db"
|
||||
db_files = sorted(backup_db_dir.glob("*.db")) if backup_db_dir.is_dir() else []
|
||||
|
||||
if db_files and db_path:
|
||||
backup_db_file = db_files[0]
|
||||
|
||||
if overwrite:
|
||||
from app.database import engine
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
conn.execute(text("PRAGMA wal_checkpoint(TRUNCATE)"))
|
||||
except Exception:
|
||||
pass
|
||||
engine.dispose()
|
||||
shutil.copy2(str(backup_db_file), str(db_path))
|
||||
stats["db_restauree"] = True
|
||||
else:
|
||||
merge = _merge_db_add_only(backup_db_file, db_path)
|
||||
stats["db_lignes_ajoutees"] = merge["rows_added"]
|
||||
stats["db_restauree"] = True
|
||||
|
||||
return {"ok": True, **stats}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise HTTPException(500, f"Erreur lors de la restauration : {exc}") from exc
|
||||
finally:
|
||||
_safe_remove(str(tmp_zip))
|
||||
shutil.rmtree(str(tmp_extract), ignore_errors=True)
|
||||
|
||||
|
||||
@router.post("/settings/images/resize-all")
|
||||
def resize_all_images(session: Session = Depends(get_session)) -> dict[str, Any]:
|
||||
"""Redimensionne les images pleine taille de la bibliothèque dont la largeur dépasse le paramètre configuré."""
|
||||
from PIL import Image
|
||||
import io as _io
|
||||
|
||||
setting = session.exec(select(UserSettings).where(UserSettings.cle == "image_max_width")).first()
|
||||
max_px = 1200
|
||||
if setting:
|
||||
try:
|
||||
max_px = int(setting.valeur)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
if max_px <= 0:
|
||||
return {"ok": True, "redimensionnees": 0, "ignorees": 0, "erreurs": 0,
|
||||
"message": "Taille originale configurée — aucune modification."}
|
||||
|
||||
from app.models.media import Media as MediaModel
|
||||
urls = session.exec(select(MediaModel.url)).all()
|
||||
|
||||
uploads_dir = Path(UPLOAD_DIR).resolve()
|
||||
redimensionnees = 0
|
||||
ignorees = 0
|
||||
erreurs = 0
|
||||
|
||||
for url in urls:
|
||||
if not url:
|
||||
continue
|
||||
# /uploads/filename.webp → data/uploads/filename.webp
|
||||
filename = url.lstrip("/").removeprefix("uploads/")
|
||||
file_path = uploads_dir / filename
|
||||
if not file_path.is_file():
|
||||
ignorees += 1
|
||||
continue
|
||||
try:
|
||||
with Image.open(file_path) as img:
|
||||
w, h = img.size
|
||||
if w <= max_px and h <= max_px:
|
||||
ignorees += 1
|
||||
continue
|
||||
img_copy = img.copy()
|
||||
img_copy.thumbnail((max_px, max_px), Image.LANCZOS)
|
||||
img_copy.save(file_path, "WEBP", quality=85)
|
||||
redimensionnees += 1
|
||||
except Exception:
|
||||
erreurs += 1
|
||||
|
||||
return {"ok": True, "redimensionnees": redimensionnees, "ignorees": ignorees, "erreurs": erreurs}
|
||||
|
||||
|
||||
@router.post("/settings/backup/samba")
|
||||
def backup_to_samba(session: Session = Depends(get_session)) -> dict[str, Any]:
|
||||
"""Envoie une sauvegarde ZIP vers un partage Samba/CIFS."""
|
||||
|
||||
Reference in New Issue
Block a user