import ali

This commit is contained in:
2026-02-01 01:45:51 +01:00
parent bdbfa4e25a
commit 46d6d88ce5
48 changed files with 6714 additions and 185 deletions
+6
View File
@@ -1,11 +1,17 @@
"""Package des routers API."""
from app.routers.categories import router as categories_router
from app.routers.documents import router as documents_router
from app.routers.import_csv import router as import_router
from app.routers.items import router as items_router
from app.routers.locations import router as locations_router
from app.routers.shops import router as shops_router
__all__ = [
"categories_router",
"documents_router",
"import_router",
"locations_router",
"items_router",
"shops_router",
]
+249
View File
@@ -0,0 +1,249 @@
"""Router pour les documents (upload, téléchargement, suppression)."""
import os
import uuid
from pathlib import Path
from typing import Annotated
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
from fastapi.responses import FileResponse
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings
from app.core.database import get_db
from app.models.document import Document, DocumentType
from app.models.item import Item
from app.schemas.document import DocumentResponse, DocumentUpdate, DocumentUploadResponse
router = APIRouter(prefix="/documents", tags=["documents"])
# Types MIME autorisés
ALLOWED_IMAGE_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp"}
ALLOWED_PDF_TYPES = {"application/pdf"}
ALLOWED_TYPES = ALLOWED_IMAGE_TYPES | ALLOWED_PDF_TYPES
# Taille max : 10 Mo
MAX_FILE_SIZE = 10 * 1024 * 1024
def get_upload_path(doc_type: DocumentType, filename: str) -> Path:
"""Retourne le chemin complet pour un fichier uploadé."""
subdir = "photos" if doc_type == DocumentType.PHOTO else "documents"
return settings.upload_dir_path / subdir / filename
def generate_unique_filename(original_name: str) -> str:
"""Génère un nom de fichier unique avec UUID."""
ext = Path(original_name).suffix.lower()
return f"{uuid.uuid4()}{ext}"
async def validate_item_exists(session: AsyncSession, item_id: int) -> Item:
"""Vérifie que l'item existe."""
item = await session.get(Item, item_id)
if not item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Item {item_id} non trouvé"
)
return item
@router.post("/upload", response_model=DocumentUploadResponse, status_code=status.HTTP_201_CREATED)
async def upload_document(
file: Annotated[UploadFile, File(description="Fichier à uploader")],
item_id: Annotated[int, Form(description="ID de l'objet associé")],
doc_type: Annotated[DocumentType, Form(description="Type de document")],
description: Annotated[str | None, Form(description="Description optionnelle")] = None,
session: AsyncSession = Depends(get_db),
):
"""Upload un document et l'associe à un item.
- Accepte images (JPEG, PNG, GIF, WebP) et PDF
- Taille max : 10 Mo
- Génère un nom unique pour éviter les conflits
"""
# Vérifier que l'item existe
await validate_item_exists(session, item_id)
# Vérifier le type MIME
if file.content_type not in ALLOWED_TYPES:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Type de fichier non autorisé : {file.content_type}. "
f"Types acceptés : images (JPEG, PNG, GIF, WebP) et PDF"
)
# Vérifier si le type correspond au fichier
if doc_type == DocumentType.PHOTO and file.content_type not in ALLOWED_IMAGE_TYPES:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Le type 'photo' nécessite un fichier image"
)
# Lire le contenu du fichier
content = await file.read()
file_size = len(content)
# Vérifier la taille
if file_size > MAX_FILE_SIZE:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Fichier trop volumineux ({file_size / 1024 / 1024:.1f} Mo). "
f"Taille max : {MAX_FILE_SIZE / 1024 / 1024:.0f} Mo"
)
# Générer un nom unique
unique_filename = generate_unique_filename(file.filename or "document")
# Déterminer le chemin de stockage
file_path = get_upload_path(doc_type, unique_filename)
# Créer le répertoire si nécessaire
file_path.parent.mkdir(parents=True, exist_ok=True)
# Sauvegarder le fichier
with open(file_path, "wb") as f:
f.write(content)
# Créer l'entrée en base
relative_path = str(file_path.relative_to(settings.upload_dir_path.parent))
document = Document(
filename=unique_filename,
original_name=file.filename or "document",
type=doc_type,
mime_type=file.content_type or "application/octet-stream",
size_bytes=file_size,
file_path=relative_path,
description=description,
item_id=item_id,
)
session.add(document)
await session.commit()
await session.refresh(document)
return DocumentUploadResponse(
id=document.id,
filename=document.filename,
original_name=document.original_name,
type=document.type,
mime_type=document.mime_type,
size_bytes=document.size_bytes,
message="Document uploadé avec succès"
)
@router.get("/item/{item_id}", response_model=list[DocumentResponse])
async def get_item_documents(
item_id: int,
doc_type: DocumentType | None = None,
session: AsyncSession = Depends(get_db),
):
"""Récupère tous les documents d'un item."""
from sqlalchemy import select
await validate_item_exists(session, item_id)
query = select(Document).where(Document.item_id == item_id)
if doc_type:
query = query.where(Document.type == doc_type)
query = query.order_by(Document.created_at.desc())
result = await session.execute(query)
documents = result.scalars().all()
return [DocumentResponse.model_validate(doc) for doc in documents]
@router.get("/{document_id}", response_model=DocumentResponse)
async def get_document(
document_id: int,
session: AsyncSession = Depends(get_db),
):
"""Récupère les métadonnées d'un document."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
return DocumentResponse.model_validate(document)
@router.get("/{document_id}/download")
async def download_document(
document_id: int,
session: AsyncSession = Depends(get_db),
):
"""Télécharge un document."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
file_path = settings.upload_dir_path.parent / document.file_path
if not file_path.exists():
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Fichier non trouvé sur le disque"
)
return FileResponse(
path=file_path,
filename=document.original_name,
media_type=document.mime_type,
)
@router.patch("/{document_id}", response_model=DocumentResponse)
async def update_document(
document_id: int,
data: DocumentUpdate,
session: AsyncSession = Depends(get_db),
):
"""Met à jour les métadonnées d'un document."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
update_data = data.model_dump(exclude_unset=True)
for field, value in update_data.items():
setattr(document, field, value)
await session.commit()
await session.refresh(document)
return DocumentResponse.model_validate(document)
@router.delete("/{document_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_document(
document_id: int,
session: AsyncSession = Depends(get_db),
):
"""Supprime un document (fichier + entrée en base)."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
# Supprimer le fichier
file_path = settings.upload_dir_path.parent / document.file_path
if file_path.exists():
os.remove(file_path)
# Supprimer l'entrée en base
await session.delete(document)
await session.commit()
+344
View File
@@ -0,0 +1,344 @@
"""Router pour l'import de fichiers CSV (AliExpress)."""
import csv
import io
import re
from datetime import date
from typing import Annotated
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.models.item import Item, ItemStatus
from app.models.location import Location, LocationType
from app.repositories.item import ItemRepository
from app.repositories.location import LocationRepository
from app.repositories.shop import ShopRepository
router = APIRouter(prefix="/import", tags=["Import"])
# Mapping des mois français → numéro
MOIS_FR: dict[str, int] = {
"janv.": 1, "févr.": 2, "mars": 3, "avr.": 4,
"mai": 5, "juin": 6, "juil.": 7, "août": 8,
"sept.": 9, "oct.": 10, "nov.": 11, "déc.": 12,
}
def parse_date_fr(date_str: str) -> str | None:
"""Parse une date au format français AliExpress ('5 sept. 2025') → '2025-09-05'."""
if not date_str or not date_str.strip():
return None
date_str = date_str.strip()
# Format attendu : "5 sept. 2025"
match = re.match(r"(\d{1,2})\s+(\S+)\s+(\d{4})", date_str)
if not match:
return None
day, month_str, year = match.groups()
month = MOIS_FR.get(month_str.lower())
if month is None:
# Essai avec le mois tel quel (ex: "mars" sans point)
month = MOIS_FR.get(month_str.lower().rstrip("."))
if month is None:
return None
return f"{year}-{month:02d}-{int(day):02d}"
def parse_price(price_str: str) -> float | None:
"""Parse un prix depuis le CSV ('37.19' ou '4,59') → float."""
if not price_str or not price_str.strip():
return None
cleaned = price_str.strip().replace(",", ".")
try:
val = float(cleaned)
return val if val > 0 else None
except ValueError:
return None
def parse_quantity(qty_str: str) -> int:
"""Parse une quantité ('1.00' ou '2') → int."""
if not qty_str or not qty_str.strip():
return 1
try:
return max(1, int(float(qty_str.strip())))
except ValueError:
return 1
def fix_url(url: str) -> str | None:
"""Corrige les URLs AliExpress (ajoute https: si nécessaire)."""
if not url or not url.strip():
return None
url = url.strip()
if url.startswith("//"):
return f"https:{url}"
return url
def parse_attributes(attr_str: str) -> dict[str, str] | None:
"""Parse les attributs AliExpress en dictionnaire."""
if not attr_str or not attr_str.strip():
return None
parts = [p.strip() for p in attr_str.split(",") if p.strip()]
if not parts:
return None
result: dict[str, str] = {}
for i, part in enumerate(parts):
result[f"attribut_{i + 1}"] = part
return result
# Schémas de réponse
class ImportPreviewItem(BaseModel):
"""Un item parsé depuis le CSV, prêt pour la preview."""
index: int
name: str
price: float | None = None
quantity: int = 1
purchase_date: str | None = None
seller_name: str | None = None
url: str | None = None
image_url: str | None = None
attributes: dict[str, str] | None = None
order_id: str | None = None
order_status: str | None = None
total_price: float | None = None
is_duplicate: bool = False
class ImportPreviewResponse(BaseModel):
"""Réponse de preview d'import."""
items: list[ImportPreviewItem]
total_items: int
errors: list[str]
class ImportResultResponse(BaseModel):
"""Réponse après import effectif."""
items_created: int
shops_created: int
errors: list[str]
def parse_aliexpress_csv(content: str) -> tuple[list[ImportPreviewItem], list[str]]:
"""Parse le CSV AliExpress et retourne les items + erreurs."""
items: list[ImportPreviewItem] = []
errors: list[str] = []
# Supprimer le BOM UTF-8 si présent
if content.startswith("\ufeff"):
content = content[1:]
reader = csv.DictReader(io.StringIO(content))
# Collecter les lignes totaux par order_id pour récupérer le prix réel
totals_by_order: dict[str, float] = {}
all_rows: list[dict[str, str]] = []
for row in reader:
all_rows.append(row)
order_id = row.get("Order Id", "").strip()
item_title = row.get("Item title", "").strip()
total_price_str = row.get("Total price", "").strip()
# Ligne totaux : pas de titre d'item mais un total
if not item_title and total_price_str:
price = parse_price(total_price_str)
if price and order_id:
totals_by_order[order_id] = price
# Deuxième passe : extraire les items
index = 0
for row in all_rows:
item_title = row.get("Item title", "").strip()
if not item_title:
continue # Ignorer les lignes totaux
order_id = row.get("Order Id", "").strip()
try:
item = ImportPreviewItem(
index=index,
name=item_title,
price=parse_price(row.get("Item price", "")),
quantity=parse_quantity(row.get("Item quantity", "")),
purchase_date=parse_date_fr(row.get("Order date", "")),
seller_name=row.get("Store Name", "").strip() or None,
url=fix_url(row.get("Item product link", "")),
image_url=fix_url(row.get("Item image url", "")),
attributes=parse_attributes(row.get("Item attributes", "")),
order_id=order_id or None,
order_status=row.get("Order Status", "").strip() or None,
total_price=totals_by_order.get(order_id),
)
items.append(item)
index += 1
except Exception as e:
errors.append(f"Ligne {index}: {e}")
index += 1
return items, errors
@router.post(
"/csv/aliexpress/preview",
response_model=ImportPreviewResponse,
summary="Prévisualiser un import CSV AliExpress",
)
async def preview_aliexpress_csv(
file: Annotated[UploadFile, File(description="Fichier CSV AliExpress")],
session: AsyncSession = Depends(get_db),
) -> ImportPreviewResponse:
"""Parse le CSV et retourne une preview des items à importer."""
if not file.filename or not file.filename.lower().endswith(".csv"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Le fichier doit être un CSV (.csv)",
)
content = await file.read()
try:
text = content.decode("utf-8-sig")
except UnicodeDecodeError:
text = content.decode("latin-1")
items, errors = parse_aliexpress_csv(text)
# Détecter les doublons par URL
for item in items:
if item.url:
result = await session.execute(
select(Item.id).where(Item.url == item.url).limit(1)
)
if result.first():
item.is_duplicate = True
return ImportPreviewResponse(
items=items,
total_items=len(items),
errors=errors,
)
@router.post(
"/csv/aliexpress/import",
response_model=ImportResultResponse,
status_code=status.HTTP_201_CREATED,
summary="Importer les items depuis un CSV AliExpress",
)
async def import_aliexpress_csv(
file: Annotated[UploadFile, File(description="Fichier CSV AliExpress")],
category_id: Annotated[int, Form(description="Catégorie par défaut")],
item_status: Annotated[str, Form(description="Statut par défaut")] = "in_stock",
selected_indices: Annotated[str, Form(description="Indices des items à importer (virgules)")] = "",
session: AsyncSession = Depends(get_db),
) -> ImportResultResponse:
"""Importe les items sélectionnés depuis le CSV."""
if not file.filename or not file.filename.lower().endswith(".csv"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Le fichier doit être un CSV (.csv)",
)
content = await file.read()
try:
text = content.decode("utf-8-sig")
except UnicodeDecodeError:
text = content.decode("latin-1")
items, parse_errors = parse_aliexpress_csv(text)
# Filtrer par indices sélectionnés
if selected_indices.strip():
try:
selected = set(int(i.strip()) for i in selected_indices.split(",") if i.strip())
items = [item for item in items if item.index in selected]
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Format d'indices invalide",
)
# Valider le statut
try:
status_enum = ItemStatus(item_status)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Statut invalide : {item_status}",
)
item_repo = ItemRepository(session)
shop_repo = ShopRepository(session)
errors: list[str] = []
items_created = 0
shop_created = False
# Résoudre ou créer la boutique unique "AliExpress"
aliexpress_shop = await shop_repo.get_by_name("AliExpress")
if not aliexpress_shop:
aliexpress_shop = await shop_repo.create(
name="AliExpress",
url="https://www.aliexpress.com",
)
shop_created = True
shop_id = aliexpress_shop.id
# Résoudre ou créer l'emplacement "Non assigné"
loc_result = await session.execute(
select(Location).where(Location.name == "Non assigné")
)
non_assigne_loc = loc_result.scalar_one_or_none()
if not non_assigne_loc:
loc_repo = LocationRepository(session)
non_assigne_loc = await loc_repo.create_with_path(
name="Non assigné",
type=LocationType.ROOM,
)
location_id = non_assigne_loc.id
for item in items:
try:
# Fusionner le nom du vendeur dans les caractéristiques
characteristics = item.attributes or {}
if item.seller_name:
characteristics["vendeur"] = item.seller_name
# Convertir la date string en objet date Python
purchase_date_obj = None
if item.purchase_date:
try:
purchase_date_obj = date.fromisoformat(item.purchase_date)
except ValueError:
pass
# Créer l'item
item_data = {
"name": item.name,
"quantity": item.quantity,
"status": status_enum,
"price": item.price,
"purchase_date": purchase_date_obj,
"url": item.url,
"characteristics": characteristics or None,
"notes": f"Commande AliExpress #{item.order_id}" if item.order_id else None,
"category_id": category_id,
"location_id": location_id,
"shop_id": shop_id,
}
await item_repo.create(**item_data)
items_created += 1
except Exception as e:
errors.append(f"{item.name}: {e}")
await session.commit()
return ImportResultResponse(
items_created=items_created,
shops_created=1 if shop_created else 0,
errors=errors,
)
+163
View File
@@ -0,0 +1,163 @@
"""Router API pour les boutiques."""
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.repositories.shop import ShopRepository
from app.schemas.common import PaginatedResponse, SuccessResponse
from app.schemas.shop import (
ShopCreate,
ShopResponse,
ShopUpdate,
ShopWithItemCount,
)
router = APIRouter(prefix="/shops", tags=["Shops"])
@router.get("", response_model=PaginatedResponse[ShopWithItemCount])
async def list_shops(
page: int = 1,
page_size: int = 20,
db: AsyncSession = Depends(get_db),
) -> PaginatedResponse[ShopWithItemCount]:
"""Liste toutes les boutiques avec le nombre d'objets."""
repo = ShopRepository(db)
skip = (page - 1) * page_size
shops_with_count = await repo.get_all_with_item_count(skip=skip, limit=page_size)
total = await repo.count()
items = [
ShopWithItemCount(
id=shop.id,
name=shop.name,
description=shop.description,
url=shop.url,
address=shop.address,
created_at=shop.created_at,
updated_at=shop.updated_at,
item_count=count,
)
for shop, count in shops_with_count
]
return PaginatedResponse.create(items=items, total=total, page=page, page_size=page_size)
@router.get("/{shop_id}", response_model=ShopWithItemCount)
async def get_shop(
shop_id: int,
db: AsyncSession = Depends(get_db),
) -> ShopWithItemCount:
"""Récupère une boutique par son ID."""
repo = ShopRepository(db)
result = await repo.get_with_item_count(shop_id)
if result is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Boutique {shop_id} non trouvée",
)
shop, item_count = result
return ShopWithItemCount(
id=shop.id,
name=shop.name,
description=shop.description,
url=shop.url,
address=shop.address,
created_at=shop.created_at,
updated_at=shop.updated_at,
item_count=item_count,
)
@router.post("", response_model=ShopResponse, status_code=status.HTTP_201_CREATED)
async def create_shop(
data: ShopCreate,
db: AsyncSession = Depends(get_db),
) -> ShopResponse:
"""Crée une nouvelle boutique."""
repo = ShopRepository(db)
if await repo.name_exists(data.name):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Une boutique avec le nom '{data.name}' existe déjà",
)
shop = await repo.create(
name=data.name,
description=data.description,
url=data.url,
address=data.address,
)
await db.commit()
return ShopResponse.model_validate(shop)
@router.put("/{shop_id}", response_model=ShopResponse)
async def update_shop(
shop_id: int,
data: ShopUpdate,
db: AsyncSession = Depends(get_db),
) -> ShopResponse:
"""Met à jour une boutique."""
repo = ShopRepository(db)
existing = await repo.get(shop_id)
if existing is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Boutique {shop_id} non trouvée",
)
if data.name and data.name != existing.name:
if await repo.name_exists(data.name, exclude_id=shop_id):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Une boutique avec le nom '{data.name}' existe déjà",
)
shop = await repo.update(
shop_id,
name=data.name,
description=data.description,
url=data.url,
address=data.address,
)
await db.commit()
return ShopResponse.model_validate(shop)
@router.delete("/{shop_id}", response_model=SuccessResponse)
async def delete_shop(
shop_id: int,
db: AsyncSession = Depends(get_db),
) -> SuccessResponse:
"""Supprime une boutique."""
repo = ShopRepository(db)
result = await repo.get_with_item_count(shop_id)
if result is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Boutique {shop_id} non trouvée",
)
shop, item_count = result
if item_count > 0:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Impossible de supprimer : {item_count} objet(s) sont associés à cette boutique",
)
await repo.delete(shop_id)
await db.commit()
return SuccessResponse(message="Boutique supprimée avec succès", id=shop_id)