import ali

This commit is contained in:
2026-02-01 01:45:51 +01:00
parent bdbfa4e25a
commit 46d6d88ce5
48 changed files with 6714 additions and 185 deletions

View File

@@ -5,6 +5,7 @@ Documentation : https://docs.pydantic.dev/latest/concepts/pydantic_settings/
"""
from functools import lru_cache
from pathlib import Path
from typing import Literal
from pydantic import Field, field_validator
@@ -72,7 +73,10 @@ class Settings(BaseSettings):
return [origin.strip() for origin in self.CORS_ORIGINS.split(",")]
# === Stockage fichiers ===
UPLOAD_DIR: str = Field(default="./uploads", description="Répertoire des uploads")
UPLOAD_DIR: str = Field(
default="./uploads",
description="Répertoire des uploads",
)
MAX_UPLOAD_SIZE_MB: int = Field(
default=50, description="Taille max des uploads en Mo"
)
@@ -91,6 +95,11 @@ class Settings(BaseSettings):
"""Retourne la taille max en octets."""
return self.MAX_UPLOAD_SIZE_MB * 1024 * 1024
@property
def upload_dir_path(self) -> Path:
"""Retourne le chemin du répertoire d'uploads comme Path."""
return Path(self.UPLOAD_DIR).resolve()
# === Recherche ===
SEARCH_MIN_QUERY_LENGTH: int = Field(
default=2, description="Longueur minimale des requêtes de recherche"

View File

@@ -83,13 +83,85 @@ async def get_db() -> AsyncGenerator[AsyncSession, None]:
async def init_db() -> None:
"""Initialise la base de données.
Crée toutes les tables définies dans les modèles.
Crée toutes les tables définies dans les modèles + FTS5 pour la recherche.
À utiliser uniquement en développement ou pour les tests.
En production, utiliser Alembic pour les migrations.
"""
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
# Initialisation de la recherche full-text FTS5
await init_fts5()
async def init_fts5() -> None:
"""Crée la table virtuelle FTS5 et les triggers de synchronisation.
FTS5 permet une recherche full-text performante sur les items.
Les triggers maintiennent l'index à jour automatiquement.
"""
from sqlalchemy import text
async with engine.begin() as conn:
# Supprimer l'ancienne table FTS5 si elle existe (pour recréation propre)
await conn.execute(text("DROP TRIGGER IF EXISTS fts_items_insert"))
await conn.execute(text("DROP TRIGGER IF EXISTS fts_items_update"))
await conn.execute(text("DROP TRIGGER IF EXISTS fts_items_delete"))
await conn.execute(text("DROP TABLE IF EXISTS fts_items"))
# Table virtuelle FTS5 indexant nom, description, marque, modèle, notes
await conn.execute(text("""
CREATE VIRTUAL TABLE fts_items USING fts5(
name,
description,
brand,
model,
notes,
serial_number,
content='items',
content_rowid='id',
tokenize='unicode61 remove_diacritics 2'
)
"""))
# Trigger INSERT : ajouter dans FTS5 quand un item est créé
await conn.execute(text("""
CREATE TRIGGER fts_items_insert
AFTER INSERT ON items
BEGIN
INSERT INTO fts_items(rowid, name, description, brand, model, notes, serial_number)
VALUES (NEW.id, NEW.name, NEW.description, NEW.brand, NEW.model, NEW.notes, NEW.serial_number);
END
"""))
# Trigger UPDATE : mettre à jour FTS5 quand un item est modifié
await conn.execute(text("""
CREATE TRIGGER fts_items_update
AFTER UPDATE ON items
BEGIN
INSERT INTO fts_items(fts_items, rowid, name, description, brand, model, notes, serial_number)
VALUES ('delete', OLD.id, OLD.name, OLD.description, OLD.brand, OLD.model, OLD.notes, OLD.serial_number);
INSERT INTO fts_items(rowid, name, description, brand, model, notes, serial_number)
VALUES (NEW.id, NEW.name, NEW.description, NEW.brand, NEW.model, NEW.notes, NEW.serial_number);
END
"""))
# Trigger DELETE : supprimer de FTS5 quand un item est supprimé
await conn.execute(text("""
CREATE TRIGGER fts_items_delete
AFTER DELETE ON items
BEGIN
INSERT INTO fts_items(fts_items, rowid, name, description, brand, model, notes, serial_number)
VALUES ('delete', OLD.id, OLD.name, OLD.description, OLD.brand, OLD.model, OLD.notes, OLD.serial_number);
END
"""))
# Remplir FTS5 avec les données existantes
await conn.execute(text("""
INSERT INTO fts_items(rowid, name, description, brand, model, notes, serial_number)
SELECT id, name, description, brand, model, notes, serial_number FROM items
"""))
async def close_db() -> None:
"""Ferme proprement les connexions à la base de données.

View File

@@ -107,11 +107,14 @@ async def global_exception_handler(request: Any, exc: Exception) -> JSONResponse
# === Enregistrement des routers ===
from app.routers import categories_router, items_router, locations_router
from app.routers import categories_router, documents_router, import_router, items_router, locations_router, shops_router
app.include_router(categories_router, prefix="/api/v1")
app.include_router(locations_router, prefix="/api/v1")
app.include_router(items_router, prefix="/api/v1")
app.include_router(documents_router, prefix="/api/v1")
app.include_router(shops_router, prefix="/api/v1")
app.include_router(import_router, prefix="/api/v1")
if __name__ == "__main__":

View File

@@ -7,6 +7,7 @@ from app.models.category import Category
from app.models.document import Document, DocumentType
from app.models.item import Item, ItemStatus
from app.models.location import Location, LocationType
from app.models.shop import Shop
__all__ = [
"Category",
@@ -16,4 +17,5 @@ __all__ = [
"ItemStatus",
"Document",
"DocumentType",
"Shop",
]

View File

@@ -8,7 +8,7 @@ from datetime import date, datetime
from decimal import Decimal
from typing import TYPE_CHECKING
from sqlalchemy import Date, DateTime, Enum, ForeignKey, Integer, Numeric, String, Text
from sqlalchemy import Date, DateTime, Enum, ForeignKey, Integer, JSON, Numeric, String, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.sql import func
@@ -18,6 +18,7 @@ if TYPE_CHECKING:
from app.models.category import Category
from app.models.document import Document
from app.models.location import Location
from app.models.shop import Shop
import enum
@@ -27,6 +28,7 @@ class ItemStatus(str, enum.Enum):
IN_STOCK = "in_stock" # En stock (non utilisé)
IN_USE = "in_use" # En cours d'utilisation
INTEGRATED = "integrated" # Intégré dans un autre objet
BROKEN = "broken" # Cassé/HS
SOLD = "sold" # Vendu
LENT = "lent" # Prêté
@@ -79,6 +81,9 @@ class Item(Base):
price: Mapped[Decimal | None] = mapped_column(Numeric(10, 2), nullable=True)
purchase_date: Mapped[date | None] = mapped_column(Date, nullable=True)
# Caractéristiques techniques (clé-valeur, ex: {"RAM": "16 Go", "CPU": "i7"})
characteristics: Mapped[dict | None] = mapped_column(JSON, nullable=True, default=None)
# Notes
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
@@ -89,6 +94,12 @@ class Item(Base):
location_id: Mapped[int] = mapped_column(
Integer, ForeignKey("locations.id", ondelete="RESTRICT"), nullable=False, index=True
)
parent_item_id: Mapped[int | None] = mapped_column(
Integer, ForeignKey("items.id", ondelete="SET NULL"), nullable=True, index=True
)
shop_id: Mapped[int | None] = mapped_column(
Integer, ForeignKey("shops.id", ondelete="SET NULL"), nullable=True, index=True
)
# Timestamps
created_at: Mapped[datetime] = mapped_column(
@@ -107,6 +118,13 @@ class Item(Base):
documents: Mapped[list["Document"]] = relationship(
"Document", back_populates="item", cascade="all, delete-orphan"
)
shop: Mapped["Shop | None"] = relationship("Shop", back_populates="items")
parent_item: Mapped["Item | None"] = relationship(
"Item", remote_side=[id], foreign_keys=[parent_item_id], back_populates="children"
)
children: Mapped[list["Item"]] = relationship(
"Item", back_populates="parent_item", foreign_keys=[parent_item_id]
)
def __repr__(self) -> str:
"""Représentation string de l'objet."""

View File

@@ -0,0 +1,56 @@
"""Modèle SQLAlchemy pour les boutiques/magasins.
Les boutiques représentent les lieux d'achat des objets.
"""
from datetime import datetime
from typing import TYPE_CHECKING
from sqlalchemy import DateTime, String, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.sql import func
from app.core.database import Base
if TYPE_CHECKING:
from app.models.item import Item
class Shop(Base):
"""Boutique ou magasin.
Attributes:
id: Identifiant unique auto-incrémenté
name: Nom de la boutique (ex: "Amazon", "Leroy Merlin")
description: Description optionnelle
url: URL du site web de la boutique
address: Adresse physique optionnelle
created_at: Date/heure de création (auto)
updated_at: Date/heure de dernière modification (auto)
items: Relation vers les objets achetés dans cette boutique
"""
__tablename__ = "shops"
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
name: Mapped[str] = mapped_column(String(200), unique=True, nullable=False, index=True)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
url: Mapped[str | None] = mapped_column(String(500), nullable=True)
address: Mapped[str | None] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
items: Mapped[list["Item"]] = relationship(
"Item", back_populates="shop"
)
def __repr__(self) -> str:
return f"<Shop(id={self.id}, name='{self.name}')>"

View File

@@ -3,7 +3,7 @@
from decimal import Decimal
from typing import Any
from sqlalchemy import or_, select
from sqlalchemy import or_, select, text
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
@@ -33,6 +33,7 @@ class ItemRepository(BaseRepository[Item]):
selectinload(Item.category),
selectinload(Item.location),
selectinload(Item.documents),
selectinload(Item.parent_item),
)
.where(Item.id == id)
)
@@ -55,6 +56,8 @@ class ItemRepository(BaseRepository[Item]):
.options(
selectinload(Item.category),
selectinload(Item.location),
selectinload(Item.documents),
selectinload(Item.parent_item),
)
.offset(skip)
.limit(limit)
@@ -91,20 +94,24 @@ class ItemRepository(BaseRepository[Item]):
stmt = select(Item).options(
selectinload(Item.category),
selectinload(Item.location),
selectinload(Item.documents),
selectinload(Item.parent_item),
)
# Recherche textuelle
# Recherche full-text via FTS5
if query:
search_term = f"%{query}%"
stmt = stmt.where(
or_(
Item.name.ilike(search_term),
Item.description.ilike(search_term),
Item.brand.ilike(search_term),
Item.model.ilike(search_term),
Item.notes.ilike(search_term),
)
)
# Échapper les caractères spéciaux FTS5 et ajouter le préfixe *
safe_query = query.replace('"', '""').strip()
if safe_query:
# Recherche par préfixe pour résultats en temps réel
fts_terms = " ".join(f'"{word}"*' for word in safe_query.split() if word)
stmt = stmt.where(
Item.id.in_(
select(text("rowid")).select_from(text("fts_items")).where(
text("fts_items MATCH :fts_query")
)
)
).params(fts_query=fts_terms)
# Filtres
if category_id is not None:
@@ -141,16 +148,16 @@ class ItemRepository(BaseRepository[Item]):
stmt = select(func.count(Item.id))
if query:
search_term = f"%{query}%"
stmt = stmt.where(
or_(
Item.name.ilike(search_term),
Item.description.ilike(search_term),
Item.brand.ilike(search_term),
Item.model.ilike(search_term),
Item.notes.ilike(search_term),
)
)
safe_query = query.replace('"', '""').strip()
if safe_query:
fts_terms = " ".join(f'"{word}"*' for word in safe_query.split() if word)
stmt = stmt.where(
Item.id.in_(
select(text("rowid")).select_from(text("fts_items")).where(
text("fts_items MATCH :fts_query")
)
)
).params(fts_query=fts_terms)
if category_id is not None:
stmt = stmt.where(Item.category_id == category_id)

View File

@@ -0,0 +1,50 @@
"""Repository pour les boutiques."""
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.models.shop import Shop
from app.repositories.base import BaseRepository
class ShopRepository(BaseRepository[Shop]):
"""Repository pour les opérations sur les boutiques."""
def __init__(self, db: AsyncSession) -> None:
super().__init__(Shop, db)
async def get_by_name(self, name: str) -> Shop | None:
result = await self.db.execute(
select(Shop).where(Shop.name == name)
)
return result.scalar_one_or_none()
async def get_with_item_count(self, id: int) -> tuple[Shop, int] | None:
result = await self.db.execute(
select(Shop).options(selectinload(Shop.items)).where(Shop.id == id)
)
shop = result.scalar_one_or_none()
if shop is None:
return None
return shop, len(shop.items)
async def get_all_with_item_count(
self, skip: int = 0, limit: int = 100
) -> list[tuple[Shop, int]]:
result = await self.db.execute(
select(Shop)
.options(selectinload(Shop.items))
.offset(skip)
.limit(limit)
.order_by(Shop.name)
)
shops = result.scalars().all()
return [(shop, len(shop.items)) for shop in shops]
async def name_exists(self, name: str, exclude_id: int | None = None) -> bool:
query = select(func.count(Shop.id)).where(Shop.name == name)
if exclude_id is not None:
query = query.where(Shop.id != exclude_id)
result = await self.db.execute(query)
return result.scalar_one() > 0

View File

@@ -1,11 +1,17 @@
"""Package des routers API."""
from app.routers.categories import router as categories_router
from app.routers.documents import router as documents_router
from app.routers.import_csv import router as import_router
from app.routers.items import router as items_router
from app.routers.locations import router as locations_router
from app.routers.shops import router as shops_router
__all__ = [
"categories_router",
"documents_router",
"import_router",
"locations_router",
"items_router",
"shops_router",
]

View File

@@ -0,0 +1,249 @@
"""Router pour les documents (upload, téléchargement, suppression)."""
import os
import uuid
from pathlib import Path
from typing import Annotated
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
from fastapi.responses import FileResponse
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.config import settings
from app.core.database import get_db
from app.models.document import Document, DocumentType
from app.models.item import Item
from app.schemas.document import DocumentResponse, DocumentUpdate, DocumentUploadResponse
router = APIRouter(prefix="/documents", tags=["documents"])
# Types MIME autorisés
ALLOWED_IMAGE_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp"}
ALLOWED_PDF_TYPES = {"application/pdf"}
ALLOWED_TYPES = ALLOWED_IMAGE_TYPES | ALLOWED_PDF_TYPES
# Taille max : 10 Mo
MAX_FILE_SIZE = 10 * 1024 * 1024
def get_upload_path(doc_type: DocumentType, filename: str) -> Path:
"""Retourne le chemin complet pour un fichier uploadé."""
subdir = "photos" if doc_type == DocumentType.PHOTO else "documents"
return settings.upload_dir_path / subdir / filename
def generate_unique_filename(original_name: str) -> str:
"""Génère un nom de fichier unique avec UUID."""
ext = Path(original_name).suffix.lower()
return f"{uuid.uuid4()}{ext}"
async def validate_item_exists(session: AsyncSession, item_id: int) -> Item:
"""Vérifie que l'item existe."""
item = await session.get(Item, item_id)
if not item:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Item {item_id} non trouvé"
)
return item
@router.post("/upload", response_model=DocumentUploadResponse, status_code=status.HTTP_201_CREATED)
async def upload_document(
file: Annotated[UploadFile, File(description="Fichier à uploader")],
item_id: Annotated[int, Form(description="ID de l'objet associé")],
doc_type: Annotated[DocumentType, Form(description="Type de document")],
description: Annotated[str | None, Form(description="Description optionnelle")] = None,
session: AsyncSession = Depends(get_db),
):
"""Upload un document et l'associe à un item.
- Accepte images (JPEG, PNG, GIF, WebP) et PDF
- Taille max : 10 Mo
- Génère un nom unique pour éviter les conflits
"""
# Vérifier que l'item existe
await validate_item_exists(session, item_id)
# Vérifier le type MIME
if file.content_type not in ALLOWED_TYPES:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Type de fichier non autorisé : {file.content_type}. "
f"Types acceptés : images (JPEG, PNG, GIF, WebP) et PDF"
)
# Vérifier si le type correspond au fichier
if doc_type == DocumentType.PHOTO and file.content_type not in ALLOWED_IMAGE_TYPES:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Le type 'photo' nécessite un fichier image"
)
# Lire le contenu du fichier
content = await file.read()
file_size = len(content)
# Vérifier la taille
if file_size > MAX_FILE_SIZE:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Fichier trop volumineux ({file_size / 1024 / 1024:.1f} Mo). "
f"Taille max : {MAX_FILE_SIZE / 1024 / 1024:.0f} Mo"
)
# Générer un nom unique
unique_filename = generate_unique_filename(file.filename or "document")
# Déterminer le chemin de stockage
file_path = get_upload_path(doc_type, unique_filename)
# Créer le répertoire si nécessaire
file_path.parent.mkdir(parents=True, exist_ok=True)
# Sauvegarder le fichier
with open(file_path, "wb") as f:
f.write(content)
# Créer l'entrée en base
relative_path = str(file_path.relative_to(settings.upload_dir_path.parent))
document = Document(
filename=unique_filename,
original_name=file.filename or "document",
type=doc_type,
mime_type=file.content_type or "application/octet-stream",
size_bytes=file_size,
file_path=relative_path,
description=description,
item_id=item_id,
)
session.add(document)
await session.commit()
await session.refresh(document)
return DocumentUploadResponse(
id=document.id,
filename=document.filename,
original_name=document.original_name,
type=document.type,
mime_type=document.mime_type,
size_bytes=document.size_bytes,
message="Document uploadé avec succès"
)
@router.get("/item/{item_id}", response_model=list[DocumentResponse])
async def get_item_documents(
item_id: int,
doc_type: DocumentType | None = None,
session: AsyncSession = Depends(get_db),
):
"""Récupère tous les documents d'un item."""
from sqlalchemy import select
await validate_item_exists(session, item_id)
query = select(Document).where(Document.item_id == item_id)
if doc_type:
query = query.where(Document.type == doc_type)
query = query.order_by(Document.created_at.desc())
result = await session.execute(query)
documents = result.scalars().all()
return [DocumentResponse.model_validate(doc) for doc in documents]
@router.get("/{document_id}", response_model=DocumentResponse)
async def get_document(
document_id: int,
session: AsyncSession = Depends(get_db),
):
"""Récupère les métadonnées d'un document."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
return DocumentResponse.model_validate(document)
@router.get("/{document_id}/download")
async def download_document(
document_id: int,
session: AsyncSession = Depends(get_db),
):
"""Télécharge un document."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
file_path = settings.upload_dir_path.parent / document.file_path
if not file_path.exists():
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Fichier non trouvé sur le disque"
)
return FileResponse(
path=file_path,
filename=document.original_name,
media_type=document.mime_type,
)
@router.patch("/{document_id}", response_model=DocumentResponse)
async def update_document(
document_id: int,
data: DocumentUpdate,
session: AsyncSession = Depends(get_db),
):
"""Met à jour les métadonnées d'un document."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
update_data = data.model_dump(exclude_unset=True)
for field, value in update_data.items():
setattr(document, field, value)
await session.commit()
await session.refresh(document)
return DocumentResponse.model_validate(document)
@router.delete("/{document_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_document(
document_id: int,
session: AsyncSession = Depends(get_db),
):
"""Supprime un document (fichier + entrée en base)."""
document = await session.get(Document, document_id)
if not document:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Document non trouvé"
)
# Supprimer le fichier
file_path = settings.upload_dir_path.parent / document.file_path
if file_path.exists():
os.remove(file_path)
# Supprimer l'entrée en base
await session.delete(document)
await session.commit()

View File

@@ -0,0 +1,344 @@
"""Router pour l'import de fichiers CSV (AliExpress)."""
import csv
import io
import re
from datetime import date
from typing import Annotated
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.models.item import Item, ItemStatus
from app.models.location import Location, LocationType
from app.repositories.item import ItemRepository
from app.repositories.location import LocationRepository
from app.repositories.shop import ShopRepository
router = APIRouter(prefix="/import", tags=["Import"])
# Mapping des mois français → numéro
MOIS_FR: dict[str, int] = {
"janv.": 1, "févr.": 2, "mars": 3, "avr.": 4,
"mai": 5, "juin": 6, "juil.": 7, "août": 8,
"sept.": 9, "oct.": 10, "nov.": 11, "déc.": 12,
}
def parse_date_fr(date_str: str) -> str | None:
"""Parse une date au format français AliExpress ('5 sept. 2025') → '2025-09-05'."""
if not date_str or not date_str.strip():
return None
date_str = date_str.strip()
# Format attendu : "5 sept. 2025"
match = re.match(r"(\d{1,2})\s+(\S+)\s+(\d{4})", date_str)
if not match:
return None
day, month_str, year = match.groups()
month = MOIS_FR.get(month_str.lower())
if month is None:
# Essai avec le mois tel quel (ex: "mars" sans point)
month = MOIS_FR.get(month_str.lower().rstrip("."))
if month is None:
return None
return f"{year}-{month:02d}-{int(day):02d}"
def parse_price(price_str: str) -> float | None:
"""Parse un prix depuis le CSV ('37.19' ou '4,59') → float."""
if not price_str or not price_str.strip():
return None
cleaned = price_str.strip().replace(",", ".")
try:
val = float(cleaned)
return val if val > 0 else None
except ValueError:
return None
def parse_quantity(qty_str: str) -> int:
"""Parse une quantité ('1.00' ou '2') → int."""
if not qty_str or not qty_str.strip():
return 1
try:
return max(1, int(float(qty_str.strip())))
except ValueError:
return 1
def fix_url(url: str) -> str | None:
"""Corrige les URLs AliExpress (ajoute https: si nécessaire)."""
if not url or not url.strip():
return None
url = url.strip()
if url.startswith("//"):
return f"https:{url}"
return url
def parse_attributes(attr_str: str) -> dict[str, str] | None:
"""Parse les attributs AliExpress en dictionnaire."""
if not attr_str or not attr_str.strip():
return None
parts = [p.strip() for p in attr_str.split(",") if p.strip()]
if not parts:
return None
result: dict[str, str] = {}
for i, part in enumerate(parts):
result[f"attribut_{i + 1}"] = part
return result
# Schémas de réponse
class ImportPreviewItem(BaseModel):
"""Un item parsé depuis le CSV, prêt pour la preview."""
index: int
name: str
price: float | None = None
quantity: int = 1
purchase_date: str | None = None
seller_name: str | None = None
url: str | None = None
image_url: str | None = None
attributes: dict[str, str] | None = None
order_id: str | None = None
order_status: str | None = None
total_price: float | None = None
is_duplicate: bool = False
class ImportPreviewResponse(BaseModel):
"""Réponse de preview d'import."""
items: list[ImportPreviewItem]
total_items: int
errors: list[str]
class ImportResultResponse(BaseModel):
"""Réponse après import effectif."""
items_created: int
shops_created: int
errors: list[str]
def parse_aliexpress_csv(content: str) -> tuple[list[ImportPreviewItem], list[str]]:
"""Parse le CSV AliExpress et retourne les items + erreurs."""
items: list[ImportPreviewItem] = []
errors: list[str] = []
# Supprimer le BOM UTF-8 si présent
if content.startswith("\ufeff"):
content = content[1:]
reader = csv.DictReader(io.StringIO(content))
# Collecter les lignes totaux par order_id pour récupérer le prix réel
totals_by_order: dict[str, float] = {}
all_rows: list[dict[str, str]] = []
for row in reader:
all_rows.append(row)
order_id = row.get("Order Id", "").strip()
item_title = row.get("Item title", "").strip()
total_price_str = row.get("Total price", "").strip()
# Ligne totaux : pas de titre d'item mais un total
if not item_title and total_price_str:
price = parse_price(total_price_str)
if price and order_id:
totals_by_order[order_id] = price
# Deuxième passe : extraire les items
index = 0
for row in all_rows:
item_title = row.get("Item title", "").strip()
if not item_title:
continue # Ignorer les lignes totaux
order_id = row.get("Order Id", "").strip()
try:
item = ImportPreviewItem(
index=index,
name=item_title,
price=parse_price(row.get("Item price", "")),
quantity=parse_quantity(row.get("Item quantity", "")),
purchase_date=parse_date_fr(row.get("Order date", "")),
seller_name=row.get("Store Name", "").strip() or None,
url=fix_url(row.get("Item product link", "")),
image_url=fix_url(row.get("Item image url", "")),
attributes=parse_attributes(row.get("Item attributes", "")),
order_id=order_id or None,
order_status=row.get("Order Status", "").strip() or None,
total_price=totals_by_order.get(order_id),
)
items.append(item)
index += 1
except Exception as e:
errors.append(f"Ligne {index}: {e}")
index += 1
return items, errors
@router.post(
"/csv/aliexpress/preview",
response_model=ImportPreviewResponse,
summary="Prévisualiser un import CSV AliExpress",
)
async def preview_aliexpress_csv(
file: Annotated[UploadFile, File(description="Fichier CSV AliExpress")],
session: AsyncSession = Depends(get_db),
) -> ImportPreviewResponse:
"""Parse le CSV et retourne une preview des items à importer."""
if not file.filename or not file.filename.lower().endswith(".csv"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Le fichier doit être un CSV (.csv)",
)
content = await file.read()
try:
text = content.decode("utf-8-sig")
except UnicodeDecodeError:
text = content.decode("latin-1")
items, errors = parse_aliexpress_csv(text)
# Détecter les doublons par URL
for item in items:
if item.url:
result = await session.execute(
select(Item.id).where(Item.url == item.url).limit(1)
)
if result.first():
item.is_duplicate = True
return ImportPreviewResponse(
items=items,
total_items=len(items),
errors=errors,
)
@router.post(
"/csv/aliexpress/import",
response_model=ImportResultResponse,
status_code=status.HTTP_201_CREATED,
summary="Importer les items depuis un CSV AliExpress",
)
async def import_aliexpress_csv(
file: Annotated[UploadFile, File(description="Fichier CSV AliExpress")],
category_id: Annotated[int, Form(description="Catégorie par défaut")],
item_status: Annotated[str, Form(description="Statut par défaut")] = "in_stock",
selected_indices: Annotated[str, Form(description="Indices des items à importer (virgules)")] = "",
session: AsyncSession = Depends(get_db),
) -> ImportResultResponse:
"""Importe les items sélectionnés depuis le CSV."""
if not file.filename or not file.filename.lower().endswith(".csv"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Le fichier doit être un CSV (.csv)",
)
content = await file.read()
try:
text = content.decode("utf-8-sig")
except UnicodeDecodeError:
text = content.decode("latin-1")
items, parse_errors = parse_aliexpress_csv(text)
# Filtrer par indices sélectionnés
if selected_indices.strip():
try:
selected = set(int(i.strip()) for i in selected_indices.split(",") if i.strip())
items = [item for item in items if item.index in selected]
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Format d'indices invalide",
)
# Valider le statut
try:
status_enum = ItemStatus(item_status)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Statut invalide : {item_status}",
)
item_repo = ItemRepository(session)
shop_repo = ShopRepository(session)
errors: list[str] = []
items_created = 0
shop_created = False
# Résoudre ou créer la boutique unique "AliExpress"
aliexpress_shop = await shop_repo.get_by_name("AliExpress")
if not aliexpress_shop:
aliexpress_shop = await shop_repo.create(
name="AliExpress",
url="https://www.aliexpress.com",
)
shop_created = True
shop_id = aliexpress_shop.id
# Résoudre ou créer l'emplacement "Non assigné"
loc_result = await session.execute(
select(Location).where(Location.name == "Non assigné")
)
non_assigne_loc = loc_result.scalar_one_or_none()
if not non_assigne_loc:
loc_repo = LocationRepository(session)
non_assigne_loc = await loc_repo.create_with_path(
name="Non assigné",
type=LocationType.ROOM,
)
location_id = non_assigne_loc.id
for item in items:
try:
# Fusionner le nom du vendeur dans les caractéristiques
characteristics = item.attributes or {}
if item.seller_name:
characteristics["vendeur"] = item.seller_name
# Convertir la date string en objet date Python
purchase_date_obj = None
if item.purchase_date:
try:
purchase_date_obj = date.fromisoformat(item.purchase_date)
except ValueError:
pass
# Créer l'item
item_data = {
"name": item.name,
"quantity": item.quantity,
"status": status_enum,
"price": item.price,
"purchase_date": purchase_date_obj,
"url": item.url,
"characteristics": characteristics or None,
"notes": f"Commande AliExpress #{item.order_id}" if item.order_id else None,
"category_id": category_id,
"location_id": location_id,
"shop_id": shop_id,
}
await item_repo.create(**item_data)
items_created += 1
except Exception as e:
errors.append(f"{item.name}: {e}")
await session.commit()
return ImportResultResponse(
items_created=items_created,
shops_created=1 if shop_created else 0,
errors=errors,
)

View File

@@ -0,0 +1,163 @@
"""Router API pour les boutiques."""
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import get_db
from app.repositories.shop import ShopRepository
from app.schemas.common import PaginatedResponse, SuccessResponse
from app.schemas.shop import (
ShopCreate,
ShopResponse,
ShopUpdate,
ShopWithItemCount,
)
router = APIRouter(prefix="/shops", tags=["Shops"])
@router.get("", response_model=PaginatedResponse[ShopWithItemCount])
async def list_shops(
page: int = 1,
page_size: int = 20,
db: AsyncSession = Depends(get_db),
) -> PaginatedResponse[ShopWithItemCount]:
"""Liste toutes les boutiques avec le nombre d'objets."""
repo = ShopRepository(db)
skip = (page - 1) * page_size
shops_with_count = await repo.get_all_with_item_count(skip=skip, limit=page_size)
total = await repo.count()
items = [
ShopWithItemCount(
id=shop.id,
name=shop.name,
description=shop.description,
url=shop.url,
address=shop.address,
created_at=shop.created_at,
updated_at=shop.updated_at,
item_count=count,
)
for shop, count in shops_with_count
]
return PaginatedResponse.create(items=items, total=total, page=page, page_size=page_size)
@router.get("/{shop_id}", response_model=ShopWithItemCount)
async def get_shop(
shop_id: int,
db: AsyncSession = Depends(get_db),
) -> ShopWithItemCount:
"""Récupère une boutique par son ID."""
repo = ShopRepository(db)
result = await repo.get_with_item_count(shop_id)
if result is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Boutique {shop_id} non trouvée",
)
shop, item_count = result
return ShopWithItemCount(
id=shop.id,
name=shop.name,
description=shop.description,
url=shop.url,
address=shop.address,
created_at=shop.created_at,
updated_at=shop.updated_at,
item_count=item_count,
)
@router.post("", response_model=ShopResponse, status_code=status.HTTP_201_CREATED)
async def create_shop(
data: ShopCreate,
db: AsyncSession = Depends(get_db),
) -> ShopResponse:
"""Crée une nouvelle boutique."""
repo = ShopRepository(db)
if await repo.name_exists(data.name):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Une boutique avec le nom '{data.name}' existe déjà",
)
shop = await repo.create(
name=data.name,
description=data.description,
url=data.url,
address=data.address,
)
await db.commit()
return ShopResponse.model_validate(shop)
@router.put("/{shop_id}", response_model=ShopResponse)
async def update_shop(
shop_id: int,
data: ShopUpdate,
db: AsyncSession = Depends(get_db),
) -> ShopResponse:
"""Met à jour une boutique."""
repo = ShopRepository(db)
existing = await repo.get(shop_id)
if existing is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Boutique {shop_id} non trouvée",
)
if data.name and data.name != existing.name:
if await repo.name_exists(data.name, exclude_id=shop_id):
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Une boutique avec le nom '{data.name}' existe déjà",
)
shop = await repo.update(
shop_id,
name=data.name,
description=data.description,
url=data.url,
address=data.address,
)
await db.commit()
return ShopResponse.model_validate(shop)
@router.delete("/{shop_id}", response_model=SuccessResponse)
async def delete_shop(
shop_id: int,
db: AsyncSession = Depends(get_db),
) -> SuccessResponse:
"""Supprime une boutique."""
repo = ShopRepository(db)
result = await repo.get_with_item_count(shop_id)
if result is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Boutique {shop_id} non trouvée",
)
shop, item_count = result
if item_count > 0:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail=f"Impossible de supprimer : {item_count} objet(s) sont associés à cette boutique",
)
await repo.delete(shop_id)
await db.commit()
return SuccessResponse(message="Boutique supprimée avec succès", id=shop_id)

View File

@@ -5,8 +5,9 @@ Définit les schémas de validation pour les requêtes et réponses API.
from datetime import date, datetime
from decimal import Decimal
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, ConfigDict, Field, model_validator
from app.models.item import ItemStatus
from app.schemas.category import CategoryResponse
@@ -26,6 +27,7 @@ class ItemBase(BaseModel):
url: str | None = Field(None, max_length=500, description="Lien vers page produit")
price: Decimal | None = Field(None, ge=0, decimal_places=2, description="Prix d'achat")
purchase_date: date | None = Field(None, description="Date d'achat")
characteristics: dict[str, str] | None = Field(None, description="Caractéristiques techniques (clé-valeur)")
notes: str | None = Field(None, description="Notes libres")
@@ -34,6 +36,8 @@ class ItemCreate(ItemBase):
category_id: int = Field(..., description="ID de la catégorie")
location_id: int = Field(..., description="ID de l'emplacement")
parent_item_id: int | None = Field(None, description="ID de l'objet parent (si intégré)")
shop_id: int | None = Field(None, description="ID de la boutique d'achat")
class ItemUpdate(BaseModel):
@@ -49,9 +53,12 @@ class ItemUpdate(BaseModel):
url: str | None = Field(None, max_length=500)
price: Decimal | None = Field(None, ge=0)
purchase_date: date | None = None
characteristics: dict[str, str] | None = None
notes: str | None = None
category_id: int | None = None
location_id: int | None = None
parent_item_id: int | None = None
shop_id: int | None = None
class ItemResponse(ItemBase):
@@ -62,6 +69,8 @@ class ItemResponse(ItemBase):
id: int
category_id: int
location_id: int
parent_item_id: int | None = None
shop_id: int | None = None
created_at: datetime
updated_at: datetime
@@ -71,6 +80,57 @@ class ItemWithRelations(ItemResponse):
category: CategoryResponse
location: LocationResponse
thumbnail_id: int | None = None
parent_item_name: str | None = None
@model_validator(mode="before")
@classmethod
def extract_computed_fields(cls, data: Any) -> Any:
"""Extrait les champs calculés : thumbnail et nom du parent."""
from sqlalchemy.orm import InstanceState
thumbnail_id = None
parent_item_name = None
# Vérifier que les relations sont chargées (éviter lazy load en async)
loaded_relations: set[str] = set()
if hasattr(data, "_sa_instance_state"):
state: InstanceState = data._sa_instance_state
loaded_relations = set(state.dict.keys())
if "documents" in loaded_relations:
for doc in data.documents:
if doc.type.value == "photo":
thumbnail_id = doc.id
break
if "parent_item" in loaded_relations and data.parent_item is not None:
parent_item_name = data.parent_item.name
if isinstance(data, dict):
if thumbnail_id:
data["thumbnail_id"] = thumbnail_id
if parent_item_name:
data["parent_item_name"] = parent_item_name
elif thumbnail_id or parent_item_name:
result = {}
for k in dir(data):
if k.startswith("_"):
continue
# Ne pas accéder aux relations non chargées
if k in ("documents", "parent_item", "children", "shop"):
continue
try:
result[k] = getattr(data, k)
except Exception:
pass
if thumbnail_id:
result["thumbnail_id"] = thumbnail_id
if parent_item_name:
result["parent_item_name"] = parent_item_name
return result
return data
class ItemSummary(BaseModel):

View File

@@ -0,0 +1,45 @@
"""Schémas Pydantic pour les boutiques."""
from datetime import datetime
from pydantic import BaseModel, ConfigDict, Field
class ShopBase(BaseModel):
"""Schéma de base pour les boutiques."""
name: str = Field(..., min_length=1, max_length=200, description="Nom de la boutique")
description: str | None = Field(None, max_length=1000, description="Description optionnelle")
url: str | None = Field(None, max_length=500, description="URL du site web")
address: str | None = Field(None, description="Adresse physique")
class ShopCreate(ShopBase):
"""Schéma pour la création d'une boutique."""
pass
class ShopUpdate(BaseModel):
"""Schéma pour la mise à jour d'une boutique (tous les champs optionnels)."""
name: str | None = Field(None, min_length=1, max_length=200)
description: str | None = Field(None, max_length=1000)
url: str | None = Field(None, max_length=500)
address: str | None = None
class ShopResponse(ShopBase):
"""Schéma de réponse pour une boutique."""
model_config = ConfigDict(from_attributes=True)
id: int
created_at: datetime
updated_at: datetime
class ShopWithItemCount(ShopResponse):
"""Schéma de réponse avec le nombre d'objets."""
item_count: int = Field(default=0, description="Nombre d'objets achetés dans cette boutique")