This commit is contained in:
2026-01-18 12:23:01 +01:00
parent ef3d0ed970
commit bb1263edb8
86 changed files with 90289 additions and 0 deletions

45
backend/app/db/crud.py Normal file
View File

@@ -0,0 +1,45 @@
from __future__ import annotations
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
from backend.app.db import models, schemas
"""
CRUD minimal pour manipuler les produits dans la base SQLite.
"""
def get_product(db: Session, product_id: int) -> models.Product | None:
return db.query(models.Product).filter(models.Product.id == product_id).first()
def list_products(db: Session, skip: int = 0, limit: int = 100) -> list[models.Product]:
return db.query(models.Product).offset(skip).limit(limit).all()
def create_product(db: Session, data: schemas.ProductCreate) -> models.Product:
product = models.Product(**data.dict())
db.add(product)
try:
db.commit()
except IntegrityError:
db.rollback()
raise
db.refresh(product)
return product
def update_product(db: Session, product: models.Product, changes: schemas.ProductUpdate) -> models.Product:
for field, value in changes.dict(exclude_unset=True).items():
setattr(product, field, value)
db.add(product)
db.commit()
db.refresh(product)
return product
def remove_product(db: Session, product: models.Product) -> None:
db.delete(product)
db.commit()

View File

@@ -0,0 +1,30 @@
from __future__ import annotations
from os import getenv
from pathlib import Path
from sqlalchemy import create_engine
from sqlalchemy.orm import declarative_base, sessionmaker
DATA_DIR = Path(__file__).resolve().parent.parent.parent / "data"
# stockage SQLite + raw JSON dans backend/data
DATA_DIR.mkdir(parents=True, exist_ok=True)
DEFAULT_DATABASE_PATH = DATA_DIR / "suivi.db"
DEFAULT_DATABASE_URL = f"sqlite:///{DEFAULT_DATABASE_PATH}"
DATABASE_URL = getenv("DATABASE_URL", DEFAULT_DATABASE_URL)
if DATABASE_URL.startswith("sqlite:///"):
sqlite_path = Path(DATABASE_URL.replace("sqlite:///", "", 1))
sqlite_path.parent.mkdir(parents=True, exist_ok=True)
else:
DEFAULT_DATABASE_PATH.parent.mkdir(parents=True, exist_ok=True)
engine = create_engine(
DATABASE_URL,
connect_args={"check_same_thread": False},
future=True,
)
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, future=True)
# classe de base pour les modèles SQLAlchemy
Base = declarative_base()

70
backend/app/db/models.py Normal file
View File

@@ -0,0 +1,70 @@
from __future__ import annotations
from datetime import datetime
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String, Text
from sqlalchemy.orm import relationship
from backend.app.db.database import Base
class Product(Base):
"""Table principale des produits suivis."""
__tablename__ = "products"
id = Column(Integer, primary_key=True, index=True)
boutique = Column(String(32), nullable=False)
url = Column(Text, nullable=False)
asin = Column(String(20), nullable=False, index=True)
titre = Column(Text, nullable=True)
url_image = Column(Text, nullable=True)
categorie = Column(String(64), nullable=True)
type = Column(String(64), nullable=True)
actif = Column(Boolean, default=True)
cree_le = Column(DateTime, default=datetime.utcnow)
modifie_le = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
snapshots = relationship("ProductSnapshot", back_populates="product")
class ScrapeRun(Base):
"""Journal des runs de scraping pour surveiller taux de succès."""
__tablename__ = "scrape_runs"
id = Column(Integer, primary_key=True, index=True)
demarre_le = Column(DateTime, default=datetime.utcnow)
termine_le = Column(DateTime, nullable=True)
statut = Column(String(32), default="pending")
nb_total = Column(Integer, default=0)
nb_ok = Column(Integer, default=0)
nb_echec = Column(Integer, default=0)
chemin_log = Column(Text, nullable=True)
snapshots = relationship("ProductSnapshot", back_populates="scrape_run")
class ProductSnapshot(Base):
"""Historique des snapshots capturés pour chaque produit."""
__tablename__ = "product_snapshots"
id = Column(Integer, primary_key=True, index=True)
produit_id = Column(Integer, ForeignKey("products.id"), nullable=False)
run_scrap_id = Column(Integer, ForeignKey("scrape_runs.id"), nullable=True)
scrape_le = Column(DateTime, default=datetime.utcnow)
prix_actuel = Column(Float, nullable=True)
prix_conseille = Column(Float, nullable=True)
prix_min_30j = Column(Float, nullable=True)
etat_stock = Column(String(256), nullable=True)
en_stock = Column(Boolean, nullable=True)
note = Column(Float, nullable=True)
nombre_avis = Column(Integer, nullable=True)
prime = Column(Boolean, nullable=True)
choix_amazon = Column(Boolean, nullable=True)
offre_limitee = Column(Boolean, nullable=True)
exclusivite_amazon = Column(Boolean, nullable=True)
chemin_json_brut = Column(Text, nullable=True)
statut_scrap = Column(String(32), default="ok")
message_erreur = Column(Text, nullable=True)
product = relationship("Product", back_populates="snapshots")
scrape_run = relationship("ScrapeRun", back_populates="snapshots")

63
backend/app/db/schemas.py Normal file
View File

@@ -0,0 +1,63 @@
from __future__ import annotations
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, HttpUrl
class ProductBase(BaseModel):
boutique: str
url: HttpUrl
asin: str
titre: Optional[str]
url_image: Optional[HttpUrl]
categorie: Optional[str]
type: Optional[str]
actif: Optional[bool] = True
class ProductCreate(ProductBase):
pass
class ProductUpdate(BaseModel):
titre: Optional[str] = None
url_image: Optional[HttpUrl] = None
categorie: Optional[str] = None
type: Optional[str] = None
actif: Optional[bool] = None
class ProductRead(ProductBase):
id: int
cree_le: datetime
modifie_le: datetime
class Config:
orm_mode = True
class ProductSnapshotBase(BaseModel):
prix_actuel: Optional[float]
prix_conseille: Optional[float]
prix_min_30j: Optional[float]
etat_stock: Optional[str]
en_stock: Optional[bool]
note: Optional[float]
nombre_avis: Optional[int]
prime: Optional[bool]
choix_amazon: Optional[bool]
offre_limitee: Optional[bool]
exclusivite_amazon: Optional[bool]
statut_scrap: Optional[str]
message_erreur: Optional[str]
class ProductSnapshotRead(ProductSnapshotBase):
id: int
produit_id: int
scrape_le: datetime
class Config:
orm_mode = True