From 1af5f668558d993dc7ed93a70c9f68fe965893ff Mon Sep 17 00:00:00 2001 From: gilles Date: Sun, 22 Feb 2026 12:11:58 +0100 Subject: [PATCH] fix(ai-service): gestion erreurs, health check, limite taille, run_in_executor Co-Authored-By: Claude Sonnet 4.6 --- ai-service/Dockerfile | 2 +- ai-service/main.py | 49 ++++++++++++++++++++++++++++++------------- 2 files changed, 36 insertions(+), 15 deletions(-) diff --git a/ai-service/Dockerfile b/ai-service/Dockerfile index 8f11fb8..b853bb5 100644 --- a/ai-service/Dockerfile +++ b/ai-service/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim +FROM python:3.11.11-slim WORKDIR /app COPY requirements.txt . diff --git a/ai-service/main.py b/ai-service/main.py index 0cd96e8..3ec3d88 100644 --- a/ai-service/main.py +++ b/ai-service/main.py @@ -1,14 +1,16 @@ +import asyncio import io import os from typing import List -from fastapi import FastAPI, File, UploadFile -from PIL import Image +from fastapi import FastAPI, File, HTTPException, UploadFile +from PIL import Image, UnidentifiedImageError app = FastAPI(title="AI Plant Detection Service") _model = None MODEL_CACHE_DIR = os.environ.get("MODEL_CACHE_DIR", "/models") +MAX_FILE_SIZE = 10 * 1024 * 1024 # 10 MB def get_model(): @@ -16,23 +18,16 @@ def get_model(): if _model is None: from ultralytics import YOLO os.makedirs(MODEL_CACHE_DIR, exist_ok=True) - _model = YOLO("foduucom/plant-leaf-detection-and-classification") + try: + _model = YOLO("foduucom/plant-leaf-detection-and-classification") + except Exception as e: + raise RuntimeError(f"Impossible de charger le modèle YOLO: {e}") from e return _model -@app.get("/health") -def health(): - return {"status": "ok"} - - -@app.post("/detect") -async def detect(file: UploadFile = File(...)): - data = await file.read() - img = Image.open(io.BytesIO(data)).convert("RGB") - +def _run_inference(img: Image.Image) -> list: model = get_model() results = model.predict(img, conf=0.25, iou=0.45, verbose=False) - detections = [] if results and results[0].boxes: boxes = results[0].boxes @@ -45,3 +40,29 @@ async def detect(file: UploadFile = File(...)): "confidence": round(conf, 3), }) return detections + + +@app.get("/health") +def health(): + return {"status": "ok", "model_loaded": _model is not None} + + +@app.post("/detect") +async def detect(file: UploadFile = File(...)): + data = await file.read() + + if len(data) > MAX_FILE_SIZE: + raise HTTPException(status_code=413, detail="Fichier trop volumineux (max 10 MB)") + + try: + img = Image.open(io.BytesIO(data)).convert("RGB") + except (UnidentifiedImageError, OSError) as e: + raise HTTPException(status_code=400, detail=f"Image invalide: {e}") + + try: + loop = asyncio.get_event_loop() + detections = await loop.run_in_executor(None, _run_inference, img) + except RuntimeError as e: + raise HTTPException(status_code=503, detail=str(e)) + + return detections