From ad1c1044c769ae50ade92fda79c5d5c92f710580 Mon Sep 17 00:00:00 2001 From: gilles Date: Sun, 22 Feb 2026 12:09:56 +0100 Subject: [PATCH] =?UTF-8?q?feat(ai-service):=20container=20YOLO=20FastAPI?= =?UTF-8?q?=20pour=20d=C3=A9tection=20plantes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Claude Sonnet 4.6 --- ai-service/Dockerfile | 9 +++++++ ai-service/main.py | 47 +++++++++++++++++++++++++++++++++++++ ai-service/requirements.txt | 5 ++++ docker-compose.yml | 31 ++++++++++++++++++++++-- 4 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 ai-service/Dockerfile create mode 100644 ai-service/main.py create mode 100644 ai-service/requirements.txt diff --git a/ai-service/Dockerfile b/ai-service/Dockerfile new file mode 100644 index 0000000..8f11fb8 --- /dev/null +++ b/ai-service/Dockerfile @@ -0,0 +1,9 @@ +FROM python:3.11-slim + +WORKDIR /app +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY main.py . + +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8070"] diff --git a/ai-service/main.py b/ai-service/main.py new file mode 100644 index 0000000..0cd96e8 --- /dev/null +++ b/ai-service/main.py @@ -0,0 +1,47 @@ +import io +import os +from typing import List + +from fastapi import FastAPI, File, UploadFile +from PIL import Image + +app = FastAPI(title="AI Plant Detection Service") + +_model = None +MODEL_CACHE_DIR = os.environ.get("MODEL_CACHE_DIR", "/models") + + +def get_model(): + global _model + if _model is None: + from ultralytics import YOLO + os.makedirs(MODEL_CACHE_DIR, exist_ok=True) + _model = YOLO("foduucom/plant-leaf-detection-and-classification") + return _model + + +@app.get("/health") +def health(): + return {"status": "ok"} + + +@app.post("/detect") +async def detect(file: UploadFile = File(...)): + data = await file.read() + img = Image.open(io.BytesIO(data)).convert("RGB") + + model = get_model() + results = model.predict(img, conf=0.25, iou=0.45, verbose=False) + + detections = [] + if results and results[0].boxes: + boxes = results[0].boxes + names = model.names + for i in range(min(3, len(boxes))): + cls_id = int(boxes.cls[i].item()) + conf = float(boxes.conf[i].item()) + detections.append({ + "class_name": names[cls_id], + "confidence": round(conf, 3), + }) + return detections diff --git a/ai-service/requirements.txt b/ai-service/requirements.txt new file mode 100644 index 0000000..578ce09 --- /dev/null +++ b/ai-service/requirements.txt @@ -0,0 +1,5 @@ +fastapi==0.115.5 +uvicorn[standard]==0.32.1 +ultralytics==8.3.0 +Pillow==11.1.0 +python-multipart==0.0.12 diff --git a/docker-compose.yml b/docker-compose.yml index 5f870b3..2c43ffa 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,15 +4,42 @@ services: volumes: - ./data:/data ports: - - "8000:8000" + - "8060:8060" env_file: - .env + environment: + - TZ=Europe/Paris + - AI_SERVICE_URL=http://ai-service:8070 + - REDIS_URL=redis://redis:6379 + restart: unless-stopped + depends_on: + - redis + - ai-service + + ai-service: + build: ./ai-service + volumes: + - yolo_models:/models + environment: + - MODEL_CACHE_DIR=/models + restart: unless-stopped + + redis: + image: redis:7-alpine + volumes: + - redis_data:/data restart: unless-stopped frontend: build: ./frontend ports: - - "80:80" + - "8061:8061" depends_on: - backend + environment: + - TZ=Europe/Paris restart: unless-stopped + +volumes: + yolo_models: + redis_data: