feat(ai-service): container YOLO FastAPI pour détection plantes
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
9
ai-service/Dockerfile
Normal file
9
ai-service/Dockerfile
Normal file
@@ -0,0 +1,9 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY main.py .
|
||||
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8070"]
|
||||
47
ai-service/main.py
Normal file
47
ai-service/main.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import io
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
from fastapi import FastAPI, File, UploadFile
|
||||
from PIL import Image
|
||||
|
||||
app = FastAPI(title="AI Plant Detection Service")
|
||||
|
||||
_model = None
|
||||
MODEL_CACHE_DIR = os.environ.get("MODEL_CACHE_DIR", "/models")
|
||||
|
||||
|
||||
def get_model():
|
||||
global _model
|
||||
if _model is None:
|
||||
from ultralytics import YOLO
|
||||
os.makedirs(MODEL_CACHE_DIR, exist_ok=True)
|
||||
_model = YOLO("foduucom/plant-leaf-detection-and-classification")
|
||||
return _model
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/detect")
|
||||
async def detect(file: UploadFile = File(...)):
|
||||
data = await file.read()
|
||||
img = Image.open(io.BytesIO(data)).convert("RGB")
|
||||
|
||||
model = get_model()
|
||||
results = model.predict(img, conf=0.25, iou=0.45, verbose=False)
|
||||
|
||||
detections = []
|
||||
if results and results[0].boxes:
|
||||
boxes = results[0].boxes
|
||||
names = model.names
|
||||
for i in range(min(3, len(boxes))):
|
||||
cls_id = int(boxes.cls[i].item())
|
||||
conf = float(boxes.conf[i].item())
|
||||
detections.append({
|
||||
"class_name": names[cls_id],
|
||||
"confidence": round(conf, 3),
|
||||
})
|
||||
return detections
|
||||
5
ai-service/requirements.txt
Normal file
5
ai-service/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
fastapi==0.115.5
|
||||
uvicorn[standard]==0.32.1
|
||||
ultralytics==8.3.0
|
||||
Pillow==11.1.0
|
||||
python-multipart==0.0.12
|
||||
@@ -4,15 +4,42 @@ services:
|
||||
volumes:
|
||||
- ./data:/data
|
||||
ports:
|
||||
- "8000:8000"
|
||||
- "8060:8060"
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- TZ=Europe/Paris
|
||||
- AI_SERVICE_URL=http://ai-service:8070
|
||||
- REDIS_URL=redis://redis:6379
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- redis
|
||||
- ai-service
|
||||
|
||||
ai-service:
|
||||
build: ./ai-service
|
||||
volumes:
|
||||
- yolo_models:/models
|
||||
environment:
|
||||
- MODEL_CACHE_DIR=/models
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
restart: unless-stopped
|
||||
|
||||
frontend:
|
||||
build: ./frontend
|
||||
ports:
|
||||
- "80:80"
|
||||
- "8061:8061"
|
||||
depends_on:
|
||||
- backend
|
||||
environment:
|
||||
- TZ=Europe/Paris
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
yolo_models:
|
||||
redis_data:
|
||||
|
||||
Reference in New Issue
Block a user