Files
mes_docker/ollama/docker-compose.yaml
gilles soulier 2982269d83 Actualiser ollama/docker-compose.yaml
ajout du docker compose pour ollama
2025-09-28 09:26:26 +02:00

81 lines
1.9 KiB
YAML

# https://gist.githubusercontent.com/usrbinkat/de44facc683f954bf0cca6c87e2f9f88/raw/0402e8441de57ccd8b00fe0db8ad40cae7d5fdb8/docker-compose.yaml
services:
open-webui:
container_name: open-webui
image: ghcr.io/open-webui/open-webui:main
environment:
- MODEL_DOWNLOAD_DIR=/models
- OLLAMA_API_BASE_URL=http://ollama:11434
- OLLAMA_API_URL=http://ollama:11434
- LOG_LEVEL=debug
- WEBUI_SECRET_KEY=your_secret_key_here # Add this to prevent logouts after updates
volumes:
- data:/data
- models:/models
- open-webui:/app/backend/data # Corrected path based on documentation
ports:
- "8080:8080"
logging:
driver: json-file
options:
max-size: "5m"
max-file: "2"
depends_on:
- ollama
extra_hosts:
- "host.docker.internal:host-gateway"
networks:
- ollama-net
restart: unless-stopped
ollama:
container_name: ollama
image: ollama/ollama:latest
runtime: nvidia
environment:
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
- CUDA_VISIBLE_DEVICES=0
- LOG_LEVEL=debug
deploy:
resources:
reservations:
devices:
- driver: nvidia
capabilities: [gpu]
count: all
volumes:
- ollama:/root/.ollama
- models:/models
ports:
- "11434:11434"
logging:
driver: json-file
options:
max-size: "5m"
max-file: "2"
networks:
- ollama-net
restart: unless-stopped
watchtower:
image: containrrr/watchtower
container_name: watchtower
volumes:
- /var/run/docker.sock:/var/run/docker.sock
command: --interval 300 open-webui # Check for updates every 5 minutes
depends_on:
- open-webui
networks:
- ollama-net
restart: unless-stopped
volumes:
data:
models:
ollama:
open-webui:
networks:
ollama-net:
driver: bridge