thedocks/ollama/docker-compose.yml
2025-04-27 15:55:00 +01:00

57 lines
1.3 KiB
YAML

services:
ollama:
volumes:
- ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest}
ports:
- 11434:11434
environment:
- OLLAMA_MAX_LOADED_MODELS=1
- OLLAMA_NO_CUDA=1
deploy:
resources:
limits:
cpus: '4' # Limit CPU cores
memory: 16G # Limit RAM (adjust based on your system)
# devices:
# - /dev/dri:/dev/dri
open-webui:
build:
context: .
args:
OLLAMA_BASE_URL: '/ollama'
dockerfile: Dockerfile
image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-latest}
container_name: open-webui
volumes:
- open-webui:/app/backend/data
depends_on:
- ollama
ports:
- ${OPEN_WEBUI_PORT-3001}:8080
environment:
- WEBUI_CONCURRENCY=1
- LOG_LEVEL=debug
- 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY='
- 'RAG_EMBEDDING_ENGINE=ollama'
- 'AUDIO_STT_ENGINE=openai'
deploy:
resources:
limits:
cpus: '2'
memory: 4G
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}
open-webui: {}