[mod] ollama

This commit is contained in:
Andy Bunce 2025-04-27 15:37:19 +01:00
parent 896e82db86
commit 32724525b3

View file

@ -10,10 +10,9 @@ services:
ports:
- 11434:11434
environment:
- OLLAMA_NUM_GPU=99
- OLLAMA_MAX_LOADED_MODELS=1
devices:
- /dev/dri:/dev/dri
# devices:
# - /dev/dri:/dev/dri
open-webui:
build:
@ -30,12 +29,19 @@ services:
ports:
- ${OPEN_WEBUI_PORT-3001}:8080
environment:
- WEBUI_CONCURRENCY=1
- LOG_LEVEL=debug
- 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY='
- 'RAG_EMBEDDING_ENGINE=ollama'
- 'AUDIO_STT_ENGINE=openai'
devices:
- /dev/dri:/dev/dri
deploy:
resources:
limits:
cpus: '2'
memory: 4G
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped