[mod] ollama

This commit is contained in:
Andy Bunce 2025-04-27 15:37:19 +01:00
parent 896e82db86
commit 32724525b3

View file

@ -10,10 +10,9 @@ services:
ports: ports:
- 11434:11434 - 11434:11434
environment: environment:
- OLLAMA_NUM_GPU=99
- OLLAMA_MAX_LOADED_MODELS=1 - OLLAMA_MAX_LOADED_MODELS=1
devices: # devices:
- /dev/dri:/dev/dri # - /dev/dri:/dev/dri
open-webui: open-webui:
build: build:
@ -30,12 +29,19 @@ services:
ports: ports:
- ${OPEN_WEBUI_PORT-3001}:8080 - ${OPEN_WEBUI_PORT-3001}:8080
environment: environment:
- WEBUI_CONCURRENCY=1
- LOG_LEVEL=debug
- 'OLLAMA_BASE_URL=http://ollama:11434' - 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY=' - 'WEBUI_SECRET_KEY='
- 'RAG_EMBEDDING_ENGINE=ollama' - 'RAG_EMBEDDING_ENGINE=ollama'
- 'AUDIO_STT_ENGINE=openai' - 'AUDIO_STT_ENGINE=openai'
devices:
- /dev/dri:/dev/dri deploy:
resources:
limits:
cpus: '2'
memory: 4G
extra_hosts: extra_hosts:
- host.docker.internal:host-gateway - host.docker.internal:host-gateway
restart: unless-stopped restart: unless-stopped