[add] ollama

This commit is contained in:
Andy Bunce 2026-02-14 21:57:08 +00:00
parent dbdfe6d052
commit 8e5ba0b73a
3 changed files with 17 additions and 0 deletions

View file

@ -0,0 +1,58 @@
services:
ollama:
volumes:
- ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest}
ports:
- 11434:11434
environment:
- OLLAMA_MAX_LOADED_MODELS=1
- OLLAMA_NO_CUDA=1
- OLLAMA_FLASH_ATTENTION=1
deploy:
resources:
limits:
cpus: '6.0' # Limit CPU cores
memory: 16G # Limit RAM (adjust based on your system)
# devices:
# - /dev/dri:/dev/dri
open-webui:
build:
context: .
args:
OLLAMA_BASE_URL: '/ollama'
dockerfile: Dockerfile
image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-latest}
container_name: open-webui
volumes:
- open-webui:/app/backend/data
depends_on:
- ollama
ports:
- ${OPEN_WEBUI_PORT-3001}:8080
environment:
- WEBUI_CONCURRENCY=1
- LOG_LEVEL=debug
- 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY='
- 'RAG_EMBEDDING_ENGINE=ollama'
- 'AUDIO_STT_ENGINE=openai'
deploy:
resources:
limits:
cpus: '2'
memory: 4G
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}
open-webui: {}

9
ollama2/readme.md Normal file
View file

@ -0,0 +1,9 @@
https://dev.to/ajeetraina/how-to-setup-open-webui-with-ollama-and-docker-desktop-24f0
```mermaid
graph LR
A[Start] --> B[Process]
B --> C[Decision]
C -->|Yes| D[End]
C -->|No| E[Loop Back to B]
```