112 lines
2.5 KiB
YAML
112 lines
2.5 KiB
YAML
version: '3.8'
|
|
|
|
services:
|
|
# --- THE BRAIN (Backend Only) ---
|
|
ollama:
|
|
image: ollama/ollama:rocm
|
|
networks:
|
|
- ai_internal
|
|
deploy:
|
|
replicas: 1
|
|
placement:
|
|
constraints:
|
|
- node.labels.gpu == true
|
|
restart_policy:
|
|
condition: on-failure
|
|
|
|
user: root
|
|
|
|
cap_add:
|
|
- IPC_LOCK
|
|
- SYS_PTRACE
|
|
- SYS_ADMIN
|
|
- SYS_RESOURCE
|
|
- SYS_NICE
|
|
|
|
devices:
|
|
# Map ALL device nodes
|
|
- /dev/dri:/dev/dri
|
|
- /dev/kfd:/dev/kfd
|
|
|
|
volumes:
|
|
- /mnt/local-ssd/docker/ollama:/root/.ollama
|
|
# --- THE FIX: Hardware Topology Maps ---
|
|
# Allow ROCm to identify the card model via System Tree
|
|
- /sys/class/drm:/sys/class/drm:ro
|
|
- /sys/class/kfd:/sys/class/kfd:ro
|
|
- /sys/devices:/sys/devices:ro
|
|
|
|
environment:
|
|
# Force support for Navi 21 (6900 XT)
|
|
- HSA_OVERRIDE_GFX_VERSION=10.3.0
|
|
|
|
# REMOVE ROCR_VISIBLE_DEVICES
|
|
# (We want it to scan the PCI tree naturally now that it can see /sys)
|
|
|
|
# eGPU Stability
|
|
- HSA_ENABLE_SDMA=0
|
|
|
|
- OLLAMA_HOST=0.0.0.0
|
|
- OLLAMA_KEEP_ALIVE=5m
|
|
- OLLAMA_DEBUG=1
|
|
|
|
# --- UI 1: Open WebUI (HP Mini 1) ---
|
|
openwebui:
|
|
image: ghcr.io/open-webui/open-webui:main
|
|
networks:
|
|
- ai_internal
|
|
- proxy
|
|
ports:
|
|
- "3000:8080"
|
|
environment:
|
|
- OLLAMA_BASE_URL=http://ollama:11434
|
|
volumes:
|
|
# BIND MOUNT: Easy to backup config/users
|
|
- /home/phunter/docker-data/openwebui:/app/backend/data
|
|
deploy:
|
|
placement:
|
|
constraints:
|
|
- node.role == manager
|
|
|
|
# --- UI 2: AnythingLLM (HP Mini 1) ---
|
|
anythingllm:
|
|
image: mintplexlabs/anythingllm
|
|
networks:
|
|
- ai_internal
|
|
- proxy
|
|
ports:
|
|
- "3001:3001"
|
|
environment:
|
|
- STORAGE_DIR=/app/server/storage
|
|
- LLM_PROVIDER=ollama
|
|
- OLLAMA_BASE_PATH=http://ollama:11434
|
|
- OLLAMA_MODEL_PREF=llama3
|
|
volumes:
|
|
# BIND MOUNT: Easy to backup PDFs/Workspaces
|
|
- /home/phunter/docker-data/anythingllm:/app/server/storage
|
|
deploy:
|
|
placement:
|
|
constraints:
|
|
- node.role == manager
|
|
|
|
# --- UI 3: Lobe Chat (HP Mini 1) ---
|
|
lobechat:
|
|
image: lobehub/lobe-chat
|
|
networks:
|
|
- ai_internal
|
|
- proxy
|
|
ports:
|
|
- "3210:3210"
|
|
environment:
|
|
- OLLAMA_PROXY_URL=http://ollama:11434/v1
|
|
deploy:
|
|
placement:
|
|
constraints:
|
|
- node.role == manager
|
|
|
|
networks:
|
|
ai_internal:
|
|
driver: overlay
|
|
attachable: true
|
|
proxy:
|
|
external: true |