Add web ui's
This commit is contained in:
@@ -1,8 +1,11 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# --- THE BRAIN (Backend Only - P52) ---
|
||||
ollama:
|
||||
image: ollama/ollama:rocm
|
||||
networks:
|
||||
- ai_internal
|
||||
deploy:
|
||||
replicas: 1
|
||||
placement:
|
||||
@@ -10,28 +13,75 @@ services:
|
||||
- node.labels.gpu == true
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
# We use "host" mode for devices in Swarm to avoid permission headaches
|
||||
# This allows the container to see the /dev/kfd and /dev/dri paths we mount below
|
||||
user: root
|
||||
|
||||
volumes:
|
||||
- /mnt/local-ssd/docker/ollama:/root/.ollama
|
||||
# Mount the Kernel Fusion Driver (Required for ROCm)
|
||||
# P52 Bind Mount (Already working)
|
||||
- /mnt/local-ssd/ollama:/root/.ollama
|
||||
- /dev/kfd:/dev/kfd
|
||||
# Mount the Direct Rendering Interface (The actual cards)
|
||||
- /dev/dri:/dev/dri
|
||||
|
||||
networks:
|
||||
- proxy
|
||||
|
||||
environment:
|
||||
# Force support for Navi 21 (6900 XT)
|
||||
- HSA_OVERRIDE_GFX_VERSION=10.3.0
|
||||
# Tell ROCm to verify usage
|
||||
- ROCR_VISIBLE_DEVICES=all
|
||||
- OLLAMA_HOST=0.0.0.0
|
||||
- OLLAMA_KEEP_ALIVE=5m
|
||||
|
||||
# --- UI 1: Open WebUI (HP Mini 1) ---
|
||||
openwebui:
|
||||
image: ghcr.io/open-webui/open-webui:main
|
||||
networks:
|
||||
- ai_internal
|
||||
- Proxy
|
||||
ports:
|
||||
- "3000:8080"
|
||||
environment:
|
||||
- OLLAMA_BASE_URL=http://ollama:11434
|
||||
volumes:
|
||||
# BIND MOUNT: Easy to backup config/users
|
||||
- /home/phunter/docker-data/openwebui:/app/backend/data
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
|
||||
# --- UI 2: AnythingLLM (HP Mini 1) ---
|
||||
anythingllm:
|
||||
image: mintplexlabs/anythingllm
|
||||
networks:
|
||||
- ai_internal
|
||||
- Proxy
|
||||
ports:
|
||||
- "3001:3001"
|
||||
environment:
|
||||
- STORAGE_DIR=/app/server/storage
|
||||
- LLM_PROVIDER=ollama
|
||||
- OLLAMA_BASE_PATH=http://ollama:11434
|
||||
- OLLAMA_MODEL_PREF=llama3
|
||||
volumes:
|
||||
# BIND MOUNT: Easy to backup PDFs/Workspaces
|
||||
- /home/phunter/docker-data/anythingllm:/app/server/storage
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
|
||||
# --- UI 3: Lobe Chat (HP Mini 1) ---
|
||||
lobechat:
|
||||
image: lobehub/lobe-chat
|
||||
networks:
|
||||
- ai_internal
|
||||
- Proxy
|
||||
ports:
|
||||
- "3210:3210"
|
||||
environment:
|
||||
- OLLAMA_PROXY_URL=http://ollama:11434/v1
|
||||
deploy:
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
|
||||
networks:
|
||||
proxy:
|
||||
ai_internal:
|
||||
driver: overlay
|
||||
attachable: true
|
||||
Proxy:
|
||||
external: true
|
||||
Reference in New Issue
Block a user