Files
docker-compose/Local Homelab/ollama/compose/compose.yml
Preston Hunter 00f4bd9e38 feat(compose): add AMD-optimized ComfyUI service
Add a new ComfyUI service to the Docker Compose configuration, specifically tailored for AMD GPUs using ROCm.

- Uses the rocm/pytorch base image for hardware acceleration
- Includes an initialization script to clone and install ComfyUI on first boot
- Configures necessary hardware device mappings (/dev/dri, /dev/kfd) and environment variables for AMD compatibility
- Sets up persistent storage and network integration with the existing proxy and AI internal networks
2025-12-24 21:16:16 -05:00

109 lines
2.6 KiB
YAML

version: '3.8'
services:
# --- UI 1: Open WebUI (HP Mini 1) ---
openwebui:
image: ghcr.io/open-webui/open-webui:main
networks:
- ai_internal
- proxy
ports:
- "3000:8080"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
# BIND MOUNT: Easy to backup config/users
- /home/phunter/docker-data/openwebui:/app/backend/data
deploy:
placement:
constraints:
- node.role == manager
# --- UI 2: AnythingLLM (HP Mini 1) ---
anythingllm:
image: mintplexlabs/anythingllm
networks:
- ai_internal
- proxy
ports:
- "3001:3001"
environment:
- STORAGE_DIR=/app/server/storage
- LLM_PROVIDER=ollama
- OLLAMA_BASE_PATH=http://ollama:11434
- OLLAMA_MODEL_PREF=llama3
volumes:
# BIND MOUNT: Easy to backup PDFs/Workspaces
- /home/phunter/docker-data/anythingllm:/app/server/storage
deploy:
placement:
constraints:
- node.role == manager
# --- UI 3: Lobe Chat (HP Mini 1) ---
lobechat:
image: lobehub/lobe-chat
networks:
- ai_internal
- proxy
ports:
- "3210:3210"
environment:
- OLLAMA_PROXY_URL=http://ollama:11434/v1
deploy:
placement:
constraints:
- node.role == manager
# --- IMAGE ENGINE: ComfyUI (AMD Optimized) ---
comfyui:
image: rocm/pytorch:rocm6.0_ubuntu22.04_py3.10_pytorch_2.1.2
# Command to auto-install ComfyUI on first boot
command: >
bash -c "
if [ ! -d /workspace/ComfyUI ]; then
git clone https://github.com/comfyanonymous/ComfyUI /workspace/ComfyUI;
cd /workspace/ComfyUI;
pip install -r requirements.txt;
fi;
cd /workspace/ComfyUI;
python3 main.py --listen 0.0.0.0 --port 8188 --preview-method auto
"
networks:
- ai_internal
- proxy
deploy:
replicas: 1
placement:
constraints:
- node.labels.gpu == true
# Hardware Access
devices:
- /dev/dri:/dev/dri
- /dev/kfd:/dev/kfd
cap_add:
- IPC_LOCK
- SYS_PTRACE
group_add:
- video
- render
volumes:
# --- UPDATED PATH ---
- /mnt/local-ssd/docker/comfyui:/workspace/ComfyUI
# System Hardware Maps
- /sys/class/drm:/sys/class/drm:ro
- /sys/class/kfd:/sys/class/kfd:ro
- /sys/devices:/sys/devices:ro
environment:
- HSA_OVERRIDE_GFX_VERSION=10.3.0
- CLI_ARGS=--listen 0.0.0.0 --port 8188
networks:
ai_internal:
external: true
proxy:
external: true