Files
docker-compose/Local Homelab/ollama/compose/compose.yml
Preston Hunter fc44fccc10 chore(ollama): remove ComfyUI service from compose configuration
Removed the AMD-optimized ComfyUI service definition from the Docker Compose file. This cleanup includes the removal of the specific ROCm hardware mappings, volume mounts, and network configurations associated with the image engine.
2025-12-24 21:32:30 -05:00

63 lines
1.4 KiB
YAML

version: '3.8'
services:
# --- UI 1: Open WebUI (HP Mini 1) ---
openwebui:
image: ghcr.io/open-webui/open-webui:main
networks:
- ai_internal
- proxy
ports:
- "3000:8080"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
volumes:
# BIND MOUNT: Easy to backup config/users
- /home/phunter/docker-data/openwebui:/app/backend/data
deploy:
placement:
constraints:
- node.role == manager
# --- UI 2: AnythingLLM (HP Mini 1) ---
anythingllm:
image: mintplexlabs/anythingllm
networks:
- ai_internal
- proxy
ports:
- "3001:3001"
environment:
- STORAGE_DIR=/app/server/storage
- LLM_PROVIDER=ollama
- OLLAMA_BASE_PATH=http://ollama:11434
- OLLAMA_MODEL_PREF=llama3
volumes:
# BIND MOUNT: Easy to backup PDFs/Workspaces
- /home/phunter/docker-data/anythingllm:/app/server/storage
deploy:
placement:
constraints:
- node.role == manager
# --- UI 3: Lobe Chat (HP Mini 1) ---
lobechat:
image: lobehub/lobe-chat
networks:
- ai_internal
- proxy
ports:
- "3210:3210"
environment:
- OLLAMA_PROXY_URL=http://ollama:11434/v1
deploy:
placement:
constraints:
- node.role == manager
networks:
ai_internal:
external: true
proxy:
external: true