Remove Ollama

This commit is contained in:
2025-12-24 01:51:23 -05:00
parent bfbf308d05
commit 2c5b4d0394

View File

@@ -1,54 +1,6 @@
version: '3.8' version: '3.8'
services: services:
# --- THE BRAIN (Backend Only) ---
ollama:
image: ollama/ollama:rocm
networks:
- ai_internal
deploy:
replicas: 1
placement:
constraints:
- node.labels.gpu == true
restart_policy:
condition: on-failure
user: "0:993"
cap_add:
- IPC_LOCK
- SYS_PTRACE
- SYS_ADMIN
- SYS_RESOURCE
- SYS_NICE
devices:
# Map ALL device nodes
- /dev/dri:/dev/dri
- /dev/kfd:/dev/kfd
volumes:
- /mnt/local-ssd/docker/ollama:/root/.ollama
# --- THE FIX: Hardware Topology Maps ---
# Allow ROCm to identify the card model via System Tree
- /sys/class/drm:/sys/class/drm:ro
- /sys/class/kfd:/sys/class/kfd:ro
- /sys/devices:/sys/devices:ro
environment:
# Force support for Navi 21 (6900 XT)
- HSA_OVERRIDE_GFX_VERSION=10.3.0
# REMOVE ROCR_VISIBLE_DEVICES
# (We want it to scan the PCI tree naturally now that it can see /sys)
# eGPU Stability
- HSA_ENABLE_SDMA=0
- OLLAMA_HOST=0.0.0.0
- OLLAMA_KEEP_ALIVE=5m
- OLLAMA_DEBUG=1
# --- UI 1: Open WebUI (HP Mini 1) --- # --- UI 1: Open WebUI (HP Mini 1) ---
openwebui: openwebui: