Adds a new `lobechat` group and task to the `ai-frontend.nomad` job file. This change introduces the LobeChat AI frontend application, running in a Podman container, alongside the existing OpenWebUI service. It's configured to connect to an Ollama instance via Consul service discovery. The new `lobechat` service is registered in Consul but does not include Traefik tags, meaning it's not immediately exposed via the Traefik ingress controller. It's constrained to run on the `hp1-home` host.
123 lines
2.3 KiB
HCL
123 lines
2.3 KiB
HCL
job "ai-frontend" {
|
|
datacenters = [ "Homelab-PTECH-DC" ]
|
|
region = "home"
|
|
type = "service"
|
|
|
|
# --- OpenWebUI (completely unchanged) ---
|
|
group "openwebui" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 8080
|
|
to = 8080
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "openwebui"
|
|
port = "http"
|
|
tags = ["traefik.enable=true"]
|
|
|
|
check {
|
|
type = "http"
|
|
path = "/health"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
OLLAMA_BASE_URL = "http://ollama:11434"
|
|
}
|
|
|
|
config {
|
|
image = "ghcr.io/open-webui/open-webui:main"
|
|
ports = [ "http" ]
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 4000
|
|
}
|
|
}
|
|
}
|
|
|
|
# --- LobeChat (new, no Traefik tags) ---
|
|
group "lobechat" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 3210
|
|
to = 3210
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "lobechat"
|
|
port = "http"
|
|
|
|
# No Traefik tags added
|
|
# Only registers the service in Consul for discovery/health checks
|
|
|
|
check {
|
|
type = "tcp"
|
|
port = "http"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
OLLAMA_PROXY_URL = "http://ollama.service.consul:11434"
|
|
|
|
# Optional: Add a password if you expose this port externally
|
|
# ACCESS_CODE = "your-strong-password-here"
|
|
}
|
|
|
|
config {
|
|
image = "lobehub/lobe-chat:latest"
|
|
ports = [ "http" ]
|
|
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 2000
|
|
}
|
|
}
|
|
}
|
|
} |