feat(nomad): Add LobeChat service to ai-frontend job

Adds a new `lobechat` group and task to the `ai-frontend.nomad` job file.

This change introduces the LobeChat AI frontend application, running in a Podman container, alongside the existing OpenWebUI service. It's configured to connect to an Ollama instance via Consul service discovery.

The new `lobechat` service is registered in Consul but does not include Traefik tags, meaning it's not immediately exposed via the Traefik ingress controller. It's constrained to run on the `hp1-home` host.
This commit is contained in:
2025-12-29 02:06:11 -05:00
parent 69dd29b3a1
commit b23a0ab9af

View File

@@ -1,9 +1,9 @@
job "ai-frontend" {
datacenters = ["Homelab-PTECH-DC"]
datacenters = [ "Homelab-PTECH-DC" ]
region = "home"
type = "service"
# --- OpenWebUI ---
# --- OpenWebUI (completely unchanged) ---
group "openwebui" {
count = 1
@@ -18,10 +18,9 @@ job "ai-frontend" {
to = 8080
}
# ✅ DNS Moved Here
dns {
servers = ["192.168.1.133"]
searches = ["service.consul"]
servers = [ "192.168.1.133" ]
searches = [ "service.consul" ]
}
}
@@ -47,7 +46,7 @@ job "ai-frontend" {
config {
image = "ghcr.io/open-webui/open-webui:main"
ports = ["http"]
ports = [ "http" ]
volumes = [
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data"
]
@@ -59,4 +58,66 @@ job "ai-frontend" {
}
}
}
# --- LobeChat (new, no Traefik tags) ---
group "lobechat" {
count = 1
constraint {
attribute = "${attr.unique.hostname}"
value = "hp1-home"
}
network {
port "http" {
static = 3210
to = 3210
}
dns {
servers = [ "192.168.1.133" ]
searches = [ "service.consul" ]
}
}
service {
name = "lobechat"
port = "http"
# No Traefik tags added
# Only registers the service in Consul for discovery/health checks
check {
type = "tcp"
port = "http"
interval = "20s"
timeout = "2s"
}
}
task "server" {
driver = "podman"
env {
OLLAMA_PROXY_URL = "http://ollama.service.consul:11434"
# Optional: Add a password if you expose this port externally
# ACCESS_CODE = "your-strong-password-here"
}
config {
image = "lobehub/lobe-chat:latest"
ports = [ "http" ]
volumes = [
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data"
]
}
resources {
cpu = 1000
memory = 2000
}
}
}
}