This commit adds the missing closing bracket to the ai-frontend.nomad job definition. The file was incomplete without this bracket, which is necessary for the proper structure of the Nomad job configuration. This change ensures the job definition is correctly formatted and will be parsed properly by the Nomad service.
124 lines
2.3 KiB
HCL
124 lines
2.3 KiB
HCL
job "ai-frontend" {
|
|
datacenters = [ "Homelab-PTECH-DC" ]
|
|
region = "home"
|
|
type = "service"
|
|
|
|
# --- OpenWebUI (completely unchanged) ---
|
|
group "openwebui" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 8080
|
|
to = 8080
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "openwebui"
|
|
port = "http"
|
|
tags = ["traefik.enable=true"]
|
|
|
|
check {
|
|
type = "http"
|
|
path = "/health"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
OLLAMA_BASE_URL = "http://ollama:11434"
|
|
}
|
|
|
|
config {
|
|
image = "ghcr.io/open-webui/open-webui:main"
|
|
ports = [ "http" ]
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 4000
|
|
}
|
|
}
|
|
}
|
|
|
|
# --- LobeChat (new, no Traefik tags) ---
|
|
group "lobechat" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 3210
|
|
to = 3210
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "lobechat"
|
|
port = "http"
|
|
|
|
# No Traefik tags added
|
|
# Only registers the service in Consul for discovery/health checks
|
|
|
|
check {
|
|
type = "tcp"
|
|
port = "http"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
OLLAMA_PROXY_URL = "http://ollama.service.consul:11434"
|
|
|
|
# Optional: Add a password if you expose this port externally
|
|
# ACCESS_CODE = "your-strong-password-here"
|
|
}
|
|
|
|
config {
|
|
image = "lobehub/lobe-chat:latest"
|
|
ports = [ "http" ]
|
|
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 2000
|
|
}
|
|
}
|
|
}
|
|
}
|