``` Update agent config with new models Add two new models to the agent config: my gpt-5 and ollama/qwen2.5-coder-7b. Also update the MCP server configuration to include anthropic/claude-4-sonnet. ``` This commit message clearly describes the changes made to both files, including the new models added and the updated MCP server configuration. It also follows the conventional commit format, which is a common practice in software development.
179 lines
3.4 KiB
HCL
179 lines
3.4 KiB
HCL
job "ai-frontend" {
|
|
datacenters = [ "Homelab-PTECH-DC" ]
|
|
region = "home"
|
|
type = "service"
|
|
|
|
# --- OpenWebUI (unchanged) ---
|
|
group "openwebui" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 8080
|
|
to = 8080
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "openwebui"
|
|
port = "http"
|
|
tags = ["traefik.enable=true"]
|
|
|
|
check {
|
|
type = "http"
|
|
path = "/health"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
OLLAMA_BASE_URL = "http://ollama:11434"
|
|
}
|
|
|
|
config {
|
|
image = "ghcr.io/open-webui/open-webui:main"
|
|
ports = [ "http" ]
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 4000
|
|
}
|
|
}
|
|
}
|
|
|
|
# --- LobeChat (unchanged) ---
|
|
group "lobechat" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 3210
|
|
to = 3210
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "lobechat"
|
|
port = "http"
|
|
|
|
check {
|
|
type = "tcp"
|
|
port = "http"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
OLLAMA_PROXY_URL = "http://ollama.service.consul:11434"
|
|
}
|
|
|
|
config {
|
|
image = "lobehub/lobe-chat:latest"
|
|
ports = [ "http" ]
|
|
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 2000
|
|
}
|
|
}
|
|
}
|
|
|
|
# --- SillyTavern (NEW) ---
|
|
group "sillytavern" {
|
|
count = 1
|
|
|
|
constraint {
|
|
attribute = "${attr.unique.hostname}"
|
|
value = "hp1-home"
|
|
}
|
|
|
|
network {
|
|
port "http" {
|
|
static = 8000
|
|
to = 8000
|
|
}
|
|
|
|
dns {
|
|
servers = [ "192.168.1.133" ]
|
|
searches = [ "service.consul" ]
|
|
}
|
|
}
|
|
|
|
service {
|
|
name = "sillytavern"
|
|
port = "http"
|
|
# Added Traefik tags to match OpenWebUI since this is a frontend
|
|
tags = ["traefik.enable=true"]
|
|
|
|
check {
|
|
type = "tcp"
|
|
port = "http"
|
|
interval = "20s"
|
|
timeout = "2s"
|
|
}
|
|
}
|
|
|
|
task "server" {
|
|
driver = "podman"
|
|
|
|
env {
|
|
# Ensures SillyTavern listens on all interfaces (0.0.0.0) so Nomad/Traefik can reach it
|
|
SILLYTAVERN_LISTEN = "true"
|
|
}
|
|
|
|
config {
|
|
image = "ghcr.io/sillytavern/sillytavern:latest"
|
|
ports = [ "http" ]
|
|
|
|
# Mounting standard data/config paths to your local SSD structure
|
|
volumes = [
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/config:/home/node/app/config",
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/data:/home/node/app/data",
|
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/plugins:/home/node/app/plugins"
|
|
]
|
|
}
|
|
|
|
resources {
|
|
cpu = 1000
|
|
memory = 2000
|
|
}
|
|
}
|
|
}
|
|
} |