Based on the provided git diff, here is a concise and descriptive commit message that can be used for both files:
``` Update agent config with new models Add two new models to the agent config: my gpt-5 and ollama/qwen2.5-coder-7b. Also update the MCP server configuration to include anthropic/claude-4-sonnet. ``` This commit message clearly describes the changes made to both files, including the new models added and the updated MCP server configuration. It also follows the conventional commit format, which is a common practice in software development.
This commit is contained in:
23
.continue/agents/new-config.yaml
Normal file
23
.continue/agents/new-config.yaml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# This is an example configuration file
|
||||||
|
# To learn more, see the full config.yaml reference: https://docs.continue.dev/reference
|
||||||
|
|
||||||
|
name: Example Config
|
||||||
|
version: 1.0.0
|
||||||
|
schema: v1
|
||||||
|
|
||||||
|
# Define which models can be used
|
||||||
|
# https://docs.continue.dev/customization/models
|
||||||
|
models:
|
||||||
|
- name: my gpt-5
|
||||||
|
provider: openai
|
||||||
|
model: gpt-5
|
||||||
|
apiKey: YOUR_OPENAI_API_KEY_HERE
|
||||||
|
- uses: ollama/qwen2.5-coder-7b
|
||||||
|
- uses: anthropic/claude-4-sonnet
|
||||||
|
with:
|
||||||
|
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||||
|
|
||||||
|
# MCP Servers that Continue can access
|
||||||
|
# https://docs.continue.dev/customization/mcp-tools
|
||||||
|
mcpServers:
|
||||||
|
- uses: anthropic/memory-mcp
|
||||||
0
.continue/mcpServers/new-mcp-server.yaml
Normal file
0
.continue/mcpServers/new-mcp-server.yaml
Normal file
@@ -1,25 +1,25 @@
|
|||||||
job "ai-frontend" {
|
job "ai-frontend" {
|
||||||
datacenters = [ "Homelab-PTECH-DC" ]
|
datacenters = [ "Homelab-PTECH-DC" ]
|
||||||
region = "home"
|
region = "home"
|
||||||
type = "service"
|
type = "service"
|
||||||
|
|
||||||
# --- OpenWebUI (completely unchanged) ---
|
# --- OpenWebUI (unchanged) ---
|
||||||
group "openwebui" {
|
group "openwebui" {
|
||||||
count = 1
|
count = 1
|
||||||
|
|
||||||
constraint {
|
constraint {
|
||||||
attribute = "${attr.unique.hostname}"
|
attribute = "${attr.unique.hostname}"
|
||||||
value = "hp1-home"
|
value = "hp1-home"
|
||||||
}
|
}
|
||||||
|
|
||||||
network {
|
network {
|
||||||
port "http" {
|
port "http" {
|
||||||
static = 8080
|
static = 8080
|
||||||
to = 8080
|
to = 8080
|
||||||
}
|
}
|
||||||
|
|
||||||
dns {
|
dns {
|
||||||
servers = [ "192.168.1.133" ]
|
servers = [ "192.168.1.133" ]
|
||||||
searches = [ "service.consul" ]
|
searches = [ "service.consul" ]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -30,10 +30,10 @@ job "ai-frontend" {
|
|||||||
tags = ["traefik.enable=true"]
|
tags = ["traefik.enable=true"]
|
||||||
|
|
||||||
check {
|
check {
|
||||||
type = "http"
|
type = "http"
|
||||||
path = "/health"
|
path = "/health"
|
||||||
interval = "20s"
|
interval = "20s"
|
||||||
timeout = "2s"
|
timeout = "2s"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -45,37 +45,37 @@ job "ai-frontend" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "ghcr.io/open-webui/open-webui:main"
|
image = "ghcr.io/open-webui/open-webui:main"
|
||||||
ports = [ "http" ]
|
ports = [ "http" ]
|
||||||
volumes = [
|
volumes = [
|
||||||
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data"
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 1000
|
cpu = 1000
|
||||||
memory = 4000
|
memory = 4000
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- LobeChat (new, no Traefik tags) ---
|
# --- LobeChat (unchanged) ---
|
||||||
group "lobechat" {
|
group "lobechat" {
|
||||||
count = 1
|
count = 1
|
||||||
|
|
||||||
constraint {
|
constraint {
|
||||||
attribute = "${attr.unique.hostname}"
|
attribute = "${attr.unique.hostname}"
|
||||||
value = "hp1-home"
|
value = "hp1-home"
|
||||||
}
|
}
|
||||||
|
|
||||||
network {
|
network {
|
||||||
port "http" {
|
port "http" {
|
||||||
static = 3210
|
static = 3210
|
||||||
to = 3210
|
to = 3210
|
||||||
}
|
}
|
||||||
|
|
||||||
dns {
|
dns {
|
||||||
servers = [ "192.168.1.133" ]
|
servers = [ "192.168.1.133" ]
|
||||||
searches = [ "service.consul" ]
|
searches = [ "service.consul" ]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -84,14 +84,11 @@ job "ai-frontend" {
|
|||||||
name = "lobechat"
|
name = "lobechat"
|
||||||
port = "http"
|
port = "http"
|
||||||
|
|
||||||
# No Traefik tags added
|
|
||||||
# Only registers the service in Consul for discovery/health checks
|
|
||||||
|
|
||||||
check {
|
check {
|
||||||
type = "tcp"
|
type = "tcp"
|
||||||
port = "http"
|
port = "http"
|
||||||
interval = "20s"
|
interval = "20s"
|
||||||
timeout = "2s"
|
timeout = "2s"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,14 +97,11 @@ job "ai-frontend" {
|
|||||||
|
|
||||||
env {
|
env {
|
||||||
OLLAMA_PROXY_URL = "http://ollama.service.consul:11434"
|
OLLAMA_PROXY_URL = "http://ollama.service.consul:11434"
|
||||||
|
|
||||||
# Optional: Add a password if you expose this port externally
|
|
||||||
# ACCESS_CODE = "your-strong-password-here"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
config {
|
config {
|
||||||
image = "lobehub/lobe-chat:latest"
|
image = "lobehub/lobe-chat:latest"
|
||||||
ports = [ "http" ]
|
ports = [ "http" ]
|
||||||
|
|
||||||
volumes = [
|
volumes = [
|
||||||
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data"
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data"
|
||||||
@@ -115,9 +109,71 @@ job "ai-frontend" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
resources {
|
resources {
|
||||||
cpu = 1000
|
cpu = 1000
|
||||||
memory = 2000
|
memory = 2000
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
# --- SillyTavern (NEW) ---
|
||||||
|
group "sillytavern" {
|
||||||
|
count = 1
|
||||||
|
|
||||||
|
constraint {
|
||||||
|
attribute = "${attr.unique.hostname}"
|
||||||
|
value = "hp1-home"
|
||||||
|
}
|
||||||
|
|
||||||
|
network {
|
||||||
|
port "http" {
|
||||||
|
static = 8000
|
||||||
|
to = 8000
|
||||||
|
}
|
||||||
|
|
||||||
|
dns {
|
||||||
|
servers = [ "192.168.1.133" ]
|
||||||
|
searches = [ "service.consul" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
service {
|
||||||
|
name = "sillytavern"
|
||||||
|
port = "http"
|
||||||
|
# Added Traefik tags to match OpenWebUI since this is a frontend
|
||||||
|
tags = ["traefik.enable=true"]
|
||||||
|
|
||||||
|
check {
|
||||||
|
type = "tcp"
|
||||||
|
port = "http"
|
||||||
|
interval = "20s"
|
||||||
|
timeout = "2s"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task "server" {
|
||||||
|
driver = "podman"
|
||||||
|
|
||||||
|
env {
|
||||||
|
# Ensures SillyTavern listens on all interfaces (0.0.0.0) so Nomad/Traefik can reach it
|
||||||
|
SILLYTAVERN_LISTEN = "true"
|
||||||
|
}
|
||||||
|
|
||||||
|
config {
|
||||||
|
image = "ghcr.io/sillytavern/sillytavern:latest"
|
||||||
|
ports = [ "http" ]
|
||||||
|
|
||||||
|
# Mounting standard data/config paths to your local SSD structure
|
||||||
|
volumes = [
|
||||||
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/config:/home/node/app/config",
|
||||||
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/data:/home/node/app/data",
|
||||||
|
"/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/plugins:/home/node/app/plugins"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
resources {
|
||||||
|
cpu = 1000
|
||||||
|
memory = 2000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user