diff --git a/.continue/agents/new-config.yaml b/.continue/agents/new-config.yaml new file mode 100644 index 0000000..d55cdcf --- /dev/null +++ b/.continue/agents/new-config.yaml @@ -0,0 +1,23 @@ +# This is an example configuration file +# To learn more, see the full config.yaml reference: https://docs.continue.dev/reference + +name: Example Config +version: 1.0.0 +schema: v1 + +# Define which models can be used +# https://docs.continue.dev/customization/models +models: + - name: my gpt-5 + provider: openai + model: gpt-5 + apiKey: YOUR_OPENAI_API_KEY_HERE + - uses: ollama/qwen2.5-coder-7b + - uses: anthropic/claude-4-sonnet + with: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + +# MCP Servers that Continue can access +# https://docs.continue.dev/customization/mcp-tools +mcpServers: + - uses: anthropic/memory-mcp diff --git a/.continue/mcpServers/new-mcp-server.yaml b/.continue/mcpServers/new-mcp-server.yaml new file mode 100644 index 0000000..e69de29 diff --git a/stacks/ai/ai-frontend.nomad b/stacks/ai/ai-frontend.nomad index 3114ef7..45e046a 100644 --- a/stacks/ai/ai-frontend.nomad +++ b/stacks/ai/ai-frontend.nomad @@ -1,25 +1,25 @@ job "ai-frontend" { datacenters = [ "Homelab-PTECH-DC" ] - region = "home" - type = "service" + region = "home" + type = "service" - # --- OpenWebUI (completely unchanged) --- + # --- OpenWebUI (unchanged) --- group "openwebui" { count = 1 constraint { attribute = "${attr.unique.hostname}" - value = "hp1-home" + value = "hp1-home" } network { port "http" { static = 8080 - to = 8080 + to = 8080 } dns { - servers = [ "192.168.1.133" ] + servers = [ "192.168.1.133" ] searches = [ "service.consul" ] } } @@ -30,10 +30,10 @@ job "ai-frontend" { tags = ["traefik.enable=true"] check { - type = "http" - path = "/health" + type = "http" + path = "/health" interval = "20s" - timeout = "2s" + timeout = "2s" } } @@ -45,37 +45,37 @@ job "ai-frontend" { } config { - image = "ghcr.io/open-webui/open-webui:main" - ports = [ "http" ] + image = "ghcr.io/open-webui/open-webui:main" + ports = [ "http" ] volumes = [ "/mnt/local-ssd/nomad/stacks/ai/ai-frontend/openwebui:/app/backend/data" ] } resources { - cpu = 1000 + cpu = 1000 memory = 4000 } } } - # --- LobeChat (new, no Traefik tags) --- + # --- LobeChat (unchanged) --- group "lobechat" { count = 1 constraint { attribute = "${attr.unique.hostname}" - value = "hp1-home" + value = "hp1-home" } network { port "http" { static = 3210 - to = 3210 + to = 3210 } dns { - servers = [ "192.168.1.133" ] + servers = [ "192.168.1.133" ] searches = [ "service.consul" ] } } @@ -84,14 +84,11 @@ job "ai-frontend" { name = "lobechat" port = "http" - # No Traefik tags added - # Only registers the service in Consul for discovery/health checks - check { - type = "tcp" - port = "http" + type = "tcp" + port = "http" interval = "20s" - timeout = "2s" + timeout = "2s" } } @@ -100,14 +97,11 @@ job "ai-frontend" { env { OLLAMA_PROXY_URL = "http://ollama.service.consul:11434" - - # Optional: Add a password if you expose this port externally - # ACCESS_CODE = "your-strong-password-here" } config { - image = "lobehub/lobe-chat:latest" - ports = [ "http" ] + image = "lobehub/lobe-chat:latest" + ports = [ "http" ] volumes = [ "/mnt/local-ssd/nomad/stacks/ai/ai-frontend/lobechat/data:/data" @@ -115,9 +109,71 @@ job "ai-frontend" { } resources { - cpu = 1000 + cpu = 1000 memory = 2000 } } } -} + + # --- SillyTavern (NEW) --- + group "sillytavern" { + count = 1 + + constraint { + attribute = "${attr.unique.hostname}" + value = "hp1-home" + } + + network { + port "http" { + static = 8000 + to = 8000 + } + + dns { + servers = [ "192.168.1.133" ] + searches = [ "service.consul" ] + } + } + + service { + name = "sillytavern" + port = "http" + # Added Traefik tags to match OpenWebUI since this is a frontend + tags = ["traefik.enable=true"] + + check { + type = "tcp" + port = "http" + interval = "20s" + timeout = "2s" + } + } + + task "server" { + driver = "podman" + + env { + # Ensures SillyTavern listens on all interfaces (0.0.0.0) so Nomad/Traefik can reach it + SILLYTAVERN_LISTEN = "true" + } + + config { + image = "ghcr.io/sillytavern/sillytavern:latest" + ports = [ "http" ] + + # Mounting standard data/config paths to your local SSD structure + volumes = [ + "/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/config:/home/node/app/config", + "/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/data:/home/node/app/data", + "/mnt/local-ssd/nomad/stacks/ai/ai-frontend/sillytavern/plugins:/home/node/app/plugins" + ] + } + + resources { + cpu = 1000 + memory = 2000 + } + } + } +} \ No newline at end of file