feat: compose tor+ollama separation; runner uses OLLAMA_BASE; modelfiles mount

This commit is contained in:
Χγφτ Kompanion 2025-10-13 03:51:18 +13:00
parent efcadefc19
commit 9b9666485f
3 changed files with 32 additions and 22 deletions

View File

@ -0,0 +1,12 @@
version: "3.9"
services:
ollama:
networks: [komnet]
volumes:
- /home/kompanion/ollama:/root/.ollama
runner:
environment:
OLLAMA_BASE_URL: http://ollama:11434
networks:
komnet:
internal: false

View File

@ -1,29 +1,27 @@
version: "3.9" version: "3.9"
name: metal-kompanion name: metal-kompanion
networks: networks:
komnet: komnet: {} # runner ↔ tor ↔ ollama
driver: bridge netpub: {} # egress to internet for tor + ollama
internal: true
services: services:
# Local model host
ollama:
image: ollama/ollama:latest
restart: unless-stopped
ports: ["127.0.0.1:11434:11434"]
volumes:
- ollama:/root/.ollama
networks: [komnet]
# TOR proxy (SOCKS5)
tor: tor:
image: dperson/torproxy image: dperson/torproxy
restart: unless-stopped restart: unless-stopped
command: -a 0.0.0.0 command: -a 0.0.0.0
ports: ["127.0.0.1:9050:9050"] ports: ["127.0.0.1:9051:9051"] # optional host exposure
networks: [komnet] networks: [komnet, netpub]
ollama:
image: ollama/ollama:latest
restart: unless-stopped
ports: ["127.0.0.1:11435:11435"] # expose to host for tools if desired
volumes:
- ollama:/root/.ollama # persist models once
- /home/kompanion/ollama-modelfiles:/modelfiles # your custom Modelfiles/LoRA
networks: [komnet, netpub] # can reach registry.ollama.ai
# Companion runner (Python) — reads tasks.jsonl, writes journal/ledger
runner: runner:
image: python:3.11-slim image: python:3.11-slim
restart: unless-stopped restart: unless-stopped
@ -32,9 +30,9 @@ services:
XDG_STATE_HOME: /state XDG_STATE_HOME: /state
XDG_CONFIG_HOME: /config XDG_CONFIG_HOME: /config
XDG_CACHE_HOME: /cache XDG_CACHE_HOME: /cache
# Route all egress through TOR by default (except localhost) ALL_PROXY: socks5h://tor:9051
ALL_PROXY: socks5h://tor:9050 NO_PROXY: ollama,localhost,127.0.0.1
NO_PROXY: 127.0.0.1,localhost OLLAMA_BASE: http://ollama:11435 # talk to container by DNS name
depends_on: [ollama, tor] depends_on: [ollama, tor]
volumes: volumes:
- /home/kompanion/.local/state/kompanion:/state/kompanion - /home/kompanion/.local/state/kompanion:/state/kompanion
@ -45,4 +43,4 @@ services:
networks: [komnet] networks: [komnet]
volumes: volumes:
ollama: ollama: {}

View File

@ -1,3 +1,3 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import time base = os.environ.get("OLLAMA_BASE", "http://ollama:11434")
while True: time.sleep(3600) url = f"{base}/api/generate"