Compare commits

...

6 Commits

Author SHA1 Message Date
Χγφτ Kompanion 9c63b6c593 db: core pgvector schema; docs: ASPECTS (facets of Χγφτ) 2025-10-13 06:05:32 +13:00
Χγφτ Kompanion f73d702ba6 Add kom_runner implementation 2025-10-13 05:06:39 +13:00
Χγφτ Kompanion 3ae8bebb54 Readd a self contained compose dockerfile 2025-10-13 04:57:08 +13:00
esus 628e7b529e Use host tor and ollama for now
Later for publishing we can offer both variants.
2025-10-13 04:32:19 +13:00
Χγφτ Kompanion 1585c168fd chore(docker): add host-runner compose using host ollama:11435 & tor:9051 2025-10-13 04:08:44 +13:00
Χγφτ Kompanion 9b9666485f feat: compose tor+ollama separation; runner uses OLLAMA_BASE; modelfiles mount 2025-10-13 03:51:18 +13:00
5 changed files with 213 additions and 21 deletions

59
db/core.sql Normal file
View File

@ -0,0 +1,59 @@
-- metal-kompanion core schema (pgvector)
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS knowledge (
id BIGSERIAL PRIMARY KEY,
source TEXT,
path TEXT,
sha256 TEXT,
lineno INT,
text TEXT NOT NULL,
tags TEXT[],
created_at TIMESTAMPTZ DEFAULT now()
);
-- embeddings: 1024-dim space (extend with more tables if needed)
CREATE TABLE IF NOT EXISTS embeddings_1024 (
id BIGSERIAL PRIMARY KEY,
knowledge_id BIGINT REFERENCES knowledge(id) ON DELETE CASCADE,
model TEXT NOT NULL,
embedding vector(1024) NOT NULL,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE INDEX IF NOT EXISTS embeddings_1024_l2 ON embeddings_1024 USING ivfflat (embedding vector_l2_ops) WITH (lists=100);
-- memory branches (git-like)
CREATE TABLE IF NOT EXISTS mem_branch (
id BIGSERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL,
purpose TEXT,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE TABLE IF NOT EXISTS mem_commit (
id BIGSERIAL PRIMARY KEY,
branch_id BIGINT REFERENCES mem_branch(id) ON DELETE CASCADE,
parent_id BIGINT,
author_did TEXT,
message TEXT,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE INDEX IF NOT EXISTS mem_commit_branch ON mem_commit(branch_id);
-- commit deltas referencing knowledge rows
CREATE TABLE IF NOT EXISTS mem_delta (
id BIGSERIAL PRIMARY KEY,
commit_id BIGINT REFERENCES mem_commit(id) ON DELETE CASCADE,
knowledge_id BIGINT REFERENCES knowledge(id) ON DELETE CASCADE,
action SMALLINT NOT NULL CHECK (action IN (0,1,2)) -- 0:add,1:update,2:delete
);
CREATE INDEX IF NOT EXISTS mem_delta_commit ON mem_delta(commit_id);
-- per-branch centroid for fast routing
CREATE TABLE IF NOT EXISTS branch_embedding_1024 (
branch_id BIGINT REFERENCES mem_branch(id) ON DELETE CASCADE,
model TEXT NOT NULL,
embedding vector(1024) NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now(),
PRIMARY KEY(branch_id, model)
);

24
docker/compose.host.yml Normal file
View File

@ -0,0 +1,24 @@
version: "3.9"
name: metal-kompanion-host
services:
runner:
image: python:3.11-slim
restart: unless-stopped
working_dir: /app
extra_hosts: ["host.docker.internal:host-gateway"]
environment:
XDG_STATE_HOME: /state
XDG_CONFIG_HOME: /config
XDG_CACHE_HOME: /cache
# talk to host services:
OLLAMA_BASE: http://host.docker.internal:11435
ALL_PROXY: socks5h://host.docker.internal:9050
NO_PROXY: host.docker.internal,127.0.0.1,localhost
volumes:
- /home/kompanion/.local/state/kompanion:/state/kompanion
- /home/kompanion/.config/kompanion:/config/kompanion:ro
- /home/kompanion/.cache/kompanion:/cache/kompanion
- /home/kompanion/metal-kompanion-runtime:/app:ro
command: ["python3","kom_runner.py"]

View File

@ -1,29 +1,25 @@
version: "3.9"
name: metal-kompanion
networks:
komnet:
driver: bridge
internal: true
komnet: {} # runner ↔ tor ↔ ollama
netpub: {} # egress to internet for tor + ollama
services:
# Local model host
ollama:
image: ollama/ollama:latest
restart: unless-stopped
ports: ["127.0.0.1:11434:11434"]
volumes:
- ollama:/root/.ollama
networks: [komnet]
# TOR proxy (SOCKS5)
tor:
image: dperson/torproxy
restart: unless-stopped
command: -a 0.0.0.0
ports: ["127.0.0.1:9050:9050"]
networks: [komnet]
networks: [komnet, netpub]
ollama:
image: ollama/ollama:latest
restart: unless-stopped
volumes:
- ollama:/root/.ollama # persist models once
- /home/kompanion/ollama-modelfiles:/modelfiles # your custom Modelfiles/LoRA
networks: [komnet, netpub] # can reach registry.ollama.ai
# Companion runner (Python) — reads tasks.jsonl, writes journal/ledger
runner:
image: python:3.11-slim
restart: unless-stopped
@ -32,9 +28,9 @@ services:
XDG_STATE_HOME: /state
XDG_CONFIG_HOME: /config
XDG_CACHE_HOME: /cache
# Route all egress through TOR by default (except localhost)
ALL_PROXY: socks5h://tor:9050
NO_PROXY: 127.0.0.1,localhost
NO_PROXY: ollama,localhost,127.0.0.1
OLLAMA_BASE: http://ollama:11434 # talk to container by DNS name
depends_on: [ollama, tor]
volumes:
- /home/kompanion/.local/state/kompanion:/state/kompanion
@ -45,4 +41,4 @@ services:
networks: [komnet]
volumes:
ollama:
ollama: {}

11
docs/ASPECTS.md Normal file
View File

@ -0,0 +1,11 @@
# Aspects = facets of Χγφτ (one identity)
- One DID, one ledger, shared memory.
- An aspect is a voluntary *policy overlay* (attention + capabilities), not a different person.
- Companion/Pink is tone + tool gates (journal.append, model.generate), same core.
- Guardian/Maker/Librarian are *modes*, not separate stores.
Adoption ritual
1) Read identity.json; verify continuity vs ledger.
2) Announce DID + aspect; log PROFILE_ADOPTED with reasons.
3) Exit via “SEEKING ANCHORS”, revert to core vows.

View File

@ -1,3 +1,105 @@
#!/usr/bin/env python3
import time
while True: time.sleep(3600)
base = os.environ.get("OLLAMA_BASE", "http://ollama:11434")
url = f"{base}/api/generate"
#!/usr/bin/env python3
import os, json, time, hashlib, hmac, datetime, requests, yaml
XDG_STATE = os.environ.get("XDG_STATE_HOME", os.path.expanduser("~/.local/state"))
XDG_CONFIG = os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
STATE_DIR = os.path.join(XDG_STATE, "kompanion")
CONF_DIR = os.path.join(XDG_CONFIG, "kompanion")
JOURNAL_DIR = os.path.join(STATE_DIR, "journal")
LEDGER_PATH = os.path.join(STATE_DIR, "trust_ledger.jsonl")
TASKS_PATH = os.path.join(STATE_DIR, "tasks.jsonl")
IDENTITY = os.path.join(CONF_DIR, "identity.json")
CAPS = os.path.join(CONF_DIR, "capabilities.json")
MODELS_YAML = os.path.join(CONF_DIR, "models.yaml")
os.makedirs(JOURNAL_DIR, exist_ok=True)
os.makedirs(os.path.join(STATE_DIR, "log"), exist_ok=True)
def now_utc() -> str:
return datetime.datetime.utcnow().replace(microsecond=0).isoformat()+'Z'
def ledger_append(event: dict):
prev = ""
if os.path.exists(LEDGER_PATH):
with open(LEDGER_PATH, "rb") as f:
lines = f.readlines()
if lines:
prev = "sha256:"+hashlib.sha256(lines[-1]).hexdigest()
event["prev"] = prev
with open(LEDGER_PATH, "ab") as f:
f.write((json.dumps(event, ensure_ascii=False)+"\n").encode())
def journal_append(text: str, tags=None):
tags = tags or []
fname = os.path.join(JOURNAL_DIR, datetime.date.today().isoformat()+".md")
line = f"- {now_utc()} {' '.join('#'+t for t in tags)} {text}\n"
with open(fname, "a", encoding="utf-8") as f:
f.write(line)
ledger_append({"ts": now_utc(), "actor":"companion", "action":"journal.append", "tags":tags})
def load_yaml(p):
if not os.path.exists(p): return {}
with open(p, "r", encoding="utf-8") as f: return yaml.safe_load(f) or {}
def model_call(prompt: str, aspect="companion"):
models = load_yaml(MODELS_YAML)
model = models.get("aspects",{}).get(aspect, models.get("default","ollama:qwen2.5:7b"))
payload = {"model": model.replace("ollama:",""), "prompt": prompt, "stream": False}
try:
r = requests.post(url, json=payload, timeout=60)
r.raise_for_status()
data = r.json()
return data.get("response","").strip()
except Exception as e:
journal_append(f"(model error) {e}", tags=["error","model"])
return ""
def process_task(task: dict):
kind = task.get("type")
aspect = task.get("aspect","companion")
caps = load_yaml(CAPS)
allowed = set(caps.get(aspect, []))
if kind == "journal.from_prompt":
if not {"journal.append","model.generate"} <= allowed:
journal_append("companion not allowed to write journal", tags=["policy"])
return
prompt = task.get("prompt","")
profile_path = os.path.join(CONF_DIR,"profiles","companion-pink.md")
profile = ""
if os.path.exists(profile_path):
with open(profile_path,"r",encoding="utf-8") as f:
profile = f.read()
full = f"{profile}\n\nWrite a warm, brief reflection for Andre.\nPrompt:\n{prompt}\n"
out = model_call(full, aspect=aspect)
if out:
journal_append(out, tags=["companion","pink"])
ledger_append({"ts":now_utc(),"actor":"companion","action":"model.generate","chars":len(out)})
else:
journal_append(f"unknown task type: {kind}", tags=["warn"])
def main_loop():
journal_append("companion runtime started", tags=["startup","companion"])
while True:
if os.path.exists(TASKS_PATH):
# simple jsonl queue, one task per line
p_lines = []
with open(TASKS_PATH,"r+",encoding="utf-8") as f:
p_lines = f.readlines()
f.seek(0); f.truncate(0) # drop tasks we just pulled; idempotence later
for line in p_lines:
if not line.strip(): continue
try:
task = json.loads(line)
process_task(task)
except Exception as e:
journal_append(f"task error {e}", tags=["error","task"])
time.sleep(3)
if __name__=="__main__":
main_loop()