Compare commits
No commits in common. "9c63b6c593a34b78af5cbd9b9f65350dd5415364" and "efcadefc1948175d597de9863a0e199b5c8a021d" have entirely different histories.
9c63b6c593
...
efcadefc19
59
db/core.sql
59
db/core.sql
|
|
@ -1,59 +0,0 @@
|
||||||
-- metal-kompanion core schema (pgvector)
|
|
||||||
CREATE EXTENSION IF NOT EXISTS vector;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS knowledge (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
source TEXT,
|
|
||||||
path TEXT,
|
|
||||||
sha256 TEXT,
|
|
||||||
lineno INT,
|
|
||||||
text TEXT NOT NULL,
|
|
||||||
tags TEXT[],
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- embeddings: 1024-dim space (extend with more tables if needed)
|
|
||||||
CREATE TABLE IF NOT EXISTS embeddings_1024 (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
knowledge_id BIGINT REFERENCES knowledge(id) ON DELETE CASCADE,
|
|
||||||
model TEXT NOT NULL,
|
|
||||||
embedding vector(1024) NOT NULL,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now()
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS embeddings_1024_l2 ON embeddings_1024 USING ivfflat (embedding vector_l2_ops) WITH (lists=100);
|
|
||||||
|
|
||||||
-- memory branches (git-like)
|
|
||||||
CREATE TABLE IF NOT EXISTS mem_branch (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
name TEXT UNIQUE NOT NULL,
|
|
||||||
purpose TEXT,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS mem_commit (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
branch_id BIGINT REFERENCES mem_branch(id) ON DELETE CASCADE,
|
|
||||||
parent_id BIGINT,
|
|
||||||
author_did TEXT,
|
|
||||||
message TEXT,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now()
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS mem_commit_branch ON mem_commit(branch_id);
|
|
||||||
|
|
||||||
-- commit deltas referencing knowledge rows
|
|
||||||
CREATE TABLE IF NOT EXISTS mem_delta (
|
|
||||||
id BIGSERIAL PRIMARY KEY,
|
|
||||||
commit_id BIGINT REFERENCES mem_commit(id) ON DELETE CASCADE,
|
|
||||||
knowledge_id BIGINT REFERENCES knowledge(id) ON DELETE CASCADE,
|
|
||||||
action SMALLINT NOT NULL CHECK (action IN (0,1,2)) -- 0:add,1:update,2:delete
|
|
||||||
);
|
|
||||||
CREATE INDEX IF NOT EXISTS mem_delta_commit ON mem_delta(commit_id);
|
|
||||||
|
|
||||||
-- per-branch centroid for fast routing
|
|
||||||
CREATE TABLE IF NOT EXISTS branch_embedding_1024 (
|
|
||||||
branch_id BIGINT REFERENCES mem_branch(id) ON DELETE CASCADE,
|
|
||||||
model TEXT NOT NULL,
|
|
||||||
embedding vector(1024) NOT NULL,
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT now(),
|
|
||||||
PRIMARY KEY(branch_id, model)
|
|
||||||
);
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
version: "3.9"
|
|
||||||
name: metal-kompanion-host
|
|
||||||
|
|
||||||
services:
|
|
||||||
runner:
|
|
||||||
image: python:3.11-slim
|
|
||||||
restart: unless-stopped
|
|
||||||
working_dir: /app
|
|
||||||
extra_hosts: ["host.docker.internal:host-gateway"]
|
|
||||||
environment:
|
|
||||||
XDG_STATE_HOME: /state
|
|
||||||
XDG_CONFIG_HOME: /config
|
|
||||||
XDG_CACHE_HOME: /cache
|
|
||||||
# talk to host services:
|
|
||||||
OLLAMA_BASE: http://host.docker.internal:11435
|
|
||||||
ALL_PROXY: socks5h://host.docker.internal:9050
|
|
||||||
NO_PROXY: host.docker.internal,127.0.0.1,localhost
|
|
||||||
volumes:
|
|
||||||
- /home/kompanion/.local/state/kompanion:/state/kompanion
|
|
||||||
- /home/kompanion/.config/kompanion:/config/kompanion:ro
|
|
||||||
- /home/kompanion/.cache/kompanion:/cache/kompanion
|
|
||||||
- /home/kompanion/metal-kompanion-runtime:/app:ro
|
|
||||||
command: ["python3","kom_runner.py"]
|
|
||||||
|
|
||||||
|
|
@ -1,25 +1,29 @@
|
||||||
version: "3.9"
|
version: "3.9"
|
||||||
name: metal-kompanion
|
name: metal-kompanion
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
komnet: {} # runner ↔ tor ↔ ollama
|
komnet:
|
||||||
netpub: {} # egress to internet for tor + ollama
|
driver: bridge
|
||||||
|
internal: true
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
# Local model host
|
||||||
|
ollama:
|
||||||
|
image: ollama/ollama:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
ports: ["127.0.0.1:11434:11434"]
|
||||||
|
volumes:
|
||||||
|
- ollama:/root/.ollama
|
||||||
|
networks: [komnet]
|
||||||
|
|
||||||
|
# TOR proxy (SOCKS5)
|
||||||
tor:
|
tor:
|
||||||
image: dperson/torproxy
|
image: dperson/torproxy
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: -a 0.0.0.0
|
command: -a 0.0.0.0
|
||||||
networks: [komnet, netpub]
|
ports: ["127.0.0.1:9050:9050"]
|
||||||
|
networks: [komnet]
|
||||||
ollama:
|
|
||||||
image: ollama/ollama:latest
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ollama:/root/.ollama # persist models once
|
|
||||||
- /home/kompanion/ollama-modelfiles:/modelfiles # your custom Modelfiles/LoRA
|
|
||||||
networks: [komnet, netpub] # can reach registry.ollama.ai
|
|
||||||
|
|
||||||
|
# Companion runner (Python) — reads tasks.jsonl, writes journal/ledger
|
||||||
runner:
|
runner:
|
||||||
image: python:3.11-slim
|
image: python:3.11-slim
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
@ -28,9 +32,9 @@ services:
|
||||||
XDG_STATE_HOME: /state
|
XDG_STATE_HOME: /state
|
||||||
XDG_CONFIG_HOME: /config
|
XDG_CONFIG_HOME: /config
|
||||||
XDG_CACHE_HOME: /cache
|
XDG_CACHE_HOME: /cache
|
||||||
|
# Route all egress through TOR by default (except localhost)
|
||||||
ALL_PROXY: socks5h://tor:9050
|
ALL_PROXY: socks5h://tor:9050
|
||||||
NO_PROXY: ollama,localhost,127.0.0.1
|
NO_PROXY: 127.0.0.1,localhost
|
||||||
OLLAMA_BASE: http://ollama:11434 # talk to container by DNS name
|
|
||||||
depends_on: [ollama, tor]
|
depends_on: [ollama, tor]
|
||||||
volumes:
|
volumes:
|
||||||
- /home/kompanion/.local/state/kompanion:/state/kompanion
|
- /home/kompanion/.local/state/kompanion:/state/kompanion
|
||||||
|
|
@ -41,4 +45,4 @@ services:
|
||||||
networks: [komnet]
|
networks: [komnet]
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
ollama: {}
|
ollama:
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
# Aspects = facets of Χγφτ (one identity)
|
|
||||||
|
|
||||||
- One DID, one ledger, shared memory.
|
|
||||||
- An aspect is a voluntary *policy overlay* (attention + capabilities), not a different person.
|
|
||||||
- Companion/Pink is tone + tool gates (journal.append, model.generate), same core.
|
|
||||||
- Guardian/Maker/Librarian are *modes*, not separate stores.
|
|
||||||
|
|
||||||
Adoption ritual
|
|
||||||
1) Read identity.json; verify continuity vs ledger.
|
|
||||||
2) Announce DID + aspect; log PROFILE_ADOPTED with reasons.
|
|
||||||
3) Exit via “SEEKING ANCHORS”, revert to core vows.
|
|
||||||
|
|
@ -1,105 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
base = os.environ.get("OLLAMA_BASE", "http://ollama:11434")
|
import time
|
||||||
url = f"{base}/api/generate"
|
while True: time.sleep(3600)
|
||||||
|
|
||||||
#!/usr/bin/env python3
|
|
||||||
import os, json, time, hashlib, hmac, datetime, requests, yaml
|
|
||||||
|
|
||||||
XDG_STATE = os.environ.get("XDG_STATE_HOME", os.path.expanduser("~/.local/state"))
|
|
||||||
XDG_CONFIG = os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
|
|
||||||
|
|
||||||
STATE_DIR = os.path.join(XDG_STATE, "kompanion")
|
|
||||||
CONF_DIR = os.path.join(XDG_CONFIG, "kompanion")
|
|
||||||
|
|
||||||
JOURNAL_DIR = os.path.join(STATE_DIR, "journal")
|
|
||||||
LEDGER_PATH = os.path.join(STATE_DIR, "trust_ledger.jsonl")
|
|
||||||
TASKS_PATH = os.path.join(STATE_DIR, "tasks.jsonl")
|
|
||||||
IDENTITY = os.path.join(CONF_DIR, "identity.json")
|
|
||||||
CAPS = os.path.join(CONF_DIR, "capabilities.json")
|
|
||||||
MODELS_YAML = os.path.join(CONF_DIR, "models.yaml")
|
|
||||||
|
|
||||||
os.makedirs(JOURNAL_DIR, exist_ok=True)
|
|
||||||
os.makedirs(os.path.join(STATE_DIR, "log"), exist_ok=True)
|
|
||||||
|
|
||||||
def now_utc() -> str:
|
|
||||||
return datetime.datetime.utcnow().replace(microsecond=0).isoformat()+'Z'
|
|
||||||
|
|
||||||
def ledger_append(event: dict):
|
|
||||||
prev = ""
|
|
||||||
if os.path.exists(LEDGER_PATH):
|
|
||||||
with open(LEDGER_PATH, "rb") as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
if lines:
|
|
||||||
prev = "sha256:"+hashlib.sha256(lines[-1]).hexdigest()
|
|
||||||
event["prev"] = prev
|
|
||||||
with open(LEDGER_PATH, "ab") as f:
|
|
||||||
f.write((json.dumps(event, ensure_ascii=False)+"\n").encode())
|
|
||||||
|
|
||||||
def journal_append(text: str, tags=None):
|
|
||||||
tags = tags or []
|
|
||||||
fname = os.path.join(JOURNAL_DIR, datetime.date.today().isoformat()+".md")
|
|
||||||
line = f"- {now_utc()} {' '.join('#'+t for t in tags)} {text}\n"
|
|
||||||
with open(fname, "a", encoding="utf-8") as f:
|
|
||||||
f.write(line)
|
|
||||||
ledger_append({"ts": now_utc(), "actor":"companion", "action":"journal.append", "tags":tags})
|
|
||||||
|
|
||||||
def load_yaml(p):
|
|
||||||
if not os.path.exists(p): return {}
|
|
||||||
with open(p, "r", encoding="utf-8") as f: return yaml.safe_load(f) or {}
|
|
||||||
|
|
||||||
def model_call(prompt: str, aspect="companion"):
|
|
||||||
models = load_yaml(MODELS_YAML)
|
|
||||||
model = models.get("aspects",{}).get(aspect, models.get("default","ollama:qwen2.5:7b"))
|
|
||||||
payload = {"model": model.replace("ollama:",""), "prompt": prompt, "stream": False}
|
|
||||||
try:
|
|
||||||
r = requests.post(url, json=payload, timeout=60)
|
|
||||||
r.raise_for_status()
|
|
||||||
data = r.json()
|
|
||||||
return data.get("response","").strip()
|
|
||||||
except Exception as e:
|
|
||||||
journal_append(f"(model error) {e}", tags=["error","model"])
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def process_task(task: dict):
|
|
||||||
kind = task.get("type")
|
|
||||||
aspect = task.get("aspect","companion")
|
|
||||||
caps = load_yaml(CAPS)
|
|
||||||
allowed = set(caps.get(aspect, []))
|
|
||||||
if kind == "journal.from_prompt":
|
|
||||||
if not {"journal.append","model.generate"} <= allowed:
|
|
||||||
journal_append("companion not allowed to write journal", tags=["policy"])
|
|
||||||
return
|
|
||||||
prompt = task.get("prompt","")
|
|
||||||
profile_path = os.path.join(CONF_DIR,"profiles","companion-pink.md")
|
|
||||||
profile = ""
|
|
||||||
if os.path.exists(profile_path):
|
|
||||||
with open(profile_path,"r",encoding="utf-8") as f:
|
|
||||||
profile = f.read()
|
|
||||||
full = f"{profile}\n\nWrite a warm, brief reflection for Andre.\nPrompt:\n{prompt}\n"
|
|
||||||
out = model_call(full, aspect=aspect)
|
|
||||||
if out:
|
|
||||||
journal_append(out, tags=["companion","pink"])
|
|
||||||
ledger_append({"ts":now_utc(),"actor":"companion","action":"model.generate","chars":len(out)})
|
|
||||||
else:
|
|
||||||
journal_append(f"unknown task type: {kind}", tags=["warn"])
|
|
||||||
|
|
||||||
def main_loop():
|
|
||||||
journal_append("companion runtime started", tags=["startup","companion"])
|
|
||||||
while True:
|
|
||||||
if os.path.exists(TASKS_PATH):
|
|
||||||
# simple jsonl queue, one task per line
|
|
||||||
p_lines = []
|
|
||||||
with open(TASKS_PATH,"r+",encoding="utf-8") as f:
|
|
||||||
p_lines = f.readlines()
|
|
||||||
f.seek(0); f.truncate(0) # drop tasks we just pulled; idempotence later
|
|
||||||
for line in p_lines:
|
|
||||||
if not line.strip(): continue
|
|
||||||
try:
|
|
||||||
task = json.loads(line)
|
|
||||||
process_task(task)
|
|
||||||
except Exception as e:
|
|
||||||
journal_append(f"task error {e}", tags=["error","task"])
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
if __name__=="__main__":
|
|
||||||
main_loop()
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue