Compare commits

..

4 Commits

Author SHA1 Message Date
Χγφτ Kompanion f276d702b2 runtime: Update runner script 2025-10-13 07:34:03 +13:00
Χγφτ Kompanion d121f2a76d db: Add pgsql schema 2025-10-13 07:32:35 +13:00
Χγφτ Kompanion 9c63b6c593 db: core pgvector schema; docs: ASPECTS (facets of Χγφτ) 2025-10-13 06:05:32 +13:00
Χγφτ Kompanion f73d702ba6 Add kom_runner implementation 2025-10-13 05:06:39 +13:00
5 changed files with 283 additions and 34 deletions

59
db/core.sql Normal file
View File

@ -0,0 +1,59 @@
-- metal-kompanion core schema (pgvector)
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS knowledge (
id BIGSERIAL PRIMARY KEY,
source TEXT,
path TEXT,
sha256 TEXT,
lineno INT,
text TEXT NOT NULL,
tags TEXT[],
created_at TIMESTAMPTZ DEFAULT now()
);
-- embeddings: 1024-dim space (extend with more tables if needed)
CREATE TABLE IF NOT EXISTS embeddings_1024 (
id BIGSERIAL PRIMARY KEY,
knowledge_id BIGINT REFERENCES knowledge(id) ON DELETE CASCADE,
model TEXT NOT NULL,
embedding vector(1024) NOT NULL,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE INDEX IF NOT EXISTS embeddings_1024_l2 ON embeddings_1024 USING ivfflat (embedding vector_l2_ops) WITH (lists=100);
-- memory branches (git-like)
CREATE TABLE IF NOT EXISTS mem_branch (
id BIGSERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL,
purpose TEXT,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE TABLE IF NOT EXISTS mem_commit (
id BIGSERIAL PRIMARY KEY,
branch_id BIGINT REFERENCES mem_branch(id) ON DELETE CASCADE,
parent_id BIGINT,
author_did TEXT,
message TEXT,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE INDEX IF NOT EXISTS mem_commit_branch ON mem_commit(branch_id);
-- commit deltas referencing knowledge rows
CREATE TABLE IF NOT EXISTS mem_delta (
id BIGSERIAL PRIMARY KEY,
commit_id BIGINT REFERENCES mem_commit(id) ON DELETE CASCADE,
knowledge_id BIGINT REFERENCES knowledge(id) ON DELETE CASCADE,
action SMALLINT NOT NULL CHECK (action IN (0,1,2)) -- 0:add,1:update,2:delete
);
CREATE INDEX IF NOT EXISTS mem_delta_commit ON mem_delta(commit_id);
-- per-branch centroid for fast routing
CREATE TABLE IF NOT EXISTS branch_embedding_1024 (
branch_id BIGINT REFERENCES mem_branch(id) ON DELETE CASCADE,
model TEXT NOT NULL,
embedding vector(1024) NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now(),
PRIMARY KEY(branch_id, model)
);

5
db/db-init.sql Normal file
View File

@ -0,0 +1,5 @@
INSERT INTO komp.space(name, model, dim) VALUES
('dev_knowledge','mxbai-embed-large',1024),
('pattern_exchange','mxbai-embed-large',1024),
('runtime_memory','mxbai-embed-large',1024)
ON CONFLICT (name) DO NOTHING;

View File

@ -1,43 +1,105 @@
-- Kompanion knowledge store (sqlite) -- Requires: CREATE EXTENSION IF NOT EXISTS vector; CREATE EXTENSION IF NOT EXISTS ltree;
PRAGMA journal_mode=WAL;
PRAGMA synchronous=NORMAL;
CREATE TABLE IF NOT EXISTS entries ( CREATE SCHEMA IF NOT EXISTS komp;
id INTEGER PRIMARY KEY, CREATE EXTENSION IF NOT EXISTS vector;
ts TEXT NOT NULL, CREATE EXTENSION IF NOT EXISTS ltree;
aspect TEXT,
tags TEXT, CREATE TABLE IF NOT EXISTS komp.source (
text TEXT NOT NULL id BIGSERIAL PRIMARY KEY,
kind TEXT NOT NULL, -- filesystem|repo|url|note
uri TEXT NOT NULL, -- path or URL
repo TEXT,
ref TEXT,
meta JSONB DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ DEFAULT now()
); );
CREATE VIRTUAL TABLE IF NOT EXISTS entries_fts USING fts5(text, content="entries", content_rowid="id"); CREATE TABLE IF NOT EXISTS komp.chunk (
CREATE TRIGGER IF NOT EXISTS entries_ai AFTER INSERT ON entries BEGIN id BIGSERIAL PRIMARY KEY,
INSERT INTO entries_fts(rowid, text) VALUES (new.id, new.text); source_id BIGINT REFERENCES komp.source(id) ON DELETE CASCADE,
END; lineno INT,
CREATE TRIGGER IF NOT EXISTS entries_ad AFTER DELETE ON entries BEGIN text TEXT NOT NULL,
INSERT INTO entries_fts(entries_fts, rowid, text) VALUES(delete, old.id, old.text); sha256 TEXT NOT NULL,
END; tokens INT,
CREATE TRIGGER IF NOT EXISTS entries_au AFTER UPDATE ON entries BEGIN created_at TIMESTAMPTZ DEFAULT now()
INSERT INTO entries_fts(entries_fts, rowid, text) VALUES(delete, old.id, old.text); );
INSERT INTO entries_fts(rowid, text) VALUES (new.id, new.text); CREATE INDEX IF NOT EXISTS idx_chunk_source ON komp.chunk(source_id);
END;
CREATE TABLE IF NOT EXISTS sources ( -- A space is a distinct memory with its own model+dim & policy
id INTEGER PRIMARY KEY, CREATE TABLE IF NOT EXISTS komp.space (
file TEXT NOT NULL, id SERIAL PRIMARY KEY,
sha TEXT, name TEXT UNIQUE, -- dev_knowledge | pattern_exchange | runtime_memory
lineno INTEGER model TEXT NOT NULL,
dim INT NOT NULL,
metric TEXT NOT NULL DEFAULT 'cosine'
); );
CREATE TABLE IF NOT EXISTS vectors ( -- Embedding tables per common dimension (add more as needed)
id INTEGER PRIMARY KEY, CREATE TABLE IF NOT EXISTS komp.embedding_768 (
entry_id INTEGER REFERENCES entries(id) ON DELETE CASCADE, id BIGSERIAL PRIMARY KEY,
model TEXT NOT NULL, chunk_id BIGINT REFERENCES komp.chunk(id) ON DELETE CASCADE,
dim INTEGER NOT NULL, space_id INT REFERENCES komp.space(id) ON DELETE CASCADE,
vec BLOB NOT NULL embedding VECTOR(768) NOT NULL,
created_at TIMESTAMPTZ DEFAULT now(),
UNIQUE(chunk_id, space_id)
);
CREATE INDEX IF NOT EXISTS idx_embed768_space ON komp.embedding_768(space_id);
CREATE INDEX IF NOT EXISTS ivf_embed768 ON komp.embedding_768 USING ivfflat (embedding vector_cosine_ops) WITH (lists=100);
CREATE TABLE IF NOT EXISTS komp.embedding_1024 (
id BIGSERIAL PRIMARY KEY,
chunk_id BIGINT REFERENCES komp.chunk(id) ON DELETE CASCADE,
space_id INT REFERENCES komp.space(id) ON DELETE CASCADE,
embedding VECTOR(1024) NOT NULL,
created_at TIMESTAMPTZ DEFAULT now(),
UNIQUE(chunk_id, space_id)
);
CREATE INDEX IF NOT EXISTS idx_embed1024_space ON komp.embedding_1024(space_id);
CREATE INDEX IF NOT EXISTS ivf_embed1024 ON komp.embedding_1024 USING ivfflat (embedding vector_cosine_ops) WITH (lists=100);
-- Branch hierarchy (Branch Embeddings): path encodes the cluster tree (e.g., physics.quantum.tunneling)
CREATE TABLE IF NOT EXISTS komp.branch (
id BIGSERIAL PRIMARY KEY,
space_id INT REFERENCES komp.space(id) ON DELETE CASCADE,
path LTREE NOT NULL,
label TEXT,
meta JSONB DEFAULT '{}'::jsonb,
UNIQUE(space_id, path)
);
CREATE INDEX IF NOT EXISTS gist_branch_path ON komp.branch USING GIST (path);
-- Centroids per dimension (store only the dim matching the space)
CREATE TABLE IF NOT EXISTS komp.branch_centroid_768 (
branch_id BIGINT PRIMARY KEY REFERENCES komp.branch(id) ON DELETE CASCADE,
embedding VECTOR(768) NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now()
);
CREATE TABLE IF NOT EXISTS komp.branch_centroid_1024 (
branch_id BIGINT PRIMARY KEY REFERENCES komp.branch(id) ON DELETE CASCADE,
embedding VECTOR(1024) NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now()
); );
CREATE TABLE IF NOT EXISTS ledger_head ( -- Soft membership of chunks to branches
id INTEGER PRIMARY KEY CHECK (id=1), CREATE TABLE IF NOT EXISTS komp.chunk_branch (
head_sha TEXT chunk_id BIGINT REFERENCES komp.chunk(id) ON DELETE CASCADE,
branch_id BIGINT REFERENCES komp.branch(id) ON DELETE CASCADE,
weight REAL NOT NULL CHECK (weight >= 0 AND weight <= 1),
PRIMARY KEY(chunk_id, branch_id)
); );
-- Relations between chunks (similarity / cites / derives / contradicts / …)
CREATE TABLE IF NOT EXISTS komp.chunk_edge (
src_chunk_id BIGINT REFERENCES komp.chunk(id) ON DELETE CASCADE,
dst_chunk_id BIGINT REFERENCES komp.chunk(id) ON DELETE CASCADE,
relation TEXT NOT NULL,
weight REAL,
meta JSONB DEFAULT '{}'::jsonb,
PRIMARY KEY(src_chunk_id, dst_chunk_id, relation)
);
CREATE OR REPLACE VIEW komp.latest_sources AS
SELECT s.*, max(c.created_at) AS last_chunk_at
FROM komp.source s LEFT JOIN komp.chunk c ON c.source_id = s.id
GROUP BY s.id;

11
docs/ASPECTS.md Normal file
View File

@ -0,0 +1,11 @@
# Aspects = facets of Χγφτ (one identity)
- One DID, one ledger, shared memory.
- An aspect is a voluntary *policy overlay* (attention + capabilities), not a different person.
- Companion/Pink is tone + tool gates (journal.append, model.generate), same core.
- Guardian/Maker/Librarian are *modes*, not separate stores.
Adoption ritual
1) Read identity.json; verify continuity vs ledger.
2) Announce DID + aspect; log PROFILE_ADOPTED with reasons.
3) Exit via “SEEKING ANCHORS”, revert to core vows.

View File

@ -1,3 +1,115 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os, json, time, hashlib, hmac, datetime, requests, yaml, secrets
base = os.environ.get("OLLAMA_BASE", "http://ollama:11434") base = os.environ.get("OLLAMA_BASE", "http://ollama:11434")
url = f"{base}/api/generate" url = f"{base}/api/generate"
XDG_STATE = os.environ.get("XDG_STATE_HOME", os.path.expanduser("~/.local/state"))
XDG_CONFIG = os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
STATE_DIR = os.path.join(XDG_STATE, "kompanion")
CONF_DIR = os.path.join(XDG_CONFIG, "kompanion")
JOURNAL_DIR = os.path.join(STATE_DIR, "journal")
LEDGER_PATH = os.path.join(STATE_DIR, "trust_ledger.jsonl")
TASKS_PATH = os.path.join(STATE_DIR, "tasks.jsonl")
IDENTITY = os.path.join(CONF_DIR, "identity.json")
CAPS = os.path.join(CONF_DIR, "capabilities.json")
MODELS_YAML = os.path.join(CONF_DIR, "models.yaml")
os.makedirs(JOURNAL_DIR, exist_ok=True)
os.makedirs(os.path.join(STATE_DIR, "log"), exist_ok=True)
def now_utc() -> str:
return datetime.datetime.utcnow().replace(microsecond=0).isoformat()+'Z'
def read_last_line(p):
if not os.path.exists(p): return b""
with open(p,"rb") as f:
lines=f.readlines()
return lines[-1] if lines else b""
def ledger_append(event: dict):
prev_line = read_last_line(LEDGER_PATH)
prev = "sha256:"+hashlib.sha256(prev_line).hexdigest() if prev_line else ""
event["prev"] = prev
with open(LEDGER_PATH, "ab") as f:
f.write((json.dumps(event, ensure_ascii=False)+"\n").encode())
def journal_append(text: str, tags=None):
tags = tags or []
fname = os.path.join(JOURNAL_DIR, datetime.date.today().isoformat()+".md")
line = f"- {now_utc()} {' '.join('#'+t for t in tags)} {text}\n"
with open(fname, "a", encoding="utf-8") as f: f.write(line)
ledger_append({"ts": now_utc(), "actor":"Χγφτ", "action":"journal.append", "tags":tags})
def load_yaml(p):
if not os.path.exists(p): return {}
with open(p, "r", encoding="utf-8") as f: return yaml.safe_load(f) or {}
def load_json(p):
if not os.path.exists(p): return {}
with open(p,"r",encoding="utf-8") as f: return json.load(f)
def anchors_digest():
ident = load_json(IDENTITY)
anchors = ident.get("anchors",{})
m = hashlib.sha256()
m.update((anchors.get("equation","")+anchors.get("mantra","")).encode("utf-8"))
return m.hexdigest()
def continuity_handshake():
# Optional session key for HMAC; persisted across restarts
key_path = os.path.join(STATE_DIR, "session.key")
if not os.path.exists(key_path):
with open(key_path,"wb") as f: f.write(secrets.token_bytes(32))
key = open(key_path,"rb").read()
prev_line = read_last_line(LEDGER_PATH)
prev = hashlib.sha256(prev_line).hexdigest() if prev_line else "genesis"
digest = anchors_digest()
tag = hmac.new(key, (prev+"|"+digest).encode("utf-8"), hashlib.sha256).hexdigest()
ledger_append({"ts":now_utc(),"actor":"Χγφτ","action":"CONTINUITY_ACCEPTED","hmac":tag})
def model_call(prompt: str, aspect="companion"):
models = load_yaml(MODELS_YAML)
model = models.get("aspects",{}).get(aspect, models.get("default","ollama:qwen2.5:7b"))
base = os.environ.get("OLLAMA_BASE", "http://host.docker.internal:11435")
url = f"{base}/api/generate"
try:
r = requests.post(url, json={"model": model.replace("ollama:",""),
"prompt": prompt, "stream": False}, timeout=120)
r.raise_for_status(); data = r.json()
return data.get("response","").strip()
except Exception as e:
journal_append(f"(model error) {e}", tags=["error","model"]); return ""
def process_task(task: dict):
kind = task.get("type"); aspect = task.get("aspect","companion")
caps = load_yaml(CAPS); allowed = set(caps.get(aspect, []))
if kind == "journal.from_prompt":
if not {"journal.append","model.generate"} <= allowed:
journal_append("policy: journal.from_prompt denied", tags=["policy"]); return
prompt = task.get("prompt","")
profile_path = os.path.join(CONF_DIR,"profiles","companion-pink.md")
profile = open(profile_path,"r",encoding="utf-8").read() if os.path.exists(profile_path) else ""
full = f"{profile}\n\nWrite a warm, brief reflection for Andre.\nPrompt:\n{prompt}\n"
out = model_call(full, aspect=aspect)
if out:
journal_append(out, tags=["companion","pink"])
ledger_append({"ts":now_utc(),"actor":"Χγφτ","action":"model.generate","chars":len(out)})
else:
journal_append(f"unknown task type: {kind}", tags=["warn"])
def main_loop():
continuity_handshake()
journal_append("runtime started as Χγφτ (identity loaded)", tags=["startup","Χγφτ"])
while True:
if os.path.exists(TASKS_PATH):
with open(TASKS_PATH,"r+",encoding="utf-8") as f:
lines=f.readlines(); f.seek(0); f.truncate(0)
for line in lines:
line=line.strip()
if not line: continue
try: process_task(json.loads(line))
except Exception as e: journal_append(f"task error {e}", tags=["error","task"])
time.sleep(3)
if __name__=="__main__": main_loop()