Compare commits
No commits in common. "3f1410a0951b984c239945de046b48188f373baf" and "dadb0af9541cd568d1b80cfda9b03f48ff11c1fd" have entirely different histories.
3f1410a095
...
dadb0af954
517
.acf/tasks.json
517
.acf/tasks.json
|
|
@ -3,6 +3,523 @@
|
||||||
"projectDescription": "MCP backend for Kompanion: memory/context/embedding provider over MCP, built from scratch (qtmcp-based) to persist conversation state and serve embeddings + retrieval to avoid forgetting across threads.",
|
"projectDescription": "MCP backend for Kompanion: memory/context/embedding provider over MCP, built from scratch (qtmcp-based) to persist conversation state and serve embeddings + retrieval to avoid forgetting across threads.",
|
||||||
"lastTaskId": 24,
|
"lastTaskId": 24,
|
||||||
"tasks": [
|
"tasks": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"title": "Project Setup: metal-kompanion-mcp",
|
||||||
|
"description": "MCP backend for Kompanion: memory/context/embedding provider over MCP, built from scratch (qtmcp-based) to persist conversation state and serve embeddings + retrieval to avoid forgetting across threads.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 700,
|
||||||
|
"priorityDisplay": "high",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:31:50.258Z",
|
||||||
|
"updatedAt": "2025-10-13T17:31:50.258Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [],
|
||||||
|
"lastSubtaskIndex": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"title": "Design MCP memory/context API",
|
||||||
|
"description": "Specify MCP tools for: save_context, recall_context, embed_text, upsert_memory, search_memory, warm_cache. Define input/output schemas, auth, and versioning.",
|
||||||
|
"status": "in_progress",
|
||||||
|
"priority": 500,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:32:24.705Z",
|
||||||
|
"updatedAt": "2025-10-13T17:40:02.144Z",
|
||||||
|
"subtasks": [
|
||||||
|
{
|
||||||
|
"id": "2.1",
|
||||||
|
"title": "Write JSON Schemas for tools (done)",
|
||||||
|
"status": "todo",
|
||||||
|
"createdAt": "2025-10-13T17:39:21.256Z",
|
||||||
|
"updatedAt": "2025-10-13T17:39:21.256Z",
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:39:21.256Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Subtask created with title: \"Write JSON Schemas for tools (done)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"lastSubtaskIndex": 1,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:32:24.705Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Design MCP memory/context API\""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:40:02.144Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Status changed from \"todo\" to \"in_progress\". Message: Docs and schemas created. Proceeding to server scaffold and adapters."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"title": "Select embedding backend & storage",
|
||||||
|
"description": "Choose between local (Ollama/gguf via llama.cpp embedding) vs remote (OpenAI/SentenceTransformers). Storage: sqlite+vectstore (pgvector/qdrant/chroma). Provide abstraction + adapters.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 501,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:32:35.110Z",
|
||||||
|
"updatedAt": "2025-10-13T17:32:35.110Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:32:35.110Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Select embedding backend & storage\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"title": "Scaffold qtmcp-based server",
|
||||||
|
"description": "Set up C++/Qt MCP server skeleton using qtmcp. Implement handshake, tool registration, and simple ping tool. Build with CMake in /home/kompanion/dev/metal/src/metal-kompanion.",
|
||||||
|
"status": "in_progress",
|
||||||
|
"priority": 499,
|
||||||
|
"priorityDisplay": "P1",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:32:47.443Z",
|
||||||
|
"updatedAt": "2025-10-13T18:13:07.568Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:32:47.443Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Scaffold qtmcp-based server\""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T18:13:07.568Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Status changed from \"todo\" to \"in_progress\". Message: Starting MCP server skeleton with tool registry, ping tool, and placeholders for kom.memory.v1 handlers."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"title": "Implement memory adapters",
|
||||||
|
"description": "Adapters: (1) SQLite+FAISS/pgvector, (2) Qdrant, (3) Chroma. CRUD: upsert, delete, query, batch. Support namespaces (project/thread), TTL, metadata tags.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 502,
|
||||||
|
"priorityDisplay": "P1",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:32:57.756Z",
|
||||||
|
"updatedAt": "2025-10-13T17:32:57.756Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:32:57.756Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Implement memory adapters\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"title": "Deep research: memory DB architecture & schema",
|
||||||
|
"description": "Survey best practices for conversational memory stores (RAG, TTL, namespaces, versioning). Produce target schema for Postgres+pgvector and SQLite mappings.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 498,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:46:18.403Z",
|
||||||
|
"updatedAt": "2025-10-13T17:46:18.403Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:46:18.403Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Deep research: memory DB architecture & schema\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"title": "Decide primary DB: Postgres+pgvector vs SQLite+FAISS",
|
||||||
|
"description": "Evaluate tradeoffs (multi-user, concurrency, migrations, backups). Pick canonical prod DB and document local dev fallback.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 503,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:47:21.042Z",
|
||||||
|
"updatedAt": "2025-10-13T17:47:21.042Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:47:21.042Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Decide primary DB: Postgres+pgvector vs SQLite+FAISS\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 8,
|
||||||
|
"title": "Implement DAL + migrations (pgvector)",
|
||||||
|
"description": "Create C++ DAL layer for namespaces, items, chunks, embeddings. Add migration runner and seed scripts. Map MCP tool calls to DB ops.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 497,
|
||||||
|
"priorityDisplay": "P1",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T17:47:30.982Z",
|
||||||
|
"updatedAt": "2025-10-13T17:47:30.982Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T17:47:30.982Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Implement DAL + migrations (pgvector)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 9,
|
||||||
|
"title": "Add cloud DB hardening (RLS, FTS/trgm, ANN indexes)",
|
||||||
|
"description": "Implement RLS policies; add FTS + pg_trgm for lexical search; unique (namespace_id, key); partial ANN indexes per model.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 504,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:13:13.769Z",
|
||||||
|
"updatedAt": "2025-10-13T19:13:13.769Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:13:13.769Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Add cloud DB hardening (RLS, FTS/trgm, ANN indexes)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 10,
|
||||||
|
"title": "Server enforcement: scope injection + rate limits",
|
||||||
|
"description": "Inject namespace/user via session context; default-deny for scope widening; add simple per-tool rate limits.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 496,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:13:21.164Z",
|
||||||
|
"updatedAt": "2025-10-13T19:13:21.164Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:13:21.164Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Server enforcement: scope injection + rate limits\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 11,
|
||||||
|
"title": "Redaction & sensitivity pipeline",
|
||||||
|
"description": "Implement preprocessing to detect/seal secrets; set metadata.sensitivity; skip FTS/embeddings for `secret` items.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 505,
|
||||||
|
"priorityDisplay": "P1",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:13:29.391Z",
|
||||||
|
"updatedAt": "2025-10-13T19:13:29.392Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:13:29.391Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Redaction & sensitivity pipeline\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 12,
|
||||||
|
"title": "Private vault mode (key-only retrieval)",
|
||||||
|
"description": "Implement vault path for secret items: encrypted-at-rest only; disable participation in FTS/vector; key-based recall APIs.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 495,
|
||||||
|
"priorityDisplay": "P1",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:13:36.653Z",
|
||||||
|
"updatedAt": "2025-10-13T19:13:36.653Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:13:36.653Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Private vault mode (key-only retrieval)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 13,
|
||||||
|
"title": "Local backup tools: export/import (E2EE)",
|
||||||
|
"description": "Add kom.local.v1.backup.export_encrypted / import_encrypted using the draft backup format.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 506,
|
||||||
|
"priorityDisplay": "P1",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:13:44.851Z",
|
||||||
|
"updatedAt": "2025-10-13T19:13:44.851Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:13:44.851Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Local backup tools: export/import (E2EE)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 14,
|
||||||
|
"title": "Cloud adapters: backup/sync & payments stubs",
|
||||||
|
"description": "Expose kom.cloud.v1.backup.upload/restore, kom.cloud.v1.sync.push/pull, and payments.* stubs.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 494,
|
||||||
|
"priorityDisplay": "P2",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:13:55.490Z",
|
||||||
|
"updatedAt": "2025-10-13T19:13:55.490Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:13:55.490Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Cloud adapters: backup/sync & payments stubs\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 15,
|
||||||
|
"title": "Purge job & admin delete paths",
|
||||||
|
"description": "Implement scheduled hard-deletes for soft-deleted/expired items; add admin nuke namespace/user procedure.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 507,
|
||||||
|
"priorityDisplay": "P2",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:14:06.080Z",
|
||||||
|
"updatedAt": "2025-10-13T19:14:06.080Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:14:06.080Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Purge job & admin delete paths\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 16,
|
||||||
|
"title": "Test suite: privacy & hybrid search",
|
||||||
|
"description": "Cross-tenant leakage, redaction invariants, TTL/purge, lexical vs hybrid parity, hosted vs local parity.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 493,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T19:14:14.309Z",
|
||||||
|
"updatedAt": "2025-10-13T19:14:14.310Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T19:14:14.310Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Test suite: privacy & hybrid search\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 17,
|
||||||
|
"title": "Enable Qwen-2.5-Coder with tool support (Happy-Code profile)",
|
||||||
|
"description": "Prepare system prompt + registry injection + JSON-only protocol enforcement; provide tool schemas and example transcripts; validate with kom.memory/local backup tools.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 508,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T23:29:36.547Z",
|
||||||
|
"updatedAt": "2025-10-13T23:29:36.548Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T23:29:36.548Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Enable Qwen-2.5-Coder with tool support (Happy-Code profile)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 18,
|
||||||
|
"title": "Expose Agentic-Control-Framework as a tool",
|
||||||
|
"description": "Wrap ACF endpoints into a tool registry accessible to models (list/add/update tasks, read/write files, run commands) with strict allowlist per workspace.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 492,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T23:29:43.303Z",
|
||||||
|
"updatedAt": "2025-10-13T23:29:43.304Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T23:29:43.304Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Expose Agentic-Control-Framework as a tool\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 19,
|
||||||
|
"title": "DAL skeleton + SQL calls (pgvector)",
|
||||||
|
"description": "Create DAL interfaces and pgvector implementation stubs: connect, begin/commit, upsert item/chunk/embedding, search (text+vector placeholder), prepared SQL in sql/pg. Wire handlers to DAL in no-op mode.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 509,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-13T23:29:49.918Z",
|
||||||
|
"updatedAt": "2025-10-13T23:29:49.918Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-13T23:29:49.918Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"DAL skeleton + SQL calls (pgvector)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 20,
|
||||||
|
"title": "Claude Code integration rescue plan",
|
||||||
|
"description": "Stabilize Qwen-2.5-Coder inside Claude Code despite heavy system prompts: hard system override, JSON-only protocol, stop-sequences, tool registry injection, and fallback DSL.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 491,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-14T00:06:04.896Z",
|
||||||
|
"updatedAt": "2025-10-14T00:06:04.896Z",
|
||||||
|
"subtasks": [],
|
||||||
|
"lastSubtaskIndex": 0,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-14T00:06:04.896Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"Claude Code integration rescue plan\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 21,
|
||||||
|
"title": "DAL Phase 1: libpq/pqxx wiring + SQL calls",
|
||||||
|
"description": "Link pqxx, implement PgDal against Postgres+pgvector: connect/tx, ensureNamespace, upsertItem/Chunks/Embeddings, searchText (FTS/trgm), searchVector (<->). Provide DSN via env; add cmake find + link.",
|
||||||
|
"status": "todo",
|
||||||
|
"priority": 510,
|
||||||
|
"priorityDisplay": "P0",
|
||||||
|
"dependsOn": [],
|
||||||
|
"createdAt": "2025-10-14T00:29:55.327Z",
|
||||||
|
"updatedAt": "2025-10-14T00:29:55.327Z",
|
||||||
|
"subtasks": [
|
||||||
|
{
|
||||||
|
"id": "21.1",
|
||||||
|
"title": "CMake: find_package(pqxx) and link; CI env var DSN",
|
||||||
|
"status": "todo",
|
||||||
|
"createdAt": "2025-10-14T00:30:00.856Z",
|
||||||
|
"updatedAt": "2025-10-14T00:30:00.857Z",
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-14T00:30:00.856Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Subtask created with title: \"CMake: find_package(pqxx) and link; CI env var DSN\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "21.2",
|
||||||
|
"title": "PgDal: implement connect/tx + prepared statements",
|
||||||
|
"status": "todo",
|
||||||
|
"createdAt": "2025-10-14T00:30:06.138Z",
|
||||||
|
"updatedAt": "2025-10-14T00:30:06.138Z",
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-14T00:30:06.138Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Subtask created with title: \"PgDal: implement connect/tx + prepared statements\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "21.3",
|
||||||
|
"title": "SQL: ensureNamespace, upsertItem/Chunks/Embeddings",
|
||||||
|
"status": "todo",
|
||||||
|
"createdAt": "2025-10-14T00:30:11.519Z",
|
||||||
|
"updatedAt": "2025-10-14T00:30:11.519Z",
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-14T00:30:11.519Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Subtask created with title: \"SQL: ensureNamespace, upsertItem/Chunks/Embeddings\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "21.4",
|
||||||
|
"title": "Search: FTS/trgm + vector <-> with filters (namespace/thread/tags)",
|
||||||
|
"status": "todo",
|
||||||
|
"createdAt": "2025-10-14T00:30:17.290Z",
|
||||||
|
"updatedAt": "2025-10-14T00:30:17.290Z",
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-14T00:30:17.290Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Subtask created with title: \"Search: FTS/trgm + vector <-> with filters (namespace/thread/tags)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"lastSubtaskIndex": 4,
|
||||||
|
"relatedFiles": [],
|
||||||
|
"activityLog": [
|
||||||
|
{
|
||||||
|
"timestamp": "2025-10-14T00:29:55.327Z",
|
||||||
|
"type": "log",
|
||||||
|
"message": "Task created with title: \"DAL Phase 1: libpq/pqxx wiring + SQL calls\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": 22,
|
"id": 22,
|
||||||
"title": "Handlers → DAL integration",
|
"title": "Handlers → DAL integration",
|
||||||
|
|
|
||||||
|
|
@ -60,9 +60,4 @@ if (BUILD_TESTS)
|
||||||
add_subdirectory(tests)
|
add_subdirectory(tests)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
find_program(MCP_PROXY_EXECUTABLE mcp-proxy)
|
|
||||||
if (MCP_PROXY_EXECUTABLE)
|
|
||||||
message(STATUS "Found mcp-proxy: ${MCP_PROXY_EXECUTABLE}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAGES)
|
feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAGES)
|
||||||
|
|
|
||||||
|
|
@ -1 +1,2 @@
|
||||||
|
CREATE ROLE kompanion LOGIN PASSWORD 'komp';
|
||||||
CREATE DATABASE kompanion OWNER kompanion;
|
CREATE DATABASE kompanion OWNER kompanion;
|
||||||
|
|
|
||||||
|
|
@ -28,9 +28,6 @@ CREATE TABLE IF NOT EXISTS memory_chunks (
|
||||||
content_tsv tsvector GENERATED ALWAYS AS (to_tsvector('english', content)) STORED
|
content_tsv tsvector GENERATED ALWAYS AS (to_tsvector('english', content)) STORED
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Ensure single row per (item,seq)
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS ux_chunks_item_seq ON memory_chunks(item_id, seq);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS embeddings (
|
CREATE TABLE IF NOT EXISTS embeddings (
|
||||||
id BIGSERIAL PRIMARY KEY,
|
id BIGSERIAL PRIMARY KEY,
|
||||||
chunk_id UUID NOT NULL REFERENCES memory_chunks(id) ON DELETE CASCADE,
|
chunk_id UUID NOT NULL REFERENCES memory_chunks(id) ON DELETE CASCADE,
|
||||||
|
|
|
||||||
|
|
@ -5,18 +5,17 @@ ROLE=${ROLE:-kompanion}
|
||||||
PASS=${PASS:-komp}
|
PASS=${PASS:-komp}
|
||||||
|
|
||||||
psql -v ON_ERROR_STOP=1 <<SQL
|
psql -v ON_ERROR_STOP=1 <<SQL
|
||||||
DROP DATABASE IF EXISTS "$DB_NAME";
|
DO $$ BEGIN
|
||||||
CREATE DATABASE "$DB_NAME" OWNER "$ROLE";
|
PERFORM 1 FROM pg_roles WHERE rolname = '$ROLE';
|
||||||
|
IF NOT FOUND THEN EXECUTE format('CREATE ROLE %I LOGIN PASSWORD %L', '$ROLE', '$PASS'); END IF;
|
||||||
|
END $$;
|
||||||
|
DO $$ BEGIN
|
||||||
|
IF NOT EXISTS (SELECT 1 FROM pg_database WHERE datname = '$DB_NAME') THEN
|
||||||
|
EXECUTE format('CREATE DATABASE %I OWNER %I', '$DB_NAME', '$ROLE');
|
||||||
|
END IF;
|
||||||
|
END $$;
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
for f in "$(dirname "$0")"/../init/*.sql; do
|
|
||||||
if [[ "$f" == *"001_roles.sql"* ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
echo "Applying $f"
|
|
||||||
psql -d "$DB_NAME" -f "$f"
|
|
||||||
done
|
|
||||||
|
|
||||||
for f in `dirname($0)`/*.sql; do
|
for f in `dirname($0)`/*.sql; do
|
||||||
echo "Applying $f"
|
echo "Applying $f"
|
||||||
psql -d "$DB_NAME" -f "$f"
|
psql -d "$DB_NAME" -f "$f"
|
||||||
|
|
|
||||||
|
|
@ -1,375 +0,0 @@
|
||||||
🧭 Kompanion Architecture Overview
|
|
||||||
1. System Composition
|
|
||||||
┌──────────────────────────────────────────────────────────────┐
|
|
||||||
│ Kompanion GUI │
|
|
||||||
│ - Chat & Prompt Window (bare-bones interactive shell) │
|
|
||||||
│ - Database Inspector & Settings │
|
|
||||||
│ - “Under-the-hood” Repair / Diagnostics │
|
|
||||||
└──────────────────────┬───────────────────────────────────────┘
|
|
||||||
│ Qt signals / slots
|
|
||||||
▼
|
|
||||||
┌──────────────────────────────────────────────────────────────┐
|
|
||||||
│ Kompanion Management Layer / Interactive App │
|
|
||||||
│ Session context, user state, identity.json, guardrails │
|
|
||||||
│ Event dispatch to middleware │
|
|
||||||
└──────────────────────┬───────────────────────────────────────┘
|
|
||||||
│
|
|
||||||
▼
|
|
||||||
┌──────────────────────────────────────────────────────────────┐
|
|
||||||
│ Middleware / Integration Bus │
|
|
||||||
│ (MCP Server + D-Bus bridge + Harmony adapter) │
|
|
||||||
│ │
|
|
||||||
│ • Receives prompts & structured messages from GUI │
|
|
||||||
│ • Parses intents / actions │
|
|
||||||
│ • Maps to available tool APIs via libKI │
|
|
||||||
│ • Emits Qt-style signals (or D-Bus signals) for: │
|
|
||||||
│ → text_output, tool_call, file_request, etc. │
|
|
||||||
│ • Converts internal tool descriptions to OpenAI Harmony JSON│
|
|
||||||
│ for external compatibility │
|
|
||||||
│ • Acts as security sandbox & audit logger │
|
|
||||||
└──────────────────────┬───────────────────────────────────────┘
|
|
||||||
│
|
|
||||||
▼
|
|
||||||
┌──────────────────────────────────────────────────────────────┐
|
|
||||||
│ libKI Layer │
|
|
||||||
│ - Executes validated tool actions │
|
|
||||||
│ - Provides adapters for system utilities, MCP tools, etc. │
|
|
||||||
│ - Returns results via structured JSON events │
|
|
||||||
│ - No direct LLM exposure │
|
|
||||||
└──────────────────────────────────────────────────────────────┘
|
|
||||||
|
|
||||||
Public API Surface
|
|
||||||
Component Interface Purpose
|
|
||||||
MCP Server WebSocket / JSON-RPC Integrations and external agents
|
|
||||||
D-Bus Bridge org.kde.kompanion Desktop IPC for local tools
|
|
||||||
libKI C / C++ / Python API Tool execution, capability registration
|
|
||||||
Harmony Adapter JSON Schema Compatibility with OpenAI-style tool descriptors
|
|
||||||
2. Middleware Responsibilities
|
|
||||||
|
|
||||||
Prompt Routing & Intent Recognition
|
|
||||||
|
|
||||||
Receive structured prompt events (PromptReceived, ToolRequest, ContextUpdate).
|
|
||||||
|
|
||||||
Apply regex / template matching to map natural-language requests → tool actions.
|
|
||||||
|
|
||||||
Generate Harmony-compliant tool calls when needed.
|
|
||||||
|
|
||||||
Signal-Based Event Model
|
|
||||||
|
|
||||||
Expose agent state as Qt signals:
|
|
||||||
|
|
||||||
```cpp
|
|
||||||
signals:
|
|
||||||
void textOutput(const QString &text);
|
|
||||||
void toolRequested(const QString &toolName, const QVariantMap &args);
|
|
||||||
void fileAccessRequested(const QString &path);
|
|
||||||
void actionComplete(const QString &resultJson);
|
|
||||||
```
|
|
||||||
|
|
||||||
The GUI subscribes to these, while libKI listens for action triggers.
|
|
||||||
|
|
||||||
Language–Tool Mapping Layer
|
|
||||||
|
|
||||||
Uses a registry of regular expressions and language patterns:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"regex": "open (.*) in editor",
|
|
||||||
"tool": "file.open",
|
|
||||||
"args": { "path": "{1}" }
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Each mapping can be exported/imported in Harmony tool schema:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"name": "file.open",
|
|
||||||
"description": "Open a file in the editor",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": { "path": { "type": "string" } }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Security & Guardrails
|
|
||||||
|
|
||||||
Middleware verifies that tool calls comply with the active identity.json guardrails.
|
|
||||||
|
|
||||||
D-Bus and MCP servers expose only whitelisted methods.
|
|
||||||
|
|
||||||
All tool invocations are logged with timestamp, user, and hash.
|
|
||||||
|
|
||||||
Interoperability
|
|
||||||
|
|
||||||
The Harmony adapter serializes Kompanion tool metadata to the OpenAI format, so external LLMs can call Kompanion tools safely.
|
|
||||||
|
|
||||||
Conversely, Harmony JSON from OpenAI APIs can be wrapped into libKI calls for local execution.
|
|
||||||
|
|
||||||
3. Data Flow Example
|
|
||||||
|
|
||||||
User Prompt → GUI → Middleware → libKI → Middleware → GUI
|
|
||||||
|
|
||||||
1. Prompt: "List running containers."
|
|
||||||
2. Middleware regex matches → tool `docker.list`
|
|
||||||
3. Emits `toolRequested("docker.list", {})`
|
|
||||||
4. libKI executes, returns JSON result
|
|
||||||
5. Middleware emits `textOutput()` with formatted result
|
|
||||||
|
|
||||||
If the same request comes from an OpenAI API:
|
|
||||||
|
|
||||||
Harmony JSON tool call → parsed by Middleware → identical libKI action executed.
|
|
||||||
|
|
||||||
4. Key Design Goals
|
|
||||||
|
|
||||||
- Human-grade transparency: every action is signalized; nothing hidden.
|
|
||||||
- Replaceable backend: libKI can wrap any execution layer (Python, Rust, C++).
|
|
||||||
- Unified schema: one tool description format (Harmony) across OpenAI and Kompanion.
|
|
||||||
- Extensibility: new tools register dynamically via D-Bus or MCP messages.
|
|
||||||
- Auditability: all interactions logged to structured database.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 5. Interface Diagrams & Example Code
|
|
||||||
|
|
||||||
### 5.1 Component Classes & Signals (Qt-style)
|
|
||||||
|
|
||||||
```
|
|
||||||
┌──────────────────────┐
|
|
||||||
| KompanionGui |
|
|
||||||
|-----------------------|
|
|
||||||
| + promptUser() |
|
|
||||||
| + showText(QString) |
|
|
||||||
| + showError(QString) |
|
|
||||||
└────────┬──────────────┘
|
|
||||||
|
|
|
||||||
| signal: userPrompted(QString prompt)
|
|
||||||
|
|
|
||||||
┌────────▼──────────────┐
|
|
||||||
| KompanionController |
|
|
||||||
| (Middleware layer) |
|
|
||||||
|------------------------|
|
|
||||||
| + handlePrompt(QString)|
|
|
||||||
| + requestTool(...) |
|
|
||||||
| + outputText(...) |
|
|
||||||
└────────┬───────────────┘
|
|
||||||
|
|
|
||||||
| signal: toolRequested(QString toolName, QVariantMap args)
|
|
||||||
| signal: textOutput(QString text)
|
|
||||||
|
|
|
||||||
┌────────▼───────────────┐
|
|
||||||
| libKIExecutor |
|
|
||||||
| (Tool execution) |
|
|
||||||
|-------------------------|
|
|
||||||
| + executeTool(...) |
|
|
||||||
| + returnResult(...) |
|
|
||||||
└─────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
**Signal / slot examples**
|
|
||||||
|
|
||||||
```cpp
|
|
||||||
// KompanionGui emits when user types:
|
|
||||||
emit userPrompted(promptText);
|
|
||||||
|
|
||||||
// KompanionController connects:
|
|
||||||
connect(gui, &KompanionGui::userPrompted,
|
|
||||||
controller, &KompanionController::handlePrompt);
|
|
||||||
|
|
||||||
// Within handlePrompt():
|
|
||||||
void KompanionController::handlePrompt(const QString &prompt) {
|
|
||||||
// parse intent → determine which tool to call
|
|
||||||
QString tool = "file.open";
|
|
||||||
QVariantMap args;
|
|
||||||
args["path"] = "/home/user/file.txt";
|
|
||||||
emit toolRequested(tool, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
// libKIExecutor listens:
|
|
||||||
connect(controller, &KompanionController::toolRequested,
|
|
||||||
executor, &libKIExecutor::executeTool);
|
|
||||||
|
|
||||||
void libKIExecutor::executeTool(const QString &toolName,
|
|
||||||
const QVariantMap &args) {
|
|
||||||
// call actual tool, then:
|
|
||||||
QString result = runTool(toolName, args);
|
|
||||||
emit toolResult(toolName, args, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Controller then forwards:
|
|
||||||
connect(executor, &libKIExecutor::toolResult,
|
|
||||||
controller, &KompanionController::onToolResult);
|
|
||||||
|
|
||||||
void KompanionController::onToolResult(...) {
|
|
||||||
emit textOutput(formattedResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
// GUI shows:
|
|
||||||
connect(controller, &KompanionController::textOutput,
|
|
||||||
gui, &KompanionGui::showText);
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5.2 D-Bus Interface Definition (KDE / Doxygen Style)
|
|
||||||
|
|
||||||
The canonical D-Bus interface lives at: `docs/dbus/org.kde.kompanion.xml`
|
|
||||||
|
|
||||||
```xml
|
|
||||||
<!-- org.kde.kompanion.xml -->
|
|
||||||
<node>
|
|
||||||
<interface name="org.kde.kompanion.Controller">
|
|
||||||
<method name="SendPrompt">
|
|
||||||
<arg direction="in" name="prompt" type="s"/>
|
|
||||||
<arg direction="out" name="accepted" type="b"/>
|
|
||||||
</method>
|
|
||||||
<method name="CancelRequest">
|
|
||||||
<arg direction="in" name="requestId" type="s"/>
|
|
||||||
<arg direction="out" name="cancelled" type="b"/>
|
|
||||||
</method>
|
|
||||||
<signal name="TextOutput">
|
|
||||||
<arg name="text" type="s"/>
|
|
||||||
</signal>
|
|
||||||
<signal name="ToolRequested">
|
|
||||||
<arg name="toolName" type="s"/>
|
|
||||||
<arg name="args" type="a{sv}"/>
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
</signal>
|
|
||||||
<signal name="ToolResult">
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
<arg name="result" type="s"/>
|
|
||||||
<arg name="success" type="b"/>
|
|
||||||
</signal>
|
|
||||||
<property name="SessionId" type="s" access="read"/>
|
|
||||||
<property name="IdentityPath" type="s" access="read"/>
|
|
||||||
</interface>
|
|
||||||
<interface name="org.kde.kompanion.Executor">
|
|
||||||
<method name="ExecuteTool">
|
|
||||||
<arg direction="in" name="toolName" type="s"/>
|
|
||||||
<arg direction="in" name="args" type="a{sv}"/>
|
|
||||||
<arg direction="out" name="requestId" type="s"/>
|
|
||||||
</method>
|
|
||||||
<method name="Cancel">
|
|
||||||
<arg direction="in" name="requestId" type="s"/>
|
|
||||||
</method>
|
|
||||||
<signal name="Progress">
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
<arg name="message" type="s"/>
|
|
||||||
<arg name="percent" type="d"/>
|
|
||||||
</signal>
|
|
||||||
</interface>
|
|
||||||
</node>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5.3 Object Paths / Service Names
|
|
||||||
|
|
||||||
- Service: `org.kde.kompanion`
|
|
||||||
- Root path: `/org/kde/kompanion`
|
|
||||||
- Controller object: `/org/kde/kompanion/Controller`
|
|
||||||
- Executor object: `/org/kde/kompanion/Executor`
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 6. Harmony Adapter (OpenAI Compatibility)
|
|
||||||
|
|
||||||
**Goal:** translate native libKI tool metadata to/from OpenAI Harmony JSON so Kompanion tools work via OpenAI interfaces.
|
|
||||||
|
|
||||||
### 6.1 Native → Harmony
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"name": "file.open",
|
|
||||||
"description": "Open a file in the editor",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"path": { "type": "string", "description": "Absolute or relative path" }
|
|
||||||
},
|
|
||||||
"required": ["path"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6.2 Harmony → Native
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool_call": {
|
|
||||||
"name": "file.open",
|
|
||||||
"arguments": { "path": "/home/user/notes.md" }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6.3 Adapter Rules
|
|
||||||
- Enforce guardrails (identity.json) before registering tools.
|
|
||||||
- Redact secret-like args per redaction patterns.
|
|
||||||
- Map Harmony types ↔ Qt/QDBus types: `string↔s`, `number↔d/x`, `boolean↔b`, `object↔a{sv}`, `array↔av`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 7. CMake & Codegen Hooks
|
|
||||||
|
|
||||||
- Place D-Bus XML at `docs/dbus/org.kde.kompanion.xml`.
|
|
||||||
- In `CMakeLists.txt`, add Qt DBus codegen targets, e.g.:
|
|
||||||
|
|
||||||
```cmake
|
|
||||||
find_package(Qt6 REQUIRED COMPONENTS Core DBus)
|
|
||||||
|
|
||||||
qt_add_dbus_adaptor(
|
|
||||||
DBUS_SRCS
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/docs/dbus/org.kde.kompanion.xml
|
|
||||||
src/middleware/kompanioncontroller.h KompanionController
|
|
||||||
/org/kde/kompanion/Controller org.kde.kompanion.Controller
|
|
||||||
)
|
|
||||||
|
|
||||||
qt_add_dbus_interface(
|
|
||||||
DBUS_IFACES
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/docs/dbus/org.kde.kompanion.xml
|
|
||||||
OrgKdeKompanion
|
|
||||||
)
|
|
||||||
|
|
||||||
add_library(dbus_gen ${DBUS_SRCS} ${DBUS_IFACES})
|
|
||||||
target_link_libraries(dbus_gen Qt6::Core Qt6::DBus)
|
|
||||||
```
|
|
||||||
|
|
||||||
(Adjust paths and targets to your tree.)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 8. libKI Execution Contract (minimal)
|
|
||||||
|
|
||||||
```cpp
|
|
||||||
struct KiArg { QString key; QVariant value; };
|
|
||||||
struct KiResult { bool ok; QString mime; QByteArray data; QString json; };
|
|
||||||
|
|
||||||
class ILibKiExecutor : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public slots:
|
|
||||||
virtual QString execute(const QString &toolName, const QVariantMap &args) = 0; // returns requestId
|
|
||||||
virtual void cancel(const QString &requestId) = 0;
|
|
||||||
signals:
|
|
||||||
void resultReady(const QString &requestId, const KiResult &result);
|
|
||||||
void progress(const QString &requestId, const QString &message, double percent);
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 9. Example Regex Mapping Registry
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- regex: "open (.*) in editor"
|
|
||||||
tool: file.open
|
|
||||||
args: { path: "{1}" }
|
|
||||||
- regex: "list containers"
|
|
||||||
tool: docker.list
|
|
||||||
- regex: "compose up (.*)"
|
|
||||||
tool: docker.compose.up
|
|
||||||
args: { service: "{1}" }
|
|
||||||
```
|
|
||||||
|
|
||||||
At runtime, the controller compiles these and emits `toolRequested()` on match.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
_End of document._
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
Kompanion CLI and Schema Navigation
|
|
||||||
|
|
||||||
This guide shows how to use the `kompanion` CLI to:
|
|
||||||
- Configure the database and apply init SQL
|
|
||||||
- Call MCP tools directly
|
|
||||||
- Run an MCP server (stdio or network) from the CLI
|
|
||||||
- Inspect and query the Postgres schema
|
|
||||||
|
|
||||||
Prerequisites
|
|
||||||
- Build: `cmake -S . -B build && cmake --build build -j`
|
|
||||||
- Optional: set `PG_DSN` (e.g., `postgresql://kompanion:komup@localhost:5432/kompanion`)
|
|
||||||
|
|
||||||
Initialization
|
|
||||||
- Run wizard and apply DB schema: `kompanion --init`
|
|
||||||
- Writes `~/.config/kompanion/kompanionrc` (or KConfig). Also sets `PG_DSN` for the session.
|
|
||||||
|
|
||||||
MCP Tool Usage
|
|
||||||
- List tools: `kompanion --list`
|
|
||||||
- Single call with inline JSON: `kompanion kom.memory.v1.search_memory -r '{"namespace":"dev_knowledge","query":{"text":"embedding model","k":5}}'`
|
|
||||||
- Read request from stdin: `echo '{"namespace":"dev_knowledge","content":"hello","key":"note"}' | kompanion kom.memory.v1.save_context -i`
|
|
||||||
- Interactive loop: `kompanion -I kom.memory.v1.search_memory` then type `!prompt quick brown fox`
|
|
||||||
|
|
||||||
Run MCP Server from CLI
|
|
||||||
- Stdio backend (default): `kompanion --mcp-serve`
|
|
||||||
- Explicit backend: `kompanion --mcp-serve stdio`
|
|
||||||
- Network backend address (if available): `kompanion --mcp-serve ws --mcp-address 127.0.0.1:8000`
|
|
||||||
|
|
||||||
Database Navigation
|
|
||||||
Note: These helpers expect a reachable Postgres (`PG_DSN` set). If missing, the CLI falls back to an in‑memory stub for tool calls, but DB navigation requires Postgres.
|
|
||||||
|
|
||||||
- List namespaces: `kompanion --db-namespaces`
|
|
||||||
- Output: `name<TAB>uuid`
|
|
||||||
- List recent items in a namespace: `kompanion --db-items --ns dev_knowledge [--limit 20]`
|
|
||||||
- Output: `item_id<TAB>key<TAB>content_snippet<TAB>tags`
|
|
||||||
- Hybrid search within a namespace:
|
|
||||||
- Text-only: `kompanion --db-search --ns dev_knowledge --text "pgvector index" --limit 5`
|
|
||||||
- With embedding vector from file: `kompanion --db-search --ns dev_knowledge --embedding-file /path/vec.json --limit 5`
|
|
||||||
- `vec.json` must be a JSON array of numbers representing the embedding.
|
|
||||||
|
|
||||||
Schema Guide (Postgres)
|
|
||||||
- Tables: `namespaces`, `memory_items`, `memory_chunks`, `embeddings`, `auth_secrets`
|
|
||||||
- Key indexes:
|
|
||||||
- `memory_items(namespace_id, key)` (unique when `key` not null)
|
|
||||||
- `memory_chunks.content_tsv` GIN (full‑text)
|
|
||||||
- `embeddings.vector` IVFFLAT with `vector_cosine_ops` (per‑model partial index)
|
|
||||||
|
|
||||||
Tips
|
|
||||||
- For quick trials without Postgres, tool calls work in stub mode (in‑memory DAL). To exercise vector search and FTS, run the DB init scripts via `kompanion --init`.
|
|
||||||
- Use `kompanion --verbose` to echo JSON requests/responses.
|
|
||||||
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN" "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
|
|
||||||
<node>
|
|
||||||
<interface name="org.kde.kompanion.Controller">
|
|
||||||
<method name="sendPrompt">
|
|
||||||
<arg name="prompt" type="s" direction="in"/>
|
|
||||||
<arg name="requestId" type="s" direction="out"/>
|
|
||||||
</method>
|
|
||||||
<method name="cancelRequest">
|
|
||||||
<arg name="requestId" type="s" direction="in"/>
|
|
||||||
</method>
|
|
||||||
<signal name="textOutput">
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
<arg name="text" type="s"/>
|
|
||||||
</signal>
|
|
||||||
<signal name="toolRequested">
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
<arg name="toolName" type="s"/>
|
|
||||||
<arg name="args" type="s"/>
|
|
||||||
</signal>
|
|
||||||
<signal name="toolResult">
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
<arg name="resultJson" type="s"/>
|
|
||||||
<arg name="success" type="b"/>
|
|
||||||
</signal>
|
|
||||||
<property name="sessionId" type="s" access="read"/>
|
|
||||||
<property name="identityPath" type="s" access="read"/>
|
|
||||||
</interface>
|
|
||||||
</node>
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN" "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
|
|
||||||
<node>
|
|
||||||
<interface name="org.kde.kompanion.Executor">
|
|
||||||
<method name="executeTool">
|
|
||||||
<arg name="toolName" type="s" direction="in"/>
|
|
||||||
<arg name="args" type="s" direction="in"/>
|
|
||||||
<arg name="requestId" type="s" direction="out"/>
|
|
||||||
</method>
|
|
||||||
<method name="cancel">
|
|
||||||
<arg name="requestId" type="s" direction="in"/>
|
|
||||||
</method>
|
|
||||||
<signal name="progress">
|
|
||||||
<arg name="requestId" type="s"/>
|
|
||||||
<arg name="progress" type="i"/>
|
|
||||||
<arg name="message" type="s"/>
|
|
||||||
</signal>
|
|
||||||
</interface>
|
|
||||||
</node>
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
[
|
|
||||||
{ "regex": "^open (.+) in editor$", "tool": "file.open", "keys": ["path"] },
|
|
||||||
{ "regex": "^list containers$", "tool": "docker.list", "keys": [] },
|
|
||||||
{ "regex": "^compose up (.+)$", "tool": "docker.compose.up", "keys": ["service"] }
|
|
||||||
,{ "regex": "^save snapshot (.+)$", "tool": "kom.memory.v1.save_context", "keys": ["key"] }
|
|
||||||
,{ "regex": "^load snapshot (.+)$", "tool": "kom.memory.v1.recall_context", "keys": ["key"] }
|
|
||||||
,{ "regex": "^warm cache (.+)$", "tool": "kom.memory.v1.warm_cache", "keys": ["namespace"] }
|
|
||||||
]
|
|
||||||
|
|
@ -55,7 +55,6 @@ CREATE TABLE IF NOT EXISTS memory_chunks (
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
);
|
);
|
||||||
CREATE INDEX IF NOT EXISTS chunks_item_idx ON memory_chunks(item_id, ord);
|
CREATE INDEX IF NOT EXISTS chunks_item_idx ON memory_chunks(item_id, ord);
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS ux_chunks_item_ord ON memory_chunks(item_id, ord);
|
|
||||||
|
|
||||||
-- Embeddings: one per chunk (per model)
|
-- Embeddings: one per chunk (per model)
|
||||||
CREATE TABLE IF NOT EXISTS embeddings (
|
CREATE TABLE IF NOT EXISTS embeddings (
|
||||||
|
|
@ -1,47 +1,6 @@
|
||||||
# Subdir CMake for src
|
|
||||||
|
|
||||||
# Ensure internal libs are available to dependents
|
|
||||||
add_subdirectory(dal)
|
|
||||||
|
|
||||||
# Add CLI
|
|
||||||
add_subdirectory(cli)
|
add_subdirectory(cli)
|
||||||
|
add_subdirectory(dal)
|
||||||
|
add_subdirectory(gui)
|
||||||
|
|
||||||
add_subdirectory(KI)
|
add_subdirectory(KI)
|
||||||
add_subdirectory(mcp)
|
add_subdirectory(mcp)
|
||||||
|
|
||||||
include_directories(CMAKE_CURRENT_SOURCE_DIR)
|
|
||||||
|
|
||||||
add_library(kompanion_mw SHARED
|
|
||||||
middleware/kompanioncontroller.cpp
|
|
||||||
middleware/libkiexecutor.cpp
|
|
||||||
middleware/regexregistry.cpp
|
|
||||||
middleware/guardrailspolicy.cpp
|
|
||||||
middleware/orchestrator.cpp
|
|
||||||
)
|
|
||||||
|
|
||||||
find_package(Qt6 REQUIRED COMPONENTS Core DBus Sql)
|
|
||||||
|
|
||||||
set(KOMPANION_CONTROLLER_DBUS_XML ${CMAKE_CURRENT_SOURCE_DIR}/../docs/dbus/org.kde.kompanion.controller.xml)
|
|
||||||
set(KOMPANION_EXECUTOR_DBUS_XML ${CMAKE_CURRENT_SOURCE_DIR}/../docs/dbus/org.kde.kompanion.executor.xml)
|
|
||||||
|
|
||||||
qt_add_dbus_adaptor(
|
|
||||||
KOMPANION_DBUS_ADAPTOR_SRCS
|
|
||||||
${KOMPANION_CONTROLLER_DBUS_XML}
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/middleware/kompanioncontroller.h KompanionController
|
|
||||||
)
|
|
||||||
|
|
||||||
qt_add_dbus_interface(
|
|
||||||
KOMPANION_DBUS_INTERFACE_SRCS
|
|
||||||
${KOMPANION_EXECUTOR_DBUS_XML}
|
|
||||||
OrgKdeKompanionExecutor
|
|
||||||
)
|
|
||||||
|
|
||||||
set_target_properties(kompanion_mw PROPERTIES CXX_STANDARD 20)
|
|
||||||
|
|
||||||
target_include_directories(kompanion_mw PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/middleware)
|
|
||||||
|
|
||||||
target_sources(kompanion_mw PRIVATE ${KOMPANION_DBUS_ADAPTOR_SRCS} ${KOMPANION_DBUS_INTERFACE_SRCS})
|
|
||||||
|
|
||||||
target_link_libraries(kompanion_mw PRIVATE Qt6::Core Qt6::DBus Qt6::Sql Qt6::Network kom_dal)
|
|
||||||
target_compile_definitions(kompanion_mw PRIVATE KOMPANION_MW_LIBRARY)
|
|
||||||
|
|
||||||
# Example executable wiring GUI/controller/executor together could be added later.
|
|
||||||
|
|
|
||||||
|
|
@ -8,15 +8,6 @@
|
||||||
|
|
||||||
namespace KI {
|
namespace KI {
|
||||||
|
|
||||||
/**
|
|
||||||
* KIEmbedOptions and KIEmbeddingResult document the embedding API exposed by libKI providers.
|
|
||||||
*
|
|
||||||
* Semantics
|
|
||||||
* - Providers should accept one or many input texts and return one vector per input.
|
|
||||||
* - The `model` is a free-form identifier understood by the provider (e.g., "bge-m3:latest").
|
|
||||||
* - If `normalize` is set to "l2", providers may L2-normalize vectors client-side for cosine search.
|
|
||||||
*/
|
|
||||||
|
|
||||||
class KIEmbedOptions
|
class KIEmbedOptions
|
||||||
{
|
{
|
||||||
Q_GADGET
|
Q_GADGET
|
||||||
|
|
|
||||||
|
|
@ -33,15 +33,9 @@ KICapabilities* OllamaProvider::caps() const
|
||||||
return m_caps;
|
return m_caps;
|
||||||
}
|
}
|
||||||
|
|
||||||
static QString ollamaBaseUrl() {
|
|
||||||
const QByteArray env = qgetenv("OLLAMA_BASE");
|
|
||||||
if (!env.isEmpty()) return QString::fromLocal8Bit(env);
|
|
||||||
return QStringLiteral("http://localhost:11434");
|
|
||||||
}
|
|
||||||
|
|
||||||
void OllamaProvider::reload()
|
void OllamaProvider::reload()
|
||||||
{
|
{
|
||||||
QNetworkRequest req{QUrl(ollamaBaseUrl() + QStringLiteral("/api/tags"))};
|
QNetworkRequest req{QUrl(QStringLiteral("http://localhost:11434/api/tags"))};
|
||||||
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
||||||
auto rep = m_manager->get(req);
|
auto rep = m_manager->get(req);
|
||||||
connect(rep, &QNetworkReply::finished, this, [this, rep] {
|
connect(rep, &QNetworkReply::finished, this, [this, rep] {
|
||||||
|
|
@ -62,7 +56,7 @@ void OllamaProvider::reload()
|
||||||
|
|
||||||
QFuture<KIReply*> OllamaProvider::chat(const KIThread& thread, const KIChatOptions& opts)
|
QFuture<KIReply*> OllamaProvider::chat(const KIThread& thread, const KIChatOptions& opts)
|
||||||
{
|
{
|
||||||
QNetworkRequest req{QUrl(ollamaBaseUrl() + QStringLiteral("/api/generate"))};
|
QNetworkRequest req{QUrl(QStringLiteral("http://localhost:11434/api/generate"))};
|
||||||
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
||||||
|
|
||||||
QJsonObject data;
|
QJsonObject data;
|
||||||
|
|
@ -114,43 +108,43 @@ QNetworkRequest req{QUrl(ollamaBaseUrl() + QStringLiteral("/api/generate"))};
|
||||||
|
|
||||||
QFuture<KIEmbeddingResult> OllamaProvider::embed(const QStringList& texts, const KIEmbedOptions& opts)
|
QFuture<KIEmbeddingResult> OllamaProvider::embed(const QStringList& texts, const KIEmbedOptions& opts)
|
||||||
{
|
{
|
||||||
// Execute one request per input text; aggregate outputs.
|
QNetworkRequest req{QUrl(QStringLiteral("http://localhost:11434/api/embeddings"))};
|
||||||
QFutureInterface<KIEmbeddingResult> fi;
|
|
||||||
fi.reportStarted();
|
|
||||||
if (texts.isEmpty()) { KIEmbeddingResult r; r.model = opts.model; fi.reportResult(r); fi.reportFinished(); return fi.future(); }
|
|
||||||
|
|
||||||
struct Accum { QVector<QVector<float>> vectors; int remaining = 0; QString model; };
|
|
||||||
auto acc = new Accum();
|
|
||||||
acc->vectors.resize(texts.size());
|
|
||||||
acc->remaining = texts.size();
|
|
||||||
|
|
||||||
const QUrl url(ollamaBaseUrl() + QStringLiteral("/api/embeddings"));
|
|
||||||
for (int i = 0; i < texts.size(); ++i) {
|
|
||||||
QNetworkRequest req{url};
|
|
||||||
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
||||||
const QJsonObject body{ {QStringLiteral("model"), opts.model}, {QStringLiteral("prompt"), texts[i]} };
|
|
||||||
auto rep = m_manager->post(req, QJsonDocument(body).toJson());
|
QJsonObject data;
|
||||||
connect(rep, &QNetworkReply::finished, this, [rep, i, acc, fi]() mutable {
|
data["model"] = opts.model;
|
||||||
if (rep->error() == QNetworkReply::NoError) {
|
data["prompt"] = texts.join("\n"); // Join all texts into a single prompt
|
||||||
const auto obj = QJsonDocument::fromJson(rep->readAll()).object();
|
|
||||||
if (acc->model.isEmpty()) acc->model = obj.value(QStringLiteral("model")).toString();
|
auto netReply = m_manager->post(req, QJsonDocument(data).toJson());
|
||||||
const auto arr = obj.value(QStringLiteral("embedding")).toArray();
|
|
||||||
QVector<float> vec; vec.reserve(arr.size());
|
QFutureInterface<KIEmbeddingResult> interface;
|
||||||
for (const auto &v : arr) vec.push_back(static_cast<float>(v.toDouble()));
|
interface.reportStarted();
|
||||||
acc->vectors[i] = std::move(vec);
|
|
||||||
}
|
connect(netReply, &QNetworkReply::finished, this, [netReply, interface]() mutable {
|
||||||
rep->deleteLater();
|
if (netReply->error() != QNetworkReply::NoError) {
|
||||||
acc->remaining -= 1;
|
// TODO: Handle error
|
||||||
if (acc->remaining == 0) {
|
interface.reportFinished();
|
||||||
KIEmbeddingResult res; res.vectors = std::move(acc->vectors); res.model = acc->model;
|
netReply->deleteLater();
|
||||||
fi.reportResult(res);
|
return;
|
||||||
fi.reportFinished();
|
|
||||||
delete acc;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return fi.future();
|
const auto json = QJsonDocument::fromJson(netReply->readAll());
|
||||||
|
const auto embeddingArray = json["embedding"].toArray();
|
||||||
|
|
||||||
|
KIEmbeddingResult result;
|
||||||
|
QVector<float> embedding;
|
||||||
|
for (const QJsonValue &value : embeddingArray) {
|
||||||
|
embedding.push_back(value.toDouble());
|
||||||
|
}
|
||||||
|
result.vectors.push_back(embedding);
|
||||||
|
result.model = json["model"].toString();
|
||||||
|
|
||||||
|
interface.reportResult(result);
|
||||||
|
interface.reportFinished();
|
||||||
|
netReply->deleteLater();
|
||||||
|
});
|
||||||
|
|
||||||
|
return interface.future();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OllamaProvider::cancel(quint64 requestId)
|
void OllamaProvider::cancel(quint64 requestId)
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,6 @@ Qt6::Core
|
||||||
Qt6::Sql
|
Qt6::Sql
|
||||||
KF6::ConfigCore
|
KF6::ConfigCore
|
||||||
kom_dal
|
kom_dal
|
||||||
kompanion_mw
|
|
||||||
kom_ki
|
kom_ki
|
||||||
kom_mcp
|
|
||||||
Qt6::McpServer
|
|
||||||
)
|
)
|
||||||
install(TARGETS kompanion RUNTIME ${KF_INSTALL_TARGETS_DEFAULT_ARGS})
|
install(TARGETS kompanion RUNTIME ${KF_INSTALL_TARGETS_DEFAULT_ARGS})
|
||||||
|
|
|
||||||
|
|
@ -15,9 +15,6 @@
|
||||||
#include <QSqlDriver>
|
#include <QSqlDriver>
|
||||||
#include <QSqlError>
|
#include <QSqlError>
|
||||||
#include <QSqlQuery>
|
#include <QSqlQuery>
|
||||||
#include <QLoggingCategory>
|
|
||||||
#include <QJsonDocument>
|
|
||||||
#include <QJsonArray>
|
|
||||||
|
|
||||||
#ifdef HAVE_KCONFIG
|
#ifdef HAVE_KCONFIG
|
||||||
#include <KConfigGroup>
|
#include <KConfigGroup>
|
||||||
|
|
@ -37,10 +34,8 @@
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "mcp/KompanionQtServer.hpp"
|
|
||||||
#include "mcp/RegisterTools.hpp"
|
|
||||||
#include "dal/PgDal.hpp"
|
|
||||||
#include "mcp/KomMcpServer.hpp"
|
#include "mcp/KomMcpServer.hpp"
|
||||||
|
#include "mcp/RegisterTools.hpp"
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
|
|
@ -583,19 +578,6 @@ bool runInitializationWizard(QTextStream& in,
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool connectDalFromEnv(ki::PgDal& dal, QTextStream& out) {
|
|
||||||
const char* envDsn = std::getenv("PG_DSN");
|
|
||||||
if (!envDsn || !*envDsn) {
|
|
||||||
out << "PG_DSN not set; using in-memory DAL stub.\n";
|
|
||||||
return dal.connect("stub://memory");
|
|
||||||
}
|
|
||||||
if (!dal.connect(envDsn)) {
|
|
||||||
out << "Failed to connect to database; falling back to stub.\n";
|
|
||||||
return dal.connect("stub://memory");
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
int runInteractiveSession(KomMcpServer& server,
|
int runInteractiveSession(KomMcpServer& server,
|
||||||
const std::string& toolName,
|
const std::string& toolName,
|
||||||
bool verbose) {
|
bool verbose) {
|
||||||
|
|
@ -685,109 +667,6 @@ void printToolList(const KomMcpServer& server) {
|
||||||
out.flush();
|
out.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
int runMcpServer(QString backend, QString address, QTextStream& qerr) {
|
|
||||||
KomMcpServer logic;
|
|
||||||
register_default_tools(logic);
|
|
||||||
|
|
||||||
ki::PgDal dal;
|
|
||||||
QTextStream qout(stdout);
|
|
||||||
connectDalFromEnv(dal, qout);
|
|
||||||
|
|
||||||
const QStringList availableBackends = QMcpServer::backends();
|
|
||||||
if (availableBackends.isEmpty()) {
|
|
||||||
qerr << "[kompanion] No MCP server backends detected in plugin search path.\n";
|
|
||||||
} else if (!availableBackends.contains(backend)) {
|
|
||||||
qerr << "[kompanion] Backend '" << backend << "' not available. Known: "
|
|
||||||
<< availableBackends.join('/') << "\n";
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
KompanionQtServer server(backend, &logic, &dal);
|
|
||||||
if (backend == QStringLiteral("stdio")) server.start(); else server.start(address);
|
|
||||||
QCoreApplication::instance()->exec();
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------- DB helpers (CLI) ----------
|
|
||||||
bool dbListNamespaces(QTextStream& out) {
|
|
||||||
const char* dsn = std::getenv("PG_DSN");
|
|
||||||
if (!dsn || !*dsn) { out << "PG_DSN not set.\n"; return false; }
|
|
||||||
QString err;
|
|
||||||
if (!testConnection(dsn, &err)) { out << "Connection failed: " << err << "\n"; return false; }
|
|
||||||
|
|
||||||
const QString connName = QStringLiteral("kompanion_db_%1").arg(QRandomGenerator::global()->generate64(), 0, 16);
|
|
||||||
QSqlDatabase db = QSqlDatabase::addDatabase(QStringLiteral("QPSQL"), connName);
|
|
||||||
const auto cfg = configFromDsn(std::optional<std::string>(dsn));
|
|
||||||
db.setDatabaseName(cfg.dbname); db.setUserName(cfg.user); db.setPassword(cfg.password);
|
|
||||||
db.setHostName(cfg.useSocket ? cfg.socketPath : cfg.host);
|
|
||||||
bool ok=false; const int portValue = cfg.port.toInt(&ok); if (ok && portValue>0) db.setPort(portValue);
|
|
||||||
if (!db.open()) { out << "Open failed: " << db.lastError().text() << "\n"; QSqlDatabase::removeDatabase(connName); return false; }
|
|
||||||
QSqlQuery q(db);
|
|
||||||
if (!q.exec(QStringLiteral("SELECT id::text, name FROM namespaces ORDER BY name"))) {
|
|
||||||
out << q.lastError().text() << "\n"; db.close(); QSqlDatabase::removeDatabase(connName); return false; }
|
|
||||||
while (q.next()) {
|
|
||||||
out << q.value(1).toString() << "\t" << q.value(0).toString() << "\n";
|
|
||||||
}
|
|
||||||
db.close(); QSqlDatabase::removeDatabase(connName);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool dbListItems(const QString& nsName, int limit, QTextStream& out) {
|
|
||||||
const char* dsn = std::getenv("PG_DSN");
|
|
||||||
if (!dsn || !*dsn) { out << "PG_DSN not set.\n"; return false; }
|
|
||||||
const QString connName = QStringLiteral("kompanion_db_%1").arg(QRandomGenerator::global()->generate64(), 0, 16);
|
|
||||||
QSqlDatabase db = QSqlDatabase::addDatabase(QStringLiteral("QPSQL"), connName);
|
|
||||||
const auto cfg = configFromDsn(std::optional<std::string>(dsn));
|
|
||||||
db.setDatabaseName(cfg.dbname); db.setUserName(cfg.user); db.setPassword(cfg.password);
|
|
||||||
db.setHostName(cfg.useSocket ? cfg.socketPath : cfg.host);
|
|
||||||
bool ok=false; const int portValue = cfg.port.toInt(&ok); if (ok && portValue>0) db.setPort(portValue);
|
|
||||||
if (!db.open()) { out << "Open failed: " << db.lastError().text() << "\n"; QSqlDatabase::removeDatabase(connName); return false; }
|
|
||||||
QSqlQuery q(db);
|
|
||||||
q.prepare(QStringLiteral(
|
|
||||||
"SELECT i.id::text, COALESCE(i.key,''), LEFT(i.content, 120), array_to_string(i.tags, ',') "
|
|
||||||
"FROM memory_items i JOIN namespaces n ON n.id = i.namespace_id "
|
|
||||||
"WHERE n.name = :name ORDER BY i.created_at DESC LIMIT :lim"));
|
|
||||||
q.bindValue(":name", nsName); q.bindValue(":lim", limit);
|
|
||||||
if (!q.exec()) { out << q.lastError().text() << "\n"; db.close(); QSqlDatabase::removeDatabase(connName); return false; }
|
|
||||||
while (q.next()) {
|
|
||||||
out << q.value(0).toString() << '\t' << q.value(1).toString() << '\t' << q.value(2).toString().replace('\n',' ') << '\t' << q.value(3).toString() << "\n";
|
|
||||||
}
|
|
||||||
db.close(); QSqlDatabase::removeDatabase(connName);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool dbSearch(const QString& nsName, const QString& text, const QString& embeddingFile, int k, QTextStream& out) {
|
|
||||||
ki::PgDal dal; connectDalFromEnv(dal, out);
|
|
||||||
auto ns = dal.findNamespace(nsName.toStdString());
|
|
||||||
if (!ns) { out << "namespace not found\n"; return false; }
|
|
||||||
std::vector<float> vec;
|
|
||||||
if (!embeddingFile.isEmpty()) {
|
|
||||||
QFile f(embeddingFile);
|
|
||||||
if (!f.open(QIODevice::ReadOnly|QIODevice::Text)) { out << "cannot read embedding file\n"; return false; }
|
|
||||||
const auto doc = QJsonDocument::fromJson(f.readAll());
|
|
||||||
if (!doc.isArray()) { out << "embedding file must be JSON array\n"; return false; }
|
|
||||||
for (const auto &v : doc.array()) vec.push_back(static_cast<float>(v.toDouble()));
|
|
||||||
}
|
|
||||||
// Hybrid: try text first, then vector.
|
|
||||||
auto rows = dal.searchText(ns->id, text.toStdString(), k);
|
|
||||||
int printed = 0;
|
|
||||||
for (size_t i=0; i<rows.size() && printed<k; ++i) {
|
|
||||||
const auto &r = rows[i];
|
|
||||||
out << QString::fromStdString(r.id) << '\t' << QString::fromStdString(r.text.value_or("")) << '\t' << QString::number(1.0 - (0.05*i), 'f', 3) << "\n";
|
|
||||||
++printed;
|
|
||||||
}
|
|
||||||
if (printed < k && !vec.empty()) {
|
|
||||||
auto more = dal.searchVector(ns->id, vec, k-printed);
|
|
||||||
for (const auto &p : more) {
|
|
||||||
auto item = dal.getItemById(p.first);
|
|
||||||
if (!item) continue;
|
|
||||||
out << QString::fromStdString(p.first) << '\t' << QString::fromStdString(item->text.value_or("")) << '\t' << QString::number(p.second, 'f', 3) << "\n";
|
|
||||||
++printed; if (printed>=k) break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
int main(int argc, char** argv) {
|
int main(int argc, char** argv) {
|
||||||
|
|
@ -830,73 +709,6 @@ int main(int argc, char** argv) {
|
||||||
"dsn");
|
"dsn");
|
||||||
parser.addOption(dsnOption);
|
parser.addOption(dsnOption);
|
||||||
|
|
||||||
// MCP server mode
|
|
||||||
QCommandLineOption mcpServeOption(QStringList() << "S" << "mcp-serve",
|
|
||||||
"Run as an MCP server instead of a one-shot tool. Optional backend name (stdio|ws).",
|
|
||||||
"backend", "stdio");
|
|
||||||
parser.addOption(mcpServeOption);
|
|
||||||
QCommandLineOption mcpAddrOption(QStringList() << "A" << "mcp-address",
|
|
||||||
"Address to listen on for network backends.",
|
|
||||||
"address", "127.0.0.1:8000");
|
|
||||||
parser.addOption(mcpAddrOption);
|
|
||||||
|
|
||||||
// DB navigation helpers
|
|
||||||
QCommandLineOption dbNsOption(QStringList() << "db-namespaces",
|
|
||||||
"List namespaces in the database and exit.");
|
|
||||||
parser.addOption(dbNsOption);
|
|
||||||
QCommandLineOption dbItemsOption(QStringList() << "db-items",
|
|
||||||
"List recent items in a namespace (requires --ns). Optional --limit.");
|
|
||||||
parser.addOption(dbItemsOption);
|
|
||||||
QCommandLineOption nsNameOption(QStringList() << "ns",
|
|
||||||
"Namespace name for DB operations.",
|
|
||||||
"name");
|
|
||||||
parser.addOption(nsNameOption);
|
|
||||||
QCommandLineOption limitOption(QStringList() << "limit",
|
|
||||||
"Limit for DB operations (default 10).",
|
|
||||||
"n", "10");
|
|
||||||
parser.addOption(limitOption);
|
|
||||||
QCommandLineOption queryOption(QStringList() << "db-search",
|
|
||||||
"Hybrid search in a namespace. Use --text and/or --embedding-file. Requires --ns.");
|
|
||||||
parser.addOption(queryOption);
|
|
||||||
QCommandLineOption embFileOption(QStringList() << "embedding-file",
|
|
||||||
"Path to JSON array containing embedding vector for hybrid search.",
|
|
||||||
"path");
|
|
||||||
parser.addOption(embFileOption);
|
|
||||||
|
|
||||||
// Snapshot helpers
|
|
||||||
QCommandLineOption snapshotSaveOption(QStringList() << "snapshot-save",
|
|
||||||
"Save a JSON snapshot (content) under a key in --ns. Provide content via -r/--stdin/[payload].");
|
|
||||||
parser.addOption(snapshotSaveOption);
|
|
||||||
QCommandLineOption snapshotLoadOption(QStringList() << "snapshot-load",
|
|
||||||
"Load a JSON snapshot for --ns and --key and print it.");
|
|
||||||
parser.addOption(snapshotLoadOption);
|
|
||||||
QCommandLineOption keyOption(QStringList() << "key",
|
|
||||||
"Key for snapshot operations (default 'session:last').",
|
|
||||||
"key", "session:last");
|
|
||||||
parser.addOption(keyOption);
|
|
||||||
|
|
||||||
// Warm cache + rehydrate helpers
|
|
||||||
QCommandLineOption warmCacheOption(QStringList() << "warm-cache",
|
|
||||||
"Warm precomputed embeddings (policy or ad-hoc). Use with --policy or --id.");
|
|
||||||
parser.addOption(warmCacheOption);
|
|
||||||
QCommandLineOption policyOption(QStringList() << "policy",
|
|
||||||
"Policy file (YAML/JSON) describing namespaces, model, limit, window_days.",
|
|
||||||
"path");
|
|
||||||
parser.addOption(policyOption);
|
|
||||||
QCommandLineOption idOption(QStringList() << "id",
|
|
||||||
"Explicit item id for ad-hoc warm cache (use with --stdin or -r).",
|
|
||||||
"id");
|
|
||||||
parser.addOption(idOption);
|
|
||||||
|
|
||||||
parser.addOption(idOption);
|
|
||||||
QCommandLineOption rehydrateOption(QStringList() << "--rehydrate",
|
|
||||||
"Compose a rehydration frame: snapshot + top-K search for --text.");
|
|
||||||
parser.addOption(rehydrateOption);
|
|
||||||
QCommandLineOption kOption(QStringList() << "k",
|
|
||||||
"Top-K for rehydrate/search.",
|
|
||||||
"k", "8");
|
|
||||||
parser.addOption(kOption);
|
|
||||||
|
|
||||||
parser.addPositionalArgument("tool", "Tool name to invoke.");
|
parser.addPositionalArgument("tool", "Tool name to invoke.");
|
||||||
parser.addPositionalArgument("payload", "Optional JSON payload or file path (use '-' for stdin).", "[payload]");
|
parser.addPositionalArgument("payload", "Optional JSON payload or file path (use '-' for stdin).", "[payload]");
|
||||||
|
|
||||||
|
|
@ -909,11 +721,6 @@ int main(int argc, char** argv) {
|
||||||
const bool verbose = parser.isSet(verboseOption);
|
const bool verbose = parser.isSet(verboseOption);
|
||||||
const bool interactive = parser.isSet(interactiveOption);
|
const bool interactive = parser.isSet(interactiveOption);
|
||||||
const bool initRequested = parser.isSet(initOption);
|
const bool initRequested = parser.isSet(initOption);
|
||||||
const bool runMcp = parser.isSet(mcpServeOption);
|
|
||||||
const bool snapSave = parser.isSet(snapshotSaveOption);
|
|
||||||
const bool snapLoad = parser.isSet(snapshotLoadOption);
|
|
||||||
const bool warmCache = parser.isSet(warmCacheOption);
|
|
||||||
const bool rehydrate = parser.isSet(rehydrateOption);
|
|
||||||
|
|
||||||
std::optional<std::string> configDsn = readDsnFromConfig();
|
std::optional<std::string> configDsn = readDsnFromConfig();
|
||||||
const char* envDsn = std::getenv("PG_DSN");
|
const char* envDsn = std::getenv("PG_DSN");
|
||||||
|
|
@ -959,143 +766,6 @@ int main(int argc, char** argv) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// MCP server mode first (exclusive)
|
|
||||||
if (runMcp) {
|
|
||||||
const QString backend = parser.value(mcpServeOption);
|
|
||||||
const QString addr = parser.value(mcpAddrOption);
|
|
||||||
return runMcpServer(backend, addr, qerr);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Snapshot helpers (exclusive)
|
|
||||||
if (snapSave || snapLoad) {
|
|
||||||
const QString ns = parser.value(nsNameOption);
|
|
||||||
const QString nsQuery = parser.value(nsQuery);
|
|
||||||
if (ns.isEmpty()) { qerr << "--snapshot-save/load requires --ns <name>\n"; return 1; }
|
|
||||||
if (snapSave) {
|
|
||||||
// Resolve content from CLI
|
|
||||||
std::string raw;
|
|
||||||
QString err;
|
|
||||||
if (!resolveRequestPayload(parser, parser.positionalArguments(), requestOption, stdinOption, raw, &err)) {
|
|
||||||
qerr << (err.isEmpty() ? QStringLiteral("Failed to read snapshot content") : err) << "\n";
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
// Wrap into save_context call
|
|
||||||
std::ostringstream req;
|
|
||||||
req << "{\"namespace\":\"" << ns.toStdString() << "\",\"key\":\"" << nsQuery.toStdString() << "\",\"content\":" << raw << ",\"tags\":[\"snapshot\"]}";
|
|
||||||
const std::string out = server.dispatch("kom.memory.v1.save_context", req.str());
|
|
||||||
std::cout << out << std::endl;
|
|
||||||
return 0;
|
|
||||||
} else {
|
|
||||||
std::ostringstream req;
|
|
||||||
req << "{\"namespace\":\"" << ns.toStdString() << "\",\"key\":\"" << nsQuery.toStdString() << "\"}";
|
|
||||||
const std::string out = server.dispatch("kom.memory.v1.recall_context", req.str());
|
|
||||||
std::cout << out << std::endl;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Warm cache
|
|
||||||
if (warmCache) {
|
|
||||||
const QString ns = parser.value(nsNameOption);
|
|
||||||
const QString id = parser.value(idOption);
|
|
||||||
const QString policyPath = parser.value(policyOption);
|
|
||||||
const QString model = parser.value(QStringLiteral("--model")); // optional generic pass-through
|
|
||||||
const int limit = parser.value(limitOption).toInt();
|
|
||||||
|
|
||||||
if (!id.isEmpty()) {
|
|
||||||
// Ad-hoc enqueue: upsert_and_embed with single item from stdin/-r/payload
|
|
||||||
std::string raw; QString err;
|
|
||||||
if (!resolveRequestPayload(parser, parser.positionalArguments(), requestOption, stdinOption, raw, &err)) {
|
|
||||||
qerr << (err.isEmpty() ? QStringLiteral("Failed to read content for --id") : err) << "\n"; return 1;
|
|
||||||
}
|
|
||||||
std::ostringstream req;
|
|
||||||
req << "{\"namespace\":\"" << ns.toStdString() << "\",\"model\":\"" << model.toStdString() << "\",\"items\":[{\"id\":\"" << id.toStdString() << "\",\"text\":" << raw << "}]}";
|
|
||||||
std::cout << server.dispatch("kom.memory.v1.upsert_and_embed", req.str()) << std::endl;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!policyPath.isEmpty()) {
|
|
||||||
// Minimal policy parser (YAML/JSON): namespaces, model, limit, window_days
|
|
||||||
QFile f(policyPath); if (!f.open(QIODevice::ReadOnly|QIODevice::Text)) { qerr << "Cannot open policy" << "\n"; return 1; }
|
|
||||||
const QString pol = QString::fromUtf8(f.readAll());
|
|
||||||
// Extract namespaces as lines starting with '-'
|
|
||||||
QStringList nss;
|
|
||||||
QRegularExpression rxNs("^\\s*-\\s*([A-Za-z0-9_:\\-]+)\\s*$");
|
|
||||||
for (const QString &line : pol.split('\n')) {
|
|
||||||
auto m = rxNs.match(line); if (m.hasMatch()) nss << m.captured(1);
|
|
||||||
}
|
|
||||||
if (nss.isEmpty() && !ns.isEmpty()) nss << ns;
|
|
||||||
// Extract window_days
|
|
||||||
int windowDays = 0; {
|
|
||||||
QRegularExpression rx("window_days\\s*:\\s*([0-9]+)"); auto m = rx.match(pol); if (m.hasMatch()) windowDays = m.captured(1).toInt();
|
|
||||||
}
|
|
||||||
// Extract model
|
|
||||||
QString pModel = model; if (pModel.isEmpty()) { QRegularExpression rx("model\\s*:\\s*([A-Za-z0-9_:\\-]+)"); auto m = rx.match(pol); if (m.hasMatch()) pModel = m.captured(1); }
|
|
||||||
// Extract limit
|
|
||||||
int pLimit = limit>0?limit:10; { QRegularExpression rx("limit\\s*:\\s*([0-9]+)"); auto m = rx.match(pol); if (m.hasMatch()) pLimit = m.captured(1).toInt(); }
|
|
||||||
|
|
||||||
// Compute since timestamp if windowDays>0
|
|
||||||
QString since;
|
|
||||||
if (windowDays > 0) {
|
|
||||||
const auto now = QDateTime::currentDateTimeUtc();
|
|
||||||
since = now.addDays(-windowDays).toString(Qt::ISODate);
|
|
||||||
}
|
|
||||||
for (const QString &nsv : nss) {
|
|
||||||
std::ostringstream req;
|
|
||||||
req << "{\"namespace\":\"" << nsv.toStdString() << "\"";
|
|
||||||
if (!pModel.isEmpty()) req << ",\"model\":\"" << pModel.toStdString() << "\"";
|
|
||||||
if (!since.isEmpty()) req << ",\"since\":\"" << since.toStdString() << "\"";
|
|
||||||
req << ",\"limit\":" << pLimit << "}";
|
|
||||||
std::cout << server.dispatch("kom.memory.v1.warm_cache", req.str()) << std::endl;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Simple case: call warm_cache once for ns
|
|
||||||
std::ostringstream req;
|
|
||||||
req << "{\"namespace\":\"" << ns.toStdString() << "\"";
|
|
||||||
if (!model.isEmpty()) req << ",\"model\":\"" << model.toStdString() << "\"";
|
|
||||||
req << ",\"limit\":" << (limit>0?limit:10) << "}";
|
|
||||||
std::cout << server.dispatch("kom.memory.v1.warm_cache", req.str()) << std::endl;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rehydrate composition
|
|
||||||
if (rehydrate) {
|
|
||||||
const QString ns = parser.value(nsNameOption);
|
|
||||||
const QString key = parser.value(keyOption);
|
|
||||||
const QString text = parser.value(queryOption);
|
|
||||||
bool ok=false; int k = parser.value(kOption).toInt(&ok); if (!ok || k<=0) k=8;
|
|
||||||
// Recall snapshot
|
|
||||||
std::ostringstream r1; r1 << "{\"namespace\":\"" << ns.toStdString() << "\",\"key\":\"" << key.toStdString() << "\"}";
|
|
||||||
const std::string snapshot = server.dispatch("kom.memory.v1.recall_context", r1.str());
|
|
||||||
// Search
|
|
||||||
std::ostringstream r2; r2 << "{\"namespace\":\"" << ns.toStdString() << "\",\"query\":{\"text\":\"" << jsonEscape(text) << "\",\"k\":" << k << "}}";
|
|
||||||
const std::string matches = server.dispatch("kom.memory.v1.search_memory", r2.str());
|
|
||||||
// Compose
|
|
||||||
std::cout << "{\"snapshot\":" << snapshot << ",\"search\":" << matches << "}" << std::endl;
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// DB inspection helpers (exclusive)
|
|
||||||
if (parser.isSet(dbNsOption)) {
|
|
||||||
return dbListNamespaces(qout) ? 0 : 1;
|
|
||||||
}
|
|
||||||
if (parser.isSet(dbItemsOption)) {
|
|
||||||
const QString ns = parser.value(nsNameOption);
|
|
||||||
if (ns.isEmpty()) { qerr << "--db-items requires --ns <name>\n"; return 1; }
|
|
||||||
bool ok=false; int lim = parser.value(limitOption).toInt(&ok); if (!ok || lim<=0) lim=10;
|
|
||||||
return dbListItems(ns, lim, qout) ? 0 : 1;
|
|
||||||
}
|
|
||||||
if (parser.isSet(queryOption)) {
|
|
||||||
const QString ns = parser.value(queryOption);
|
|
||||||
if (ns.isEmpty()) { qerr << "--db-search requires --ns <name>\n"; return 1; }
|
|
||||||
bool ok=false; int k = parser.value(limitOption).toInt(&ok); if (!ok || k<=0) k=5;
|
|
||||||
const QString text = parser.value(queryOption);
|
|
||||||
const QString embPath = parser.value(embFileOption);
|
|
||||||
return dbSearch(ns, text, embPath, k, qout) ? 0 : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const QStringList positional = parser.positionalArguments();
|
const QStringList positional = parser.positionalArguments();
|
||||||
if (positional.isEmpty()) {
|
if (positional.isEmpty()) {
|
||||||
parser.showHelp(1);
|
parser.showHelp(1);
|
||||||
|
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Moved from ingest/run_ingest.py for transparency. See ingest/pipeline.qt-kde-bge-m3.yaml
|
|
||||||
for configuration fields. This script remains a reference pipeline and is not
|
|
||||||
used by the C++ build.
|
|
||||||
"""
|
|
||||||
# Original content is available under ingest/run_ingest.py. Keeping this as a thin
|
|
||||||
# forwarder/import to avoid duplication while surfacing the script under src/cli/.
|
|
||||||
import os, sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
here = Path(__file__).resolve()
|
|
||||||
ingest_script = here.parent.parent.parent / 'ingest' / 'run_ingest.py'
|
|
||||||
if not ingest_script.exists():
|
|
||||||
print('ingest/run_ingest.py not found', file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
code = ingest_script.read_text(encoding='utf-8')
|
|
||||||
exec(compile(code, str(ingest_script), 'exec'))
|
|
||||||
|
|
||||||
|
|
@ -1,58 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Lightweight embedding helper moved from ingest/ for transparency.
|
|
||||||
|
|
||||||
Usage examples:
|
|
||||||
- Single embedding via Ollama:
|
|
||||||
OLLAMA_BASE=http://localhost:11434 \
|
|
||||||
./py_embedder.py --model bge-m3:latest --text "hello world"
|
|
||||||
|
|
||||||
- Batch from stdin (one line per text):
|
|
||||||
./py_embedder.py --model bge-m3:latest --stdin < texts.txt
|
|
||||||
|
|
||||||
Outputs JSON array of floats (for single text) or array-of-arrays for batches.
|
|
||||||
This script does not touch the database; it only produces vectors.
|
|
||||||
"""
|
|
||||||
import os, sys, json, argparse, requests
|
|
||||||
|
|
||||||
def embed_ollama(texts, model, base):
|
|
||||||
url = f"{base}/api/embeddings"
|
|
||||||
# Some Ollama models accept a single prompt; do one-by-one for reliability
|
|
||||||
out = []
|
|
||||||
for t in texts:
|
|
||||||
r = requests.post(url, json={"model": model, "prompt": t}, timeout=120)
|
|
||||||
r.raise_for_status()
|
|
||||||
data = r.json()
|
|
||||||
if "embedding" in data:
|
|
||||||
out.append(data["embedding"]) # single vector
|
|
||||||
elif "embeddings" in data:
|
|
||||||
out.extend(data["embeddings"]) # multiple vectors
|
|
||||||
else:
|
|
||||||
raise RuntimeError("Embedding response missing 'embedding(s)'")
|
|
||||||
return out
|
|
||||||
|
|
||||||
def main():
|
|
||||||
ap = argparse.ArgumentParser()
|
|
||||||
ap.add_argument("--model", default=os.environ.get("EMBED_MODEL","bge-m3:latest"))
|
|
||||||
ap.add_argument("--text", help="Text to embed; if omitted, use --stdin")
|
|
||||||
ap.add_argument("--stdin", action="store_true", help="Read texts from stdin (one per line)")
|
|
||||||
ap.add_argument("--base", default=os.environ.get("OLLAMA_BASE","http://localhost:11434"))
|
|
||||||
args = ap.parse_args()
|
|
||||||
|
|
||||||
texts = []
|
|
||||||
if args.text:
|
|
||||||
texts = [args.text]
|
|
||||||
elif args.stdin:
|
|
||||||
texts = [line.rstrip("\n") for line in sys.stdin if line.strip()]
|
|
||||||
else:
|
|
||||||
ap.error("Provide --text or --stdin")
|
|
||||||
|
|
||||||
vectors = embed_ollama(texts, args.model, args.base)
|
|
||||||
if len(texts) == 1 and vectors:
|
|
||||||
print(json.dumps(vectors[0]))
|
|
||||||
else:
|
|
||||||
print(json.dumps(vectors))
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
||||||
|
|
@ -6,4 +6,3 @@ target_compile_features(kom_dal PUBLIC cxx_std_20)
|
||||||
target_include_directories(kom_dal PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
target_include_directories(kom_dal PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
target_link_libraries(kom_dal PUBLIC Qt6::Core Qt6::Sql)
|
target_link_libraries(kom_dal PUBLIC Qt6::Core Qt6::Sql)
|
||||||
target_compile_options(kom_dal PRIVATE -fexceptions)
|
target_compile_options(kom_dal PRIVATE -fexceptions)
|
||||||
set_target_properties(kom_dal PROPERTIES POSITION_INDEPENDENT_CODE ON)
|
|
||||||
|
|
|
||||||
|
|
@ -502,20 +502,15 @@ std::vector<std::string> PgDal::upsertChunks(const std::vector<ChunkRow>& chunks
|
||||||
if (stored.item_id.empty()) {
|
if (stored.item_id.empty()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Enforce uniqueness by (item_id, ord) in memory as well
|
if (stored.id.empty()) {
|
||||||
auto& bucket = chunksByItem_[stored.item_id];
|
stored.id = allocateId(nextChunkId_, "chunk_");
|
||||||
std::string existingId;
|
|
||||||
for (const auto &cid : bucket) {
|
|
||||||
auto it = chunks_.find(cid);
|
|
||||||
if (it != chunks_.end() && it->second.ord == stored.ord) { existingId = cid; break; }
|
|
||||||
}
|
|
||||||
if (existingId.empty()) {
|
|
||||||
if (stored.id.empty()) stored.id = allocateId(nextChunkId_, "chunk_");
|
|
||||||
bucket.push_back(stored.id);
|
|
||||||
} else {
|
|
||||||
stored.id = existingId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
chunks_[stored.id] = stored;
|
chunks_[stored.id] = stored;
|
||||||
|
auto& bucket = chunksByItem_[stored.item_id];
|
||||||
|
if (!idsContains(bucket, stored.id)) {
|
||||||
|
bucket.push_back(stored.id);
|
||||||
|
}
|
||||||
ids.push_back(stored.id);
|
ids.push_back(stored.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -529,16 +524,17 @@ std::vector<std::string> PgDal::sqlUpsertChunks(const std::vector<ChunkRow>& chu
|
||||||
QSqlDatabase db = database();
|
QSqlDatabase db = database();
|
||||||
QSqlQuery query(db);
|
QSqlQuery query(db);
|
||||||
query.prepare(QStringLiteral(
|
query.prepare(QStringLiteral(
|
||||||
"INSERT INTO memory_chunks (item_id, seq, content, id) "
|
"INSERT INTO memory_chunks (id, item_id, seq, content) "
|
||||||
"VALUES (:item_id::uuid, :seq, :content, COALESCE(NULLIF(:id, '')::uuid, gen_random_uuid())) "
|
"VALUES (COALESCE(NULLIF(:id, '')::uuid, gen_random_uuid()), "
|
||||||
"ON CONFLICT (item_id, seq) DO UPDATE SET content = EXCLUDED.content "
|
" :item_id::uuid, :seq, :content) "
|
||||||
|
"ON CONFLICT (id) DO UPDATE SET seq = EXCLUDED.seq, content = EXCLUDED.content "
|
||||||
"RETURNING id::text;"));
|
"RETURNING id::text;"));
|
||||||
|
|
||||||
for (const auto& chunk : chunks) {
|
for (const auto& chunk : chunks) {
|
||||||
|
query.bindValue(QStringLiteral(":id"), QString::fromStdString(chunk.id));
|
||||||
query.bindValue(QStringLiteral(":item_id"), QString::fromStdString(chunk.item_id));
|
query.bindValue(QStringLiteral(":item_id"), QString::fromStdString(chunk.item_id));
|
||||||
query.bindValue(QStringLiteral(":seq"), chunk.ord);
|
query.bindValue(QStringLiteral(":seq"), chunk.ord);
|
||||||
query.bindValue(QStringLiteral(":content"), QString::fromStdString(chunk.text));
|
query.bindValue(QStringLiteral(":content"), QString::fromStdString(chunk.text));
|
||||||
query.bindValue(QStringLiteral(":id"), QString::fromStdString(chunk.id));
|
|
||||||
|
|
||||||
if (!query.exec() || !query.next()) {
|
if (!query.exec() || !query.next()) {
|
||||||
throw std::runtime_error(query.lastError().text().toStdString());
|
throw std::runtime_error(query.lastError().text().toStdString());
|
||||||
|
|
|
||||||
Binary file not shown.
|
|
@ -37,31 +37,10 @@ public:
|
||||||
m_chatInput = new QLineEdit(mainWidget);
|
m_chatInput = new QLineEdit(mainWidget);
|
||||||
layout->addWidget(m_chatInput);
|
layout->addWidget(m_chatInput);
|
||||||
|
|
||||||
QHBoxLayout *row = new QHBoxLayout();
|
|
||||||
QPushButton *sendButton = new QPushButton("Send", mainWidget);
|
QPushButton *sendButton = new QPushButton("Send", mainWidget);
|
||||||
QPushButton *embedButton = new QPushButton("Embed", mainWidget);
|
layout->addWidget(sendButton);
|
||||||
row->addWidget(sendButton);
|
|
||||||
row->addWidget(embedButton);
|
|
||||||
layout->addLayout(row);
|
|
||||||
|
|
||||||
connect(sendButton, &QPushButton::clicked, this, &MainWindow::sendMessage);
|
connect(sendButton, &QPushButton::clicked, this, &MainWindow::sendMessage);
|
||||||
connect(embedButton, &QPushButton::clicked, this, [this]() {
|
|
||||||
const QString text = m_chatInput->text().trimmed();
|
|
||||||
if (text.isEmpty()) return;
|
|
||||||
KI::KIEmbedOptions opts; opts.model = "bge-m3:latest"; // simple default
|
|
||||||
QFuture<KI::KIEmbeddingResult> fut = m_kompanionClient->embed(QStringList{text}, opts);
|
|
||||||
auto *watch = new QFutureWatcher<KI::KIEmbeddingResult>(this);
|
|
||||||
connect(watch, &QFutureWatcher<KI::KIEmbeddingResult>::finished, this, [this, watch]() {
|
|
||||||
const auto res = watch->result();
|
|
||||||
if (!res.vectors.isEmpty()) {
|
|
||||||
insertText(QString("[embed %1] dim=%2\n").arg(res.model).arg(res.vectors.first().size()));
|
|
||||||
} else {
|
|
||||||
insertText("[embed] no result\n");
|
|
||||||
}
|
|
||||||
watch->deleteLater();
|
|
||||||
});
|
|
||||||
watch->setFuture(fut);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Setup KI
|
// Setup KI
|
||||||
m_ollamaProvider = new KI::OllamaProvider(this);
|
m_ollamaProvider = new KI::OllamaProvider(this);
|
||||||
|
|
|
||||||
|
|
@ -15,20 +15,8 @@
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "PgDal.hpp"
|
#include "PgDal.hpp"
|
||||||
// libKI (central embedding + model provider)
|
|
||||||
#include "Client/KIClient.h"
|
|
||||||
#include "Provider/OllamaProvider.h"
|
|
||||||
#include "Embedding/KIEmbedding.h"
|
|
||||||
#include <QEventLoop>
|
|
||||||
#include <QFutureWatcher>
|
|
||||||
|
|
||||||
namespace Handlers {
|
namespace Handlers {
|
||||||
|
|
||||||
/**
|
|
||||||
* upsert_memory
|
|
||||||
* Request: { "namespace": string, "items": [ { "id?": string, "text": string, "tags?": string[], "embedding?": number[] } ] }
|
|
||||||
* Response: { "upserted": int, "ids?": string[], "status": "ok" }
|
|
||||||
*/
|
|
||||||
namespace detail {
|
namespace detail {
|
||||||
|
|
||||||
inline ki::PgDal& database() {
|
inline ki::PgDal& database() {
|
||||||
|
|
@ -340,7 +328,6 @@ struct ParsedItem {
|
||||||
std::vector<std::string> tags;
|
std::vector<std::string> tags;
|
||||||
std::vector<float> embedding;
|
std::vector<float> embedding;
|
||||||
std::string rawJson;
|
std::string rawJson;
|
||||||
std::string metadataJson;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
inline std::vector<ParsedItem> parse_items(const std::string& json) {
|
inline std::vector<ParsedItem> parse_items(const std::string& json) {
|
||||||
|
|
@ -352,7 +339,6 @@ inline std::vector<ParsedItem> parse_items(const std::string& json) {
|
||||||
item.text = extract_string_field(obj, "text");
|
item.text = extract_string_field(obj, "text");
|
||||||
item.tags = parse_string_array(obj, "tags");
|
item.tags = parse_string_array(obj, "tags");
|
||||||
item.embedding = parse_float_array(obj, "embedding");
|
item.embedding = parse_float_array(obj, "embedding");
|
||||||
if (auto meta = extract_json_value(obj, "metadata")) item.metadataJson = *meta; else item.metadataJson = "{}";
|
|
||||||
items.push_back(std::move(item));
|
items.push_back(std::move(item));
|
||||||
}
|
}
|
||||||
return items;
|
return items;
|
||||||
|
|
@ -457,11 +443,6 @@ inline std::string upsert_memory(const std::string& reqJson) {
|
||||||
return os.str();
|
return os.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* search_memory
|
|
||||||
* Request: { "namespace": string, "query": { "text?": string, "embedding?": number[], "k?": int } }
|
|
||||||
* Response: { "matches": [ { "id": string, "score": number, "text?": string } ] }
|
|
||||||
*/
|
|
||||||
inline std::string search_memory(const std::string& reqJson) {
|
inline std::string search_memory(const std::string& reqJson) {
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
||||||
if (nsName.empty()) {
|
if (nsName.empty()) {
|
||||||
|
|
@ -519,11 +500,6 @@ inline std::string search_memory(const std::string& reqJson) {
|
||||||
return detail::serialize_matches(matches);
|
return detail::serialize_matches(matches);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* save_context
|
|
||||||
* Request: { "namespace": string, "key?": string, "content": any, "tags?": string[], "ttl_seconds?": int }
|
|
||||||
* Response: { "id": string, "created_at": iso8601 }
|
|
||||||
*/
|
|
||||||
inline std::string save_context(const std::string& reqJson) {
|
inline std::string save_context(const std::string& reqJson) {
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
||||||
if (nsName.empty()) {
|
if (nsName.empty()) {
|
||||||
|
|
@ -594,11 +570,6 @@ inline std::string save_context(const std::string& reqJson) {
|
||||||
return os.str();
|
return os.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* recall_context
|
|
||||||
* Request: { "namespace": string, "key?": string, "tags?": string[], "limit?": int, "since?": iso8601 }
|
|
||||||
* Response: { "items": [ { "id": string, "key?": string, "content": any, "tags": string[], "created_at": iso8601 } ] }
|
|
||||||
*/
|
|
||||||
inline std::string recall_context(const std::string& reqJson) {
|
inline std::string recall_context(const std::string& reqJson) {
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
||||||
if (nsName.empty()) {
|
if (nsName.empty()) {
|
||||||
|
|
@ -663,184 +634,22 @@ inline std::string recall_context(const std::string& reqJson) {
|
||||||
return os.str();
|
return os.str();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* embed_text
|
|
||||||
* Request: { "namespace": string, "model?": string, "texts": string[] }
|
|
||||||
* Response: { "model": string, "vectors": number[][] }
|
|
||||||
*
|
|
||||||
* Implementation: delegates to libKI (OllamaProvider) for local embeddings.
|
|
||||||
*/
|
|
||||||
inline std::string embed_text(const std::string& reqJson) {
|
inline std::string embed_text(const std::string& reqJson) {
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
||||||
if (nsName.empty()) {
|
if (nsName.empty()) {
|
||||||
return detail::error_response("bad_request", "namespace is required");
|
return detail::error_response("bad_request", "namespace is required");
|
||||||
}
|
}
|
||||||
|
// For now, just return a dummy successful response
|
||||||
// Parse inputs
|
return "{\"model\":\"stub-model\",\"vectors\":[[0.1,0.2,0.3]]}";
|
||||||
std::string model = detail::extract_string_field(reqJson, "model");
|
|
||||||
auto texts = detail::parse_string_array(reqJson, "texts");
|
|
||||||
if (texts.empty()) {
|
|
||||||
return detail::error_response("bad_request", "texts must contain at least one string");
|
|
||||||
}
|
|
||||||
|
|
||||||
// libKI: synchronous wait on QFuture
|
|
||||||
KI::KIClient client;
|
|
||||||
KI::OllamaProvider provider;
|
|
||||||
client.setProvider(&provider);
|
|
||||||
KI::KIEmbedOptions opts; if (!model.empty()) opts.model = QString::fromStdString(model);
|
|
||||||
|
|
||||||
QStringList qtexts; qtexts.reserve(static_cast<int>(texts.size()));
|
|
||||||
for (const auto &t : texts) qtexts.push_back(QString::fromStdString(t));
|
|
||||||
|
|
||||||
QEventLoop loop;
|
|
||||||
QFuture<KI::KIEmbeddingResult> fut = client.embed(qtexts, opts);
|
|
||||||
QFutureWatcher<KI::KIEmbeddingResult> watcher;
|
|
||||||
QObject::connect(&watcher, &QFutureWatcher<KI::KIEmbeddingResult>::finished, &loop, &QEventLoop::quit);
|
|
||||||
watcher.setFuture(fut);
|
|
||||||
loop.exec();
|
|
||||||
const KI::KIEmbeddingResult result = watcher.result();
|
|
||||||
|
|
||||||
// Serialize
|
|
||||||
std::ostringstream os;
|
|
||||||
os << "{\"model\":\"" << detail::json_escape(result.model.toStdString()) << "\",\"vectors\":[";
|
|
||||||
for (int i = 0; i < result.vectors.size(); ++i) {
|
|
||||||
if (i) os << ',';
|
|
||||||
os << '[';
|
|
||||||
const auto &vec = result.vectors[i];
|
|
||||||
for (int j = 0; j < vec.size(); ++j) {
|
|
||||||
if (j) os << ',';
|
|
||||||
os.setf(std::ios::fixed); os << std::setprecision(6) << vec[j];
|
|
||||||
}
|
|
||||||
os << ']';
|
|
||||||
}
|
|
||||||
os << "]}";
|
|
||||||
return os.str();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* warm_cache
|
|
||||||
* Request: { "namespace": string, "model?": string, "limit?": int }
|
|
||||||
* Response: { "queued": int }
|
|
||||||
*
|
|
||||||
* Implementation: fetches recent items for the namespace, embeds their text via libKI,
|
|
||||||
* creates a single chunk (ord=0) per item and upserts the (chunk, embedding) rows.
|
|
||||||
*/
|
|
||||||
inline std::string warm_cache(const std::string& reqJson) {
|
inline std::string warm_cache(const std::string& reqJson) {
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
||||||
if (nsName.empty()) {
|
if (nsName.empty()) {
|
||||||
return detail::error_response("bad_request", "namespace is required");
|
return detail::error_response("bad_request", "namespace is required");
|
||||||
}
|
}
|
||||||
std::string model = detail::extract_string_field(reqJson, "model");
|
// For now, just return a dummy successful response
|
||||||
int limit = 10;
|
return "{\"queued\":0}";
|
||||||
if (auto lim = detail::extract_int_field(reqJson, "limit")) { if (*lim > 0) limit = *lim; }
|
|
||||||
std::string key = detail::extract_string_field(reqJson, "key");
|
|
||||||
auto tags = detail::parse_string_array(reqJson, "tags");
|
|
||||||
std::string since = detail::extract_string_field(reqJson, "since");
|
|
||||||
|
|
||||||
auto nsRow = detail::database().findNamespace(nsName);
|
|
||||||
if (!nsRow) {
|
|
||||||
return std::string("{\\\"queued\\\":0}");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch recent items with optional filters
|
|
||||||
std::optional<std::string> keyOpt; if (!key.empty()) keyOpt = key;
|
|
||||||
std::optional<std::string> sinceOpt; if (!since.empty()) sinceOpt = since;
|
|
||||||
auto rows = detail::database().fetchContext(nsRow->id, keyOpt, tags, sinceOpt, limit);
|
|
||||||
if (rows.empty()) {
|
|
||||||
return std::string("{\"queued\":0}");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect texts
|
|
||||||
std::vector<std::pair<std::string, std::string>> toEmbed; toEmbed.reserve(rows.size());
|
|
||||||
for (const auto &row : rows) {
|
|
||||||
if (row.text && !row.text->empty()) {
|
|
||||||
toEmbed.emplace_back(row.id, *row.text);
|
|
||||||
}
|
|
||||||
if ((int)toEmbed.size() >= limit) break;
|
|
||||||
}
|
|
||||||
if (toEmbed.empty()) {
|
|
||||||
return std::string("{\"queued\":0}");
|
|
||||||
}
|
|
||||||
|
|
||||||
// libKI
|
|
||||||
KI::KIClient client; KI::OllamaProvider provider; client.setProvider(&provider);
|
|
||||||
KI::KIEmbedOptions opts; if (!model.empty()) opts.model = QString::fromStdString(model);
|
|
||||||
QStringList texts; for (auto &p : toEmbed) texts.push_back(QString::fromStdString(p.second));
|
|
||||||
QEventLoop loop; QFuture<KI::KIEmbeddingResult> fut = client.embed(texts, opts);
|
|
||||||
QFutureWatcher<KI::KIEmbeddingResult> watcher; QObject::connect(&watcher, &QFutureWatcher<KI::KIEmbeddingResult>::finished, &loop, &QEventLoop::quit);
|
|
||||||
watcher.setFuture(fut); loop.exec(); const KI::KIEmbeddingResult result = watcher.result();
|
|
||||||
|
|
||||||
// Persist
|
|
||||||
int persisted = 0; const int n = std::min(result.vectors.size(), (int)toEmbed.size());
|
|
||||||
for (int i = 0; i < n; ++i) {
|
|
||||||
const auto &pair = toEmbed[(size_t)i];
|
|
||||||
ki::ChunkRow chunk; chunk.item_id = pair.first; chunk.ord = 0; chunk.text = pair.second;
|
|
||||||
auto chunkIds = detail::database().upsertChunks(std::vector<ki::ChunkRow>{chunk});
|
|
||||||
if (chunkIds.empty()) continue;
|
|
||||||
const auto &vec = result.vectors[i];
|
|
||||||
ki::EmbeddingRow emb; emb.chunk_id = chunkIds.front(); emb.model = result.model.toStdString(); emb.dim = vec.size();
|
|
||||||
emb.vector.reserve(vec.size()); for (float f : vec) emb.vector.push_back(f);
|
|
||||||
detail::database().upsertEmbeddings(std::vector<ki::EmbeddingRow>{emb});
|
|
||||||
persisted++;
|
|
||||||
}
|
|
||||||
std::ostringstream os; os << "{\"queued\":" << persisted << "}"; return os.str();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** delete_context (stub MVP)
|
|
||||||
* Request: { namespace, key?, tags? }
|
|
||||||
* Response: { deleted: 0 }
|
|
||||||
* Note: Full deletion (soft-delete) can be added in DAL later.
|
|
||||||
*/
|
|
||||||
inline std::string delete_context(const std::string& reqJson) {
|
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
|
||||||
if (nsName.empty()) return detail::error_response("bad_request","namespace is required");
|
|
||||||
return std::string("{\\\"deleted\\\":0}");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Handlers
|
} // namespace Handlers
|
||||||
/**
|
|
||||||
* upsert_and_embed
|
|
||||||
* Request: { namespace, model?, items: [{id?, text, tags?, metadata?}] }
|
|
||||||
* Response: { upserted, embedded }
|
|
||||||
*/
|
|
||||||
inline std::string upsert_and_embed(const std::string& reqJson) {
|
|
||||||
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
|
|
||||||
if (nsName.empty()) return detail::error_response("bad_request","namespace is required");
|
|
||||||
auto nsRow = detail::database().ensureNamespace(nsName);
|
|
||||||
if (!nsRow) return detail::error_response("internal_error","failed to ensure namespace");
|
|
||||||
|
|
||||||
auto items = detail::parse_items(reqJson);
|
|
||||||
if (items.empty()) return detail::error_response("bad_request","items array must contain at least one entry");
|
|
||||||
std::string model = detail::extract_string_field(reqJson, "model");
|
|
||||||
|
|
||||||
// Upsert items first and collect texts/ids
|
|
||||||
std::vector<std::string> itemIds; itemIds.reserve(items.size());
|
|
||||||
std::vector<std::string> texts; texts.reserve(items.size());
|
|
||||||
for (auto &it : items) {
|
|
||||||
ki::ItemRow row; row.id = it.id; row.namespace_id = nsRow->id; row.text = it.text;
|
|
||||||
row.tags = it.tags; row.revision = 1; row.metadata_json = it.metadataJson.empty()?"{}":it.metadataJson; row.content_json = it.rawJson;
|
|
||||||
const std::string id = detail::database().upsertItem(row);
|
|
||||||
itemIds.push_back(id); texts.push_back(it.text);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Embed via libKI
|
|
||||||
KI::KIClient client; KI::OllamaProvider provider; client.setProvider(&provider);
|
|
||||||
KI::KIEmbedOptions opts; if (!model.empty()) opts.model = QString::fromStdString(model);
|
|
||||||
QStringList qtexts; for (auto &t : texts) qtexts.push_back(QString::fromStdString(t));
|
|
||||||
QEventLoop loop; QFuture<KI::KIEmbeddingResult> fut = client.embed(qtexts, opts);
|
|
||||||
QFutureWatcher<KI::KIEmbeddingResult> watcher; QObject::connect(&watcher, &QFutureWatcher<KI::KIEmbeddingResult>::finished, &loop, &QEventLoop::quit); watcher.setFuture(fut); loop.exec();
|
|
||||||
const KI::KIEmbeddingResult result = watcher.result();
|
|
||||||
|
|
||||||
// Upsert chunks + embeddings (ord=0)
|
|
||||||
int embedded = 0;
|
|
||||||
const int n = std::min((int)itemIds.size(), result.vectors.size());
|
|
||||||
for (int i = 0; i < n; ++i) {
|
|
||||||
ki::ChunkRow chunk; chunk.item_id = itemIds[(size_t)i]; chunk.ord = 0; chunk.text = texts[(size_t)i];
|
|
||||||
auto chunkIds = detail::database().upsertChunks(std::vector<ki::ChunkRow>{chunk}); if (chunkIds.empty()) continue;
|
|
||||||
ki::EmbeddingRow emb; emb.chunk_id = chunkIds.front(); emb.model = result.model.toStdString(); emb.dim = result.vectors[i].size();
|
|
||||||
emb.vector.assign(result.vectors[i].begin(), result.vectors[i].end());
|
|
||||||
detail::database().upsertEmbeddings(std::vector<ki::EmbeddingRow>{emb}); embedded++;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::ostringstream os; os << "{\"upserted\":" << itemIds.size() << ",\"embedded\":" << embedded << "}"; return os.str();
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -8,11 +8,9 @@ inline void register_default_tools(KomMcpServer& server) {
|
||||||
server.registerTool("echo", Handlers::echo);
|
server.registerTool("echo", Handlers::echo);
|
||||||
server.registerTool("kom.memory.v1.save_context", Handlers::save_context);
|
server.registerTool("kom.memory.v1.save_context", Handlers::save_context);
|
||||||
server.registerTool("kom.memory.v1.recall_context", Handlers::recall_context);
|
server.registerTool("kom.memory.v1.recall_context", Handlers::recall_context);
|
||||||
server.registerTool("kom.memory.v1.delete_context", Handlers::delete_context);
|
|
||||||
server.registerTool("kom.memory.v1.embed_text", Handlers::embed_text);
|
server.registerTool("kom.memory.v1.embed_text", Handlers::embed_text);
|
||||||
server.registerTool("kom.memory.v1.upsert_memory", Handlers::upsert_memory);
|
server.registerTool("kom.memory.v1.upsert_memory", Handlers::upsert_memory);
|
||||||
server.registerTool("kom.memory.v1.search_memory", Handlers::search_memory);
|
server.registerTool("kom.memory.v1.search_memory", Handlers::search_memory);
|
||||||
server.registerTool("kom.memory.v1.upsert_and_embed", Handlers::upsert_and_embed);
|
|
||||||
server.registerTool("kom.memory.v1.warm_cache", Handlers::warm_cache);
|
server.registerTool("kom.memory.v1.warm_cache", Handlers::warm_cache);
|
||||||
server.registerTool("kom.local.v1.backup.export_encrypted", Handlers::backup_export_encrypted);
|
server.registerTool("kom.local.v1.backup.export_encrypted", Handlers::backup_export_encrypted);
|
||||||
server.registerTool("kom.local.v1.backup.import_encrypted", Handlers::backup_import_encrypted);
|
server.registerTool("kom.local.v1.backup.import_encrypted", Handlers::backup_import_encrypted);
|
||||||
|
|
|
||||||
|
|
@ -162,25 +162,6 @@
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"delete_context": {
|
|
||||||
"description": "Delete stored context entries filtered by key and/or tags.",
|
|
||||||
"input": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"namespace": { "type": "string" },
|
|
||||||
"key": { "type": "string" },
|
|
||||||
"tags": { "$ref": "#/$defs/stringList" }
|
|
||||||
},
|
|
||||||
"required": ["namespace"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"output": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": { "deleted": { "type": "integer" } },
|
|
||||||
"required": ["deleted"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"embed_text": {
|
"embed_text": {
|
||||||
"description": "Return embedding vectors for provided text inputs.",
|
"description": "Return embedding vectors for provided text inputs.",
|
||||||
"input": {
|
"input": {
|
||||||
|
|
@ -384,76 +365,6 @@
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"upsert_and_embed": {
|
|
||||||
"description": "Upsert items and compute embeddings for each item (ord=0 chunk).",
|
|
||||||
"input": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"namespace": { "type": "string" },
|
|
||||||
"model": { "type": "string" },
|
|
||||||
"items": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"id": { "type": "string" },
|
|
||||||
"text": { "type": "string" },
|
|
||||||
"tags": { "$ref": "#/$defs/stringList" },
|
|
||||||
"metadata": { "type": "object" }
|
|
||||||
},
|
|
||||||
"required": ["text"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["namespace","items"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"output": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"upserted": { "type": "integer" },
|
|
||||||
"embedded": { "type": "integer" }
|
|
||||||
},
|
|
||||||
"required": ["upserted","embedded"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"upsert_and_embed": {
|
|
||||||
"description": "Upsert items and compute embeddings for each item (ord=0 chunk).",
|
|
||||||
"input": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"namespace": { "type": "string" },
|
|
||||||
"model": { "type": "string" },
|
|
||||||
"items": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"id": { "type": "string" },
|
|
||||||
"text": { "type": "string" },
|
|
||||||
"tags": { "$ref": "#/$defs/stringList" },
|
|
||||||
"metadata": { "type": "object" }
|
|
||||||
},
|
|
||||||
"required": ["text"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["namespace","items"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"output": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"upserted": { "type": "integer" },
|
|
||||||
"embedded": { "type": "integer" }
|
|
||||||
},
|
|
||||||
"required": ["upserted","embedded"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"kom.local.v1.backup.export_encrypted": {
|
"kom.local.v1.backup.export_encrypted": {
|
||||||
"description": "Queue an encrypted backup export for the requested namespaces.",
|
"description": "Queue an encrypted backup export for the requested namespaces.",
|
||||||
"input": {
|
"input": {
|
||||||
|
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
#include "guardrailspolicy.h"
|
|
||||||
|
|
||||||
GuardrailsPolicy::GuardrailsPolicy(QObject *parent) : QObject(parent) {}
|
|
||||||
|
|
||||||
DefaultGuardrails::DefaultGuardrails(QObject *parent) : GuardrailsPolicy(parent) {}
|
|
||||||
|
|
||||||
GuardrailsPolicy::Decision DefaultGuardrails::evaluate(const QString &toolName, const QVariantMap &args) const {
|
|
||||||
Q_UNUSED(toolName); Q_UNUSED(args);
|
|
||||||
return { true, QString() };
|
|
||||||
}
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
#pragma once
|
|
||||||
#include <QObject>
|
|
||||||
#include <QString>
|
|
||||||
#include <QVariantMap>
|
|
||||||
#include "kompanion_mw_export.h"
|
|
||||||
|
|
||||||
/** GuardrailsPolicy: approve/deny tool requests before execution */
|
|
||||||
class GuardrailsPolicy : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit GuardrailsPolicy(QObject *parent=nullptr);
|
|
||||||
virtual ~GuardrailsPolicy() = default;
|
|
||||||
|
|
||||||
struct Decision { bool allow; QString reason; };
|
|
||||||
virtual Decision evaluate(const QString &toolName, const QVariantMap &args) const = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** DefaultGuardrails: permissive, placeholder for identity.json loading */
|
|
||||||
class KOMPANION_MW_EXPORT DefaultGuardrails : public GuardrailsPolicy {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit DefaultGuardrails(QObject *parent=nullptr);
|
|
||||||
Decision evaluate(const QString &toolName, const QVariantMap &args) const override;
|
|
||||||
};
|
|
||||||
|
|
@ -1,46 +0,0 @@
|
||||||
#include "harmonyadapter.h"
|
|
||||||
#include <QJsonArray>
|
|
||||||
#include <QJsonDocument>
|
|
||||||
|
|
||||||
namespace Harmony {
|
|
||||||
|
|
||||||
QJsonObject toHarmony(const ToolSpec &spec) {
|
|
||||||
QJsonObject o;
|
|
||||||
o.insert("name", spec.name);
|
|
||||||
if (!spec.description.isEmpty()) o.insert("description", spec.description);
|
|
||||||
if (!spec.parameters.isEmpty()) o.insert("parameters", spec.parameters);
|
|
||||||
return o;
|
|
||||||
}
|
|
||||||
|
|
||||||
ToolSpec fromHarmonySpec(const QJsonObject &obj, bool *ok) {
|
|
||||||
ToolSpec s;
|
|
||||||
bool good = obj.contains("name") && obj.value("name").isString();
|
|
||||||
if (good) {
|
|
||||||
s.name = obj.value("name").toString();
|
|
||||||
s.description = obj.value("description").toString();
|
|
||||||
if (obj.value("parameters").isObject()) s.parameters = obj.value("parameters").toObject();
|
|
||||||
}
|
|
||||||
if (ok) *ok = good;
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
QJsonObject toHarmony(const ToolCall &call) {
|
|
||||||
QJsonObject o;
|
|
||||||
o.insert("name", call.name);
|
|
||||||
o.insert("arguments", QJsonObject::fromVariantMap(call.arguments));
|
|
||||||
return o;
|
|
||||||
}
|
|
||||||
|
|
||||||
ToolCall fromHarmonyCall(const QJsonObject &obj, bool *ok) {
|
|
||||||
ToolCall c;
|
|
||||||
bool good = obj.contains("name") && obj.value("name").isString();
|
|
||||||
if (good) {
|
|
||||||
c.name = obj.value("name").toString();
|
|
||||||
if (obj.value("arguments").isObject())
|
|
||||||
c.arguments = obj.value("arguments").toObject().toVariantMap();
|
|
||||||
}
|
|
||||||
if (ok) *ok = good;
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace Harmony
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
#pragma once
|
|
||||||
#include <QJsonObject>
|
|
||||||
#include <QString>
|
|
||||||
#include <QVariantMap>
|
|
||||||
|
|
||||||
/** HarmonyAdapter: translate native tool specs/calls to/from OpenAI Harmony JSON */
|
|
||||||
namespace Harmony {
|
|
||||||
|
|
||||||
struct ToolSpec {
|
|
||||||
QString name;
|
|
||||||
QString description;
|
|
||||||
QJsonObject parameters; // JSON Schema-like
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ToolCall {
|
|
||||||
QString name;
|
|
||||||
QVariantMap arguments;
|
|
||||||
};
|
|
||||||
|
|
||||||
QJsonObject toHarmony(const ToolSpec &spec);
|
|
||||||
ToolSpec fromHarmonySpec(const QJsonObject &obj, bool *ok=nullptr);
|
|
||||||
|
|
||||||
QJsonObject toHarmony(const ToolCall &call);
|
|
||||||
ToolCall fromHarmonyCall(const QJsonObject &obj, bool *ok=nullptr);
|
|
||||||
|
|
||||||
} // namespace Harmony
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
#ifndef KOMPANION_MW_EXPORT_H
|
|
||||||
#define KOMPANION_MW_EXPORT_H
|
|
||||||
|
|
||||||
#include <QtGlobal>
|
|
||||||
|
|
||||||
#if defined(KOMPANION_MW_LIBRARY)
|
|
||||||
# define KOMPANION_MW_EXPORT Q_DECL_EXPORT
|
|
||||||
#else
|
|
||||||
# define KOMPANION_MW_EXPORT Q_DECL_IMPORT
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif // KOMPANION_MW_EXPORT_H
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
#include "kompanioncontroller.h"
|
|
||||||
#include "regexregistry.h"
|
|
||||||
#include "guardrailspolicy.h"
|
|
||||||
#include <QCryptographicHash>
|
|
||||||
#include <QDateTime>
|
|
||||||
|
|
||||||
KompanionController::KompanionController(QObject *parent) : QObject(parent) {
|
|
||||||
registry_ = new RegexRegistry(this);
|
|
||||||
policy_ = new DefaultGuardrails(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
QString KompanionController::sendPrompt(const QString &prompt) {
|
|
||||||
QString tool; QVariantMap args;
|
|
||||||
if (!mapPromptToTool(prompt, tool, args)) {
|
|
||||||
const QString req = generateRequestId();
|
|
||||||
emit textOutput(req, QStringLiteral("(no mapping) %1").arg(prompt));
|
|
||||||
return QString();
|
|
||||||
}
|
|
||||||
const QString req = generateRequestId();
|
|
||||||
if (policy_) {
|
|
||||||
auto dec = policy_->evaluate(tool, args);
|
|
||||||
if (!dec.allow) {
|
|
||||||
emit textOutput(req, QStringLiteral("blocked by guardrails: %1").arg(dec.reason));
|
|
||||||
return QString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
emit toolRequested(req, tool, args);
|
|
||||||
return req;
|
|
||||||
}
|
|
||||||
|
|
||||||
void KompanionController::onToolResult(const QString &requestId, const QString &resultJson, bool success) {
|
|
||||||
Q_UNUSED(success);
|
|
||||||
emit textOutput(requestId, resultJson);
|
|
||||||
}
|
|
||||||
|
|
||||||
void KompanionController::cancelRequest(const QString &requestId) {
|
|
||||||
emit textOutput(requestId, QStringLiteral("cancel requested"));
|
|
||||||
}
|
|
||||||
|
|
||||||
QString KompanionController::generateRequestId() const {
|
|
||||||
QByteArray seed = QByteArray::number(QDateTime::currentMSecsSinceEpoch());
|
|
||||||
return QString::fromLatin1(QCryptographicHash::hash(seed, QCryptographicHash::Sha256).toHex().left(12));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool KompanionController::mapPromptToTool(const QString &prompt, QString &toolName, QVariantMap &args) const {
|
|
||||||
if (registry_) return registry_->match(prompt, toolName, args);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
#pragma once
|
|
||||||
#include <QObject>
|
|
||||||
#include <QVariantMap>
|
|
||||||
#include "kompanion_mw_export.h"
|
|
||||||
|
|
||||||
class RegexRegistry;
|
|
||||||
class GuardrailsPolicy;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* KompanionController: D-Bus facing middleware controller for org.kde.kompanion.Controller
|
|
||||||
*/
|
|
||||||
class KOMPANION_MW_EXPORT KompanionController : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit KompanionController(QObject *parent=nullptr);
|
|
||||||
|
|
||||||
public slots:
|
|
||||||
/** Accept a user prompt (natural language). Returns requestId or empty on reject. */
|
|
||||||
QString sendPrompt(const QString &prompt);
|
|
||||||
void onToolResult(const QString &requestId, const QString &resultJson, bool success);
|
|
||||||
void cancelRequest(const QString &requestId);
|
|
||||||
|
|
||||||
signals:
|
|
||||||
void textOutput(const QString &requestId, const QString &text);
|
|
||||||
void toolRequested(const QString &requestId, const QString &toolName, const QVariantMap &args);
|
|
||||||
|
|
||||||
private:
|
|
||||||
QString generateRequestId() const;
|
|
||||||
bool mapPromptToTool(const QString &prompt, QString &toolName, QVariantMap &args) const;
|
|
||||||
|
|
||||||
RegexRegistry *registry_ = nullptr;
|
|
||||||
GuardrailsPolicy *policy_ = nullptr;
|
|
||||||
};
|
|
||||||
|
|
@ -1,25 +0,0 @@
|
||||||
#include "libkiexecutor.h"
|
|
||||||
#include <QUuid>
|
|
||||||
#include <QDebug>
|
|
||||||
#include <QTimer>
|
|
||||||
|
|
||||||
LibKiExecutor::LibKiExecutor(QObject *parent)
|
|
||||||
: QObject(parent)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
QString LibKiExecutor::execute(const QString &toolName, const QString &args)
|
|
||||||
{
|
|
||||||
const QString requestId = QUuid::createUuid().toString();
|
|
||||||
qDebug() << "Executing tool:" << toolName << "with args:" << args;
|
|
||||||
|
|
||||||
// In a real implementation, this would dispatch to the corresponding libKI function.
|
|
||||||
// For this skeleton, we'll just echo the request and emit a dummy result.
|
|
||||||
|
|
||||||
// Simulate an asynchronous operation
|
|
||||||
QTimer::singleShot(1000, this, [this, requestId, args]() {
|
|
||||||
emit resultReady(requestId, args, true);
|
|
||||||
});
|
|
||||||
|
|
||||||
return requestId;
|
|
||||||
}
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
#ifndef LIBKIEXECUTOR_H
|
|
||||||
#define LIBKIEXECUTOR_H
|
|
||||||
|
|
||||||
#include <QObject>
|
|
||||||
#include <QString>
|
|
||||||
|
|
||||||
class LibKiExecutor : public QObject
|
|
||||||
{
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit LibKiExecutor(QObject *parent = nullptr);
|
|
||||||
|
|
||||||
public slots:
|
|
||||||
QString execute(const QString &toolName, const QString &args);
|
|
||||||
|
|
||||||
signals:
|
|
||||||
void resultReady(const QString &requestId, const QString &resultJson, bool success);
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // LIBKIEXECUTOR_H
|
|
||||||
|
|
@ -1,224 +0,0 @@
|
||||||
#include "orchestrator.h"
|
|
||||||
|
|
||||||
#include <QByteArray>
|
|
||||||
#include <QDate>
|
|
||||||
#include <QEventLoop>
|
|
||||||
#include <QFile>
|
|
||||||
#include <QJsonArray>
|
|
||||||
#include <QJsonDocument>
|
|
||||||
#include <QNetworkReply>
|
|
||||||
#include <QProcessEnvironment>
|
|
||||||
#include <QTextStream>
|
|
||||||
#include <QTimer>
|
|
||||||
|
|
||||||
#include "dal/PgDal.hpp"
|
|
||||||
|
|
||||||
// ---------- OllamaModelProvider ----------
|
|
||||||
OllamaModelProvider::OllamaModelProvider(QObject *parent)
|
|
||||||
: QObject(parent)
|
|
||||||
{
|
|
||||||
const auto env = QProcessEnvironment::systemEnvironment();
|
|
||||||
baseUrl_ = env.value(QStringLiteral("OLLAMA_BASE"), QStringLiteral("http://localhost:11434"));
|
|
||||||
}
|
|
||||||
|
|
||||||
QString OllamaModelProvider::chooseModelForAspect(const QString &aspect) const {
|
|
||||||
// Simple mapping; could read models.yaml in the future.
|
|
||||||
if (aspect.compare(QStringLiteral("companion"), Qt::CaseInsensitive) == 0) return defaultModel_;
|
|
||||||
if (aspect.compare(QStringLiteral("code"), Qt::CaseInsensitive) == 0) return defaultModel_;
|
|
||||||
return defaultModel_;
|
|
||||||
}
|
|
||||||
|
|
||||||
QString OllamaModelProvider::generate(const QString &prompt, const QString &aspect) {
|
|
||||||
const QString model = chooseModelForAspect(aspect);
|
|
||||||
const QUrl url(baseUrl_ + QStringLiteral("/api/generate"));
|
|
||||||
|
|
||||||
QNetworkRequest req(url);
|
|
||||||
req.setHeader(QNetworkRequest::ContentTypeHeader, QStringLiteral("application/json"));
|
|
||||||
const QJsonObject body{
|
|
||||||
{QStringLiteral("model"), model},
|
|
||||||
{QStringLiteral("prompt"), prompt},
|
|
||||||
{QStringLiteral("stream"), false},
|
|
||||||
};
|
|
||||||
const QByteArray payload = QJsonDocument(body).toJson(QJsonDocument::Compact);
|
|
||||||
QEventLoop loop;
|
|
||||||
QNetworkReply *reply = nam_.post(req, payload);
|
|
||||||
QObject::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
|
|
||||||
// Time out defensively after 10s; return empty on failure.
|
|
||||||
QTimer to;
|
|
||||||
to.setSingleShot(true);
|
|
||||||
QObject::connect(&to, &QTimer::timeout, &loop, &QEventLoop::quit);
|
|
||||||
to.start(10000);
|
|
||||||
loop.exec();
|
|
||||||
if (reply->error() != QNetworkReply::NoError) {
|
|
||||||
reply->deleteLater();
|
|
||||||
return QString();
|
|
||||||
}
|
|
||||||
const auto data = reply->readAll();
|
|
||||||
reply->deleteLater();
|
|
||||||
const auto doc = QJsonDocument::fromJson(data);
|
|
||||||
if (!doc.isObject()) return QString();
|
|
||||||
return doc.object().value(QStringLiteral("response")).toString().trimmed();
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------- Orchestrator ----------
|
|
||||||
Orchestrator::Orchestrator(QObject *parent)
|
|
||||||
: QObject(parent)
|
|
||||||
{
|
|
||||||
// Default provider (can be replaced in tests)
|
|
||||||
static OllamaModelProvider defaultProv; // lifetime: process
|
|
||||||
model_ = &defaultProv;
|
|
||||||
|
|
||||||
connect(&timer_, &QTimer::timeout, this, &Orchestrator::processPendingTasks);
|
|
||||||
}
|
|
||||||
|
|
||||||
Orchestrator::~Orchestrator() {
|
|
||||||
delete dal_;
|
|
||||||
}
|
|
||||||
|
|
||||||
ki::PgDal& Orchestrator::dal() {
|
|
||||||
if (!dal_) {
|
|
||||||
dal_ = new ki::PgDal();
|
|
||||||
const QByteArray dsn = qgetenv("PG_DSN");
|
|
||||||
if (!dsn.isEmpty()) dal_->connect(dsn.toStdString()); else dal_->connect("stub://memory");
|
|
||||||
}
|
|
||||||
return *dal_;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::start(int intervalMs) {
|
|
||||||
ensureResolvedDirs();
|
|
||||||
continuityHandshakeOnce();
|
|
||||||
timer_.start(intervalMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::stop() { timer_.stop(); }
|
|
||||||
|
|
||||||
void Orchestrator::ensureResolvedDirs() {
|
|
||||||
if (!stateDir_.exists()) {
|
|
||||||
const auto env = QProcessEnvironment::systemEnvironment();
|
|
||||||
const auto xdgState = env.value(QStringLiteral("XDG_STATE_HOME"), QDir::home().filePath(".local/state"));
|
|
||||||
stateDir_.setPath(QDir(xdgState).filePath("kompanion"));
|
|
||||||
}
|
|
||||||
if (!configDir_.exists()) {
|
|
||||||
const auto env = QProcessEnvironment::systemEnvironment();
|
|
||||||
const auto xdgConf = env.value(QStringLiteral("XDG_CONFIG_HOME"), QDir::home().filePath(".config"));
|
|
||||||
configDir_.setPath(QDir(xdgConf).filePath("kompanion"));
|
|
||||||
}
|
|
||||||
QDir().mkpath(stateDir_.absolutePath());
|
|
||||||
QDir().mkpath(journalDirPath());
|
|
||||||
}
|
|
||||||
|
|
||||||
QString Orchestrator::nowUtc() const {
|
|
||||||
return QDateTime::currentDateTimeUtc().toString(Qt::ISODateWithMs).replace(QLatin1Char('+'), QLatin1Char('Z'));
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::ledgerAppend(const QJsonObject &evt) {
|
|
||||||
QFile f(ledgerPath());
|
|
||||||
if (f.open(QIODevice::ReadOnly)) {
|
|
||||||
// noop: we could hash prev line like the python version; keep minimal for now
|
|
||||||
f.close();
|
|
||||||
}
|
|
||||||
if (f.open(QIODevice::Append | QIODevice::Text)) {
|
|
||||||
QJsonObject copy = evt;
|
|
||||||
copy.insert(QStringLiteral("ts"), nowUtc());
|
|
||||||
const QByteArray line = QJsonDocument(copy).toJson(QJsonDocument::Compact) + '\n';
|
|
||||||
f.write(line);
|
|
||||||
f.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::journalAppend(const QString &text) {
|
|
||||||
// Ensure journal directory exists even when start() was not called (tests)
|
|
||||||
QDir().mkpath(journalDirPath());
|
|
||||||
const QString file = QDir(journalDirPath()).filePath(QDate::currentDate().toString(Qt::ISODate) + QStringLiteral(".md"));
|
|
||||||
QFile f(file);
|
|
||||||
if (f.open(QIODevice::Append | QIODevice::Text)) {
|
|
||||||
QTextStream out(&f);
|
|
||||||
out << "- " << nowUtc() << ' ' << text << '\n';
|
|
||||||
out.flush();
|
|
||||||
f.close();
|
|
||||||
}
|
|
||||||
QJsonObject evt{{QStringLiteral("actor"), QStringLiteral("Χγφτ")}, {QStringLiteral("action"), QStringLiteral("journal.append")}};
|
|
||||||
ledgerAppend(evt);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::continuityHandshakeOnce() {
|
|
||||||
if (continuityDone_) return;
|
|
||||||
continuityDone_ = true;
|
|
||||||
QJsonObject evt{{QStringLiteral("actor"), QStringLiteral("Χγφτ")}, {QStringLiteral("action"), QStringLiteral("CONTINUITY_ACCEPTED")}};
|
|
||||||
ledgerAppend(evt);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::handleJournalFromPrompt(const QJsonObject &obj) {
|
|
||||||
const QString aspect = obj.value(QStringLiteral("aspect")).toString(QStringLiteral("companion"));
|
|
||||||
const QString prompt = obj.value(QStringLiteral("prompt")).toString();
|
|
||||||
if (!model_) return;
|
|
||||||
const QString preface = QStringLiteral("Write a brief, warm reflection.\nPrompt:\n");
|
|
||||||
const QString out = model_->generate(preface + prompt, aspect);
|
|
||||||
if (!out.isEmpty()) {
|
|
||||||
journalAppend(out);
|
|
||||||
}
|
|
||||||
emit taskProcessed(QStringLiteral("journal.from_prompt"));
|
|
||||||
}
|
|
||||||
|
|
||||||
void Orchestrator::processPendingTasks() {
|
|
||||||
QFile f(tasksPath());
|
|
||||||
if (!f.exists()) return;
|
|
||||||
if (!f.open(QIODevice::ReadOnly | QIODevice::Text)) return;
|
|
||||||
const QByteArray data = f.readAll();
|
|
||||||
f.close();
|
|
||||||
// Truncate after reading
|
|
||||||
if (f.open(QIODevice::WriteOnly | QIODevice::Truncate)) f.close();
|
|
||||||
|
|
||||||
const QList<QByteArray> lines = QByteArray(data).split('\n');
|
|
||||||
for (const QByteArray &raw : lines) {
|
|
||||||
const QByteArray trimmed = raw.trimmed();
|
|
||||||
if (trimmed.isEmpty()) continue;
|
|
||||||
const auto doc = QJsonDocument::fromJson(trimmed);
|
|
||||||
if (!doc.isObject()) continue;
|
|
||||||
const QJsonObject obj = doc.object();
|
|
||||||
const QString type = obj.value(QStringLiteral("type")).toString();
|
|
||||||
if (type == QStringLiteral("journal.from_prompt")) {
|
|
||||||
handleJournalFromPrompt(obj);
|
|
||||||
} else {
|
|
||||||
QJsonObject evt{{QStringLiteral("action"), QStringLiteral("unknown.task")}, {QStringLiteral("type"), type}};
|
|
||||||
ledgerAppend(evt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool Orchestrator::saveSnapshot(const QString &nameSpace,
|
|
||||||
const QString &key,
|
|
||||||
const QJsonObject &content,
|
|
||||||
const QStringList &tags)
|
|
||||||
{
|
|
||||||
auto nsRow = dal().ensureNamespace(nameSpace.toStdString());
|
|
||||||
if (!nsRow) return false;
|
|
||||||
|
|
||||||
ki::ItemRow row;
|
|
||||||
row.namespace_id = nsRow->id;
|
|
||||||
row.key = key.toStdString();
|
|
||||||
row.tags.reserve(tags.size());
|
|
||||||
for (const auto &t : tags) row.tags.push_back(t.toStdString());
|
|
||||||
row.content_json = QString::fromUtf8(QJsonDocument(content).toJson(QJsonDocument::Compact)).toStdString();
|
|
||||||
row.metadata_json = "{}";
|
|
||||||
row.created_at = std::chrono::system_clock::now();
|
|
||||||
|
|
||||||
const std::string id = dal().upsertItem(row);
|
|
||||||
return !id.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<QJsonObject> Orchestrator::loadSnapshot(const QString &nameSpace,
|
|
||||||
const QString &key)
|
|
||||||
{
|
|
||||||
auto nsRow = dal().findNamespace(nameSpace.toStdString());
|
|
||||||
if (!nsRow) return std::nullopt;
|
|
||||||
|
|
||||||
std::vector<std::string> tags; tags.emplace_back("snapshot");
|
|
||||||
auto rows = dal().fetchContext(nsRow->id, std::optional<std::string>(key.toStdString()), tags, std::nullopt, 1);
|
|
||||||
if (rows.empty()) return std::nullopt;
|
|
||||||
const auto &row = rows.front();
|
|
||||||
if (row.content_json.empty()) return std::nullopt;
|
|
||||||
const auto doc = QJsonDocument::fromJson(QByteArray::fromStdString(row.content_json));
|
|
||||||
if (!doc.isObject()) return std::nullopt;
|
|
||||||
return doc.object();
|
|
||||||
}
|
|
||||||
|
|
@ -1,113 +0,0 @@
|
||||||
#pragma once
|
|
||||||
#include <QObject>
|
|
||||||
#include <QDir>
|
|
||||||
#include <QJsonObject>
|
|
||||||
#include <QNetworkAccessManager>
|
|
||||||
#include <QTimer>
|
|
||||||
#include <functional>
|
|
||||||
|
|
||||||
#include "kompanion_mw_export.h"
|
|
||||||
|
|
||||||
namespace ki { class PgDal; }
|
|
||||||
|
|
||||||
// Minimal model provider interface so tests can stub generation.
|
|
||||||
class IModelProvider {
|
|
||||||
public:
|
|
||||||
virtual ~IModelProvider() = default;
|
|
||||||
virtual QString generate(const QString &prompt, const QString &aspect) = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Default Ollama-backed provider. Uses OLLAMA_BASE and simple /api/generate call.
|
|
||||||
class OllamaModelProvider : public QObject, public IModelProvider {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit OllamaModelProvider(QObject *parent=nullptr);
|
|
||||||
QString generate(const QString &prompt, const QString &aspect) override;
|
|
||||||
void setBaseUrl(const QString &base) { baseUrl_ = base; }
|
|
||||||
void setDefaultModel(const QString &m) { defaultModel_ = m; }
|
|
||||||
private:
|
|
||||||
QString chooseModelForAspect(const QString &aspect) const; // simple heuristic
|
|
||||||
QString baseUrl_;
|
|
||||||
QString defaultModel_ = QStringLiteral("qwen2.5:7b");
|
|
||||||
QNetworkAccessManager nam_;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Simple stub provider used by tests; returns deterministic text.
|
|
||||||
class StubModelProvider : public IModelProvider {
|
|
||||||
public:
|
|
||||||
explicit StubModelProvider(QString canned) : canned_(std::move(canned)) {}
|
|
||||||
QString generate(const QString &prompt, const QString &aspect) override {
|
|
||||||
Q_UNUSED(prompt); Q_UNUSED(aspect); return canned_;
|
|
||||||
}
|
|
||||||
private:
|
|
||||||
QString canned_;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Orchestrator: replicates runtime/kom_runner.py behaviors in C++.
|
|
||||||
// - Watches a JSONL tasks file under XDG_STATE_HOME/kompanion
|
|
||||||
// - Processes tasks like {"type":"journal.from_prompt", "prompt":"...", "aspect":"companion"}
|
|
||||||
// - Appends to journal (<state>/journal/YYYY-MM-DD.md) and to a simple ledger JSONL
|
|
||||||
class KOMPANION_MW_EXPORT Orchestrator : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit Orchestrator(QObject *parent=nullptr);
|
|
||||||
~Orchestrator();
|
|
||||||
|
|
||||||
// Injectable model provider (Ollama by default). Ownership left to caller.
|
|
||||||
void setModelProvider(IModelProvider *prov) { model_ = prov; }
|
|
||||||
|
|
||||||
// Directories resolved from XDG_* on start(); overridable for tests.
|
|
||||||
void setStateDir(const QDir &dir) { stateDir_ = dir; }
|
|
||||||
void setConfigDir(const QDir &dir) { configDir_ = dir; }
|
|
||||||
|
|
||||||
// Poll loop control.
|
|
||||||
void start(int intervalMs = 3000);
|
|
||||||
void stop();
|
|
||||||
|
|
||||||
// One-shot tick (public for tests).
|
|
||||||
void processPendingTasks();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* saveSnapshot: persist a JSON snapshot under (namespace,key) as a context item.
|
|
||||||
* - Tags default to {"snapshot"}.
|
|
||||||
* - If PG_DSN is unset, uses an in-memory stub (won't persist across restarts).
|
|
||||||
*/
|
|
||||||
bool saveSnapshot(const QString &nameSpace,
|
|
||||||
const QString &key,
|
|
||||||
const QJsonObject &content,
|
|
||||||
const QStringList &tags = {QStringLiteral("snapshot")});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* loadSnapshot: fetch the latest item for (namespace,key) tagged as snapshot.
|
|
||||||
* Returns empty optional if not found or on failure.
|
|
||||||
*/
|
|
||||||
std::optional<QJsonObject> loadSnapshot(const QString &nameSpace,
|
|
||||||
const QString &key);
|
|
||||||
|
|
||||||
signals:
|
|
||||||
void taskProcessed(const QString &kind);
|
|
||||||
|
|
||||||
private:
|
|
||||||
void ensureResolvedDirs();
|
|
||||||
void continuityHandshakeOnce();
|
|
||||||
void ledgerAppend(const QJsonObject &evt);
|
|
||||||
void journalAppend(const QString &line); // Also emits ledger entry
|
|
||||||
QString nowUtc() const;
|
|
||||||
|
|
||||||
// Task handlers
|
|
||||||
void handleJournalFromPrompt(const QJsonObject &obj);
|
|
||||||
|
|
||||||
// Helpers
|
|
||||||
QString tasksPath() const { return stateDir_.filePath("tasks.jsonl"); }
|
|
||||||
QString journalDirPath() const { return stateDir_.filePath("journal"); }
|
|
||||||
QString ledgerPath() const { return stateDir_.filePath("trust_ledger.jsonl"); }
|
|
||||||
|
|
||||||
QDir stateDir_;
|
|
||||||
QDir configDir_;
|
|
||||||
QTimer timer_;
|
|
||||||
bool continuityDone_ = false;
|
|
||||||
IModelProvider *model_ = nullptr; // not owned
|
|
||||||
// Reused DB handle so in-memory stub persists across calls in tests.
|
|
||||||
ki::PgDal* dal_ = nullptr;
|
|
||||||
ki::PgDal& dal();
|
|
||||||
};
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
#pragma once
|
|
||||||
#include <QObject>
|
|
||||||
#include <QVariantMap>
|
|
||||||
#include <functional>
|
|
||||||
|
|
||||||
/** Simple durable journal for in-flight tool calls.
|
|
||||||
* Stores JSONL entries at runtime/pending.jsonl so crashes/UI reloads can resume.
|
|
||||||
*/
|
|
||||||
class RecoveryJournal : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
explicit RecoveryJournal(const QString &path, QObject *parent=nullptr);
|
|
||||||
|
|
||||||
// record an in-flight tool call
|
|
||||||
void logInFlight(const QString &requestId, const QString &toolName, const QVariantMap &args);
|
|
||||||
// mark completion
|
|
||||||
void complete(const QString &requestId, bool ok);
|
|
||||||
// iterate unfinished entries and invoke callback(requestId, tool, args)
|
|
||||||
void recoverPending(const std::function<void(const QString&, const QString&, const QVariantMap&, const QString&)> &cb);
|
|
||||||
|
|
||||||
private:
|
|
||||||
QString path_;
|
|
||||||
};
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
#include "regexregistry.h"
|
|
||||||
#include <QFile>
|
|
||||||
#include <QJsonDocument>
|
|
||||||
#include <QJsonArray>
|
|
||||||
#include <QJsonObject>
|
|
||||||
|
|
||||||
RegexRegistry::RegexRegistry(QObject *parent) : QObject(parent) {}
|
|
||||||
|
|
||||||
bool RegexRegistry::loadFromFile(const QString &path) {
|
|
||||||
QFile f(path); if (!f.open(QIODevice::ReadOnly)) return false;
|
|
||||||
sourcePath_ = path; rules_.clear();
|
|
||||||
const auto doc = QJsonDocument::fromJson(f.readAll());
|
|
||||||
if (!doc.isArray()) return false;
|
|
||||||
for (const auto &it : doc.array()) {
|
|
||||||
if (!it.isObject()) continue;
|
|
||||||
const auto o = it.toObject();
|
|
||||||
const auto rx = o.value("regex").toString();
|
|
||||||
const auto tool = o.value("tool").toString();
|
|
||||||
const auto keys = o.value("keys").toArray();
|
|
||||||
if (rx.isEmpty() || tool.isEmpty()) continue;
|
|
||||||
Rule r{ QRegularExpression(rx, QRegularExpression::CaseInsensitiveOption), tool, {} };
|
|
||||||
for (const auto &k : keys) r.argKeys << k.toString();
|
|
||||||
rules_.push_back(std::move(r));
|
|
||||||
}
|
|
||||||
emit reloaded();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool RegexRegistry::match(const QString &prompt, QString &tool, QVariantMap &args) const {
|
|
||||||
for (const auto &r : rules_) {
|
|
||||||
const auto m = r.re.match(prompt.trimmed());
|
|
||||||
if (m.hasMatch()) {
|
|
||||||
tool = r.tool; args.clear();
|
|
||||||
for (int i=0; i<r.argKeys.size(); ++i) {
|
|
||||||
const auto key = r.argKeys.at(i);
|
|
||||||
const auto val = m.captured(i+1);
|
|
||||||
if (!key.isEmpty() && !val.isEmpty()) args.insert(key, val);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
#pragma once
|
|
||||||
#include <QObject>
|
|
||||||
#include <QRegularExpression>
|
|
||||||
#include <QVariantMap>
|
|
||||||
#include <QVector>
|
|
||||||
#include "kompanion_mw_export.h"
|
|
||||||
|
|
||||||
/** RegexRegistry: hot-reloadable mapping from NL prompts to tool+args */
|
|
||||||
class KOMPANION_MW_EXPORT RegexRegistry : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
public:
|
|
||||||
struct Rule { QRegularExpression re; QString tool; QStringList argKeys; };
|
|
||||||
explicit RegexRegistry(QObject *parent=nullptr);
|
|
||||||
bool loadFromFile(const QString &path);
|
|
||||||
bool match(const QString &prompt, QString &tool, QVariantMap &args) const;
|
|
||||||
signals:
|
|
||||||
void reloaded();
|
|
||||||
private:
|
|
||||||
QVector<Rule> rules_;
|
|
||||||
QString sourcePath_;
|
|
||||||
};
|
|
||||||
|
|
@ -0,0 +1,79 @@
|
||||||
|
# metal-kompanion-mcp
|
||||||
|
MCP backend for Kompanion: memory/context/embedding provider over MCP, built from scratch (qtmcp-based) to persist conversation state and serve embeddings + retrieval to avoid forgetting across threads.
|
||||||
|
|
||||||
|
> ## 📈 Project Summary
|
||||||
|
>
|
||||||
|
> **✅ Done**: 2 | **🔄 In Progress**: 0 | **⬜ Todo**: 38 | **❌ Blocked**: 0
|
||||||
|
>
|
||||||
|
> **Progress**: 5% `█░░░░░░░░░░░░░░░░░░░` 2/40 tasks
|
||||||
|
>
|
||||||
|
> **Priorities**: 🚨 **Critical**: 0 | 🔴 **High**: 1 | 🟡 **Medium**: 41 | 🟢 **Low**: 0
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
| ID | Status | Priority | Title | Description |
|
||||||
|
|:--:|:------:|:--------:|:------|:------------|
|
||||||
|
| #1 | ⬜ todo | 700 | **Project Setup: metal-kompanion-mcp** | MCP backend for Kompanion: me... |
|
||||||
|
| #2 | ⬜ in_progress | 500 | **Design MCP memory/context API** | Specify MCP tools for: save_c... |
|
||||||
|
| #3 | ⬜ todo | 501 | **Select embedding backend & storage** | Choose between local (Ollama/... |
|
||||||
|
| #4 | ⬜ in_progress | 499 | **Scaffold qtmcp-based server** | Set up C++/Qt MCP server skel... |
|
||||||
|
| #5 | ⬜ todo | 502 | **Implement memory adapters** | Adapters: (1) SQLite+FAISS/pg... |
|
||||||
|
| #6 | ⬜ todo | 498 | **Deep research: memory DB architecture & schema** | Survey best practices for con... |
|
||||||
|
| #7 | ⬜ todo | 503 | **Decide primary DB: Postgres+pgvector vs SQLite+FAISS** | Evaluate tradeoffs (multi-use... |
|
||||||
|
| #8 | ⬜ todo | 497 | **Implement DAL + migrations (pgvector)** | Create C++ DAL layer for name... |
|
||||||
|
| #9 | ⬜ todo | 504 | **Add cloud DB hardening (RLS, FTS/trgm, ANN indexes)** | Implement RLS policies; add F... |
|
||||||
|
| #10 | ⬜ todo | 496 | **Server enforcement: scope injection + rate limits** | Inject namespace/user via ses... |
|
||||||
|
| #11 | ⬜ todo | 505 | **Redaction & sensitivity pipeline** | Implement preprocessing to de... |
|
||||||
|
| #12 | ⬜ todo | 495 | **Private vault mode (key-only retrieval)** | Implement vault path for secr... |
|
||||||
|
| #13 | ⬜ todo | 506 | **Local backup tools: export/import (E2EE)** | Add kom.local.v1.backup.expor... |
|
||||||
|
| #14 | ⬜ todo | 494 | **Cloud adapters: backup/sync & payments stubs** | Expose kom.cloud.v1.backup.up... |
|
||||||
|
| #15 | ⬜ todo | 507 | **Purge job & admin delete paths** | Implement scheduled hard-dele... |
|
||||||
|
| #16 | ⬜ todo | 493 | **Test suite: privacy & hybrid search** | Cross-tenant leakage, redacti... |
|
||||||
|
| #17 | ⬜ todo | 508 | **Enable Qwen-2.5-Coder with tool support (Happy-Code profile)** | Prepare system prompt + regis... |
|
||||||
|
| #18 | ⬜ todo | 492 | **Expose Agentic-Control-Framework as a tool** | Wrap ACF endpoints into a too... |
|
||||||
|
| #19 | ⬜ todo | 509 | **DAL skeleton + SQL calls (pgvector)** | Create DAL interfaces and pgv... |
|
||||||
|
| #20 | ⬜ todo | 491 | **Claude Code integration rescue plan** | Stabilize Qwen-2.5-Coder insi... |
|
||||||
|
| #21 | ⬜ todo | 510 | **DAL Phase 1: libpq/pqxx wiring + SQL calls** | Link pqxx, implement PgDal ag... |
|
||||||
|
| #22 | ⬜ todo | 490 | **Handlers → DAL integration** | Wire kom.memory.v1.upsert_mem... |
|
||||||
|
| #23 | ⬜ todo | 511 | **Contract tests: DAL-backed tools** | Expand CTest to cover DAL-bac... |
|
||||||
|
| #24 | ⬜ todo | 489 | **Implement KompanionAI SDK** | |
|
||||||
|
|
||||||
|
|
||||||
|
### Task #2: Design MCP memory/context API - Subtasks
|
||||||
|
|
||||||
|
| ID | Status | Title |
|
||||||
|
|:--:|:------:|:------|
|
||||||
|
| #2.1 | ⬜ todo | Write JSON Schemas for tools (done) |
|
||||||
|
|
||||||
|
### Task #21: DAL Phase 1: libpq/pqxx wiring + SQL calls - Subtasks
|
||||||
|
|
||||||
|
| ID | Status | Title |
|
||||||
|
|:--:|:------:|:------|
|
||||||
|
| #21.1 | ⬜ todo | CMake: find_package(pqxx) and link; CI env var DSN |
|
||||||
|
| #21.2 | ⬜ todo | PgDal: implement connect/tx + prepared statements |
|
||||||
|
| #21.3 | ⬜ todo | SQL: ensureNamespace, upsertItem/Chunks/Embeddings |
|
||||||
|
| #21.4 | ⬜ todo | Search: FTS/trgm + vector <-> with filters (namespace/thread/tags) |
|
||||||
|
|
||||||
|
### Task #22: Handlers → DAL integration - Subtasks
|
||||||
|
|
||||||
|
| ID | Status | Title |
|
||||||
|
|:--:|:------:|:------|
|
||||||
|
| #22.1 | ⬜ todo | Replace ad-hoc JSON with parser (nlohmann/json or simdjson) |
|
||||||
|
| #22.2 | ⬜ todo | Validate request bodies against schemas before DAL calls |
|
||||||
|
| #22.3 | ⬜ todo | Scope & sensitivity enforcement (namespace/user + skip secret embeddings) |
|
||||||
|
|
||||||
|
### Task #24: Implement KompanionAI SDK - Subtasks
|
||||||
|
|
||||||
|
| ID | Status | Title |
|
||||||
|
|:--:|:------:|:------|
|
||||||
|
| #24.1 | ✅ done | Define Message & Thread Model |
|
||||||
|
| #24.2 | ✅ done | Implement Tool / Function Calling |
|
||||||
|
| #24.3 | ⬜ todo | Implement Provider abstraction (multi-backend) |
|
||||||
|
| #24.4 | ⬜ todo | Implement Completion / Reply / Streaming Events |
|
||||||
|
| #24.5 | ⬜ todo | Implement Options / Policies / Privacy |
|
||||||
|
| #24.6 | ⬜ todo | Implement Embeddings (for RAG / memory) |
|
||||||
|
| #24.7 | ⬜ todo | Implement Agent Loop Conveniences |
|
||||||
|
| #24.8 | ⬜ todo | Implement Error Model & Cancellation |
|
||||||
|
| #24.9 | ⬜ todo | Expose to QML |
|
||||||
|
| #24.10 | ⬜ todo | Migrate KLLM to KompanionAI |
|
||||||
|
|
||||||
|
|
@ -1,19 +1,32 @@
|
||||||
enable_testing()
|
add_executable(test_mcp_tools
|
||||||
qt_add_executable(test_mw
|
contract/test_mcp_tools.cpp
|
||||||
test_middleware.cpp
|
|
||||||
)
|
)
|
||||||
find_package(Qt6 REQUIRED COMPONENTS Core Test)
|
target_include_directories(test_mcp_tools PRIVATE ${PROJECT_SOURCE_DIR}/src)
|
||||||
target_link_libraries(test_mw PRIVATE Qt6::Core Qt6::Test kompanion_mw)
|
target_link_libraries(test_mcp_tools PRIVATE kom_dal)
|
||||||
add_test(NAME test_mw COMMAND test_mw)
|
target_compile_options(test_mcp_tools PRIVATE -fexceptions)
|
||||||
|
|
||||||
qt_add_executable(test_orchestrator
|
add_test(NAME contract_mcp_tools COMMAND test_mcp_tools)
|
||||||
test_orchestrator.cpp
|
|
||||||
)
|
|
||||||
target_link_libraries(test_orchestrator PRIVATE Qt6::Core Qt6::Network Qt6::Test kompanion_mw)
|
|
||||||
add_test(NAME test_orchestrator COMMAND test_orchestrator)
|
|
||||||
|
|
||||||
qt_add_executable(test_snapshot
|
add_executable(contract_memory
|
||||||
test_snapshot.cpp
|
contract_memory.cpp
|
||||||
|
)
|
||||||
|
target_include_directories(contract_memory PRIVATE ${PROJECT_SOURCE_DIR}/src)
|
||||||
|
target_link_libraries(contract_memory PRIVATE kom_dal)
|
||||||
|
target_compile_options(contract_memory PRIVATE -fexceptions)
|
||||||
|
|
||||||
|
add_test(NAME contract_memory COMMAND contract_memory)
|
||||||
|
|
||||||
|
add_executable(test_memory_exchange
|
||||||
|
mcp/test_memory_exchange.cpp
|
||||||
|
)
|
||||||
|
target_include_directories(test_memory_exchange PRIVATE ${PROJECT_SOURCE_DIR}/src)
|
||||||
|
target_link_libraries(test_memory_exchange PRIVATE kom_dal)
|
||||||
|
target_compile_options(test_memory_exchange PRIVATE -fexceptions)
|
||||||
|
|
||||||
|
add_test(NAME mcp_memory_exchange COMMAND test_memory_exchange)
|
||||||
|
|
||||||
|
add_test(
|
||||||
|
NAME e2e_mcp_test
|
||||||
|
COMMAND /bin/bash ${CMAKE_CURRENT_SOURCE_DIR}/e2e_mcp_test.sh
|
||||||
|
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||||
)
|
)
|
||||||
target_link_libraries(test_snapshot PRIVATE Qt6::Core Qt6::Network Qt6::Test kompanion_mw)
|
|
||||||
add_test(NAME test_snapshot COMMAND test_snapshot)
|
|
||||||
|
|
|
||||||
|
|
@ -13,8 +13,9 @@ MCP_SERVER_URL="http://${MCP_SERVER_HOST}:${MCP_SERVER_PORT}"
|
||||||
# --- Cleanup Function ---
|
# --- Cleanup Function ---
|
||||||
cleanup() {
|
cleanup() {
|
||||||
echo "--- Cleaning up ---"
|
echo "--- Cleaning up ---"
|
||||||
[ -n "${mcp_proxy_pid:-}" ] && kill "$mcp_proxy_pid" || true
|
if [ -n "$mcp_server_pid" ]; then
|
||||||
[ -n "${mcp_server_pid:-}" ] && kill "$mcp_server_pid" || true
|
kill "$mcp_server_pid" || true
|
||||||
|
fi
|
||||||
pkill -f kom_mcp || true
|
pkill -f kom_mcp || true
|
||||||
sleep 1 # Give the OS time to release the port
|
sleep 1 # Give the OS time to release the port
|
||||||
netstat -tuln | grep ":${MCP_SERVER_PORT}" || true # Check if port is still in use
|
netstat -tuln | grep ":${MCP_SERVER_PORT}" || true # Check if port is still in use
|
||||||
|
|
@ -28,28 +29,15 @@ echo "--- Setting up test environment ---"
|
||||||
echo ">> Initializing test database..."
|
echo ">> Initializing test database..."
|
||||||
"${PROJECT_ROOT_DIR}/db/scripts/create-test-db.sh" "$TEST_DB_NAME"
|
"${PROJECT_ROOT_DIR}/db/scripts/create-test-db.sh" "$TEST_DB_NAME"
|
||||||
|
|
||||||
# Optional environment bootstrap (developer machine)
|
|
||||||
if [ -f "$HOME/dev/main/src/env.sh" ]; then
|
|
||||||
# shellcheck source=/dev/null
|
|
||||||
. "$HOME/dev/main/src/env.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ">> Harvesting embeddings..."
|
echo ">> Harvesting embeddings..."
|
||||||
export DB_URL="dbname=${TEST_DB_NAME} user=kompanion host=/var/run/postgresql" EMBED_NAMESPACE="dev_knowledge"
|
export DB_URL="dbname=${TEST_DB_NAME} user=kompanion host=/var/run/postgresql" EMBED_NAMESPACE="dev_knowledge"
|
||||||
python3 "${PROJECT_ROOT_DIR}/tools/ingest_dir.py" "${PROJECT_ROOT_DIR}/tests/test_data" "dev_knowledge"
|
python3 "${PROJECT_ROOT_DIR}/tools/ingest_dir.py" "${PROJECT_ROOT_DIR}/tests/test_data" "dev_knowledge"
|
||||||
|
|
||||||
echo ">> Starting MCP server (preferring stdio + mcp-proxy if available)..."
|
echo ">> Starting MCP server..."
|
||||||
sleep 2
|
sleep 2
|
||||||
if command -v mcp-proxy >/dev/null 2>&1; then
|
timeout 10 $MCP_SERVER_EXECUTABLE --backend sse --address "${MCP_SERVER_HOST}:${MCP_SERVER_PORT}" < /dev/null > /dev/null 2>&1 &
|
||||||
setsid $MCP_SERVER_EXECUTABLE --backend stdio < /dev/null > /dev/null 2>&1 &
|
mcp_server_pid=$!
|
||||||
mcp_server_pid=$!
|
mcp_server_pid=$!
|
||||||
sleep 1
|
|
||||||
setsid mcp-proxy --target stdio://127.0.0.1 --listen "${MCP_SERVER_HOST}:${MCP_SERVER_PORT}" < /dev/null > /dev/null 2>&1 &
|
|
||||||
mcp_proxy_pid=$!
|
|
||||||
else
|
|
||||||
timeout 10 $MCP_SERVER_EXECUTABLE --backend sse --address "${MCP_SERVER_HOST}:${MCP_SERVER_PORT}" < /dev/null > /dev/null 2>&1 &
|
|
||||||
mcp_server_pid=$!
|
|
||||||
fi
|
|
||||||
|
|
||||||
sleep 5
|
sleep 5
|
||||||
ps -ef | grep kom_mcp
|
ps -ef | grep kom_mcp
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
This is a test file.
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
This is another test file.
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
#include <QtTest>
|
|
||||||
#include "../src/middleware/kompanioncontroller.h"
|
|
||||||
#include "../src/middleware/regexregistry.h"
|
|
||||||
|
|
||||||
class MiddlewareTest : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
private slots:
|
|
||||||
void prompt_to_tool_mapping() {
|
|
||||||
KompanionController ctl;
|
|
||||||
RegexRegistry reg;
|
|
||||||
reg.loadFromFile(QStringLiteral("../resources/mappings.json"));
|
|
||||||
// Connect signals (basic compile-time test)
|
|
||||||
QObject::connect(&ctl, &KompanionController::toolRequested, [](auto, auto, auto){ });
|
|
||||||
QObject::connect(&ctl, &KompanionController::textOutput, [](auto, auto){ });
|
|
||||||
// If the controller used the registry internally, we'd inject it; for now this test ensures build.
|
|
||||||
QVERIFY(true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
QTEST_MAIN(MiddlewareTest)
|
|
||||||
#include "test_middleware.moc"
|
|
||||||
|
|
@ -1,51 +0,0 @@
|
||||||
#include <QtTest>
|
|
||||||
#include <QDir>
|
|
||||||
#include <QFile>
|
|
||||||
#include <QTextStream>
|
|
||||||
|
|
||||||
#include "../src/middleware/orchestrator.h"
|
|
||||||
|
|
||||||
class OrchestratorTest : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
private slots:
|
|
||||||
void journal_from_prompt_writes_outputs();
|
|
||||||
};
|
|
||||||
|
|
||||||
void OrchestratorTest::journal_from_prompt_writes_outputs() {
|
|
||||||
// Create a temp state dir
|
|
||||||
QDir tmp = QDir::temp();
|
|
||||||
const QString base = QStringLiteral("kompanion_test_%1").arg(QDateTime::currentMSecsSinceEpoch());
|
|
||||||
QVERIFY(tmp.mkpath(base));
|
|
||||||
QDir state(tmp.filePath(base));
|
|
||||||
|
|
||||||
// Prepare a task JSONL
|
|
||||||
QFile tasks(state.filePath("tasks.jsonl"));
|
|
||||||
QVERIFY(tasks.open(QIODevice::WriteOnly | QIODevice::Truncate | QIODevice::Text));
|
|
||||||
const QByteArray line = QByteArray("{\"type\":\"journal.from_prompt\",\"aspect\":\"companion\",\"prompt\":\"hello world\"}\n");
|
|
||||||
QVERIFY(tasks.write(line) == line.size());
|
|
||||||
tasks.close();
|
|
||||||
|
|
||||||
// Stub provider returns deterministic text
|
|
||||||
StubModelProvider stub(QStringLiteral("TEST_OUTPUT"));
|
|
||||||
|
|
||||||
Orchestrator orch;
|
|
||||||
orch.setStateDir(state);
|
|
||||||
orch.setModelProvider(&stub);
|
|
||||||
orch.processPendingTasks();
|
|
||||||
|
|
||||||
// Expect journal file for today exists and contains the output
|
|
||||||
const QString journalPath = state.filePath("journal/" + QDate::currentDate().toString(Qt::ISODate) + ".md");
|
|
||||||
QFile journal(journalPath);
|
|
||||||
QVERIFY(journal.exists());
|
|
||||||
QVERIFY(journal.open(QIODevice::ReadOnly | QIODevice::Text));
|
|
||||||
const QString content = QString::fromUtf8(journal.readAll());
|
|
||||||
QVERIFY2(content.contains("TEST_OUTPUT"), "Journal should contain model output");
|
|
||||||
|
|
||||||
// Expect ledger file exists
|
|
||||||
QFile ledger(state.filePath("trust_ledger.jsonl"));
|
|
||||||
QVERIFY(ledger.exists());
|
|
||||||
}
|
|
||||||
|
|
||||||
QTEST_MAIN(OrchestratorTest)
|
|
||||||
#include "test_orchestrator.moc"
|
|
||||||
|
|
||||||
|
|
@ -1,25 +0,0 @@
|
||||||
#include <QtTest>
|
|
||||||
#include "../src/middleware/orchestrator.h"
|
|
||||||
|
|
||||||
class SnapshotTest : public QObject {
|
|
||||||
Q_OBJECT
|
|
||||||
private slots:
|
|
||||||
void round_trip() {
|
|
||||||
// Ensure no PG_DSN is required; Orchestrator maintains a shared in-memory DAL per instance
|
|
||||||
Orchestrator orch;
|
|
||||||
const QString ns = QStringLiteral("tests");
|
|
||||||
const QString key = QStringLiteral("session:last");
|
|
||||||
|
|
||||||
QJsonObject payload{{"a", 1}, {"b", QStringLiteral("x")}};
|
|
||||||
QVERIFY2(orch.saveSnapshot(ns, key, payload), "saveSnapshot should succeed");
|
|
||||||
|
|
||||||
auto loaded = orch.loadSnapshot(ns, key);
|
|
||||||
QVERIFY2(loaded.has_value(), "loadSnapshot should return value");
|
|
||||||
QCOMPARE(loaded->value("a").toInt(), 1);
|
|
||||||
QCOMPARE(loaded->value("b").toString(), QStringLiteral("x"));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
QTEST_MAIN(SnapshotTest)
|
|
||||||
#include "test_snapshot.moc"
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,48 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import os, sys, json, requests, psycopg
|
||||||
|
|
||||||
|
DB=os.environ.get("DB_URL","dbname=kompanion user=kompanion host=/var/run/postgresql")
|
||||||
|
OLLAMA=os.environ.get("OLLAMA_BASE","http://127.0.0.1:11434")
|
||||||
|
MODEL=os.environ.get("EMBED_MODEL","mxbai-embed-large")
|
||||||
|
SPACE=os.environ.get("EMBED_SPACE","dev_knowledge")
|
||||||
|
|
||||||
|
HELP="""\
|
||||||
|
Usage: pg_search.py "query text" [k]
|
||||||
|
Env: DB_URL, OLLAMA_BASE, EMBED_MODEL, EMBED_SPACE (default dev_knowledge)
|
||||||
|
Prints JSON results: [{score, uri, lineno, text}].
|
||||||
|
"""
|
||||||
|
|
||||||
|
def embed(q: str):
|
||||||
|
r = requests.post(f"{OLLAMA}/api/embeddings", json={"model": MODEL, "prompt": q}, timeout=120)
|
||||||
|
r.raise_for_status()
|
||||||
|
return r.json()["embedding"]
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
if len(sys.argv)<2:
|
||||||
|
print(HELP, file=sys.stderr); sys.exit(1)
|
||||||
|
query = sys.argv[1]
|
||||||
|
k = int(sys.argv[2]) if len(sys.argv)>2 else 8
|
||||||
|
vec = embed(query)
|
||||||
|
with psycopg.connect(DB) as conn, conn.cursor() as cur:
|
||||||
|
cur.execute("SELECT id, dim FROM komp.space WHERE name=%s", (SPACE,))
|
||||||
|
row = cur.fetchone()
|
||||||
|
if not row:
|
||||||
|
sys.exit(f"space {SPACE} missing")
|
||||||
|
sid, dim = row
|
||||||
|
if dim not in (768,1024):
|
||||||
|
sys.exit(f"unsupported dim {dim}")
|
||||||
|
table = f"komp.embedding_{dim}"
|
||||||
|
# cosine distance with vector_cosine_ops
|
||||||
|
sql = f"""
|
||||||
|
SELECT (e.embedding <=> %(v)s::vector) AS score, s.uri, k.lineno, k.text
|
||||||
|
FROM {table} e
|
||||||
|
JOIN komp.chunk k ON k.id = e.chunk_id
|
||||||
|
JOIN komp.source s ON s.id = k.source_id
|
||||||
|
WHERE e.space_id = %(sid)s
|
||||||
|
ORDER BY e.embedding <=> %(v)s::vector
|
||||||
|
LIMIT %(k)s
|
||||||
|
"""
|
||||||
|
cur.execute(sql, {"v": vec, "sid": sid, "k": k})
|
||||||
|
out=[{"score":float(r[0]),"uri":r[1],"lineno":r[2],"text":r[3]} for r in cur.fetchall()]
|
||||||
|
print(json.dumps(out, ensure_ascii=False, indent=2))
|
||||||
|
|
||||||
Loading…
Reference in New Issue