mcp: add upsert_and_embed tool; implement warm_cache with filters + embeddings via libKI; DAL: make memory_chunks upsert unique by (item,seq) with SQL and in-memory; DB: add unique index on (item,seq)

This commit is contained in:
Χγφτ Kompanion 2025-10-19 11:09:15 +02:00
parent a0f5dd8b4f
commit 6fd938d4f3
6 changed files with 104 additions and 13 deletions

View File

@ -28,6 +28,9 @@ CREATE TABLE IF NOT EXISTS memory_chunks (
content_tsv tsvector GENERATED ALWAYS AS (to_tsvector('english', content)) STORED
);
-- Ensure single row per (item,seq)
CREATE UNIQUE INDEX IF NOT EXISTS ux_chunks_item_seq ON memory_chunks(item_id, seq);
CREATE TABLE IF NOT EXISTS embeddings (
id BIGSERIAL PRIMARY KEY,
chunk_id UUID NOT NULL REFERENCES memory_chunks(id) ON DELETE CASCADE,

View File

@ -55,6 +55,7 @@ CREATE TABLE IF NOT EXISTS memory_chunks (
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE INDEX IF NOT EXISTS chunks_item_idx ON memory_chunks(item_id, ord);
CREATE UNIQUE INDEX IF NOT EXISTS ux_chunks_item_ord ON memory_chunks(item_id, ord);
-- Embeddings: one per chunk (per model)
CREATE TABLE IF NOT EXISTS embeddings (
@ -84,4 +85,3 @@ DROP TRIGGER IF EXISTS trg_bump_revision ON memory_items;
CREATE TRIGGER trg_bump_revision
BEFORE UPDATE ON memory_items
FOR EACH ROW EXECUTE FUNCTION bump_revision();

View File

@ -502,15 +502,20 @@ std::vector<std::string> PgDal::upsertChunks(const std::vector<ChunkRow>& chunks
if (stored.item_id.empty()) {
continue;
}
if (stored.id.empty()) {
stored.id = allocateId(nextChunkId_, "chunk_");
}
chunks_[stored.id] = stored;
// Enforce uniqueness by (item_id, ord) in memory as well
auto& bucket = chunksByItem_[stored.item_id];
if (!idsContains(bucket, stored.id)) {
bucket.push_back(stored.id);
std::string existingId;
for (const auto &cid : bucket) {
auto it = chunks_.find(cid);
if (it != chunks_.end() && it->second.ord == stored.ord) { existingId = cid; break; }
}
if (existingId.empty()) {
if (stored.id.empty()) stored.id = allocateId(nextChunkId_, "chunk_");
bucket.push_back(stored.id);
} else {
stored.id = existingId;
}
chunks_[stored.id] = stored;
ids.push_back(stored.id);
}
@ -524,17 +529,16 @@ std::vector<std::string> PgDal::sqlUpsertChunks(const std::vector<ChunkRow>& chu
QSqlDatabase db = database();
QSqlQuery query(db);
query.prepare(QStringLiteral(
"INSERT INTO memory_chunks (id, item_id, seq, content) "
"VALUES (COALESCE(NULLIF(:id, '')::uuid, gen_random_uuid()), "
" :item_id::uuid, :seq, :content) "
"ON CONFLICT (id) DO UPDATE SET seq = EXCLUDED.seq, content = EXCLUDED.content "
"INSERT INTO memory_chunks (item_id, seq, content, id) "
"VALUES (:item_id::uuid, :seq, :content, COALESCE(NULLIF(:id, '')::uuid, gen_random_uuid())) "
"ON CONFLICT (item_id, seq) DO UPDATE SET content = EXCLUDED.content "
"RETURNING id::text;"));
for (const auto& chunk : chunks) {
query.bindValue(QStringLiteral(":id"), QString::fromStdString(chunk.id));
query.bindValue(QStringLiteral(":item_id"), QString::fromStdString(chunk.item_id));
query.bindValue(QStringLiteral(":seq"), chunk.ord);
query.bindValue(QStringLiteral(":content"), QString::fromStdString(chunk.text));
query.bindValue(QStringLiteral(":id"), QString::fromStdString(chunk.id));
if (!query.exec() || !query.next()) {
throw std::runtime_error(query.lastError().text().toStdString());

View File

@ -340,6 +340,7 @@ struct ParsedItem {
std::vector<std::string> tags;
std::vector<float> embedding;
std::string rawJson;
std::string metadataJson;
};
inline std::vector<ParsedItem> parse_items(const std::string& json) {
@ -351,6 +352,7 @@ inline std::vector<ParsedItem> parse_items(const std::string& json) {
item.text = extract_string_field(obj, "text");
item.tags = parse_string_array(obj, "tags");
item.embedding = parse_float_array(obj, "embedding");
if (auto meta = extract_json_value(obj, "metadata")) item.metadataJson = *meta; else item.metadataJson = "{}";
items.push_back(std::move(item));
}
return items;
@ -781,3 +783,49 @@ inline std::string warm_cache(const std::string& reqJson) {
}
} // namespace Handlers
/**
* upsert_and_embed
* Request: { namespace, model?, items: [{id?, text, tags?, metadata?}] }
* Response: { upserted, embedded }
*/
inline std::string upsert_and_embed(const std::string& reqJson) {
const std::string nsName = detail::extract_string_field(reqJson, "namespace");
if (nsName.empty()) return detail::error_response("bad_request","namespace is required");
auto nsRow = detail::database().ensureNamespace(nsName);
if (!nsRow) return detail::error_response("internal_error","failed to ensure namespace");
auto items = detail::parse_items(reqJson);
if (items.empty()) return detail::error_response("bad_request","items array must contain at least one entry");
std::string model = detail::extract_string_field(reqJson, "model");
// Upsert items first and collect texts/ids
std::vector<std::string> itemIds; itemIds.reserve(items.size());
std::vector<std::string> texts; texts.reserve(items.size());
for (auto &it : items) {
ki::ItemRow row; row.id = it.id; row.namespace_id = nsRow->id; row.text = it.text;
row.tags = it.tags; row.revision = 1; row.metadata_json = it.metadataJson.empty()?"{}":it.metadataJson; row.content_json = it.rawJson;
const std::string id = detail::database().upsertItem(row);
itemIds.push_back(id); texts.push_back(it.text);
}
// Embed via libKI
KI::KIClient client; KI::OllamaProvider provider; client.setProvider(&provider);
KI::KIEmbedOptions opts; if (!model.empty()) opts.model = QString::fromStdString(model);
QStringList qtexts; for (auto &t : texts) qtexts.push_back(QString::fromStdString(t));
QEventLoop loop; QFuture<KI::KIEmbeddingResult> fut = client.embed(qtexts, opts);
QFutureWatcher<KI::KIEmbeddingResult> watcher; QObject::connect(&watcher, &QFutureWatcher<KI::KIEmbeddingResult>::finished, &loop, &QEventLoop::quit); watcher.setFuture(fut); loop.exec();
const KI::KIEmbeddingResult result = watcher.result();
// Upsert chunks + embeddings (ord=0)
int embedded = 0;
const int n = std::min((int)itemIds.size(), result.vectors.size());
for (int i = 0; i < n; ++i) {
ki::ChunkRow chunk; chunk.item_id = itemIds[(size_t)i]; chunk.ord = 0; chunk.text = texts[(size_t)i];
auto chunkIds = detail::database().upsertChunks(std::vector<ki::ChunkRow>{chunk}); if (chunkIds.empty()) continue;
ki::EmbeddingRow emb; emb.chunk_id = chunkIds.front(); emb.model = result.model.toStdString(); emb.dim = result.vectors[i].size();
emb.vector.assign(result.vectors[i].begin(), result.vectors[i].end());
detail::database().upsertEmbeddings(std::vector<ki::EmbeddingRow>{emb}); embedded++;
}
std::ostringstream os; os << "{\"upserted\":" << itemIds.size() << ",\"embedded\":" << embedded << "}"; return os.str();
}

View File

@ -11,6 +11,7 @@ inline void register_default_tools(KomMcpServer& server) {
server.registerTool("kom.memory.v1.embed_text", Handlers::embed_text);
server.registerTool("kom.memory.v1.upsert_memory", Handlers::upsert_memory);
server.registerTool("kom.memory.v1.search_memory", Handlers::search_memory);
server.registerTool("kom.memory.v1.upsert_and_embed", Handlers::upsert_and_embed);
server.registerTool("kom.memory.v1.warm_cache", Handlers::warm_cache);
server.registerTool("kom.local.v1.backup.export_encrypted", Handlers::backup_export_encrypted);
server.registerTool("kom.local.v1.backup.import_encrypted", Handlers::backup_import_encrypted);

View File

@ -365,6 +365,41 @@
"additionalProperties": false
}
},
"upsert_and_embed": {
"description": "Upsert items and compute embeddings for each item (ord=0 chunk).",
"input": {
"type": "object",
"properties": {
"namespace": { "type": "string" },
"model": { "type": "string" },
"items": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": { "type": "string" },
"text": { "type": "string" },
"tags": { "$ref": "#/$defs/stringList" },
"metadata": { "type": "object" }
},
"required": ["text"],
"additionalProperties": false
}
}
},
"required": ["namespace","items"],
"additionalProperties": false
},
"output": {
"type": "object",
"properties": {
"upserted": { "type": "integer" },
"embedded": { "type": "integer" }
},
"required": ["upserted","embedded"],
"additionalProperties": false
}
},
"kom.local.v1.backup.export_encrypted": {
"description": "Queue an encrypted backup export for the requested namespaces.",
"input": {