From ec42e579a83c0ffe5ebe754fc73071a39f982025 Mon Sep 17 00:00:00 2001 From: retoor Date: Sat, 8 Nov 2025 08:28:48 +0100 Subject: [PATCH] feat: implement graph data management feat: add knowledge category search refactor: remove duplicate knowledge results maintenance: update version to 1.47.0 --- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- rp/core/knowledge_context.py | 30 +++++++++++++++++++++++++++--- 3 files changed, 36 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5aaa02c..1660535 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,6 +42,14 @@ + + +## Version 1.46.0 - 2025-11-08 + +Users can now create, delete, and search for nodes and relationships within a graph. The system can also load graph data from text and manage its database schema. + +**Changes:** 3 files, 298 lines +**Languages:** Markdown (8 lines), Python (288 lines), TOML (2 lines) ## Version 1.45.0 - 2025-11-08 diff --git a/pyproject.toml b/pyproject.toml index 209afd0..924419e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "rp" -version = "1.45.0" +version = "1.46.0" description = "R python edition. The ultimate autonomous AI CLI." readme = "README.md" requires-python = ">=3.10" diff --git a/rp/core/knowledge_context.py b/rp/core/knowledge_context.py index 6dd5499..4a766c9 100644 --- a/rp/core/knowledge_context.py +++ b/rp/core/knowledge_context.py @@ -16,7 +16,22 @@ def inject_knowledge_context(assistant, user_message): logger.debug(f"Removed existing knowledge base message at index {i}") break try: - knowledge_results = assistant.enhanced.knowledge_store.search_entries(user_message, top_k=5) + # Run all search methods + knowledge_results = assistant.enhanced.knowledge_store.search_entries(user_message, top_k=5) # Hybrid semantic + keyword + category + # Additional keyword search if needed (but already in hybrid) + # Category-specific: preferences and general + pref_results = assistant.enhanced.knowledge_store.get_by_category("preferences", limit=5) + general_results = assistant.enhanced.knowledge_store.get_by_category("general", limit=5) + category_results = [] + for entry in pref_results + general_results: + if any(word in entry.content.lower() for word in user_message.lower().split()): + category_results.append({ + "content": entry.content, + "score": 0.6, + "source": f"Knowledge Base ({entry.category})", + "type": "knowledge_category", + }) + conversation_results = [] if hasattr(assistant.enhanced, "conversation_memory"): history_results = assistant.enhanced.conversation_memory.search_conversations( @@ -48,6 +63,8 @@ def inject_knowledge_context(assistant, user_message): "type": "knowledge", } ) + for res in category_results: + all_results.append(res) for conv in conversation_results: all_results.append( { @@ -57,8 +74,15 @@ def inject_knowledge_context(assistant, user_message): "type": "conversation", } ) - all_results.sort(key=lambda x: x["score"], reverse=True) - top_results = all_results[:5] + # Remove duplicates by content + seen = set() + unique_results = [] + for res in all_results: + if res["content"] not in seen: + seen.add(res["content"]) + unique_results.append(res) + unique_results.sort(key=lambda x: x["score"], reverse=True) + top_results = unique_results[:5] if not top_results: logger.debug("No relevant knowledge or conversation matches found") return