feat: implement graph data management

feat: add knowledge category search
refactor: remove duplicate knowledge results
maintenance: update version to 1.47.0
This commit is contained in:
retoor 2025-11-08 08:28:48 +01:00
parent 6a6df697fd
commit ec42e579a8
3 changed files with 36 additions and 4 deletions

View File

@ -42,6 +42,14 @@
## Version 1.46.0 - 2025-11-08
Users can now create, delete, and search for nodes and relationships within a graph. The system can also load graph data from text and manage its database schema.
**Changes:** 3 files, 298 lines
**Languages:** Markdown (8 lines), Python (288 lines), TOML (2 lines)
## Version 1.45.0 - 2025-11-08 ## Version 1.45.0 - 2025-11-08

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "rp" name = "rp"
version = "1.45.0" version = "1.46.0"
description = "R python edition. The ultimate autonomous AI CLI." description = "R python edition. The ultimate autonomous AI CLI."
readme = "README.md" readme = "README.md"
requires-python = ">=3.10" requires-python = ">=3.10"

View File

@ -16,7 +16,22 @@ def inject_knowledge_context(assistant, user_message):
logger.debug(f"Removed existing knowledge base message at index {i}") logger.debug(f"Removed existing knowledge base message at index {i}")
break break
try: try:
knowledge_results = assistant.enhanced.knowledge_store.search_entries(user_message, top_k=5) # Run all search methods
knowledge_results = assistant.enhanced.knowledge_store.search_entries(user_message, top_k=5) # Hybrid semantic + keyword + category
# Additional keyword search if needed (but already in hybrid)
# Category-specific: preferences and general
pref_results = assistant.enhanced.knowledge_store.get_by_category("preferences", limit=5)
general_results = assistant.enhanced.knowledge_store.get_by_category("general", limit=5)
category_results = []
for entry in pref_results + general_results:
if any(word in entry.content.lower() for word in user_message.lower().split()):
category_results.append({
"content": entry.content,
"score": 0.6,
"source": f"Knowledge Base ({entry.category})",
"type": "knowledge_category",
})
conversation_results = [] conversation_results = []
if hasattr(assistant.enhanced, "conversation_memory"): if hasattr(assistant.enhanced, "conversation_memory"):
history_results = assistant.enhanced.conversation_memory.search_conversations( history_results = assistant.enhanced.conversation_memory.search_conversations(
@ -48,6 +63,8 @@ def inject_knowledge_context(assistant, user_message):
"type": "knowledge", "type": "knowledge",
} }
) )
for res in category_results:
all_results.append(res)
for conv in conversation_results: for conv in conversation_results:
all_results.append( all_results.append(
{ {
@ -57,8 +74,15 @@ def inject_knowledge_context(assistant, user_message):
"type": "conversation", "type": "conversation",
} }
) )
all_results.sort(key=lambda x: x["score"], reverse=True) # Remove duplicates by content
top_results = all_results[:5] seen = set()
unique_results = []
for res in all_results:
if res["content"] not in seen:
seen.add(res["content"])
unique_results.append(res)
unique_results.sort(key=lambda x: x["score"], reverse=True)
top_results = unique_results[:5]
if not top_results: if not top_results:
logger.debug("No relevant knowledge or conversation matches found") logger.debug("No relevant knowledge or conversation matches found")
return return