Compare commits
No commits in common. "164510896e0b823c68777873f1057ce53abc52d3" and "1f0444d8c1150f06aa854cfbc82fe7f5e69e9b0e" have entirely different histories.
164510896e
...
1f0444d8c1
16
CHANGELOG.md
16
CHANGELOG.md
@ -23,22 +23,6 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Version 1.28.0 - 2025-11-08
|
|
||||||
|
|
||||||
This release introduces new features like advanced input for the assistant and collaboration agents, along with improved logging and error handling. Several internal components have been updated for better performance, stability, and maintainability.
|
|
||||||
|
|
||||||
**Changes:** 54 files, 638 lines
|
|
||||||
**Languages:** Other (10 lines), Python (626 lines), TOML (2 lines)
|
|
||||||
|
|
||||||
## Version 1.27.0 - 2025-11-08
|
|
||||||
|
|
||||||
The project has been renamed to "Reetor's Guide to Modern Python" and now includes a comprehensive tutorial. The README has been significantly updated with installation instructions, a quick start guide, and information on modern Python features and aiohttp.
|
|
||||||
|
|
||||||
**Changes:** 3 files, 2728 lines
|
|
||||||
**Languages:** Markdown (2726 lines), TOML (2 lines)
|
|
||||||
|
|
||||||
## Version 1.26.0 - 2025-11-08
|
## Version 1.26.0 - 2025-11-08
|
||||||
|
|
||||||
|
|||||||
10
Makefile
10
Makefile
@ -27,16 +27,16 @@ test:
|
|||||||
pytest tests/ -v --tb=long --full-trace -l --maxfail=10
|
pytest tests/ -v --tb=long --full-trace -l --maxfail=10
|
||||||
|
|
||||||
test-cov:
|
test-cov:
|
||||||
pytest --cov=rp --cov-report=html --cov-report=term-missing
|
pytest --cov=pr --cov-report=html --cov-report=term-missing
|
||||||
@echo "Coverage report generated in htmlcov/index.html"
|
@echo "Coverage report generated in htmlcov/index.html"
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
flake8 rp tests --max-line-length=100 --ignore=E203,W503
|
flake8 pr tests --max-line-length=100 --ignore=E203,W503
|
||||||
mypy rp --ignore-missing-imports
|
mypy pr --ignore-missing-imports
|
||||||
|
|
||||||
format:
|
format:
|
||||||
black rp tests
|
black pr tests
|
||||||
isort rp tests --profile black
|
isort pr tests --profile black
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf build/
|
rm -rf build/
|
||||||
|
|||||||
@ -27,7 +27,6 @@ Version Requirements:
|
|||||||
11. [Git Protocol Integration](#git-protocol-integration)
|
11. [Git Protocol Integration](#git-protocol-integration)
|
||||||
12. [Repository Manager Implementation](#repository-manager-implementation)
|
12. [Repository Manager Implementation](#repository-manager-implementation)
|
||||||
13. [Best Practices and Patterns](#best-practices-and-patterns)
|
13. [Best Practices and Patterns](#best-practices-and-patterns)
|
||||||
14. [Automatic Memory and Context Search](#automatic-memory-and-context-search)
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "rp"
|
name = "rp"
|
||||||
version = "1.28.0"
|
version = "1.26.0"
|
||||||
description = "R python edition. The ultimate autonomous AI CLI."
|
description = "R python edition. The ultimate autonomous AI CLI."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.10"
|
||||||
|
|||||||
@ -6,4 +6,5 @@ gitpython==3.1.43
|
|||||||
websockets==13.0.1
|
websockets==13.0.1
|
||||||
pytest==8.3.2
|
pytest==8.3.2
|
||||||
bcrypt==4.1.3
|
bcrypt==4.1.3
|
||||||
python-slugify==8.0.4
|
python-slugify==8.0.4
|
||||||
|
requests>=2.31.0
|
||||||
@ -1,6 +1,5 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from rp import __version__
|
from rp import __version__
|
||||||
from rp.core import Assistant
|
from rp.core import Assistant
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import time
|
|||||||
import uuid
|
import uuid
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Any, Callable, Dict, List, Optional
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
from ..memory.knowledge_store import KnowledgeStore
|
from ..memory.knowledge_store import KnowledgeStore
|
||||||
from .agent_communication import AgentCommunicationBus, AgentMessage, MessageType
|
from .agent_communication import AgentCommunicationBus, AgentMessage, MessageType
|
||||||
from .agent_roles import AgentRole, get_agent_role
|
from .agent_roles import AgentRole, get_agent_role
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.autonomous.detection import is_task_complete
|
from rp.autonomous.detection import is_task_complete
|
||||||
from rp.core.api import call_api
|
from rp.core.api import call_api
|
||||||
from rp.core.context import truncate_tool_result
|
from rp.core.context import truncate_tool_result
|
||||||
@ -14,7 +13,7 @@ logger = logging.getLogger("rp")
|
|||||||
def run_autonomous_mode(assistant, task):
|
def run_autonomous_mode(assistant, task):
|
||||||
assistant.autonomous_mode = True
|
assistant.autonomous_mode = True
|
||||||
assistant.autonomous_iterations = 0
|
assistant.autonomous_iterations = 0
|
||||||
logger.debug("=== AUTONOMOUS MODE START ===")
|
logger.debug(f"=== AUTONOMOUS MODE START ===")
|
||||||
logger.debug(f"Task: {task}")
|
logger.debug(f"Task: {task}")
|
||||||
from rp.core.knowledge_context import inject_knowledge_context
|
from rp.core.knowledge_context import inject_knowledge_context
|
||||||
|
|
||||||
@ -103,9 +102,7 @@ def process_response_autonomous(assistant, response):
|
|||||||
input_tokens = usage.get("prompt_tokens", 0)
|
input_tokens = usage.get("prompt_tokens", 0)
|
||||||
output_tokens = usage.get("completion_tokens", 0)
|
output_tokens = usage.get("completion_tokens", 0)
|
||||||
assistant.usage_tracker.track_request(assistant.model, input_tokens, output_tokens)
|
assistant.usage_tracker.track_request(assistant.model, input_tokens, output_tokens)
|
||||||
cost = assistant.usage_tracker._calculate_cost(
|
cost = assistant.usage_tracker._calculate_cost(assistant.model, input_tokens, output_tokens)
|
||||||
assistant.model, input_tokens, output_tokens
|
|
||||||
)
|
|
||||||
total_cost = assistant.usage_tracker.session_usage["estimated_cost"]
|
total_cost = assistant.usage_tracker.session_usage["estimated_cost"]
|
||||||
print(f"{Colors.YELLOW}đź’° Cost: ${cost:.4f} | Total: ${total_cost:.4f}{Colors.RESET}")
|
print(f"{Colors.YELLOW}đź’° Cost: ${cost:.4f} | Total: ${total_cost:.4f}{Colors.RESET}")
|
||||||
return process_response_autonomous(assistant, follow_up)
|
return process_response_autonomous(assistant, follow_up)
|
||||||
@ -126,9 +123,6 @@ def execute_single_tool(assistant, func_name, arguments):
|
|||||||
db_get,
|
db_get,
|
||||||
db_query,
|
db_query,
|
||||||
db_set,
|
db_set,
|
||||||
editor_insert_text,
|
|
||||||
editor_replace_text,
|
|
||||||
editor_search,
|
|
||||||
getpwd,
|
getpwd,
|
||||||
http_fetch,
|
http_fetch,
|
||||||
index_source_directory,
|
index_source_directory,
|
||||||
|
|||||||
@ -1,13 +1,12 @@
|
|||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.autonomous import run_autonomous_mode
|
|
||||||
from rp.commands.multiplexer_commands import MULTIPLEXER_COMMANDS
|
from rp.commands.multiplexer_commands import MULTIPLEXER_COMMANDS
|
||||||
|
from rp.autonomous import run_autonomous_mode
|
||||||
from rp.core.api import list_models
|
from rp.core.api import list_models
|
||||||
from rp.editor import RPEditor
|
|
||||||
from rp.tools import read_file
|
from rp.tools import read_file
|
||||||
from rp.tools.base import get_tools_definition
|
from rp.tools.base import get_tools_definition
|
||||||
from rp.ui import Colors
|
from rp.ui import Colors
|
||||||
|
from rp.editor import RPEditor
|
||||||
|
|
||||||
|
|
||||||
def handle_command(assistant, command):
|
def handle_command(assistant, command):
|
||||||
@ -264,7 +263,7 @@ def collaborate_agents_command(assistant, task):
|
|||||||
roles = ["coding", "research", "planning"]
|
roles = ["coding", "research", "planning"]
|
||||||
result = assistant.enhanced.collaborate_agents(task, roles)
|
result = assistant.enhanced.collaborate_agents(task, roles)
|
||||||
print(f"\n{Colors.GREEN}Collaboration completed{Colors.RESET}")
|
print(f"\n{Colors.GREEN}Collaboration completed{Colors.RESET}")
|
||||||
print("\nOrchestrator response:")
|
print(f"\nOrchestrator response:")
|
||||||
if "orchestrator" in result and "response" in result["orchestrator"]:
|
if "orchestrator" in result and "response" in result["orchestrator"]:
|
||||||
print(result["orchestrator"]["response"])
|
print(result["orchestrator"]["response"])
|
||||||
if result.get("agents"):
|
if result.get("agents"):
|
||||||
@ -296,7 +295,6 @@ def store_knowledge(assistant, content):
|
|||||||
return
|
return
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from rp.memory import KnowledgeEntry
|
from rp.memory import KnowledgeEntry
|
||||||
|
|
||||||
categories = assistant.enhanced.fact_extractor.categorize_content(content)
|
categories = assistant.enhanced.fact_extractor.categorize_content(content)
|
||||||
|
|||||||
@ -7,9 +7,7 @@ from rp.core.http_client import http_client
|
|||||||
logger = logging.getLogger("rp")
|
logger = logging.getLogger("rp")
|
||||||
|
|
||||||
|
|
||||||
def call_api(
|
def call_api(messages, model, api_url, api_key, use_tools, tools_definition, verbose=False, db_conn=None):
|
||||||
messages, model, api_url, api_key, use_tools, tools_definition, verbose=False, db_conn=None
|
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
messages = auto_slim_messages(messages, verbose=verbose)
|
messages = auto_slim_messages(messages, verbose=verbose)
|
||||||
logger.debug(f"=== API CALL START ===")
|
logger.debug(f"=== API CALL START ===")
|
||||||
@ -40,14 +38,11 @@ def call_api(
|
|||||||
if db_conn:
|
if db_conn:
|
||||||
|
|
||||||
from rp.tools.database import log_api_request
|
from rp.tools.database import log_api_request
|
||||||
|
|
||||||
log_result = log_api_request(model, api_url, request_json, db_conn)
|
log_result = log_api_request(model, api_url, request_json, db_conn)
|
||||||
if log_result.get("status") != "success":
|
if log_result.get("status") != "success":
|
||||||
logger.warning(f"Failed to log API request: {log_result.get('error')}")
|
logger.warning(f"Failed to log API request: {log_result.get('error')}")
|
||||||
logger.debug("Sending HTTP request...")
|
logger.debug("Sending HTTP request...")
|
||||||
response = http_client.post(
|
response = http_client.post(api_url, headers=headers, json_data=request_json, db_conn=db_conn)
|
||||||
api_url, headers=headers, json_data=request_json, db_conn=db_conn
|
|
||||||
)
|
|
||||||
if response.get("error"):
|
if response.get("error"):
|
||||||
if "status" in response:
|
if "status" in response:
|
||||||
logger.error(f"API HTTP Error: {response['status']} - {response.get('text', '')}")
|
logger.error(f"API HTTP Error: {response['status']} - {response.get('text', '')}")
|
||||||
@ -97,8 +92,7 @@ def list_models(model_list_url, api_key):
|
|||||||
response = http_client.get(model_list_url, headers=headers, db_conn=None)
|
response = http_client.get(model_list_url, headers=headers, db_conn=None)
|
||||||
if response.get("error"):
|
if response.get("error"):
|
||||||
return {"error": response.get("text", "HTTP error")}
|
return {"error": response.get("text", "HTTP error")}
|
||||||
response_data = response["text"]
|
data = json.loads(response["text"])
|
||||||
data = json.loads(response_data)
|
|
||||||
return data.get("data", [])
|
return data.get("data", [])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"error": str(e)}
|
return {"error": str(e)}
|
||||||
|
|||||||
@ -8,8 +8,8 @@ import sqlite3
|
|||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
from rp.commands import handle_command
|
from rp.commands import handle_command
|
||||||
|
from rp.input_handler import get_advanced_input
|
||||||
from rp.config import (
|
from rp.config import (
|
||||||
DB_PATH,
|
DB_PATH,
|
||||||
DEFAULT_API_URL,
|
DEFAULT_API_URL,
|
||||||
@ -23,7 +23,6 @@ from rp.core.autonomous_interactions import start_global_autonomous, stop_global
|
|||||||
from rp.core.background_monitor import get_global_monitor, start_global_monitor, stop_global_monitor
|
from rp.core.background_monitor import get_global_monitor, start_global_monitor, stop_global_monitor
|
||||||
from rp.core.context import init_system_message, truncate_tool_result
|
from rp.core.context import init_system_message, truncate_tool_result
|
||||||
from rp.core.usage_tracker import UsageTracker
|
from rp.core.usage_tracker import UsageTracker
|
||||||
from rp.input_handler import get_advanced_input
|
|
||||||
from rp.tools import get_tools_definition
|
from rp.tools import get_tools_definition
|
||||||
from rp.tools.agents import (
|
from rp.tools.agents import (
|
||||||
collaborate_agents,
|
collaborate_agents,
|
||||||
@ -33,7 +32,7 @@ from rp.tools.agents import (
|
|||||||
remove_agent,
|
remove_agent,
|
||||||
)
|
)
|
||||||
from rp.tools.command import kill_process, run_command, tail_process
|
from rp.tools.command import kill_process, run_command, tail_process
|
||||||
from rp.tools.database import db_get, db_query, db_set
|
from rp.tools.database import db_get, db_query, db_set, log_api_request
|
||||||
from rp.tools.filesystem import (
|
from rp.tools.filesystem import (
|
||||||
chdir,
|
chdir,
|
||||||
clear_edit_tracker,
|
clear_edit_tracker,
|
||||||
@ -108,9 +107,6 @@ class Assistant:
|
|||||||
self.background_tasks = set()
|
self.background_tasks = set()
|
||||||
self.last_result = None
|
self.last_result = None
|
||||||
self.init_database()
|
self.init_database()
|
||||||
from rp.memory import KnowledgeStore, FactExtractor
|
|
||||||
self.knowledge_store = KnowledgeStore(DB_PATH)
|
|
||||||
self.fact_extractor = FactExtractor()
|
|
||||||
self.messages.append(init_system_message(args))
|
self.messages.append(init_system_message(args))
|
||||||
try:
|
try:
|
||||||
from rp.core.enhanced_assistant import EnhancedAssistant
|
from rp.core.enhanced_assistant import EnhancedAssistant
|
||||||
@ -402,7 +398,7 @@ class Assistant:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
prompt += f">{Colors.RESET} "
|
prompt += f">{Colors.RESET} "
|
||||||
user_input = get_advanced_input(prompt) or ""
|
user_input = get_advanced_input(prompt)
|
||||||
user_input = user_input.strip()
|
user_input = user_input.strip()
|
||||||
if not user_input:
|
if not user_input:
|
||||||
continue
|
continue
|
||||||
@ -435,7 +431,7 @@ class Assistant:
|
|||||||
process_message(self, message)
|
process_message(self, message)
|
||||||
|
|
||||||
def run_autonomous(self):
|
def run_autonomous(self):
|
||||||
|
|
||||||
if self.args.message:
|
if self.args.message:
|
||||||
task = self.args.message
|
task = self.args.message
|
||||||
else:
|
else:
|
||||||
@ -445,7 +441,6 @@ class Assistant:
|
|||||||
print("No task provided. Exiting.")
|
print("No task provided. Exiting.")
|
||||||
return
|
return
|
||||||
from rp.autonomous import run_autonomous_mode
|
from rp.autonomous import run_autonomous_mode
|
||||||
|
|
||||||
run_autonomous_mode(self, task)
|
run_autonomous_mode(self, task)
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
@ -472,7 +467,7 @@ class Assistant:
|
|||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
if self.args.autonomous:
|
if self.args.autonomous:
|
||||||
self.run_autonomous()
|
self.run_autonomous()
|
||||||
elif self.args.interactive or (not self.args.message and sys.stdin.isatty()):
|
elif self.args.interactive or (not self.args.message and sys.stdin.isatty()):
|
||||||
self.run_repl()
|
self.run_repl()
|
||||||
else:
|
else:
|
||||||
@ -485,26 +480,6 @@ def process_message(assistant, message):
|
|||||||
from rp.core.knowledge_context import inject_knowledge_context
|
from rp.core.knowledge_context import inject_knowledge_context
|
||||||
|
|
||||||
inject_knowledge_context(assistant, message)
|
inject_knowledge_context(assistant, message)
|
||||||
# Save the user message as a fact
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from rp.memory import KnowledgeEntry
|
|
||||||
|
|
||||||
categories = assistant.fact_extractor.categorize_content(message)
|
|
||||||
entry_id = str(uuid.uuid4())[:16]
|
|
||||||
entry = KnowledgeEntry(
|
|
||||||
entry_id=entry_id,
|
|
||||||
category=categories[0] if categories else "user_message",
|
|
||||||
content=message,
|
|
||||||
metadata={
|
|
||||||
"type": "user_message",
|
|
||||||
"confidence": 1.0,
|
|
||||||
"source": "user_input",
|
|
||||||
},
|
|
||||||
created_at=time.time(),
|
|
||||||
updated_at=time.time(),
|
|
||||||
)
|
|
||||||
assistant.knowledge_store.add_entry(entry)
|
|
||||||
assistant.messages.append({"role": "user", "content": message})
|
assistant.messages.append({"role": "user", "content": message})
|
||||||
logger.debug(f"Processing user message: {message[:100]}...")
|
logger.debug(f"Processing user message: {message[:100]}...")
|
||||||
logger.debug(f"Current message count: {len(assistant.messages)}")
|
logger.debug(f"Current message count: {len(assistant.messages)}")
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.tools.interactive_control import (
|
from rp.tools.interactive_control import (
|
||||||
get_session_status,
|
get_session_status,
|
||||||
list_active_sessions,
|
list_active_sessions,
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import queue
|
import queue
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.multiplexer import get_all_multiplexer_states, get_multiplexer
|
from rp.multiplexer import get_all_multiplexer_states, get_multiplexer
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import configparser
|
import configparser
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from rp.core.logging import get_logger
|
from rp.core.logging import get_logger
|
||||||
|
|
||||||
logger = get_logger("config")
|
logger = get_logger("config")
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
from rp.config import (
|
from rp.config import (
|
||||||
CHARS_PER_TOKEN,
|
CHARS_PER_TOKEN,
|
||||||
CONTENT_TRIM_LENGTH,
|
CONTENT_TRIM_LENGTH,
|
||||||
@ -10,10 +9,10 @@ from rp.config import (
|
|||||||
CONTEXT_FILE,
|
CONTEXT_FILE,
|
||||||
EMERGENCY_MESSAGES_TO_KEEP,
|
EMERGENCY_MESSAGES_TO_KEEP,
|
||||||
GLOBAL_CONTEXT_FILE,
|
GLOBAL_CONTEXT_FILE,
|
||||||
KNOWLEDGE_PATH,
|
|
||||||
MAX_TOKENS_LIMIT,
|
MAX_TOKENS_LIMIT,
|
||||||
MAX_TOOL_RESULT_LENGTH,
|
MAX_TOOL_RESULT_LENGTH,
|
||||||
RECENT_MESSAGES_TO_KEEP,
|
RECENT_MESSAGES_TO_KEEP,
|
||||||
|
KNOWLEDGE_PATH,
|
||||||
)
|
)
|
||||||
from rp.ui import Colors
|
from rp.ui import Colors
|
||||||
|
|
||||||
@ -61,7 +60,7 @@ def init_system_message(args):
|
|||||||
for context_file in [CONTEXT_FILE, GLOBAL_CONTEXT_FILE]:
|
for context_file in [CONTEXT_FILE, GLOBAL_CONTEXT_FILE]:
|
||||||
if os.path.exists(context_file):
|
if os.path.exists(context_file):
|
||||||
try:
|
try:
|
||||||
with open(context_file, encoding="utf-8", errors="replace") as f:
|
with open(context_file) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
if len(content) > max_context_size:
|
if len(content) > max_context_size:
|
||||||
content = content[:max_context_size] + "\n... [truncated]"
|
content = content[:max_context_size] + "\n... [truncated]"
|
||||||
@ -72,7 +71,7 @@ def init_system_message(args):
|
|||||||
if knowledge_path.exists() and knowledge_path.is_dir():
|
if knowledge_path.exists() and knowledge_path.is_dir():
|
||||||
for knowledge_file in knowledge_path.iterdir():
|
for knowledge_file in knowledge_path.iterdir():
|
||||||
try:
|
try:
|
||||||
with open(knowledge_file, encoding="utf-8", errors="replace") as f:
|
with open(knowledge_file) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
if len(content) > max_context_size:
|
if len(content) > max_context_size:
|
||||||
content = content[:max_context_size] + "\n... [truncated]"
|
content = content[:max_context_size] + "\n... [truncated]"
|
||||||
@ -82,7 +81,7 @@ def init_system_message(args):
|
|||||||
if args.context:
|
if args.context:
|
||||||
for ctx_file in args.context:
|
for ctx_file in args.context:
|
||||||
try:
|
try:
|
||||||
with open(ctx_file, encoding="utf-8", errors="replace") as f:
|
with open(ctx_file) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
if len(content) > max_context_size:
|
if len(content) > max_context_size:
|
||||||
content = content[:max_context_size] + "\n... [truncated]"
|
content = content[:max_context_size] + "\n... [truncated]"
|
||||||
|
|||||||
@ -1,9 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import time
|
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from rp.agents import AgentManager
|
from rp.agents import AgentManager
|
||||||
from rp.cache import APICache, ToolCache
|
from rp.cache import APICache, ToolCache
|
||||||
from rp.config import (
|
from rp.config import (
|
||||||
@ -18,7 +16,7 @@ from rp.config import (
|
|||||||
)
|
)
|
||||||
from rp.core.advanced_context import AdvancedContextManager
|
from rp.core.advanced_context import AdvancedContextManager
|
||||||
from rp.core.api import call_api
|
from rp.core.api import call_api
|
||||||
from rp.memory import ConversationMemory, FactExtractor, KnowledgeStore, KnowledgeEntry
|
from rp.memory import ConversationMemory, FactExtractor, KnowledgeStore
|
||||||
from rp.tools.base import get_tools_definition
|
from rp.tools.base import get_tools_definition
|
||||||
from rp.workflows import WorkflowEngine, WorkflowStorage
|
from rp.workflows import WorkflowEngine, WorkflowStorage
|
||||||
|
|
||||||
@ -133,6 +131,9 @@ class EnhancedAssistant:
|
|||||||
facts = self.fact_extractor.extract_facts(user_message)
|
facts = self.fact_extractor.extract_facts(user_message)
|
||||||
for fact in facts[:5]:
|
for fact in facts[:5]:
|
||||||
entry_id = str(uuid.uuid4())[:16]
|
entry_id = str(uuid.uuid4())[:16]
|
||||||
|
import time
|
||||||
|
from rp.memory import KnowledgeEntry
|
||||||
|
|
||||||
categories = self.fact_extractor.categorize_content(fact["text"])
|
categories = self.fact_extractor.categorize_content(fact["text"])
|
||||||
entry = KnowledgeEntry(
|
entry = KnowledgeEntry(
|
||||||
entry_id=entry_id,
|
entry_id=entry_id,
|
||||||
@ -147,23 +148,6 @@ class EnhancedAssistant:
|
|||||||
updated_at=time.time(),
|
updated_at=time.time(),
|
||||||
)
|
)
|
||||||
self.knowledge_store.add_entry(entry)
|
self.knowledge_store.add_entry(entry)
|
||||||
|
|
||||||
# Save the entire user message as a fact
|
|
||||||
entry_id = str(uuid.uuid4())[:16]
|
|
||||||
categories = self.fact_extractor.categorize_content(user_message)
|
|
||||||
entry = KnowledgeEntry(
|
|
||||||
entry_id=entry_id,
|
|
||||||
category=categories[0] if categories else "user_message",
|
|
||||||
content=user_message,
|
|
||||||
metadata={
|
|
||||||
"type": "user_message",
|
|
||||||
"confidence": 1.0,
|
|
||||||
"source": "user_input",
|
|
||||||
},
|
|
||||||
created_at=time.time(),
|
|
||||||
updated_at=time.time(),
|
|
||||||
)
|
|
||||||
self.knowledge_store.add_entry(entry)
|
|
||||||
if self.context_manager and ADVANCED_CONTEXT_ENABLED:
|
if self.context_manager and ADVANCED_CONTEXT_ENABLED:
|
||||||
enhanced_messages, context_info = self.context_manager.create_enhanced_context(
|
enhanced_messages, context_info = self.context_manager.create_enhanced_context(
|
||||||
self.base.messages, user_message, include_knowledge=True
|
self.base.messages, user_message, include_knowledge=True
|
||||||
|
|||||||
@ -1,59 +1,16 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import random
|
import time
|
||||||
|
import requests
|
||||||
from typing import Dict, Any, Optional
|
from typing import Dict, Any, Optional
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
logger = logging.getLogger("rp")
|
logger = logging.getLogger("rp")
|
||||||
|
|
||||||
# Realistic User-Agents and headers
|
|
||||||
USER_AGENTS = [
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
||||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
||||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
|
||||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
|
|
||||||
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Edge/91.0.864.59",
|
|
||||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
|
||||||
"Mozilla/5.0 (iPad; CPU OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
|
||||||
"Mozilla/5.0 (Android 11; Mobile; rv:68.0) Gecko/68.0 Firefox/88.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_realistic_headers(additional_headers=None):
|
|
||||||
"""Generate realistic HTTP headers with random User-Agent and variations."""
|
|
||||||
accept_languages = [
|
|
||||||
"en-US,en;q=0.5",
|
|
||||||
"en-US,en;q=0.9",
|
|
||||||
"en-GB,en;q=0.5",
|
|
||||||
"en-US,en;q=0.5;fr;q=0.3",
|
|
||||||
]
|
|
||||||
headers = {
|
|
||||||
"User-Agent": random.choice(USER_AGENTS),
|
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
|
||||||
"Accept-Language": random.choice(accept_languages),
|
|
||||||
"Accept-Encoding": "gzip, deflate, br",
|
|
||||||
"DNT": "1",
|
|
||||||
"Connection": "keep-alive",
|
|
||||||
"Upgrade-Insecure-Requests": "1",
|
|
||||||
}
|
|
||||||
# Sometimes add Cache-Control
|
|
||||||
if random.random() < 0.3:
|
|
||||||
headers["Cache-Control"] = "no-cache"
|
|
||||||
# Sometimes add Referer
|
|
||||||
if random.random() < 0.2:
|
|
||||||
headers["Referer"] = "https://www.google.com/"
|
|
||||||
if additional_headers:
|
|
||||||
headers.update(additional_headers)
|
|
||||||
return headers
|
|
||||||
|
|
||||||
|
|
||||||
class SyncHTTPClient:
|
class SyncHTTPClient:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.default_headers = {}
|
self.session = requests.Session()
|
||||||
|
|
||||||
def request(
|
def request(
|
||||||
self,
|
self,
|
||||||
@ -65,87 +22,58 @@ class SyncHTTPClient:
|
|||||||
timeout: float = 30.0,
|
timeout: float = 30.0,
|
||||||
db_conn=None,
|
db_conn=None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
if headers is None:
|
"""Make a sync HTTP request using requests with retry logic."""
|
||||||
headers = get_realistic_headers()
|
attempt = 0
|
||||||
else:
|
start_time = time.time()
|
||||||
headers = get_realistic_headers(headers)
|
while True:
|
||||||
|
attempt += 1
|
||||||
request_body_for_log = ""
|
try:
|
||||||
if json_data is not None:
|
response = self.session.request(
|
||||||
request_body_for_log = json.dumps(json_data)
|
|
||||||
elif data is not None:
|
|
||||||
request_body_for_log = data.decode("utf-8") if isinstance(data, bytes) else str(data)
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests.request(
|
|
||||||
method,
|
|
||||||
url,
|
|
||||||
headers=headers,
|
|
||||||
data=data,
|
|
||||||
json=json_data,
|
|
||||||
timeout=timeout,
|
|
||||||
allow_redirects=True,
|
|
||||||
)
|
|
||||||
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
|
||||||
|
|
||||||
response_data = response.text
|
|
||||||
response_headers = dict(response.headers)
|
|
||||||
|
|
||||||
if db_conn:
|
|
||||||
from rp.tools.database import log_http_request
|
|
||||||
|
|
||||||
log_result = log_http_request(
|
|
||||||
method,
|
method,
|
||||||
url,
|
url,
|
||||||
request_body_for_log,
|
headers=headers,
|
||||||
response_data,
|
data=data,
|
||||||
response.status_code,
|
json=json_data,
|
||||||
db_conn,
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
if log_result.get("status") != "success":
|
response.raise_for_status() # Raise an exception for bad status codes
|
||||||
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
|
# Prepare request body for logging
|
||||||
|
if json_data is not None:
|
||||||
return {
|
request_body = json.dumps(json_data)
|
||||||
"status": response.status_code,
|
elif data is not None:
|
||||||
"headers": response_headers,
|
request_body = data.decode('utf-8') if isinstance(data, bytes) else str(data)
|
||||||
"text": response_data,
|
else:
|
||||||
"json": response.json,
|
request_body = ""
|
||||||
}
|
# Log the request
|
||||||
except requests.exceptions.HTTPError as e:
|
if db_conn:
|
||||||
response_data = e.response.text if e.response else ""
|
from rp.tools.database import log_http_request
|
||||||
response_headers = dict(e.response.headers) if e.response else {}
|
log_result = log_http_request(method, url, request_body, response.text, response.status_code, db_conn)
|
||||||
status_code = e.response.status_code if e.response else 0
|
if log_result.get("status") != "success":
|
||||||
|
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
|
||||||
if db_conn:
|
return {
|
||||||
from rp.tools.database import log_http_request
|
"status": response.status_code,
|
||||||
|
"headers": dict(response.headers),
|
||||||
log_result = log_http_request(
|
"text": response.text,
|
||||||
method,
|
"json": response.json,
|
||||||
url,
|
}
|
||||||
request_body_for_log,
|
except requests.exceptions.Timeout:
|
||||||
response_data,
|
elapsed = time.time() - start_time
|
||||||
status_code,
|
elapsed_minutes = int(elapsed // 60)
|
||||||
db_conn,
|
elapsed_seconds = elapsed % 60
|
||||||
|
duration_str = (
|
||||||
|
f"{elapsed_minutes}m {elapsed_seconds:.1f}s"
|
||||||
|
if elapsed_minutes > 0
|
||||||
|
else f"{elapsed_seconds:.1f}s"
|
||||||
)
|
)
|
||||||
if log_result.get("status") != "success":
|
logger.warning(
|
||||||
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
|
f"Request timed out (attempt {attempt}, duration: {duration_str}). Retrying in {attempt} second(s)..."
|
||||||
|
)
|
||||||
return {
|
time.sleep(attempt)
|
||||||
"status": status_code,
|
except requests.exceptions.RequestException as e:
|
||||||
"headers": response_headers,
|
return {"error": True, "exception": str(e)}
|
||||||
"text": response_data,
|
|
||||||
"json": lambda: e.response.json() if e.response and response_data else None,
|
|
||||||
}
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
logger.error(f"Request failed: {e}")
|
|
||||||
return {"error": True, "exception": str(e), "status": 0, "text": ""}
|
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self, url: str, headers: Optional[Dict[str, str]] = None, timeout: float = 30.0, db_conn=None
|
||||||
url: str,
|
|
||||||
headers: Optional[Dict[str, str]] = None,
|
|
||||||
timeout: float = 30.0,
|
|
||||||
db_conn=None,
|
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
return self.request("GET", url, headers=headers, timeout=timeout, db_conn=db_conn)
|
return self.request("GET", url, headers=headers, timeout=timeout, db_conn=db_conn)
|
||||||
|
|
||||||
@ -159,17 +87,11 @@ class SyncHTTPClient:
|
|||||||
db_conn=None,
|
db_conn=None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
return self.request(
|
return self.request(
|
||||||
"POST",
|
"POST", url, headers=headers, data=data, json_data=json_data, timeout=timeout, db_conn=db_conn
|
||||||
url,
|
|
||||||
headers=headers,
|
|
||||||
data=data,
|
|
||||||
json_data=json_data,
|
|
||||||
timeout=timeout,
|
|
||||||
db_conn=db_conn,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_default_headers(self, headers: Dict[str, str]):
|
def set_default_headers(self, headers: Dict[str, str]):
|
||||||
self.default_headers.update(headers)
|
self.session.headers.update(headers)
|
||||||
|
|
||||||
|
|
||||||
http_client = SyncHTTPClient()
|
http_client = SyncHTTPClient()
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from logging.handlers import RotatingFileHandler
|
from logging.handlers import RotatingFileHandler
|
||||||
|
|
||||||
from rp.config import LOG_FILE
|
from rp.config import LOG_FILE
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import json
|
|||||||
import os
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from rp.core.logging import get_logger
|
from rp.core.logging import get_logger
|
||||||
|
|
||||||
logger = get_logger("session")
|
logger = get_logger("session")
|
||||||
@ -39,7 +38,7 @@ class SessionManager:
|
|||||||
if not os.path.exists(session_file):
|
if not os.path.exists(session_file):
|
||||||
logger.warning(f"Session not found: {name}")
|
logger.warning(f"Session not found: {name}")
|
||||||
return None
|
return None
|
||||||
with open(session_file, encoding="utf-8") as f:
|
with open(session_file) as f:
|
||||||
session_data = json.load(f)
|
session_data = json.load(f)
|
||||||
logger.info(f"Session loaded: {name}")
|
logger.info(f"Session loaded: {name}")
|
||||||
return session_data
|
return session_data
|
||||||
@ -54,7 +53,7 @@ class SessionManager:
|
|||||||
if filename.endswith(".json"):
|
if filename.endswith(".json"):
|
||||||
filepath = os.path.join(SESSIONS_DIR, filename)
|
filepath = os.path.join(SESSIONS_DIR, filename)
|
||||||
try:
|
try:
|
||||||
with open(filepath, encoding="utf-8") as f:
|
with open(filepath) as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
sessions.append(
|
sessions.append(
|
||||||
{
|
{
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import json
|
|||||||
import os
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, Optional
|
from typing import Dict, Optional
|
||||||
|
|
||||||
from rp.core.logging import get_logger
|
from rp.core.logging import get_logger
|
||||||
|
|
||||||
logger = get_logger("usage")
|
logger = get_logger("usage")
|
||||||
@ -69,7 +68,7 @@ class UsageTracker:
|
|||||||
try:
|
try:
|
||||||
history = []
|
history = []
|
||||||
if os.path.exists(USAGE_DB_FILE):
|
if os.path.exists(USAGE_DB_FILE):
|
||||||
with open(USAGE_DB_FILE, encoding="utf-8") as f:
|
with open(USAGE_DB_FILE) as f:
|
||||||
history = json.load(f)
|
history = json.load(f)
|
||||||
history.append(
|
history.append(
|
||||||
{
|
{
|
||||||
@ -114,7 +113,7 @@ class UsageTracker:
|
|||||||
if not os.path.exists(USAGE_DB_FILE):
|
if not os.path.exists(USAGE_DB_FILE):
|
||||||
return {"total_requests": 0, "total_tokens": 0, "total_cost": 0.0}
|
return {"total_requests": 0, "total_tokens": 0, "total_cost": 0.0}
|
||||||
try:
|
try:
|
||||||
with open(USAGE_DB_FILE, encoding="utf-8") as f:
|
with open(USAGE_DB_FILE) as f:
|
||||||
history = json.load(f)
|
history = json.load(f)
|
||||||
total_tokens = sum((entry["total_tokens"] for entry in history))
|
total_tokens = sum((entry["total_tokens"] for entry in history))
|
||||||
total_cost = sum((entry["cost"] for entry in history))
|
total_cost = sum((entry["cost"] for entry in history))
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from rp.core.exceptions import ValidationError
|
from rp.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -118,9 +118,6 @@ class RPEditor:
|
|||||||
self.lines = content.splitlines() if content else [""]
|
self.lines = content.splitlines() if content else [""]
|
||||||
else:
|
else:
|
||||||
self.lines = [""]
|
self.lines = [""]
|
||||||
except UnicodeDecodeError:
|
|
||||||
# If it's a binary file or truly unreadable as text, treat as empty
|
|
||||||
self.lines = [""]
|
|
||||||
except Exception:
|
except Exception:
|
||||||
self.lines = [""]
|
self.lines = [""]
|
||||||
|
|
||||||
@ -353,9 +350,7 @@ class RPEditor:
|
|||||||
height, _ = self.stdscr.getmaxyx()
|
height, _ = self.stdscr.getmaxyx()
|
||||||
page_size = height - 2
|
page_size = height - 2
|
||||||
self.cursor_y = min(len(self.lines) - 1, self.cursor_y + page_size)
|
self.cursor_y = min(len(self.lines) - 1, self.cursor_y + page_size)
|
||||||
self.scroll_y = min(
|
self.scroll_y = min(max(0, len(self.lines) - height + 1), self.scroll_y + page_size)
|
||||||
max(0, len(self.lines) - height + 1), self.scroll_y + page_size
|
|
||||||
)
|
|
||||||
self.prev_key = key
|
self.prev_key = key
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@ -35,16 +35,10 @@ class RPEditor:
|
|||||||
|
|
||||||
def load_file(self):
|
def load_file(self):
|
||||||
try:
|
try:
|
||||||
if self.filename:
|
with open(self.filename) as f:
|
||||||
with open(self.filename, encoding="utf-8", errors="replace") as f:
|
self.lines = f.read().splitlines()
|
||||||
self.lines = f.read().splitlines()
|
if not self.lines:
|
||||||
if not self.lines:
|
self.lines = [""]
|
||||||
self.lines = [""]
|
|
||||||
else:
|
|
||||||
self.lines = [""]
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
# If it's a binary file or truly unreadable as text, treat as empty
|
|
||||||
self.lines = [""]
|
|
||||||
except:
|
except:
|
||||||
self.lines = [""]
|
self.lines = [""]
|
||||||
|
|
||||||
|
|||||||
@ -6,13 +6,13 @@ It intelligently resolves local imports, hoists external dependencies to the top
|
|||||||
and preserves the core logic, using AST for safe transformations.
|
and preserves the core logic, using AST for safe transformations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
|
||||||
import ast
|
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import py_compile
|
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, Optional, Set, TextIO
|
import ast
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import py_compile
|
||||||
|
from typing import Set, Dict, Optional, TextIO
|
||||||
|
|
||||||
logger = logging.getLogger("impLODE")
|
logger = logging.getLogger("impLODE")
|
||||||
|
|
||||||
@ -250,10 +250,10 @@ class Imploder:
|
|||||||
self.processed_files.clear()
|
self.processed_files.clear()
|
||||||
try:
|
try:
|
||||||
with open(output_file_path, "w", encoding="utf-8") as f_out:
|
with open(output_file_path, "w", encoding="utf-8") as f_out:
|
||||||
f_out.write("#!/usr/bin/env python3\n")
|
f_out.write(f"#!/usr/bin/env python3\n")
|
||||||
f_out.write("# -*- coding: utf-8 -*-\n")
|
f_out.write(f"# -*- coding: utf-8 -*-\n")
|
||||||
f_out.write("import logging\n")
|
f_out.write(f"import logging\n")
|
||||||
f_out.write("\n# --- IMPLODED FILE: Generated by impLODE --- #\n")
|
f_out.write(f"\n# --- IMPLODED FILE: Generated by impLODE --- #\n")
|
||||||
f_out.write(
|
f_out.write(
|
||||||
f"# --- Original main file: {os.path.relpath(main_file_abs_path, self.root_dir)} --- #\n"
|
f"# --- Original main file: {os.path.relpath(main_file_abs_path, self.root_dir)} --- #\n"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -99,22 +99,9 @@ class AdvancedInputHandler:
|
|||||||
try:
|
try:
|
||||||
path = Path(filename).expanduser().resolve()
|
path = Path(filename).expanduser().resolve()
|
||||||
if path.exists() and path.is_file():
|
if path.exists() and path.is_file():
|
||||||
mime_type, _ = mimetypes.guess_type(str(path))
|
with open(path, encoding="utf-8", errors="replace") as f:
|
||||||
if mime_type and (
|
content = f.read()
|
||||||
mime_type.startswith("text/")
|
return f"\n--- File: {filename} ---\n{content}\n--- End of {filename} ---\n"
|
||||||
or mime_type in ["application/json", "application/xml"]
|
|
||||||
):
|
|
||||||
with open(path, encoding="utf-8", errors="replace") as f:
|
|
||||||
content = f.read()
|
|
||||||
return f"\n--- File: {filename} ---\n{content}\n--- End of {filename} ---\n"
|
|
||||||
elif mime_type and not mime_type.startswith(
|
|
||||||
"image/"
|
|
||||||
): # Handle other binary files
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
binary_data = base64.b64encode(f.read()).decode("utf-8")
|
|
||||||
return f"\n--- Binary File: {filename} ({mime_type}) ---\ndata:{mime_type};base64,{binary_data}\n--- End of {filename} ---\n"
|
|
||||||
else:
|
|
||||||
return f"[File not included (unsupported type or already handled as image): {filename}]"
|
|
||||||
else:
|
else:
|
||||||
return f"[File not found: {filename}]"
|
return f"[File not found: {filename}]"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from .semantic_index import SemanticIndex
|
from .semantic_index import SemanticIndex
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,6 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.tools.process_handlers import detect_process_type, get_handler_for_process
|
from rp.tools.process_handlers import detect_process_type, get_handler_for_process
|
||||||
from rp.tools.prompt_detection import get_global_detector
|
from rp.tools.prompt_detection import get_global_detector
|
||||||
from rp.ui import Colors
|
from rp.ui import Colors
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import importlib.util
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Callable, Dict, List
|
from typing import Callable, Dict, List
|
||||||
|
|
||||||
from rp.core.logging import get_logger
|
from rp.core.logging import get_logger
|
||||||
|
|
||||||
logger = get_logger("plugins")
|
logger = get_logger("plugins")
|
||||||
|
|||||||
2
rp/rp.py
2
rp/rp.py
@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Trigger build
|
# Trigger build
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
# Add current directory to path to ensure imports work
|
# Add current directory to path to ensure imports work
|
||||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|||||||
@ -6,6 +6,7 @@ from rp.tools.agents import (
|
|||||||
remove_agent,
|
remove_agent,
|
||||||
)
|
)
|
||||||
from rp.tools.base import get_tools_definition
|
from rp.tools.base import get_tools_definition
|
||||||
|
from rp.tools.vision import post_image
|
||||||
from rp.tools.command import kill_process, run_command, run_command_interactive, tail_process
|
from rp.tools.command import kill_process, run_command, run_command_interactive, tail_process
|
||||||
from rp.tools.database import db_get, db_query, db_set
|
from rp.tools.database import db_get, db_query, db_set
|
||||||
from rp.tools.editor import (
|
from rp.tools.editor import (
|
||||||
@ -16,27 +17,7 @@ from rp.tools.editor import (
|
|||||||
open_editor,
|
open_editor,
|
||||||
)
|
)
|
||||||
from rp.tools.filesystem import (
|
from rp.tools.filesystem import (
|
||||||
chdir,
|
get_uid, read_specific_lines, replace_specific_line, insert_line_at_position, delete_specific_line, read_file, write_file, list_directory, mkdir, chdir, getpwd, index_source_directory, search_replace, get_editor, close_editor, open_editor, editor_insert_text, editor_replace_text, display_edit_summary, display_edit_timeline, clear_edit_tracker
|
||||||
clear_edit_tracker,
|
|
||||||
close_editor,
|
|
||||||
delete_specific_line,
|
|
||||||
display_edit_summary,
|
|
||||||
display_edit_timeline,
|
|
||||||
editor_insert_text,
|
|
||||||
editor_replace_text,
|
|
||||||
get_editor,
|
|
||||||
get_uid,
|
|
||||||
getpwd,
|
|
||||||
index_source_directory,
|
|
||||||
insert_line_at_position,
|
|
||||||
list_directory,
|
|
||||||
mkdir,
|
|
||||||
open_editor,
|
|
||||||
read_file,
|
|
||||||
read_specific_lines,
|
|
||||||
replace_specific_line,
|
|
||||||
search_replace,
|
|
||||||
write_file,
|
|
||||||
)
|
)
|
||||||
from rp.tools.lsp import get_diagnostics
|
from rp.tools.lsp import get_diagnostics
|
||||||
from rp.tools.memory import (
|
from rp.tools.memory import (
|
||||||
@ -51,8 +32,7 @@ from rp.tools.memory import (
|
|||||||
from rp.tools.patch import apply_patch, create_diff
|
from rp.tools.patch import apply_patch, create_diff
|
||||||
from rp.tools.python_exec import python_exec
|
from rp.tools.python_exec import python_exec
|
||||||
from rp.tools.search import glob_files, grep
|
from rp.tools.search import glob_files, grep
|
||||||
from rp.tools.vision import post_image
|
from rp.tools.web import http_fetch, web_search, web_search_news
|
||||||
from rp.tools.web import download_to_file, http_fetch, web_search, web_search_news
|
|
||||||
|
|
||||||
# Aliases for user-requested tool names
|
# Aliases for user-requested tool names
|
||||||
view = read_file
|
view = read_file
|
||||||
@ -81,7 +61,6 @@ __all__ = [
|
|||||||
"db_set",
|
"db_set",
|
||||||
"delete_knowledge_entry",
|
"delete_knowledge_entry",
|
||||||
"delete_specific_line",
|
"delete_specific_line",
|
||||||
"download_to_file",
|
|
||||||
"diagnostics",
|
"diagnostics",
|
||||||
"display_edit_summary",
|
"display_edit_summary",
|
||||||
"display_edit_timeline",
|
"display_edit_timeline",
|
||||||
@ -128,3 +107,4 @@ __all__ = [
|
|||||||
"write",
|
"write",
|
||||||
"write_file",
|
"write_file",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@ -1,9 +1,8 @@
|
|||||||
import os
|
import os
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
from rp.agents.agent_manager import AgentManager
|
from rp.agents.agent_manager import AgentManager
|
||||||
from rp.config import DEFAULT_API_URL, DEFAULT_MODEL
|
|
||||||
from rp.core.api import call_api
|
from rp.core.api import call_api
|
||||||
|
from rp.config import DEFAULT_MODEL, DEFAULT_API_URL
|
||||||
from rp.tools.base import get_tools_definition
|
from rp.tools.base import get_tools_definition
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import inspect
|
import inspect
|
||||||
from typing import get_args, get_origin, get_type_hints
|
|
||||||
|
|
||||||
import rp.tools
|
import rp.tools
|
||||||
|
from typing import get_type_hints, get_origin, get_args
|
||||||
|
|
||||||
|
|
||||||
def _type_to_json_schema(py_type):
|
def _type_to_json_schema(py_type):
|
||||||
|
|||||||
@ -76,7 +76,6 @@ def db_query(query, db_conn):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def log_api_request(model, api_url, request_payload, db_conn):
|
def log_api_request(model, api_url, request_payload, db_conn):
|
||||||
"""Log an API request to the database.
|
"""Log an API request to the database.
|
||||||
|
|
||||||
@ -102,7 +101,6 @@ def log_api_request(model, api_url, request_payload, db_conn):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def log_http_request(method, url, request_body, response_body, status_code, db_conn):
|
def log_http_request(method, url, request_body, response_body, status_code, db_conn):
|
||||||
"""Log an HTTP request to the database.
|
"""Log an HTTP request to the database.
|
||||||
|
|
||||||
|
|||||||
@ -1,15 +1,15 @@
|
|||||||
import ast
|
|
||||||
import gc
|
|
||||||
import inspect
|
|
||||||
import json
|
|
||||||
import linecache
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import os
|
||||||
|
import ast
|
||||||
|
import inspect
|
||||||
import time
|
import time
|
||||||
|
import threading
|
||||||
|
import gc
|
||||||
import weakref
|
import weakref
|
||||||
|
import linecache
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
from rp.editor import RPEditor
|
from rp.editor import RPEditor
|
||||||
|
|
||||||
from ..tools.patch import display_content_diff
|
from ..tools.patch import display_content_diff
|
||||||
from ..ui.edit_feedback import track_edit, tracker
|
from ..ui.edit_feedback import track_edit, tracker
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,8 @@
|
|||||||
import base64
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import mimetypes
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from typing import Any, Optional
|
from typing import Optional, Any
|
||||||
|
|
||||||
from rp.editor import RPEditor
|
from rp.editor import RPEditor
|
||||||
|
|
||||||
from ..tools.patch import display_content_diff
|
from ..tools.patch import display_content_diff
|
||||||
from ..ui.diff_display import get_diff_stats
|
from ..ui.diff_display import get_diff_stats
|
||||||
from ..ui.edit_feedback import track_edit, tracker
|
from ..ui.edit_feedback import track_edit, tracker
|
||||||
@ -20,9 +16,7 @@ def get_uid():
|
|||||||
return _id
|
return _id
|
||||||
|
|
||||||
|
|
||||||
def read_specific_lines(
|
def read_specific_lines(filepath: str, start_line: int, end_line: Optional[int] = None, db_conn: Optional[Any] = None) -> dict:
|
||||||
filepath: str, start_line: int, end_line: Optional[int] = None, db_conn: Optional[Any] = None
|
|
||||||
) -> dict:
|
|
||||||
"""
|
"""
|
||||||
Read specific lines or a range of lines from a file.
|
Read specific lines or a range of lines from a file.
|
||||||
|
|
||||||
@ -46,45 +40,32 @@ def read_specific_lines(
|
|||||||
Examples:
|
Examples:
|
||||||
# Read line 5 only
|
# Read line 5 only
|
||||||
result = read_specific_lines("example.txt", 5)
|
result = read_specific_lines("example.txt", 5)
|
||||||
|
|
||||||
# Read lines 10 to 20
|
# Read lines 10 to 20
|
||||||
result = read_specific_lines("example.txt", 10, 20)
|
result = read_specific_lines("example.txt", 10, 20)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
path = os.path.expanduser(filepath)
|
path = os.path.expanduser(filepath)
|
||||||
with open(path, "r") as file:
|
with open(path, 'r') as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
total_lines = len(lines)
|
total_lines = len(lines)
|
||||||
if start_line < 1 or start_line > total_lines:
|
if start_line < 1 or start_line > total_lines:
|
||||||
return {
|
return {"status": "error", "error": f"Start line {start_line} is out of range. File has {total_lines} lines."}
|
||||||
"status": "error",
|
|
||||||
"error": f"Start line {start_line} is out of range. File has {total_lines} lines.",
|
|
||||||
}
|
|
||||||
if end_line is None:
|
if end_line is None:
|
||||||
end_line = start_line
|
end_line = start_line
|
||||||
if end_line < start_line or end_line > total_lines:
|
if end_line < start_line or end_line > total_lines:
|
||||||
return {
|
return {"status": "error", "error": f"End line {end_line} is out of range. File has {total_lines} lines."}
|
||||||
"status": "error",
|
selected_lines = lines[start_line - 1:end_line]
|
||||||
"error": f"End line {end_line} is out of range. File has {total_lines} lines.",
|
content = ''.join(selected_lines)
|
||||||
}
|
|
||||||
selected_lines = lines[start_line - 1 : end_line]
|
|
||||||
content = "".join(selected_lines)
|
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_set
|
from rp.tools.database import db_set
|
||||||
|
|
||||||
db_set("read:" + path, "true", db_conn)
|
db_set("read:" + path, "true", db_conn)
|
||||||
return {"status": "success", "content": content}
|
return {"status": "success", "content": content}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def replace_specific_line(
|
def replace_specific_line(filepath: str, line_number: int, new_content: str, db_conn: Optional[Any] = None, show_diff: bool = True) -> dict:
|
||||||
filepath: str,
|
|
||||||
line_number: int,
|
|
||||||
new_content: str,
|
|
||||||
db_conn: Optional[Any] = None,
|
|
||||||
show_diff: bool = True,
|
|
||||||
) -> dict:
|
|
||||||
"""
|
"""
|
||||||
Replace the content of a specific line in a file.
|
Replace the content of a specific line in a file.
|
||||||
|
|
||||||
@ -116,27 +97,18 @@ def replace_specific_line(
|
|||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
read_status = db_get("read:" + path, db_conn)
|
read_status = db_get("read:" + path, db_conn)
|
||||||
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
||||||
return {
|
return {"status": "error", "error": "File must be read before writing. Please read the file first."}
|
||||||
"status": "error",
|
with open(path, 'r') as file:
|
||||||
"error": "File must be read before writing. Please read the file first.",
|
|
||||||
}
|
|
||||||
with open(path, "r") as file:
|
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
total_lines = len(lines)
|
total_lines = len(lines)
|
||||||
if line_number < 1 or line_number > total_lines:
|
if line_number < 1 or line_number > total_lines:
|
||||||
return {
|
return {"status": "error", "error": f"Line number {line_number} is out of range. File has {total_lines} lines."}
|
||||||
"status": "error",
|
old_content = ''.join(lines)
|
||||||
"error": f"Line number {line_number} is out of range. File has {total_lines} lines.",
|
lines[line_number - 1] = new_content + '\n' if not new_content.endswith('\n') else new_content
|
||||||
}
|
new_full_content = ''.join(lines)
|
||||||
old_content = "".join(lines)
|
with open(path, 'w') as file:
|
||||||
lines[line_number - 1] = (
|
|
||||||
new_content + "\n" if not new_content.endswith("\n") else new_content
|
|
||||||
)
|
|
||||||
new_full_content = "".join(lines)
|
|
||||||
with open(path, "w") as file:
|
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
||||||
@ -147,13 +119,7 @@ def replace_specific_line(
|
|||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def insert_line_at_position(
|
def insert_line_at_position(filepath: str, line_number: int, new_content: str, db_conn: Optional[Any] = None, show_diff: bool = True) -> dict:
|
||||||
filepath: str,
|
|
||||||
line_number: int,
|
|
||||||
new_content: str,
|
|
||||||
db_conn: Optional[Any] = None,
|
|
||||||
show_diff: bool = True,
|
|
||||||
) -> dict:
|
|
||||||
"""
|
"""
|
||||||
Insert a new line at a specific position in a file.
|
Insert a new line at a specific position in a file.
|
||||||
|
|
||||||
@ -182,38 +148,27 @@ def insert_line_at_position(
|
|||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
read_status = db_get("read:" + path, db_conn)
|
read_status = db_get("read:" + path, db_conn)
|
||||||
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
||||||
return {
|
return {"status": "error", "error": "File must be read before writing. Please read the file first."}
|
||||||
"status": "error",
|
with open(path, 'r') as file:
|
||||||
"error": "File must be read before writing. Please read the file first.",
|
|
||||||
}
|
|
||||||
with open(path, "r") as file:
|
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
old_content = "".join(lines)
|
old_content = ''.join(lines)
|
||||||
insert_index = min(line_number - 1, len(lines))
|
insert_index = min(line_number - 1, len(lines))
|
||||||
lines.insert(
|
lines.insert(insert_index, new_content + '\n' if not new_content.endswith('\n') else new_content)
|
||||||
insert_index, new_content + "\n" if not new_content.endswith("\n") else new_content
|
new_full_content = ''.join(lines)
|
||||||
)
|
with open(path, 'w') as file:
|
||||||
new_full_content = "".join(lines)
|
|
||||||
with open(path, "w") as file:
|
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
||||||
if diff_result["status"] == "success":
|
if diff_result["status"] == "success":
|
||||||
print(diff_result["visual_diff"])
|
print(diff_result["visual_diff"])
|
||||||
return {
|
return {"status": "success", "message": f"Inserted line at position {line_number} in {path}"}
|
||||||
"status": "success",
|
|
||||||
"message": f"Inserted line at position {line_number} in {path}",
|
|
||||||
}
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def delete_specific_line(
|
def delete_specific_line(filepath: str, line_number: int, db_conn: Optional[Any] = None, show_diff: bool = True) -> dict:
|
||||||
filepath: str, line_number: int, db_conn: Optional[Any] = None, show_diff: bool = True
|
|
||||||
) -> dict:
|
|
||||||
"""
|
"""
|
||||||
Delete a specific line from a file.
|
Delete a specific line from a file.
|
||||||
|
|
||||||
@ -240,25 +195,18 @@ def delete_specific_line(
|
|||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
read_status = db_get("read:" + path, db_conn)
|
read_status = db_get("read:" + path, db_conn)
|
||||||
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
||||||
return {
|
return {"status": "error", "error": "File must be read before writing. Please read the file first."}
|
||||||
"status": "error",
|
with open(path, 'r') as file:
|
||||||
"error": "File must be read before writing. Please read the file first.",
|
|
||||||
}
|
|
||||||
with open(path, "r") as file:
|
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
total_lines = len(lines)
|
total_lines = len(lines)
|
||||||
if line_number < 1 or line_number > total_lines:
|
if line_number < 1 or line_number > total_lines:
|
||||||
return {
|
return {"status": "error", "error": f"Line number {line_number} is out of range. File has {total_lines} lines."}
|
||||||
"status": "error",
|
old_content = ''.join(lines)
|
||||||
"error": f"Line number {line_number} is out of range. File has {total_lines} lines.",
|
|
||||||
}
|
|
||||||
old_content = "".join(lines)
|
|
||||||
del lines[line_number - 1]
|
del lines[line_number - 1]
|
||||||
new_full_content = "".join(lines)
|
new_full_content = ''.join(lines)
|
||||||
with open(path, "w") as file:
|
with open(path, 'w') as file:
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
||||||
@ -282,16 +230,8 @@ def read_file(filepath: str, db_conn: Optional[Any] = None) -> dict:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
path = os.path.expanduser(filepath)
|
path = os.path.expanduser(filepath)
|
||||||
mime_type, _ = mimetypes.guess_type(str(path))
|
with open(path) as f:
|
||||||
if mime_type and (
|
content = f.read()
|
||||||
mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"]
|
|
||||||
):
|
|
||||||
with open(path, encoding="utf-8", errors="replace") as f:
|
|
||||||
content = f.read()
|
|
||||||
else:
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
binary_content = f.read()
|
|
||||||
content = f"data:{mime_type if mime_type else 'application/octet-stream'};base64,{base64.b64encode(binary_content).decode('utf-8')}"
|
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_set
|
from rp.tools.database import db_set
|
||||||
|
|
||||||
@ -330,53 +270,22 @@ def write_file(
|
|||||||
"status": "error",
|
"status": "error",
|
||||||
"error": "File must be read before writing. Please read the file first.",
|
"error": "File must be read before writing. Please read the file first.",
|
||||||
}
|
}
|
||||||
|
|
||||||
write_mode = "w"
|
|
||||||
write_encoding = "utf-8"
|
|
||||||
decoded_content = content
|
|
||||||
|
|
||||||
if content.startswith("data:"):
|
|
||||||
parts = content.split(",", 1)
|
|
||||||
if len(parts) == 2:
|
|
||||||
header = parts[0]
|
|
||||||
encoded_data = parts[1]
|
|
||||||
if ";base64" in header:
|
|
||||||
try:
|
|
||||||
decoded_content = base64.b64decode(encoded_data)
|
|
||||||
write_mode = "wb"
|
|
||||||
write_encoding = None
|
|
||||||
except Exception:
|
|
||||||
pass # Not a valid base64, treat as plain text
|
|
||||||
|
|
||||||
if not is_new_file:
|
if not is_new_file:
|
||||||
if write_mode == "wb":
|
with open(path) as f:
|
||||||
with open(path, "rb") as f:
|
old_content = f.read()
|
||||||
old_content = f.read()
|
|
||||||
else:
|
|
||||||
with open(path, encoding="utf-8", errors="replace") as f:
|
|
||||||
old_content = f.read()
|
|
||||||
|
|
||||||
operation = track_edit("WRITE", filepath, content=content, old_content=old_content)
|
operation = track_edit("WRITE", filepath, content=content, old_content=old_content)
|
||||||
tracker.mark_in_progress(operation)
|
tracker.mark_in_progress(operation)
|
||||||
|
if show_diff and (not is_new_file):
|
||||||
if show_diff and (not is_new_file) and write_mode == "w": # Only show diff for text files
|
|
||||||
diff_result = display_content_diff(old_content, content, filepath)
|
diff_result = display_content_diff(old_content, content, filepath)
|
||||||
if diff_result["status"] == "success":
|
if diff_result["status"] == "success":
|
||||||
print(diff_result["visual_diff"])
|
print(diff_result["visual_diff"])
|
||||||
|
editor = RPEditor(path)
|
||||||
if write_mode == "wb":
|
editor.set_text(content)
|
||||||
with open(path, write_mode) as f:
|
editor.save_file()
|
||||||
f.write(decoded_content)
|
|
||||||
else:
|
|
||||||
with open(path, write_mode, encoding=write_encoding) as f:
|
|
||||||
f.write(decoded_content)
|
|
||||||
|
|
||||||
if os.path.exists(path) and db_conn:
|
if os.path.exists(path) and db_conn:
|
||||||
try:
|
try:
|
||||||
cursor = db_conn.cursor()
|
cursor = db_conn.cursor()
|
||||||
file_hash = hashlib.md5(
|
file_hash = hashlib.md5(old_content.encode()).hexdigest()
|
||||||
old_content.encode() if isinstance(old_content, str) else old_content
|
|
||||||
).hexdigest()
|
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"SELECT MAX(version) FROM file_versions WHERE filepath = ?", (filepath,)
|
"SELECT MAX(version) FROM file_versions WHERE filepath = ?", (filepath,)
|
||||||
)
|
)
|
||||||
@ -384,24 +293,14 @@ def write_file(
|
|||||||
version = result[0] + 1 if result[0] else 1
|
version = result[0] + 1 if result[0] else 1
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"INSERT INTO file_versions (filepath, content, hash, timestamp, version)\n VALUES (?, ?, ?, ?, ?)",
|
"INSERT INTO file_versions (filepath, content, hash, timestamp, version)\n VALUES (?, ?, ?, ?, ?)",
|
||||||
(
|
(filepath, old_content, file_hash, time.time(), version),
|
||||||
filepath,
|
|
||||||
(
|
|
||||||
old_content
|
|
||||||
if isinstance(old_content, str)
|
|
||||||
else old_content.decode("utf-8", errors="replace")
|
|
||||||
),
|
|
||||||
file_hash,
|
|
||||||
time.time(),
|
|
||||||
version,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
tracker.mark_completed(operation)
|
tracker.mark_completed(operation)
|
||||||
message = f"File written to {path}"
|
message = f"File written to {path}"
|
||||||
if show_diff and (not is_new_file) and write_mode == "w":
|
if show_diff and (not is_new_file):
|
||||||
stats = get_diff_stats(old_content, content)
|
stats = get_diff_stats(old_content, content)
|
||||||
message += f" ({stats['insertions']}+ {stats['deletions']}-)"
|
message += f" ({stats['insertions']}+ {stats['deletions']}-)"
|
||||||
return {"status": "success", "message": message}
|
return {"status": "success", "message": message}
|
||||||
@ -529,18 +428,6 @@ def search_replace(
|
|||||||
path = os.path.expanduser(filepath)
|
path = os.path.expanduser(filepath)
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
mime_type, _ = mimetypes.guess_type(str(path))
|
|
||||||
if not (
|
|
||||||
mime_type
|
|
||||||
and (
|
|
||||||
mime_type.startswith("text/")
|
|
||||||
or mime_type in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {
|
|
||||||
"status": "error",
|
|
||||||
"error": f"Cannot perform search and replace on binary file: {filepath}",
|
|
||||||
}
|
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
@ -550,7 +437,7 @@ def search_replace(
|
|||||||
"status": "error",
|
"status": "error",
|
||||||
"error": "File must be read before writing. Please read the file first.",
|
"error": "File must be read before writing. Please read the file first.",
|
||||||
}
|
}
|
||||||
with open(path, encoding="utf-8", errors="replace") as f:
|
with open(path) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
content = content.replace(old_string, new_string)
|
content = content.replace(old_string, new_string)
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
@ -596,15 +483,6 @@ def editor_insert_text(filepath, text, line=None, col=None, show_diff=True, db_c
|
|||||||
operation = None
|
operation = None
|
||||||
try:
|
try:
|
||||||
path = os.path.expanduser(filepath)
|
path = os.path.expanduser(filepath)
|
||||||
mime_type, _ = mimetypes.guess_type(str(path))
|
|
||||||
if not (
|
|
||||||
mime_type
|
|
||||||
and (
|
|
||||||
mime_type.startswith("text/")
|
|
||||||
or mime_type in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {"status": "error", "error": f"Cannot insert text into binary file: {filepath}"}
|
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
@ -616,7 +494,7 @@ def editor_insert_text(filepath, text, line=None, col=None, show_diff=True, db_c
|
|||||||
}
|
}
|
||||||
old_content = ""
|
old_content = ""
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
with open(path, encoding="utf-8", errors="replace") as f:
|
with open(path) as f:
|
||||||
old_content = f.read()
|
old_content = f.read()
|
||||||
position = (line if line is not None else 0) * 1000 + (col if col is not None else 0)
|
position = (line if line is not None else 0) * 1000 + (col if col is not None else 0)
|
||||||
operation = track_edit("INSERT", filepath, start_pos=position, content=text)
|
operation = track_edit("INSERT", filepath, start_pos=position, content=text)
|
||||||
@ -646,15 +524,6 @@ def editor_replace_text(
|
|||||||
try:
|
try:
|
||||||
operation = None
|
operation = None
|
||||||
path = os.path.expanduser(filepath)
|
path = os.path.expanduser(filepath)
|
||||||
mime_type, _ = mimetypes.guess_type(str(path))
|
|
||||||
if not (
|
|
||||||
mime_type
|
|
||||||
and (
|
|
||||||
mime_type.startswith("text/")
|
|
||||||
or mime_type in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {"status": "error", "error": f"Cannot replace text in binary file: {filepath}"}
|
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
@ -666,7 +535,7 @@ def editor_replace_text(
|
|||||||
}
|
}
|
||||||
old_content = ""
|
old_content = ""
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
with open(path, encoding="utf-8", errors="replace") as f:
|
with open(path) as f:
|
||||||
old_content = f.read()
|
old_content = f.read()
|
||||||
start_pos = start_line * 1000 + start_col
|
start_pos = start_line * 1000 + start_col
|
||||||
end_pos = end_line * 1000 + end_col
|
end_pos = end_line * 1000 + end_col
|
||||||
@ -713,3 +582,4 @@ def clear_edit_tracker():
|
|||||||
|
|
||||||
clear_tracker()
|
clear_tracker()
|
||||||
return {"status": "success", "message": "Edit tracker cleared"}
|
return {"status": "success", "message": "Edit tracker cleared"}
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import importlib
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import threading
|
import threading
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
|
||||||
def _get_multiplexer_functions():
|
def _get_multiplexer_functions():
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
from typing import Any, Dict
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
|
||||||
def get_diagnostics(filepath: str) -> Dict[str, Any]:
|
def get_diagnostics(filepath: str) -> Dict[str, Any]:
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import os
|
|||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from rp.memory.knowledge_store import KnowledgeEntry, KnowledgeStore
|
from rp.memory.knowledge_store import KnowledgeEntry, KnowledgeStore
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,9 +1,7 @@
|
|||||||
import difflib
|
import difflib
|
||||||
import mimetypes
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from ..ui.diff_display import display_diff, get_diff_stats
|
from ..ui.diff_display import display_diff, get_diff_stats
|
||||||
|
|
||||||
|
|
||||||
@ -63,27 +61,7 @@ def create_diff(
|
|||||||
try:
|
try:
|
||||||
path1 = os.path.expanduser(file1)
|
path1 = os.path.expanduser(file1)
|
||||||
path2 = os.path.expanduser(file2)
|
path2 = os.path.expanduser(file2)
|
||||||
mime_type1, _ = mimetypes.guess_type(str(path1))
|
with open(path1) as f1, open(path2) as f2:
|
||||||
mime_type2, _ = mimetypes.guess_type(str(path2))
|
|
||||||
if not (
|
|
||||||
mime_type1
|
|
||||||
and (
|
|
||||||
mime_type1.startswith("text/")
|
|
||||||
or mime_type1 in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {"status": "error", "error": f"Cannot create diff for binary file: {file1}"}
|
|
||||||
if not (
|
|
||||||
mime_type2
|
|
||||||
and (
|
|
||||||
mime_type2.startswith("text/")
|
|
||||||
or mime_type2 in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {"status": "error", "error": f"Cannot create diff for binary file: {file2}"}
|
|
||||||
with open(path1, encoding="utf-8", errors="replace") as f1, open(
|
|
||||||
path2, encoding="utf-8", errors="replace"
|
|
||||||
) as f2:
|
|
||||||
content1 = f1.read()
|
content1 = f1.read()
|
||||||
content2 = f2.read()
|
content2 = f2.read()
|
||||||
if visual:
|
if visual:
|
||||||
@ -113,27 +91,9 @@ def display_file_diff(filepath1, filepath2, format_type="unified", context_lines
|
|||||||
try:
|
try:
|
||||||
path1 = os.path.expanduser(filepath1)
|
path1 = os.path.expanduser(filepath1)
|
||||||
path2 = os.path.expanduser(filepath2)
|
path2 = os.path.expanduser(filepath2)
|
||||||
mime_type1, _ = mimetypes.guess_type(str(path1))
|
with open(path1) as f1:
|
||||||
mime_type2, _ = mimetypes.guess_type(str(path2))
|
|
||||||
if not (
|
|
||||||
mime_type1
|
|
||||||
and (
|
|
||||||
mime_type1.startswith("text/")
|
|
||||||
or mime_type1 in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {"status": "error", "error": f"Cannot display diff for binary file: {filepath1}"}
|
|
||||||
if not (
|
|
||||||
mime_type2
|
|
||||||
and (
|
|
||||||
mime_type2.startswith("text/")
|
|
||||||
or mime_type2 in ["application/json", "application/xml"]
|
|
||||||
)
|
|
||||||
):
|
|
||||||
return {"status": "error", "error": f"Cannot display diff for binary file: {filepath2}"}
|
|
||||||
with open(path1, encoding="utf-8", errors="replace") as f1:
|
|
||||||
old_content = f1.read()
|
old_content = f1.read()
|
||||||
with open(path2, encoding="utf-8", errors="replace") as f2:
|
with open(path2) as f2:
|
||||||
new_content = f2.read()
|
new_content = f2.read()
|
||||||
visual_diff = display_diff(old_content, new_content, filepath1, format_type)
|
visual_diff = display_diff(old_content, new_content, filepath1, format_type)
|
||||||
stats = get_diff_stats(old_content, new_content)
|
stats = get_diff_stats(old_content, new_content)
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
def glob_files(pattern: str, path: str = ".") -> dict:
|
def glob_files(pattern: str, path: str = ".") -> dict:
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import functools
|
|
||||||
|
|
||||||
from rp.vision import post_image as vision_post_image
|
from rp.vision import post_image as vision_post_image
|
||||||
|
import functools
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache()
|
@functools.lru_cache()
|
||||||
|
|||||||
154
rp/tools/web.py
154
rp/tools/web.py
@ -1,50 +1,15 @@
|
|||||||
import imghdr
|
import json
|
||||||
import random
|
import urllib.error
|
||||||
import requests
|
import urllib.parse
|
||||||
from typing import Optional, Dict, Any
|
import urllib.request
|
||||||
|
|
||||||
# Realistic User-Agents
|
|
||||||
USER_AGENTS = [
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
||||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
||||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
|
||||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
|
|
||||||
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Edge/91.0.864.59",
|
|
||||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
|
||||||
"Mozilla/5.0 (iPad; CPU OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
|
||||||
"Mozilla/5.0 (Android 11; Mobile; rv:68.0) Gecko/68.0 Firefox/88.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_headers():
|
import json
|
||||||
"""Get default realistic headers with variations."""
|
import urllib.parse
|
||||||
accept_languages = [
|
import urllib.request
|
||||||
"en-US,en;q=0.5",
|
|
||||||
"en-US,en;q=0.9",
|
|
||||||
"en-GB,en;q=0.5",
|
|
||||||
"en-US,en;q=0.5;fr;q=0.3",
|
|
||||||
]
|
|
||||||
headers = {
|
|
||||||
"User-Agent": random.choice(USER_AGENTS),
|
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
|
||||||
"Accept-Language": random.choice(accept_languages),
|
|
||||||
"Accept-Encoding": "gzip, deflate, br",
|
|
||||||
"DNT": "1",
|
|
||||||
"Connection": "keep-alive",
|
|
||||||
"Upgrade-Insecure-Requests": "1",
|
|
||||||
}
|
|
||||||
# Sometimes add Cache-Control
|
|
||||||
if random.random() < 0.3:
|
|
||||||
headers["Cache-Control"] = "no-cache"
|
|
||||||
# Sometimes add Referer
|
|
||||||
if random.random() < 0.2:
|
|
||||||
headers["Referer"] = "https://www.google.com/"
|
|
||||||
return headers
|
|
||||||
|
|
||||||
|
|
||||||
def http_fetch(url: str, headers: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
|
def http_fetch(url, headers=None):
|
||||||
"""Fetch content from an HTTP URL.
|
"""Fetch content from an HTTP URL.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -55,97 +20,29 @@ def http_fetch(url: str, headers: Optional[Dict[str, str]] = None) -> Dict[str,
|
|||||||
Dict with status and content.
|
Dict with status and content.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
default_headers = get_default_headers()
|
request = urllib.request.Request(url)
|
||||||
if headers:
|
if headers:
|
||||||
default_headers.update(headers)
|
for header_key, header_value in headers.items():
|
||||||
|
request.add_header(header_key, header_value)
|
||||||
response = requests.get(url, headers=default_headers, timeout=30)
|
with urllib.request.urlopen(request) as response:
|
||||||
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
content = response.read().decode("utf-8")
|
||||||
|
|
||||||
content_type = response.headers.get("Content-Type", "").lower()
|
|
||||||
if "text" in content_type or "json" in content_type or "xml" in content_type:
|
|
||||||
content = response.text
|
|
||||||
return {"status": "success", "content": content[:10000]}
|
return {"status": "success", "content": content[:10000]}
|
||||||
else:
|
except Exception as exception:
|
||||||
content = response.content
|
return {"status": "error", "error": str(exception)}
|
||||||
return {"status": "success", "content": content}
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
return {"status": "error", "error": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
def download_to_file(
|
def _perform_search(base_url, query, params=None):
|
||||||
source_url: str, destination_path: str, headers: Optional[Dict[str, str]] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Download content from an HTTP URL to a file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
source_url: The URL to download from.
|
|
||||||
destination_path: The path to save the downloaded content.
|
|
||||||
headers: Optional HTTP headers.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict with status, downloaded_from, and downloaded_to on success, or status and error on failure.
|
|
||||||
|
|
||||||
This function can be used for binary files like images as well.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
default_headers = get_default_headers()
|
encoded_query = urllib.parse.quote(query)
|
||||||
if headers:
|
full_url = f"{base_url}?query={encoded_query}"
|
||||||
default_headers.update(headers)
|
with urllib.request.urlopen(full_url) as response:
|
||||||
|
content = response.read().decode("utf-8")
|
||||||
response = requests.get(source_url, headers=default_headers, stream=True, timeout=60)
|
return {"status": "success", "content": json.loads(content)}
|
||||||
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
except Exception as exception:
|
||||||
|
return {"status": "error", "error": str(exception)}
|
||||||
with open(destination_path, "wb") as file:
|
|
||||||
for chunk in response.iter_content(chunk_size=8192):
|
|
||||||
file.write(chunk)
|
|
||||||
|
|
||||||
content_type = response.headers.get("Content-Type", "").lower()
|
|
||||||
if content_type.startswith("image/"):
|
|
||||||
img_type = imghdr.what(destination_path)
|
|
||||||
if img_type is None:
|
|
||||||
return {
|
|
||||||
"status": "success",
|
|
||||||
"downloaded_from": source_url,
|
|
||||||
"downloaded_to": destination_path,
|
|
||||||
"is_valid_image": False,
|
|
||||||
"warning": "Downloaded content is not a valid image, consider finding a different source.",
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return {
|
|
||||||
"status": "success",
|
|
||||||
"downloaded_from": source_url,
|
|
||||||
"downloaded_to": destination_path,
|
|
||||||
"is_valid_image": True,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return {
|
|
||||||
"status": "success",
|
|
||||||
"downloaded_from": source_url,
|
|
||||||
"downloaded_to": destination_path,
|
|
||||||
}
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
return {"status": "error", "error": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
def _perform_search(
|
def web_search(query):
|
||||||
base_url: str, query: str, params: Optional[Dict[str, str]] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
try:
|
|
||||||
default_headers = get_default_headers()
|
|
||||||
search_params = {"query": query}
|
|
||||||
if params:
|
|
||||||
search_params.update(params)
|
|
||||||
|
|
||||||
response = requests.get(base_url, headers=default_headers, params=search_params, timeout=30)
|
|
||||||
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
|
||||||
|
|
||||||
return {"status": "success", "content": response.json()}
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
return {"status": "error", "error": str(e)}
|
|
||||||
|
|
||||||
|
|
||||||
def web_search(query: str) -> Dict[str, Any]:
|
|
||||||
"""Perform a web search.
|
"""Perform a web search.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -158,7 +55,7 @@ def web_search(query: str) -> Dict[str, Any]:
|
|||||||
return _perform_search(base_url, query)
|
return _perform_search(base_url, query)
|
||||||
|
|
||||||
|
|
||||||
def web_search_news(query: str) -> Dict[str, Any]:
|
def web_search_news(query):
|
||||||
"""Perform a web search for news.
|
"""Perform a web search for news.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -169,3 +66,4 @@ def web_search_news(query: str) -> Dict[str, Any]:
|
|||||||
"""
|
"""
|
||||||
base_url = "https://search.molodetz.nl/search"
|
base_url = "https://search.molodetz.nl/search"
|
||||||
return _perform_search(base_url, query)
|
return _perform_search(base_url, query)
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import threading
|
|
||||||
import time
|
import time
|
||||||
|
import threading
|
||||||
|
|
||||||
|
|
||||||
class Colors:
|
class Colors:
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import difflib
|
import difflib
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from .colors import Colors
|
from .colors import Colors
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from .colors import Colors
|
from .colors import Colors
|
||||||
from .progress import ProgressBar
|
from .progress import ProgressBar
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from rp.config import LANGUAGE_KEYWORDS
|
from rp.config import LANGUAGE_KEYWORDS
|
||||||
from rp.ui.colors import Colors
|
from rp.ui.colors import Colors
|
||||||
|
|
||||||
|
|||||||
29
rp/vision.py
29
rp/vision.py
@ -1,7 +1,9 @@
|
|||||||
|
import http.client
|
||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
|
import json
|
||||||
|
import http.client
|
||||||
import pathlib
|
import pathlib
|
||||||
import requests
|
|
||||||
|
|
||||||
DEFAULT_URL = "https://static.molodetz.nl/rp.vision.cgi"
|
DEFAULT_URL = "https://static.molodetz.nl/rp.vision.cgi"
|
||||||
|
|
||||||
@ -10,24 +12,25 @@ def post_image(image_path: str, prompt: str = "", url: str = DEFAULT_URL):
|
|||||||
image_path = str(pathlib.Path(image_path).resolve().absolute())
|
image_path = str(pathlib.Path(image_path).resolve().absolute())
|
||||||
if not url:
|
if not url:
|
||||||
url = DEFAULT_URL
|
url = DEFAULT_URL
|
||||||
|
url_parts = url.split("/")
|
||||||
|
host = url_parts[2]
|
||||||
|
path = "/" + "/".join(url_parts[3:])
|
||||||
with open(image_path, "rb") as file:
|
with open(image_path, "rb") as file:
|
||||||
image_data = file.read()
|
image_data = file.read()
|
||||||
base64_data = base64.b64encode(image_data).decode("utf-8")
|
base64_data = base64.b64encode(image_data).decode("utf-8")
|
||||||
|
|
||||||
payload = {"data": base64_data, "path": image_path, "prompt": prompt}
|
payload = {"data": base64_data, "path": image_path, "prompt": prompt}
|
||||||
|
body = json.dumps(payload).encode("utf-8")
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"User-Agent": "Python requests",
|
"Content-Length": str(len(body)),
|
||||||
|
"User-Agent": "Python http.client",
|
||||||
}
|
}
|
||||||
|
conn = http.client.HTTPSConnection(host)
|
||||||
try:
|
conn.request("POST", path, body, headers)
|
||||||
response = requests.post(url, json=payload, headers=headers, timeout=60)
|
resp = conn.getresponse()
|
||||||
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
|
data = resp.read()
|
||||||
print("Status:", response.status_code, response.reason)
|
print("Status:", resp.status, resp.reason)
|
||||||
print(response.text)
|
print(data.decode())
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Error posting image: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -36,4 +39,4 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument("--prompt", default="")
|
parser.add_argument("--prompt", default="")
|
||||||
parser.add_argument("--url", default=DEFAULT_URL)
|
parser.add_argument("--url", default=DEFAULT_URL)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
post_image(args.image_path, args.prompt, args.url)
|
post_image(args.url, args.image_path, args.prompt)
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import re
|
|||||||
import time
|
import time
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from typing import Any, Callable, Dict, List, Optional
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
from .workflow_definition import ExecutionMode, Workflow, WorkflowStep
|
from .workflow_definition import ExecutionMode, Workflow, WorkflowStep
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import json
|
|||||||
import sqlite3
|
import sqlite3
|
||||||
import time
|
import time
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from .workflow_definition import Workflow
|
from .workflow_definition import Workflow
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import json
|
|
||||||
import unittest
|
import unittest
|
||||||
|
import json
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from rp.core.api import call_api, list_models
|
from rp.core.api import call_api, list_models
|
||||||
|
|||||||
@ -1,29 +1,28 @@
|
|||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from rp.commands.handlers import (
|
from rp.commands.handlers import (
|
||||||
clear_caches,
|
|
||||||
collaborate_agents_command,
|
|
||||||
execute_agent_task,
|
|
||||||
execute_workflow_command,
|
|
||||||
handle_background_command,
|
|
||||||
handle_command,
|
handle_command,
|
||||||
kill_background_session,
|
|
||||||
list_background_sessions,
|
|
||||||
obfuscate_file,
|
|
||||||
refactor_file,
|
|
||||||
review_file,
|
review_file,
|
||||||
search_knowledge,
|
refactor_file,
|
||||||
send_session_input,
|
obfuscate_file,
|
||||||
show_agents,
|
|
||||||
show_background_events,
|
|
||||||
show_cache_stats,
|
|
||||||
show_conversation_history,
|
|
||||||
show_session_output,
|
|
||||||
show_session_status,
|
|
||||||
show_system_stats,
|
|
||||||
show_workflows,
|
show_workflows,
|
||||||
start_background_session,
|
execute_workflow_command,
|
||||||
|
execute_agent_task,
|
||||||
|
show_agents,
|
||||||
|
collaborate_agents_command,
|
||||||
|
search_knowledge,
|
||||||
store_knowledge,
|
store_knowledge,
|
||||||
|
show_conversation_history,
|
||||||
|
show_cache_stats,
|
||||||
|
clear_caches,
|
||||||
|
show_system_stats,
|
||||||
|
handle_background_command,
|
||||||
|
start_background_session,
|
||||||
|
list_background_sessions,
|
||||||
|
show_session_status,
|
||||||
|
show_session_output,
|
||||||
|
send_session_input,
|
||||||
|
kill_background_session,
|
||||||
|
show_background_events,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import os
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.memory.conversation_memory import ConversationMemory
|
from rp.memory.conversation_memory import ConversationMemory
|
||||||
@ -337,8 +337,8 @@ class TestConversationMemory:
|
|||||||
|
|
||||||
def test_thread_safety(self):
|
def test_thread_safety(self):
|
||||||
"""Test that the memory can handle concurrent access."""
|
"""Test that the memory can handle concurrent access."""
|
||||||
import queue
|
|
||||||
import threading
|
import threading
|
||||||
|
import queue
|
||||||
|
|
||||||
results = queue.Queue()
|
results = queue.Queue()
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +1,15 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from rp.core.exceptions import (
|
from rp.core.exceptions import (
|
||||||
APIConnectionError,
|
|
||||||
APIException,
|
|
||||||
APIResponseError,
|
|
||||||
APITimeoutError,
|
|
||||||
ConfigurationError,
|
|
||||||
ContextError,
|
|
||||||
FileSystemError,
|
|
||||||
PRException,
|
PRException,
|
||||||
SessionError,
|
APIException,
|
||||||
|
APIConnectionError,
|
||||||
|
APITimeoutError,
|
||||||
|
APIResponseError,
|
||||||
|
ConfigurationError,
|
||||||
ToolExecutionError,
|
ToolExecutionError,
|
||||||
|
FileSystemError,
|
||||||
|
SessionError,
|
||||||
|
ContextError,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
from rp.commands.help_docs import (
|
from rp.commands.help_docs import (
|
||||||
get_agent_help,
|
|
||||||
get_background_help,
|
|
||||||
get_cache_help,
|
|
||||||
get_full_help,
|
|
||||||
get_knowledge_help,
|
|
||||||
get_workflow_help,
|
get_workflow_help,
|
||||||
|
get_agent_help,
|
||||||
|
get_knowledge_help,
|
||||||
|
get_cache_help,
|
||||||
|
get_background_help,
|
||||||
|
get_full_help,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
import os
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from rp.memory.knowledge_store import KnowledgeEntry, KnowledgeStore
|
from rp.memory.knowledge_store import KnowledgeStore, KnowledgeEntry
|
||||||
|
|
||||||
|
|
||||||
class TestKnowledgeStore:
|
class TestKnowledgeStore:
|
||||||
@ -284,8 +284,8 @@ class TestKnowledgeStore:
|
|||||||
|
|
||||||
def test_thread_safety(self):
|
def test_thread_safety(self):
|
||||||
"""Test that the store can handle concurrent access."""
|
"""Test that the store can handle concurrent access."""
|
||||||
import queue
|
|
||||||
import threading
|
import threading
|
||||||
|
import queue
|
||||||
|
|
||||||
results = queue.Queue()
|
results = queue.Queue()
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import patch, MagicMock
|
||||||
|
from rp.core.logging import setup_logging, get_logger
|
||||||
from rp.core.logging import get_logger, setup_logging
|
|
||||||
|
|
||||||
|
|
||||||
class TestLogging:
|
class TestLogging:
|
||||||
|
|||||||
@ -1,14 +1,13 @@
|
|||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from rp.commands.multiplexer_commands import (
|
from rp.commands.multiplexer_commands import (
|
||||||
|
show_sessions,
|
||||||
attach_session,
|
attach_session,
|
||||||
detach_session,
|
detach_session,
|
||||||
kill_session,
|
kill_session,
|
||||||
list_waiting_sessions,
|
|
||||||
send_command,
|
send_command,
|
||||||
show_session_log,
|
show_session_log,
|
||||||
show_session_status,
|
show_session_status,
|
||||||
show_sessions,
|
list_waiting_sessions,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
import math
|
import math
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from rp.memory.semantic_index import SemanticIndex
|
from rp.memory.semantic_index import SemanticIndex
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from rp.ui.output import OutputFormatter
|
from rp.ui.output import OutputFormatter
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user