feat: bump version to 1.28.0

feat: update project structure and imports
refactor: update test coverage to reflect project structure
refactor: update linting and formatting to reflect project structure
fix: improve autonomous mode logging
fix: handle json parsing errors in http client
fix: improve error handling in http client
feat: add support for advanced input in assistant
feat: add collaboration agents command
feat: add list models api call
feat: improve background monitor functionality
feat: update config loader
feat: update context management
feat: update enhanced assistant functionality
feat: update http client functionality
feat: update logging configuration
feat: update session management
feat: update usage tracking
feat: update validation logic
feat: add implode functionality
feat: update input handler
This commit is contained in:
retoor 2025-11-08 02:11:31 +01:00
parent a289a8e402
commit 9438496b72
53 changed files with 369 additions and 259 deletions

View File

@ -27,16 +27,16 @@ test:
pytest tests/ -v --tb=long --full-trace -l --maxfail=10
test-cov:
pytest --cov=pr --cov-report=html --cov-report=term-missing
pytest --cov=rp --cov-report=html --cov-report=term-missing
@echo "Coverage report generated in htmlcov/index.html"
lint:
flake8 pr tests --max-line-length=100 --ignore=E203,W503
mypy pr --ignore-missing-imports
flake8 rp tests --max-line-length=100 --ignore=E203,W503
mypy rp --ignore-missing-imports
format:
black pr tests
isort pr tests --profile black
black rp tests
isort rp tests --profile black
clean:
rm -rf build/

View File

@ -1,5 +1,6 @@
import argparse
import sys
from rp import __version__
from rp.core import Assistant

View File

@ -2,6 +2,7 @@ import time
import uuid
from dataclasses import dataclass, field
from typing import Any, Callable, Dict, List, Optional
from ..memory.knowledge_store import KnowledgeStore
from .agent_communication import AgentCommunicationBus, AgentMessage, MessageType
from .agent_roles import AgentRole, get_agent_role

View File

@ -1,6 +1,7 @@
import json
import logging
import time
from rp.autonomous.detection import is_task_complete
from rp.core.api import call_api
from rp.core.context import truncate_tool_result
@ -13,7 +14,7 @@ logger = logging.getLogger("rp")
def run_autonomous_mode(assistant, task):
assistant.autonomous_mode = True
assistant.autonomous_iterations = 0
logger.debug(f"=== AUTONOMOUS MODE START ===")
logger.debug("=== AUTONOMOUS MODE START ===")
logger.debug(f"Task: {task}")
from rp.core.knowledge_context import inject_knowledge_context
@ -125,6 +126,9 @@ def execute_single_tool(assistant, func_name, arguments):
db_get,
db_query,
db_set,
editor_insert_text,
editor_replace_text,
editor_search,
getpwd,
http_fetch,
index_source_directory,

View File

@ -1,12 +1,13 @@
import json
import time
from rp.commands.multiplexer_commands import MULTIPLEXER_COMMANDS
from rp.autonomous import run_autonomous_mode
from rp.commands.multiplexer_commands import MULTIPLEXER_COMMANDS
from rp.core.api import list_models
from rp.editor import RPEditor
from rp.tools import read_file
from rp.tools.base import get_tools_definition
from rp.ui import Colors
from rp.editor import RPEditor
def handle_command(assistant, command):
@ -263,7 +264,7 @@ def collaborate_agents_command(assistant, task):
roles = ["coding", "research", "planning"]
result = assistant.enhanced.collaborate_agents(task, roles)
print(f"\n{Colors.GREEN}Collaboration completed{Colors.RESET}")
print(f"\nOrchestrator response:")
print("\nOrchestrator response:")
if "orchestrator" in result and "response" in result["orchestrator"]:
print(result["orchestrator"]["response"])
if result.get("agents"):
@ -295,6 +296,7 @@ def store_knowledge(assistant, content):
return
import time
import uuid
from rp.memory import KnowledgeEntry
categories = assistant.enhanced.fact_extractor.categorize_content(content)

View File

@ -97,7 +97,8 @@ def list_models(model_list_url, api_key):
response = http_client.get(model_list_url, headers=headers, db_conn=None)
if response.get("error"):
return {"error": response.get("text", "HTTP error")}
data = json.loads(response["text"])
response_data = response["text"]
data = json.loads(response_data)
return data.get("data", [])
except Exception as e:
return {"error": str(e)}

View File

@ -8,8 +8,8 @@ import sqlite3
import sys
import traceback
from concurrent.futures import ThreadPoolExecutor
from rp.commands import handle_command
from rp.input_handler import get_advanced_input
from rp.config import (
DB_PATH,
DEFAULT_API_URL,
@ -23,6 +23,7 @@ from rp.core.autonomous_interactions import start_global_autonomous, stop_global
from rp.core.background_monitor import get_global_monitor, start_global_monitor, stop_global_monitor
from rp.core.context import init_system_message, truncate_tool_result
from rp.core.usage_tracker import UsageTracker
from rp.input_handler import get_advanced_input
from rp.tools import get_tools_definition
from rp.tools.agents import (
collaborate_agents,
@ -398,7 +399,7 @@ class Assistant:
except:
pass
prompt += f">{Colors.RESET} "
user_input = get_advanced_input(prompt)
user_input = get_advanced_input(prompt) or ""
user_input = user_input.strip()
if not user_input:
continue

View File

@ -1,5 +1,6 @@
import threading
import time
from rp.tools.interactive_control import (
get_session_status,
list_active_sessions,

View File

@ -1,6 +1,7 @@
import queue
import threading
import time
from rp.multiplexer import get_all_multiplexer_states, get_multiplexer

View File

@ -1,6 +1,7 @@
import configparser
import os
from typing import Any, Dict
from rp.core.logging import get_logger
logger = get_logger("config")

View File

@ -2,6 +2,7 @@ import json
import logging
import os
import pathlib
from rp.config import (
CHARS_PER_TOKEN,
CONTENT_TRIM_LENGTH,
@ -9,10 +10,10 @@ from rp.config import (
CONTEXT_FILE,
EMERGENCY_MESSAGES_TO_KEEP,
GLOBAL_CONTEXT_FILE,
KNOWLEDGE_PATH,
MAX_TOKENS_LIMIT,
MAX_TOOL_RESULT_LENGTH,
RECENT_MESSAGES_TO_KEEP,
KNOWLEDGE_PATH,
)
from rp.ui import Colors

View File

@ -2,6 +2,7 @@ import json
import logging
import uuid
from typing import Any, Dict, List, Optional
from rp.agents import AgentManager
from rp.cache import APICache, ToolCache
from rp.config import (
@ -132,6 +133,7 @@ class EnhancedAssistant:
for fact in facts[:5]:
entry_id = str(uuid.uuid4())[:16]
import time
from rp.memory import KnowledgeEntry
categories = self.fact_extractor.categorize_content(fact["text"])

View File

@ -1,12 +1,10 @@
import json
import logging
import random
import time
import urllib.error
import urllib.parse
import urllib.request
from typing import Dict, Any, Optional
import requests
logger = logging.getLogger("rp")
# Realistic User-Agents and headers
@ -23,6 +21,7 @@ USER_AGENTS = [
"Mozilla/5.0 (Android 11; Mobile; rv:68.0) Gecko/68.0 Firefox/88.0",
]
def get_realistic_headers(additional_headers=None):
"""Generate realistic HTTP headers with random User-Agent and variations."""
accept_languages = [
@ -71,89 +70,75 @@ class SyncHTTPClient:
else:
headers = get_realistic_headers(headers)
# Handle JSON data
request_body_for_log = ""
if json_data is not None:
data = json.dumps(json_data).encode('utf-8')
headers["Content-Type"] = "application/json"
# Prepare request body for logging
if json_data is not None:
request_body = json.dumps(json_data)
request_body_for_log = json.dumps(json_data)
elif data is not None:
request_body = data.decode("utf-8") if isinstance(data, bytes) else str(data)
else:
request_body = ""
request_body_for_log = data.decode("utf-8") if isinstance(data, bytes) else str(data)
"""Make a sync HTTP request using urllib with retry logic."""
attempt = 0
start_time = time.time()
while True:
attempt += 1
try:
req = urllib.request.Request(url, data=data, headers=headers, method=method)
with urllib.request.urlopen(req, timeout=timeout) as response:
response_data = response.read().decode('utf-8')
response = requests.request(
method,
url,
headers=headers,
data=data,
json=json_data,
timeout=timeout,
allow_redirects=True,
)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
response_data = response.text
response_headers = dict(response.headers)
# Create json method
def json_method():
return json.loads(response_data)
# Log the request
if db_conn:
from rp.tools.database import log_http_request
log_result = log_http_request(
method, url, request_body, response_data, response.status, db_conn
method,
url,
request_body_for_log,
response_data,
response.status_code,
db_conn,
)
if log_result.get("status") != "success":
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
return {
"status": response.status,
"status": response.status_code,
"headers": response_headers,
"text": response_data,
"json": json_method,
"json": response.json,
}
except urllib.error.HTTPError as e:
# For HTTP errors, still try to read the response
try:
response_data = e.read().decode('utf-8')
except:
response_data = ""
except requests.exceptions.HTTPError as e:
response_data = e.response.text if e.response else ""
response_headers = dict(e.response.headers) if e.response else {}
status_code = e.response.status_code if e.response else 0
# Log the request even on error
if db_conn:
from rp.tools.database import log_http_request
log_result = log_http_request(
method, url, request_body, response_data, e.code, db_conn
method,
url,
request_body_for_log,
response_data,
status_code,
db_conn,
)
if log_result.get("status") != "success":
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
return {
"status": e.code,
"headers": dict(e.headers) if e.headers else {},
"status": status_code,
"headers": response_headers,
"text": response_data,
"json": lambda: json.loads(response_data) if response_data else None,
"json": lambda: e.response.json() if e.response and response_data else None,
}
except urllib.error.URLError as e:
if isinstance(e.reason, TimeoutError) or "timeout" in str(e.reason).lower():
elapsed = time.time() - start_time
elapsed_minutes = int(elapsed // 60)
elapsed_seconds = elapsed % 60
duration_str = (
f"{elapsed_minutes}m {elapsed_seconds:.1f}s"
if elapsed_minutes > 0
else f"{elapsed_seconds:.1f}s"
)
logger.warning(
f"Request timed out (attempt {attempt}, duration: {duration_str}). Retrying in {attempt} second(s)..."
)
time.sleep(attempt)
else:
return {"error": True, "exception": str(e)}
except requests.exceptions.RequestException as e:
logger.error(f"Request failed: {e}")
return {"error": True, "exception": str(e), "status": 0, "text": ""}
def get(
self,

View File

@ -1,6 +1,7 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from rp.config import LOG_FILE

View File

@ -2,6 +2,7 @@ import json
import os
from datetime import datetime
from typing import Dict, List, Optional
from rp.core.logging import get_logger
logger = get_logger("session")

View File

@ -2,6 +2,7 @@ import json
import os
from datetime import datetime
from typing import Dict, Optional
from rp.core.logging import get_logger
logger = get_logger("usage")

View File

@ -1,4 +1,5 @@
import os
from rp.core.exceptions import ValidationError

View File

@ -6,13 +6,13 @@ It intelligently resolves local imports, hoists external dependencies to the top
and preserves the core logic, using AST for safe transformations.
"""
import os
import sys
import ast
import argparse
import ast
import logging
import os
import py_compile
from typing import Set, Dict, Optional, TextIO
import sys
from typing import Dict, Optional, Set, TextIO
logger = logging.getLogger("impLODE")
@ -250,10 +250,10 @@ class Imploder:
self.processed_files.clear()
try:
with open(output_file_path, "w", encoding="utf-8") as f_out:
f_out.write(f"#!/usr/bin/env python3\n")
f_out.write(f"# -*- coding: utf-8 -*-\n")
f_out.write(f"import logging\n")
f_out.write(f"\n# --- IMPLODED FILE: Generated by impLODE --- #\n")
f_out.write("#!/usr/bin/env python3\n")
f_out.write("# -*- coding: utf-8 -*-\n")
f_out.write("import logging\n")
f_out.write("\n# --- IMPLODED FILE: Generated by impLODE --- #\n")
f_out.write(
f"# --- Original main file: {os.path.relpath(main_file_abs_path, self.root_dir)} --- #\n"
)

View File

@ -100,11 +100,16 @@ class AdvancedInputHandler:
path = Path(filename).expanduser().resolve()
if path.exists() and path.is_file():
mime_type, _ = mimetypes.guess_type(str(path))
if mime_type and (mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"]):
if mime_type and (
mime_type.startswith("text/")
or mime_type in ["application/json", "application/xml"]
):
with open(path, encoding="utf-8", errors="replace") as f:
content = f.read()
return f"\n--- File: {filename} ---\n{content}\n--- End of {filename} ---\n"
elif mime_type and not mime_type.startswith("image/"): # Handle other binary files
elif mime_type and not mime_type.startswith(
"image/"
): # Handle other binary files
with open(path, "rb") as f:
binary_data = base64.b64encode(f.read()).decode("utf-8")
return f"\n--- Binary File: {filename} ({mime_type}) ---\ndata:{mime_type};base64,{binary_data}\n--- End of {filename} ---\n"

View File

@ -4,6 +4,7 @@ import threading
import time
from dataclasses import dataclass
from typing import Any, Dict, List, Optional, Tuple
from .semantic_index import SemanticIndex

View File

@ -3,6 +3,7 @@ import subprocess
import sys
import threading
import time
from rp.tools.process_handlers import detect_process_type, get_handler_for_process
from rp.tools.prompt_detection import get_global_detector
from rp.ui import Colors

View File

@ -2,6 +2,7 @@ import importlib.util
import os
import sys
from typing import Callable, Dict, List
from rp.core.logging import get_logger
logger = get_logger("plugins")

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python3
# Trigger build
import sys
import os
import sys
# Add current directory to path to ensure imports work
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))

View File

@ -6,7 +6,6 @@ from rp.tools.agents import (
remove_agent,
)
from rp.tools.base import get_tools_definition
from rp.tools.vision import post_image
from rp.tools.command import kill_process, run_command, run_command_interactive, tail_process
from rp.tools.database import db_get, db_query, db_set
from rp.tools.editor import (
@ -17,27 +16,27 @@ from rp.tools.editor import (
open_editor,
)
from rp.tools.filesystem import (
get_uid,
read_specific_lines,
replace_specific_line,
insert_line_at_position,
delete_specific_line,
read_file,
write_file,
list_directory,
mkdir,
chdir,
getpwd,
index_source_directory,
search_replace,
get_editor,
clear_edit_tracker,
close_editor,
open_editor,
editor_insert_text,
editor_replace_text,
delete_specific_line,
display_edit_summary,
display_edit_timeline,
clear_edit_tracker,
editor_insert_text,
editor_replace_text,
get_editor,
get_uid,
getpwd,
index_source_directory,
insert_line_at_position,
list_directory,
mkdir,
open_editor,
read_file,
read_specific_lines,
replace_specific_line,
search_replace,
write_file,
)
from rp.tools.lsp import get_diagnostics
from rp.tools.memory import (
@ -52,7 +51,8 @@ from rp.tools.memory import (
from rp.tools.patch import apply_patch, create_diff
from rp.tools.python_exec import python_exec
from rp.tools.search import glob_files, grep
from rp.tools.web import http_fetch, web_search, web_search_news, download_to_file
from rp.tools.vision import post_image
from rp.tools.web import download_to_file, http_fetch, web_search, web_search_news
# Aliases for user-requested tool names
view = read_file

View File

@ -1,8 +1,9 @@
import os
from typing import Any, Dict, List
from rp.agents.agent_manager import AgentManager
from rp.config import DEFAULT_API_URL, DEFAULT_MODEL
from rp.core.api import call_api
from rp.config import DEFAULT_MODEL, DEFAULT_API_URL
from rp.tools.base import get_tools_definition

View File

@ -1,6 +1,7 @@
import inspect
from typing import get_args, get_origin, get_type_hints
import rp.tools
from typing import get_type_hints, get_origin, get_args
def _type_to_json_schema(py_type):

View File

@ -1,15 +1,15 @@
import sys
import os
import ast
import inspect
import time
import threading
import gc
import weakref
import linecache
import re
import inspect
import json
import linecache
import os
import re
import subprocess
import sys
import threading
import time
import weakref
from collections import defaultdict
from datetime import datetime

View File

@ -1,6 +1,8 @@
import os
import os.path
from rp.editor import RPEditor
from ..tools.patch import display_content_diff
from ..ui.edit_feedback import track_edit, tracker

View File

@ -3,8 +3,10 @@ import hashlib
import mimetypes
import os
import time
from typing import Optional, Any
from typing import Any, Optional
from rp.editor import RPEditor
from ..tools.patch import display_content_diff
from ..ui.diff_display import get_diff_stats
from ..ui.edit_feedback import track_edit, tracker
@ -281,7 +283,9 @@ def read_file(filepath: str, db_conn: Optional[Any] = None) -> dict:
try:
path = os.path.expanduser(filepath)
mime_type, _ = mimetypes.guess_type(str(path))
if mime_type and (mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"]):
if mime_type and (
mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"]
):
with open(path, encoding="utf-8", errors="replace") as f:
content = f.read()
else:
@ -370,7 +374,9 @@ def write_file(
if os.path.exists(path) and db_conn:
try:
cursor = db_conn.cursor()
file_hash = hashlib.md5(old_content.encode() if isinstance(old_content, str) else old_content).hexdigest()
file_hash = hashlib.md5(
old_content.encode() if isinstance(old_content, str) else old_content
).hexdigest()
cursor.execute(
"SELECT MAX(version) FROM file_versions WHERE filepath = ?", (filepath,)
)
@ -378,7 +384,17 @@ def write_file(
version = result[0] + 1 if result[0] else 1
cursor.execute(
"INSERT INTO file_versions (filepath, content, hash, timestamp, version)\n VALUES (?, ?, ?, ?, ?)",
(filepath, old_content if isinstance(old_content, str) else old_content.decode('utf-8', errors='replace'), file_hash, time.time(), version),
(
filepath,
(
old_content
if isinstance(old_content, str)
else old_content.decode("utf-8", errors="replace")
),
file_hash,
time.time(),
version,
),
)
db_conn.commit()
except Exception:
@ -514,8 +530,17 @@ def search_replace(
if not os.path.exists(path):
return {"status": "error", "error": "File does not exist"}
mime_type, _ = mimetypes.guess_type(str(path))
if not (mime_type and (mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"])):
return {"status": "error", "error": f"Cannot perform search and replace on binary file: {filepath}"}
if not (
mime_type
and (
mime_type.startswith("text/")
or mime_type in ["application/json", "application/xml"]
)
):
return {
"status": "error",
"error": f"Cannot perform search and replace on binary file: {filepath}",
}
if db_conn:
from rp.tools.database import db_get
@ -572,7 +597,13 @@ def editor_insert_text(filepath, text, line=None, col=None, show_diff=True, db_c
try:
path = os.path.expanduser(filepath)
mime_type, _ = mimetypes.guess_type(str(path))
if not (mime_type and (mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"])):
if not (
mime_type
and (
mime_type.startswith("text/")
or mime_type in ["application/json", "application/xml"]
)
):
return {"status": "error", "error": f"Cannot insert text into binary file: {filepath}"}
if db_conn:
from rp.tools.database import db_get
@ -616,7 +647,13 @@ def editor_replace_text(
operation = None
path = os.path.expanduser(filepath)
mime_type, _ = mimetypes.guess_type(str(path))
if not (mime_type and (mime_type.startswith("text/") or mime_type in ["application/json", "application/xml"])):
if not (
mime_type
and (
mime_type.startswith("text/")
or mime_type in ["application/json", "application/xml"]
)
):
return {"status": "error", "error": f"Cannot replace text in binary file: {filepath}"}
if db_conn:
from rp.tools.database import db_get

View File

@ -1,6 +1,6 @@
import importlib
import subprocess
import threading
import importlib
def _get_multiplexer_functions():

View File

@ -1,4 +1,4 @@
from typing import Dict, Any
from typing import Any, Dict
def get_diagnostics(filepath: str) -> Dict[str, Any]:

View File

@ -2,6 +2,7 @@ import os
import time
import uuid
from typing import Any, Dict
from rp.memory.knowledge_store import KnowledgeEntry, KnowledgeStore

View File

@ -3,6 +3,7 @@ import mimetypes
import os
import subprocess
import tempfile
from ..ui.diff_display import display_diff, get_diff_stats
@ -64,9 +65,21 @@ def create_diff(
path2 = os.path.expanduser(file2)
mime_type1, _ = mimetypes.guess_type(str(path1))
mime_type2, _ = mimetypes.guess_type(str(path2))
if not (mime_type1 and (mime_type1.startswith("text/") or mime_type1 in ["application/json", "application/xml"])):
if not (
mime_type1
and (
mime_type1.startswith("text/")
or mime_type1 in ["application/json", "application/xml"]
)
):
return {"status": "error", "error": f"Cannot create diff for binary file: {file1}"}
if not (mime_type2 and (mime_type2.startswith("text/") or mime_type2 in ["application/json", "application/xml"])):
if not (
mime_type2
and (
mime_type2.startswith("text/")
or mime_type2 in ["application/json", "application/xml"]
)
):
return {"status": "error", "error": f"Cannot create diff for binary file: {file2}"}
with open(path1, encoding="utf-8", errors="replace") as f1, open(
path2, encoding="utf-8", errors="replace"
@ -102,9 +115,21 @@ def display_file_diff(filepath1, filepath2, format_type="unified", context_lines
path2 = os.path.expanduser(filepath2)
mime_type1, _ = mimetypes.guess_type(str(path1))
mime_type2, _ = mimetypes.guess_type(str(path2))
if not (mime_type1 and (mime_type1.startswith("text/") or mime_type1 in ["application/json", "application/xml"])):
if not (
mime_type1
and (
mime_type1.startswith("text/")
or mime_type1 in ["application/json", "application/xml"]
)
):
return {"status": "error", "error": f"Cannot display diff for binary file: {filepath1}"}
if not (mime_type2 and (mime_type2.startswith("text/") or mime_type2 in ["application/json", "application/xml"])):
if not (
mime_type2
and (
mime_type2.startswith("text/")
or mime_type2 in ["application/json", "application/xml"]
)
):
return {"status": "error", "error": f"Cannot display diff for binary file: {filepath2}"}
with open(path1, encoding="utf-8", errors="replace") as f1:
old_content = f1.read()

View File

@ -1,7 +1,7 @@
import glob
import os
from typing import List
import re
from typing import List
def glob_files(pattern: str, path: str = ".") -> dict:

View File

@ -1,6 +1,7 @@
from rp.vision import post_image as vision_post_image
import functools
from rp.vision import post_image as vision_post_image
@functools.lru_cache()
def post_image(path: str, prompt: str = None):

View File

@ -1,14 +1,7 @@
import imghdr
import json
import random
import urllib.error
import urllib.parse
import urllib.request
import json
import urllib.parse
import urllib.request
import requests
from typing import Optional, Dict, Any
# Realistic User-Agents
USER_AGENTS = [
@ -24,6 +17,7 @@ USER_AGENTS = [
"Mozilla/5.0 (Android 11; Mobile; rv:68.0) Gecko/68.0 Firefox/88.0",
]
def get_default_headers():
"""Get default realistic headers with variations."""
accept_languages = [
@ -50,7 +44,7 @@ def get_default_headers():
return headers
def http_fetch(url, headers=None):
def http_fetch(url: str, headers: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
"""Fetch content from an HTTP URL.
Args:
@ -61,19 +55,27 @@ def http_fetch(url, headers=None):
Dict with status and content.
"""
try:
request = urllib.request.Request(url)
default_headers = get_default_headers()
if headers:
default_headers.update(headers)
for header_key, header_value in default_headers.items():
request.add_header(header_key, header_value)
with urllib.request.urlopen(request) as response:
content = response.read().decode("utf-8")
return {"status": "success", "content": content[:10000]}
except Exception as exception:
return {"status": "error", "error": str(exception)}
def download_to_file(source_url, destination_path, headers=None):
response = requests.get(url, headers=default_headers, timeout=30)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
content_type = response.headers.get("Content-Type", "").lower()
if "text" in content_type or "json" in content_type or "xml" in content_type:
content = response.text
return {"status": "success", "content": content[:10000]}
else:
content = response.content
return {"status": "success", "content": content}
except requests.exceptions.RequestException as e:
return {"status": "error", "error": str(e)}
def download_to_file(
source_url: str, destination_path: str, headers: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Download content from an HTTP URL to a file.
Args:
@ -87,45 +89,63 @@ def download_to_file(source_url, destination_path, headers=None):
This function can be used for binary files like images as well.
"""
try:
request = urllib.request.Request(source_url)
default_headers = get_default_headers()
if headers:
default_headers.update(headers)
for header_key, header_value in default_headers.items():
request.add_header(header_key, header_value)
with urllib.request.urlopen(request) as response:
content = response.read()
with open(destination_path, 'wb') as file:
file.write(content)
content_type = response.headers.get('Content-Type', '').lower()
if content_type.startswith('image/'):
response = requests.get(source_url, headers=default_headers, stream=True, timeout=60)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
with open(destination_path, "wb") as file:
for chunk in response.iter_content(chunk_size=8192):
file.write(chunk)
content_type = response.headers.get("Content-Type", "").lower()
if content_type.startswith("image/"):
img_type = imghdr.what(destination_path)
if img_type is None:
return {"status": "success", "downloaded_from": source_url, "downloaded_to": destination_path, "is_valid_image": False, "warning": "Downloaded content is not a valid image, consider finding a different source."}
return {
"status": "success",
"downloaded_from": source_url,
"downloaded_to": destination_path,
"is_valid_image": False,
"warning": "Downloaded content is not a valid image, consider finding a different source.",
}
else:
return {"status": "success", "downloaded_from": source_url, "downloaded_to": destination_path, "is_valid_image": True}
return {
"status": "success",
"downloaded_from": source_url,
"downloaded_to": destination_path,
"is_valid_image": True,
}
else:
return {"status": "success", "downloaded_from": source_url, "downloaded_to": destination_path}
except Exception as exception:
return {"status": "error", "error": str(exception)}
return {
"status": "success",
"downloaded_from": source_url,
"downloaded_to": destination_path,
}
except requests.exceptions.RequestException as e:
return {"status": "error", "error": str(e)}
def _perform_search(base_url, query, params=None):
def _perform_search(
base_url: str, query: str, params: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
try:
encoded_query = urllib.parse.quote(query)
full_url = f"{base_url}?query={encoded_query}"
request = urllib.request.Request(full_url)
default_headers = get_default_headers()
for header_key, header_value in default_headers.items():
request.add_header(header_key, header_value)
with urllib.request.urlopen(request) as response:
content = response.read().decode("utf-8")
return {"status": "success", "content": json.loads(content)}
except Exception as exception:
return {"status": "error", "error": str(exception)}
search_params = {"query": query}
if params:
search_params.update(params)
response = requests.get(base_url, headers=default_headers, params=search_params, timeout=30)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
return {"status": "success", "content": response.json()}
except requests.exceptions.RequestException as e:
return {"status": "error", "error": str(e)}
def web_search(query):
def web_search(query: str) -> Dict[str, Any]:
"""Perform a web search.
Args:
@ -138,7 +158,7 @@ def web_search(query):
return _perform_search(base_url, query)
def web_search_news(query):
def web_search_news(query: str) -> Dict[str, Any]:
"""Perform a web search for news.
Args:

View File

@ -1,5 +1,5 @@
import time
import threading
import time
class Colors:

View File

@ -1,5 +1,6 @@
import difflib
from typing import Dict, List, Optional, Tuple
from .colors import Colors

View File

@ -1,5 +1,6 @@
from datetime import datetime
from typing import Dict, List, Optional
from .colors import Colors
from .progress import ProgressBar

View File

@ -1,4 +1,5 @@
import re
from rp.config import LANGUAGE_KEYWORDS
from rp.ui.colors import Colors

View File

@ -1,9 +1,7 @@
import http.client
import argparse
import base64
import json
import http.client
import pathlib
import requests
DEFAULT_URL = "https://static.molodetz.nl/rp.vision.cgi"
@ -12,25 +10,24 @@ def post_image(image_path: str, prompt: str = "", url: str = DEFAULT_URL):
image_path = str(pathlib.Path(image_path).resolve().absolute())
if not url:
url = DEFAULT_URL
url_parts = url.split("/")
host = url_parts[2]
path = "/" + "/".join(url_parts[3:])
with open(image_path, "rb") as file:
image_data = file.read()
base64_data = base64.b64encode(image_data).decode("utf-8")
payload = {"data": base64_data, "path": image_path, "prompt": prompt}
body = json.dumps(payload).encode("utf-8")
headers = {
"Content-Type": "application/json",
"Content-Length": str(len(body)),
"User-Agent": "Python http.client",
"User-Agent": "Python requests",
}
conn = http.client.HTTPSConnection(host)
conn.request("POST", path, body, headers)
resp = conn.getresponse()
data = resp.read()
print("Status:", resp.status, resp.reason)
print(data.decode())
try:
response = requests.post(url, json=payload, headers=headers, timeout=60)
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
print("Status:", response.status_code, response.reason)
print(response.text)
except requests.exceptions.RequestException as e:
print(f"Error posting image: {e}")
if __name__ == "__main__":
@ -39,4 +36,4 @@ if __name__ == "__main__":
parser.add_argument("--prompt", default="")
parser.add_argument("--url", default=DEFAULT_URL)
args = parser.parse_args()
post_image(args.url, args.image_path, args.prompt)
post_image(args.image_path, args.prompt, args.url)

View File

@ -2,6 +2,7 @@ import re
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Any, Callable, Dict, List, Optional
from .workflow_definition import ExecutionMode, Workflow, WorkflowStep

View File

@ -2,6 +2,7 @@ import json
import sqlite3
import time
from typing import List, Optional
from .workflow_definition import Workflow

View File

@ -1,5 +1,5 @@
import unittest
import json
import unittest
from unittest.mock import patch
from rp.core.api import call_api, list_models

View File

@ -1,28 +1,29 @@
from unittest.mock import Mock, patch
from rp.commands.handlers import (
handle_command,
review_file,
refactor_file,
obfuscate_file,
show_workflows,
execute_workflow_command,
execute_agent_task,
show_agents,
collaborate_agents_command,
search_knowledge,
store_knowledge,
show_conversation_history,
show_cache_stats,
clear_caches,
show_system_stats,
collaborate_agents_command,
execute_agent_task,
execute_workflow_command,
handle_background_command,
start_background_session,
list_background_sessions,
show_session_status,
show_session_output,
send_session_input,
handle_command,
kill_background_session,
list_background_sessions,
obfuscate_file,
refactor_file,
review_file,
search_knowledge,
send_session_input,
show_agents,
show_background_events,
show_cache_stats,
show_conversation_history,
show_session_output,
show_session_status,
show_system_stats,
show_workflows,
start_background_session,
store_knowledge,
)

View File

@ -1,6 +1,6 @@
import os
import sqlite3
import tempfile
import os
import time
from rp.memory.conversation_memory import ConversationMemory
@ -337,8 +337,8 @@ class TestConversationMemory:
def test_thread_safety(self):
"""Test that the memory can handle concurrent access."""
import threading
import queue
import threading
results = queue.Queue()

View File

@ -1,15 +1,16 @@
import pytest
from rp.core.exceptions import (
PRException,
APIException,
APIConnectionError,
APITimeoutError,
APIException,
APIResponseError,
APITimeoutError,
ConfigurationError,
ToolExecutionError,
FileSystemError,
SessionError,
ContextError,
FileSystemError,
PRException,
SessionError,
ToolExecutionError,
ValidationError,
)

View File

@ -1,10 +1,10 @@
from rp.commands.help_docs import (
get_workflow_help,
get_agent_help,
get_knowledge_help,
get_cache_help,
get_background_help,
get_cache_help,
get_full_help,
get_knowledge_help,
get_workflow_help,
)

View File

@ -1,9 +1,9 @@
import os
import sqlite3
import tempfile
import os
import time
from rp.memory.knowledge_store import KnowledgeStore, KnowledgeEntry
from rp.memory.knowledge_store import KnowledgeEntry, KnowledgeStore
class TestKnowledgeStore:
@ -284,8 +284,8 @@ class TestKnowledgeStore:
def test_thread_safety(self):
"""Test that the store can handle concurrent access."""
import threading
import queue
import threading
results = queue.Queue()

View File

@ -1,5 +1,6 @@
from unittest.mock import patch, MagicMock
from rp.core.logging import setup_logging, get_logger
from unittest.mock import MagicMock, patch
from rp.core.logging import get_logger, setup_logging
class TestLogging:

View File

@ -1,13 +1,14 @@
from unittest.mock import Mock, patch
from rp.commands.multiplexer_commands import (
show_sessions,
attach_session,
detach_session,
kill_session,
list_waiting_sessions,
send_command,
show_session_log,
show_session_status,
list_waiting_sessions,
show_sessions,
)

View File

@ -1,5 +1,7 @@
import math
import pytest
from rp.memory.semantic_index import SemanticIndex

View File

@ -1,5 +1,6 @@
import json
from unittest.mock import patch
from rp.ui.output import OutputFormatter