feat: rename project to "reetor's guide to modern python"
feat: add comprehensive tutorial docs: update readme with installation instructions and quick start guide feat: include information on modern python features and aiohttp refactor: move database get/set/query functions refactor: update autonomous mode response processing refactor: update api call function feat: implement realistic http headers maintenance: update dependencies maintenance: clean up imports and files
This commit is contained in:
parent
1f0444d8c1
commit
8ef3742f44
@ -23,6 +23,14 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Version 1.27.0 - 2025-11-08
|
||||||
|
|
||||||
|
The project has been renamed to "Reetor's Guide to Modern Python" and now includes a comprehensive tutorial. The README has been significantly updated with installation instructions, a quick start guide, and information on modern Python features and aiohttp.
|
||||||
|
|
||||||
|
**Changes:** 3 files, 2728 lines
|
||||||
|
**Languages:** Markdown (2726 lines), TOML (2 lines)
|
||||||
|
|
||||||
## Version 1.26.0 - 2025-11-08
|
## Version 1.26.0 - 2025-11-08
|
||||||
|
|
||||||
|
|||||||
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "rp"
|
name = "rp"
|
||||||
version = "1.26.0"
|
version = "1.27.0"
|
||||||
description = "R python edition. The ultimate autonomous AI CLI."
|
description = "R python edition. The ultimate autonomous AI CLI."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.10"
|
requires-python = ">=3.10"
|
||||||
|
|||||||
@ -7,4 +7,3 @@ websockets==13.0.1
|
|||||||
pytest==8.3.2
|
pytest==8.3.2
|
||||||
bcrypt==4.1.3
|
bcrypt==4.1.3
|
||||||
python-slugify==8.0.4
|
python-slugify==8.0.4
|
||||||
requests>=2.31.0
|
|
||||||
@ -102,7 +102,9 @@ def process_response_autonomous(assistant, response):
|
|||||||
input_tokens = usage.get("prompt_tokens", 0)
|
input_tokens = usage.get("prompt_tokens", 0)
|
||||||
output_tokens = usage.get("completion_tokens", 0)
|
output_tokens = usage.get("completion_tokens", 0)
|
||||||
assistant.usage_tracker.track_request(assistant.model, input_tokens, output_tokens)
|
assistant.usage_tracker.track_request(assistant.model, input_tokens, output_tokens)
|
||||||
cost = assistant.usage_tracker._calculate_cost(assistant.model, input_tokens, output_tokens)
|
cost = assistant.usage_tracker._calculate_cost(
|
||||||
|
assistant.model, input_tokens, output_tokens
|
||||||
|
)
|
||||||
total_cost = assistant.usage_tracker.session_usage["estimated_cost"]
|
total_cost = assistant.usage_tracker.session_usage["estimated_cost"]
|
||||||
print(f"{Colors.YELLOW}💰 Cost: ${cost:.4f} | Total: ${total_cost:.4f}{Colors.RESET}")
|
print(f"{Colors.YELLOW}💰 Cost: ${cost:.4f} | Total: ${total_cost:.4f}{Colors.RESET}")
|
||||||
return process_response_autonomous(assistant, follow_up)
|
return process_response_autonomous(assistant, follow_up)
|
||||||
|
|||||||
@ -7,7 +7,9 @@ from rp.core.http_client import http_client
|
|||||||
logger = logging.getLogger("rp")
|
logger = logging.getLogger("rp")
|
||||||
|
|
||||||
|
|
||||||
def call_api(messages, model, api_url, api_key, use_tools, tools_definition, verbose=False, db_conn=None):
|
def call_api(
|
||||||
|
messages, model, api_url, api_key, use_tools, tools_definition, verbose=False, db_conn=None
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
messages = auto_slim_messages(messages, verbose=verbose)
|
messages = auto_slim_messages(messages, verbose=verbose)
|
||||||
logger.debug(f"=== API CALL START ===")
|
logger.debug(f"=== API CALL START ===")
|
||||||
@ -38,11 +40,14 @@ def call_api(messages, model, api_url, api_key, use_tools, tools_definition, ver
|
|||||||
if db_conn:
|
if db_conn:
|
||||||
|
|
||||||
from rp.tools.database import log_api_request
|
from rp.tools.database import log_api_request
|
||||||
|
|
||||||
log_result = log_api_request(model, api_url, request_json, db_conn)
|
log_result = log_api_request(model, api_url, request_json, db_conn)
|
||||||
if log_result.get("status") != "success":
|
if log_result.get("status") != "success":
|
||||||
logger.warning(f"Failed to log API request: {log_result.get('error')}")
|
logger.warning(f"Failed to log API request: {log_result.get('error')}")
|
||||||
logger.debug("Sending HTTP request...")
|
logger.debug("Sending HTTP request...")
|
||||||
response = http_client.post(api_url, headers=headers, json_data=request_json, db_conn=db_conn)
|
response = http_client.post(
|
||||||
|
api_url, headers=headers, json_data=request_json, db_conn=db_conn
|
||||||
|
)
|
||||||
if response.get("error"):
|
if response.get("error"):
|
||||||
if "status" in response:
|
if "status" in response:
|
||||||
logger.error(f"API HTTP Error: {response['status']} - {response.get('text', '')}")
|
logger.error(f"API HTTP Error: {response['status']} - {response.get('text', '')}")
|
||||||
|
|||||||
@ -32,7 +32,7 @@ from rp.tools.agents import (
|
|||||||
remove_agent,
|
remove_agent,
|
||||||
)
|
)
|
||||||
from rp.tools.command import kill_process, run_command, tail_process
|
from rp.tools.command import kill_process, run_command, tail_process
|
||||||
from rp.tools.database import db_get, db_query, db_set, log_api_request
|
from rp.tools.database import db_get, db_query, db_set
|
||||||
from rp.tools.filesystem import (
|
from rp.tools.filesystem import (
|
||||||
chdir,
|
chdir,
|
||||||
clear_edit_tracker,
|
clear_edit_tracker,
|
||||||
@ -441,6 +441,7 @@ class Assistant:
|
|||||||
print("No task provided. Exiting.")
|
print("No task provided. Exiting.")
|
||||||
return
|
return
|
||||||
from rp.autonomous import run_autonomous_mode
|
from rp.autonomous import run_autonomous_mode
|
||||||
|
|
||||||
run_autonomous_mode(self, task)
|
run_autonomous_mode(self, task)
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
|
|||||||
@ -1,16 +1,60 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import random
|
||||||
import time
|
import time
|
||||||
import requests
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
from typing import Dict, Any, Optional
|
from typing import Dict, Any, Optional
|
||||||
|
|
||||||
logger = logging.getLogger("rp")
|
logger = logging.getLogger("rp")
|
||||||
|
|
||||||
|
# Realistic User-Agents and headers
|
||||||
|
USER_AGENTS = [
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
||||||
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
|
||||||
|
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Edge/91.0.864.59",
|
||||||
|
"Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
||||||
|
"Mozilla/5.0 (iPad; CPU OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
||||||
|
"Mozilla/5.0 (Android 11; Mobile; rv:68.0) Gecko/68.0 Firefox/88.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_realistic_headers(additional_headers=None):
|
||||||
|
"""Generate realistic HTTP headers with random User-Agent and variations."""
|
||||||
|
accept_languages = [
|
||||||
|
"en-US,en;q=0.5",
|
||||||
|
"en-US,en;q=0.9",
|
||||||
|
"en-GB,en;q=0.5",
|
||||||
|
"en-US,en;q=0.5;fr;q=0.3",
|
||||||
|
]
|
||||||
|
headers = {
|
||||||
|
"User-Agent": random.choice(USER_AGENTS),
|
||||||
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||||
|
"Accept-Language": random.choice(accept_languages),
|
||||||
|
"Accept-Encoding": "gzip, deflate, br",
|
||||||
|
"DNT": "1",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Upgrade-Insecure-Requests": "1",
|
||||||
|
}
|
||||||
|
# Sometimes add Cache-Control
|
||||||
|
if random.random() < 0.3:
|
||||||
|
headers["Cache-Control"] = "no-cache"
|
||||||
|
# Sometimes add Referer
|
||||||
|
if random.random() < 0.2:
|
||||||
|
headers["Referer"] = "https://www.google.com/"
|
||||||
|
if additional_headers:
|
||||||
|
headers.update(additional_headers)
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
class SyncHTTPClient:
|
class SyncHTTPClient:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.session = requests.Session()
|
self.default_headers = {}
|
||||||
|
|
||||||
def request(
|
def request(
|
||||||
self,
|
self,
|
||||||
@ -22,41 +66,80 @@ class SyncHTTPClient:
|
|||||||
timeout: float = 30.0,
|
timeout: float = 30.0,
|
||||||
db_conn=None,
|
db_conn=None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Make a sync HTTP request using requests with retry logic."""
|
if headers is None:
|
||||||
|
headers = get_realistic_headers()
|
||||||
|
else:
|
||||||
|
headers = get_realistic_headers(headers)
|
||||||
|
|
||||||
|
# Handle JSON data
|
||||||
|
if json_data is not None:
|
||||||
|
data = json.dumps(json_data).encode('utf-8')
|
||||||
|
headers["Content-Type"] = "application/json"
|
||||||
|
|
||||||
|
# Prepare request body for logging
|
||||||
|
if json_data is not None:
|
||||||
|
request_body = json.dumps(json_data)
|
||||||
|
elif data is not None:
|
||||||
|
request_body = data.decode("utf-8") if isinstance(data, bytes) else str(data)
|
||||||
|
else:
|
||||||
|
request_body = ""
|
||||||
|
|
||||||
|
"""Make a sync HTTP request using urllib with retry logic."""
|
||||||
attempt = 0
|
attempt = 0
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
while True:
|
while True:
|
||||||
attempt += 1
|
attempt += 1
|
||||||
try:
|
try:
|
||||||
response = self.session.request(
|
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||||
method,
|
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||||
url,
|
response_data = response.read().decode('utf-8')
|
||||||
headers=headers,
|
response_headers = dict(response.headers)
|
||||||
data=data,
|
|
||||||
json=json_data,
|
# Create json method
|
||||||
timeout=timeout,
|
def json_method():
|
||||||
)
|
return json.loads(response_data)
|
||||||
response.raise_for_status() # Raise an exception for bad status codes
|
|
||||||
# Prepare request body for logging
|
|
||||||
if json_data is not None:
|
|
||||||
request_body = json.dumps(json_data)
|
|
||||||
elif data is not None:
|
|
||||||
request_body = data.decode('utf-8') if isinstance(data, bytes) else str(data)
|
|
||||||
else:
|
|
||||||
request_body = ""
|
|
||||||
# Log the request
|
# Log the request
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import log_http_request
|
from rp.tools.database import log_http_request
|
||||||
log_result = log_http_request(method, url, request_body, response.text, response.status_code, db_conn)
|
|
||||||
|
log_result = log_http_request(
|
||||||
|
method, url, request_body, response_data, response.status, db_conn
|
||||||
|
)
|
||||||
if log_result.get("status") != "success":
|
if log_result.get("status") != "success":
|
||||||
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
|
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"status": response.status_code,
|
"status": response.status,
|
||||||
"headers": dict(response.headers),
|
"headers": response_headers,
|
||||||
"text": response.text,
|
"text": response_data,
|
||||||
"json": response.json,
|
"json": json_method,
|
||||||
}
|
}
|
||||||
except requests.exceptions.Timeout:
|
except urllib.error.HTTPError as e:
|
||||||
|
# For HTTP errors, still try to read the response
|
||||||
|
try:
|
||||||
|
response_data = e.read().decode('utf-8')
|
||||||
|
except:
|
||||||
|
response_data = ""
|
||||||
|
|
||||||
|
# Log the request even on error
|
||||||
|
if db_conn:
|
||||||
|
from rp.tools.database import log_http_request
|
||||||
|
|
||||||
|
log_result = log_http_request(
|
||||||
|
method, url, request_body, response_data, e.code, db_conn
|
||||||
|
)
|
||||||
|
if log_result.get("status") != "success":
|
||||||
|
logger.warning(f"Failed to log HTTP request: {log_result.get('error')}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": e.code,
|
||||||
|
"headers": dict(e.headers) if e.headers else {},
|
||||||
|
"text": response_data,
|
||||||
|
"json": lambda: json.loads(response_data) if response_data else None,
|
||||||
|
}
|
||||||
|
except urllib.error.URLError as e:
|
||||||
|
if isinstance(e.reason, TimeoutError) or "timeout" in str(e.reason).lower():
|
||||||
elapsed = time.time() - start_time
|
elapsed = time.time() - start_time
|
||||||
elapsed_minutes = int(elapsed // 60)
|
elapsed_minutes = int(elapsed // 60)
|
||||||
elapsed_seconds = elapsed % 60
|
elapsed_seconds = elapsed % 60
|
||||||
@ -69,11 +152,15 @@ class SyncHTTPClient:
|
|||||||
f"Request timed out (attempt {attempt}, duration: {duration_str}). Retrying in {attempt} second(s)..."
|
f"Request timed out (attempt {attempt}, duration: {duration_str}). Retrying in {attempt} second(s)..."
|
||||||
)
|
)
|
||||||
time.sleep(attempt)
|
time.sleep(attempt)
|
||||||
except requests.exceptions.RequestException as e:
|
else:
|
||||||
return {"error": True, "exception": str(e)}
|
return {"error": True, "exception": str(e)}
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self, url: str, headers: Optional[Dict[str, str]] = None, timeout: float = 30.0, db_conn=None
|
self,
|
||||||
|
url: str,
|
||||||
|
headers: Optional[Dict[str, str]] = None,
|
||||||
|
timeout: float = 30.0,
|
||||||
|
db_conn=None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
return self.request("GET", url, headers=headers, timeout=timeout, db_conn=db_conn)
|
return self.request("GET", url, headers=headers, timeout=timeout, db_conn=db_conn)
|
||||||
|
|
||||||
@ -87,11 +174,17 @@ class SyncHTTPClient:
|
|||||||
db_conn=None,
|
db_conn=None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
return self.request(
|
return self.request(
|
||||||
"POST", url, headers=headers, data=data, json_data=json_data, timeout=timeout, db_conn=db_conn
|
"POST",
|
||||||
|
url,
|
||||||
|
headers=headers,
|
||||||
|
data=data,
|
||||||
|
json_data=json_data,
|
||||||
|
timeout=timeout,
|
||||||
|
db_conn=db_conn,
|
||||||
)
|
)
|
||||||
|
|
||||||
def set_default_headers(self, headers: Dict[str, str]):
|
def set_default_headers(self, headers: Dict[str, str]):
|
||||||
self.session.headers.update(headers)
|
self.default_headers.update(headers)
|
||||||
|
|
||||||
|
|
||||||
http_client = SyncHTTPClient()
|
http_client = SyncHTTPClient()
|
||||||
|
|||||||
@ -350,7 +350,9 @@ class RPEditor:
|
|||||||
height, _ = self.stdscr.getmaxyx()
|
height, _ = self.stdscr.getmaxyx()
|
||||||
page_size = height - 2
|
page_size = height - 2
|
||||||
self.cursor_y = min(len(self.lines) - 1, self.cursor_y + page_size)
|
self.cursor_y = min(len(self.lines) - 1, self.cursor_y + page_size)
|
||||||
self.scroll_y = min(max(0, len(self.lines) - height + 1), self.scroll_y + page_size)
|
self.scroll_y = min(
|
||||||
|
max(0, len(self.lines) - height + 1), self.scroll_y + page_size
|
||||||
|
)
|
||||||
self.prev_key = key
|
self.prev_key = key
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@ -17,7 +17,27 @@ from rp.tools.editor import (
|
|||||||
open_editor,
|
open_editor,
|
||||||
)
|
)
|
||||||
from rp.tools.filesystem import (
|
from rp.tools.filesystem import (
|
||||||
get_uid, read_specific_lines, replace_specific_line, insert_line_at_position, delete_specific_line, read_file, write_file, list_directory, mkdir, chdir, getpwd, index_source_directory, search_replace, get_editor, close_editor, open_editor, editor_insert_text, editor_replace_text, display_edit_summary, display_edit_timeline, clear_edit_tracker
|
get_uid,
|
||||||
|
read_specific_lines,
|
||||||
|
replace_specific_line,
|
||||||
|
insert_line_at_position,
|
||||||
|
delete_specific_line,
|
||||||
|
read_file,
|
||||||
|
write_file,
|
||||||
|
list_directory,
|
||||||
|
mkdir,
|
||||||
|
chdir,
|
||||||
|
getpwd,
|
||||||
|
index_source_directory,
|
||||||
|
search_replace,
|
||||||
|
get_editor,
|
||||||
|
close_editor,
|
||||||
|
open_editor,
|
||||||
|
editor_insert_text,
|
||||||
|
editor_replace_text,
|
||||||
|
display_edit_summary,
|
||||||
|
display_edit_timeline,
|
||||||
|
clear_edit_tracker,
|
||||||
)
|
)
|
||||||
from rp.tools.lsp import get_diagnostics
|
from rp.tools.lsp import get_diagnostics
|
||||||
from rp.tools.memory import (
|
from rp.tools.memory import (
|
||||||
@ -32,7 +52,7 @@ from rp.tools.memory import (
|
|||||||
from rp.tools.patch import apply_patch, create_diff
|
from rp.tools.patch import apply_patch, create_diff
|
||||||
from rp.tools.python_exec import python_exec
|
from rp.tools.python_exec import python_exec
|
||||||
from rp.tools.search import glob_files, grep
|
from rp.tools.search import glob_files, grep
|
||||||
from rp.tools.web import http_fetch, web_search, web_search_news
|
from rp.tools.web import http_fetch, web_search, web_search_news, download_to_file
|
||||||
|
|
||||||
# Aliases for user-requested tool names
|
# Aliases for user-requested tool names
|
||||||
view = read_file
|
view = read_file
|
||||||
@ -61,6 +81,7 @@ __all__ = [
|
|||||||
"db_set",
|
"db_set",
|
||||||
"delete_knowledge_entry",
|
"delete_knowledge_entry",
|
||||||
"delete_specific_line",
|
"delete_specific_line",
|
||||||
|
"download_to_file",
|
||||||
"diagnostics",
|
"diagnostics",
|
||||||
"display_edit_summary",
|
"display_edit_summary",
|
||||||
"display_edit_timeline",
|
"display_edit_timeline",
|
||||||
@ -107,4 +128,3 @@ __all__ = [
|
|||||||
"write",
|
"write",
|
||||||
"write_file",
|
"write_file",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@ -76,6 +76,7 @@ def db_query(query, db_conn):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def log_api_request(model, api_url, request_payload, db_conn):
|
def log_api_request(model, api_url, request_payload, db_conn):
|
||||||
"""Log an API request to the database.
|
"""Log an API request to the database.
|
||||||
|
|
||||||
@ -101,6 +102,7 @@ def log_api_request(model, api_url, request_payload, db_conn):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def log_http_request(method, url, request_body, response_body, status_code, db_conn):
|
def log_http_request(method, url, request_body, response_body, status_code, db_conn):
|
||||||
"""Log an HTTP request to the database.
|
"""Log an HTTP request to the database.
|
||||||
|
|
||||||
|
|||||||
@ -16,7 +16,9 @@ def get_uid():
|
|||||||
return _id
|
return _id
|
||||||
|
|
||||||
|
|
||||||
def read_specific_lines(filepath: str, start_line: int, end_line: Optional[int] = None, db_conn: Optional[Any] = None) -> dict:
|
def read_specific_lines(
|
||||||
|
filepath: str, start_line: int, end_line: Optional[int] = None, db_conn: Optional[Any] = None
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Read specific lines or a range of lines from a file.
|
Read specific lines or a range of lines from a file.
|
||||||
|
|
||||||
@ -46,26 +48,39 @@ def read_specific_lines(filepath: str, start_line: int, end_line: Optional[int]
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
path = os.path.expanduser(filepath)
|
path = os.path.expanduser(filepath)
|
||||||
with open(path, 'r') as file:
|
with open(path, "r") as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
total_lines = len(lines)
|
total_lines = len(lines)
|
||||||
if start_line < 1 or start_line > total_lines:
|
if start_line < 1 or start_line > total_lines:
|
||||||
return {"status": "error", "error": f"Start line {start_line} is out of range. File has {total_lines} lines."}
|
return {
|
||||||
|
"status": "error",
|
||||||
|
"error": f"Start line {start_line} is out of range. File has {total_lines} lines.",
|
||||||
|
}
|
||||||
if end_line is None:
|
if end_line is None:
|
||||||
end_line = start_line
|
end_line = start_line
|
||||||
if end_line < start_line or end_line > total_lines:
|
if end_line < start_line or end_line > total_lines:
|
||||||
return {"status": "error", "error": f"End line {end_line} is out of range. File has {total_lines} lines."}
|
return {
|
||||||
selected_lines = lines[start_line - 1:end_line]
|
"status": "error",
|
||||||
content = ''.join(selected_lines)
|
"error": f"End line {end_line} is out of range. File has {total_lines} lines.",
|
||||||
|
}
|
||||||
|
selected_lines = lines[start_line - 1 : end_line]
|
||||||
|
content = "".join(selected_lines)
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_set
|
from rp.tools.database import db_set
|
||||||
|
|
||||||
db_set("read:" + path, "true", db_conn)
|
db_set("read:" + path, "true", db_conn)
|
||||||
return {"status": "success", "content": content}
|
return {"status": "success", "content": content}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def replace_specific_line(filepath: str, line_number: int, new_content: str, db_conn: Optional[Any] = None, show_diff: bool = True) -> dict:
|
def replace_specific_line(
|
||||||
|
filepath: str,
|
||||||
|
line_number: int,
|
||||||
|
new_content: str,
|
||||||
|
db_conn: Optional[Any] = None,
|
||||||
|
show_diff: bool = True,
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Replace the content of a specific line in a file.
|
Replace the content of a specific line in a file.
|
||||||
|
|
||||||
@ -97,18 +112,27 @@ def replace_specific_line(filepath: str, line_number: int, new_content: str, db_
|
|||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
read_status = db_get("read:" + path, db_conn)
|
read_status = db_get("read:" + path, db_conn)
|
||||||
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
||||||
return {"status": "error", "error": "File must be read before writing. Please read the file first."}
|
return {
|
||||||
with open(path, 'r') as file:
|
"status": "error",
|
||||||
|
"error": "File must be read before writing. Please read the file first.",
|
||||||
|
}
|
||||||
|
with open(path, "r") as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
total_lines = len(lines)
|
total_lines = len(lines)
|
||||||
if line_number < 1 or line_number > total_lines:
|
if line_number < 1 or line_number > total_lines:
|
||||||
return {"status": "error", "error": f"Line number {line_number} is out of range. File has {total_lines} lines."}
|
return {
|
||||||
old_content = ''.join(lines)
|
"status": "error",
|
||||||
lines[line_number - 1] = new_content + '\n' if not new_content.endswith('\n') else new_content
|
"error": f"Line number {line_number} is out of range. File has {total_lines} lines.",
|
||||||
new_full_content = ''.join(lines)
|
}
|
||||||
with open(path, 'w') as file:
|
old_content = "".join(lines)
|
||||||
|
lines[line_number - 1] = (
|
||||||
|
new_content + "\n" if not new_content.endswith("\n") else new_content
|
||||||
|
)
|
||||||
|
new_full_content = "".join(lines)
|
||||||
|
with open(path, "w") as file:
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
||||||
@ -119,7 +143,13 @@ def replace_specific_line(filepath: str, line_number: int, new_content: str, db_
|
|||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def insert_line_at_position(filepath: str, line_number: int, new_content: str, db_conn: Optional[Any] = None, show_diff: bool = True) -> dict:
|
def insert_line_at_position(
|
||||||
|
filepath: str,
|
||||||
|
line_number: int,
|
||||||
|
new_content: str,
|
||||||
|
db_conn: Optional[Any] = None,
|
||||||
|
show_diff: bool = True,
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Insert a new line at a specific position in a file.
|
Insert a new line at a specific position in a file.
|
||||||
|
|
||||||
@ -148,27 +178,38 @@ def insert_line_at_position(filepath: str, line_number: int, new_content: str, d
|
|||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
read_status = db_get("read:" + path, db_conn)
|
read_status = db_get("read:" + path, db_conn)
|
||||||
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
||||||
return {"status": "error", "error": "File must be read before writing. Please read the file first."}
|
return {
|
||||||
with open(path, 'r') as file:
|
"status": "error",
|
||||||
|
"error": "File must be read before writing. Please read the file first.",
|
||||||
|
}
|
||||||
|
with open(path, "r") as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
old_content = ''.join(lines)
|
old_content = "".join(lines)
|
||||||
insert_index = min(line_number - 1, len(lines))
|
insert_index = min(line_number - 1, len(lines))
|
||||||
lines.insert(insert_index, new_content + '\n' if not new_content.endswith('\n') else new_content)
|
lines.insert(
|
||||||
new_full_content = ''.join(lines)
|
insert_index, new_content + "\n" if not new_content.endswith("\n") else new_content
|
||||||
with open(path, 'w') as file:
|
)
|
||||||
|
new_full_content = "".join(lines)
|
||||||
|
with open(path, "w") as file:
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
||||||
if diff_result["status"] == "success":
|
if diff_result["status"] == "success":
|
||||||
print(diff_result["visual_diff"])
|
print(diff_result["visual_diff"])
|
||||||
return {"status": "success", "message": f"Inserted line at position {line_number} in {path}"}
|
return {
|
||||||
|
"status": "success",
|
||||||
|
"message": f"Inserted line at position {line_number} in {path}",
|
||||||
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"status": "error", "error": str(e)}
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
def delete_specific_line(filepath: str, line_number: int, db_conn: Optional[Any] = None, show_diff: bool = True) -> dict:
|
def delete_specific_line(
|
||||||
|
filepath: str, line_number: int, db_conn: Optional[Any] = None, show_diff: bool = True
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Delete a specific line from a file.
|
Delete a specific line from a file.
|
||||||
|
|
||||||
@ -195,18 +236,25 @@ def delete_specific_line(filepath: str, line_number: int, db_conn: Optional[Any]
|
|||||||
return {"status": "error", "error": "File does not exist"}
|
return {"status": "error", "error": "File does not exist"}
|
||||||
if db_conn:
|
if db_conn:
|
||||||
from rp.tools.database import db_get
|
from rp.tools.database import db_get
|
||||||
|
|
||||||
read_status = db_get("read:" + path, db_conn)
|
read_status = db_get("read:" + path, db_conn)
|
||||||
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
if read_status.get("status") != "success" or read_status.get("value") != "true":
|
||||||
return {"status": "error", "error": "File must be read before writing. Please read the file first."}
|
return {
|
||||||
with open(path, 'r') as file:
|
"status": "error",
|
||||||
|
"error": "File must be read before writing. Please read the file first.",
|
||||||
|
}
|
||||||
|
with open(path, "r") as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
total_lines = len(lines)
|
total_lines = len(lines)
|
||||||
if line_number < 1 or line_number > total_lines:
|
if line_number < 1 or line_number > total_lines:
|
||||||
return {"status": "error", "error": f"Line number {line_number} is out of range. File has {total_lines} lines."}
|
return {
|
||||||
old_content = ''.join(lines)
|
"status": "error",
|
||||||
|
"error": f"Line number {line_number} is out of range. File has {total_lines} lines.",
|
||||||
|
}
|
||||||
|
old_content = "".join(lines)
|
||||||
del lines[line_number - 1]
|
del lines[line_number - 1]
|
||||||
new_full_content = ''.join(lines)
|
new_full_content = "".join(lines)
|
||||||
with open(path, 'w') as file:
|
with open(path, "w") as file:
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
diff_result = display_content_diff(old_content, new_full_content, filepath)
|
||||||
@ -582,4 +630,3 @@ def clear_edit_tracker():
|
|||||||
|
|
||||||
clear_tracker()
|
clear_tracker()
|
||||||
return {"status": "success", "message": "Edit tracker cleared"}
|
return {"status": "success", "message": "Edit tracker cleared"}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
|
import imghdr
|
||||||
import json
|
import json
|
||||||
|
import random
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
@ -8,6 +10,45 @@ import json
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
|
||||||
|
# Realistic User-Agents
|
||||||
|
USER_AGENTS = [
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
||||||
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
|
||||||
|
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0",
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Edge/91.0.864.59",
|
||||||
|
"Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
||||||
|
"Mozilla/5.0 (iPad; CPU OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1",
|
||||||
|
"Mozilla/5.0 (Android 11; Mobile; rv:68.0) Gecko/68.0 Firefox/88.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_default_headers():
|
||||||
|
"""Get default realistic headers with variations."""
|
||||||
|
accept_languages = [
|
||||||
|
"en-US,en;q=0.5",
|
||||||
|
"en-US,en;q=0.9",
|
||||||
|
"en-GB,en;q=0.5",
|
||||||
|
"en-US,en;q=0.5;fr;q=0.3",
|
||||||
|
]
|
||||||
|
headers = {
|
||||||
|
"User-Agent": random.choice(USER_AGENTS),
|
||||||
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||||
|
"Accept-Language": random.choice(accept_languages),
|
||||||
|
"Accept-Encoding": "gzip, deflate, br",
|
||||||
|
"DNT": "1",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Upgrade-Insecure-Requests": "1",
|
||||||
|
}
|
||||||
|
# Sometimes add Cache-Control
|
||||||
|
if random.random() < 0.3:
|
||||||
|
headers["Cache-Control"] = "no-cache"
|
||||||
|
# Sometimes add Referer
|
||||||
|
if random.random() < 0.2:
|
||||||
|
headers["Referer"] = "https://www.google.com/"
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
def http_fetch(url, headers=None):
|
def http_fetch(url, headers=None):
|
||||||
"""Fetch content from an HTTP URL.
|
"""Fetch content from an HTTP URL.
|
||||||
@ -21,8 +62,10 @@ def http_fetch(url, headers=None):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
request = urllib.request.Request(url)
|
request = urllib.request.Request(url)
|
||||||
|
default_headers = get_default_headers()
|
||||||
if headers:
|
if headers:
|
||||||
for header_key, header_value in headers.items():
|
default_headers.update(headers)
|
||||||
|
for header_key, header_value in default_headers.items():
|
||||||
request.add_header(header_key, header_value)
|
request.add_header(header_key, header_value)
|
||||||
with urllib.request.urlopen(request) as response:
|
with urllib.request.urlopen(request) as response:
|
||||||
content = response.read().decode("utf-8")
|
content = response.read().decode("utf-8")
|
||||||
@ -30,12 +73,52 @@ def http_fetch(url, headers=None):
|
|||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
return {"status": "error", "error": str(exception)}
|
return {"status": "error", "error": str(exception)}
|
||||||
|
|
||||||
|
def download_to_file(source_url, destination_path, headers=None):
|
||||||
|
"""Download content from an HTTP URL to a file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_url: The URL to download from.
|
||||||
|
destination_path: The path to save the downloaded content.
|
||||||
|
headers: Optional HTTP headers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with status, downloaded_from, and downloaded_to on success, or status and error on failure.
|
||||||
|
|
||||||
|
This function can be used for binary files like images as well.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
request = urllib.request.Request(source_url)
|
||||||
|
default_headers = get_default_headers()
|
||||||
|
if headers:
|
||||||
|
default_headers.update(headers)
|
||||||
|
for header_key, header_value in default_headers.items():
|
||||||
|
request.add_header(header_key, header_value)
|
||||||
|
with urllib.request.urlopen(request) as response:
|
||||||
|
content = response.read()
|
||||||
|
with open(destination_path, 'wb') as file:
|
||||||
|
file.write(content)
|
||||||
|
content_type = response.headers.get('Content-Type', '').lower()
|
||||||
|
if content_type.startswith('image/'):
|
||||||
|
img_type = imghdr.what(destination_path)
|
||||||
|
if img_type is None:
|
||||||
|
return {"status": "success", "downloaded_from": source_url, "downloaded_to": destination_path, "is_valid_image": False, "warning": "Downloaded content is not a valid image, consider finding a different source."}
|
||||||
|
else:
|
||||||
|
return {"status": "success", "downloaded_from": source_url, "downloaded_to": destination_path, "is_valid_image": True}
|
||||||
|
else:
|
||||||
|
return {"status": "success", "downloaded_from": source_url, "downloaded_to": destination_path}
|
||||||
|
except Exception as exception:
|
||||||
|
return {"status": "error", "error": str(exception)}
|
||||||
|
|
||||||
|
|
||||||
def _perform_search(base_url, query, params=None):
|
def _perform_search(base_url, query, params=None):
|
||||||
try:
|
try:
|
||||||
encoded_query = urllib.parse.quote(query)
|
encoded_query = urllib.parse.quote(query)
|
||||||
full_url = f"{base_url}?query={encoded_query}"
|
full_url = f"{base_url}?query={encoded_query}"
|
||||||
with urllib.request.urlopen(full_url) as response:
|
request = urllib.request.Request(full_url)
|
||||||
|
default_headers = get_default_headers()
|
||||||
|
for header_key, header_value in default_headers.items():
|
||||||
|
request.add_header(header_key, header_value)
|
||||||
|
with urllib.request.urlopen(request) as response:
|
||||||
content = response.read().decode("utf-8")
|
content = response.read().decode("utf-8")
|
||||||
return {"status": "success", "content": json.loads(content)}
|
return {"status": "success", "content": json.loads(content)}
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
@ -66,4 +149,3 @@ def web_search_news(query):
|
|||||||
"""
|
"""
|
||||||
base_url = "https://search.molodetz.nl/search"
|
base_url = "https://search.molodetz.nl/search"
|
||||||
return _perform_search(base_url, query)
|
return _perform_search(base_url, query)
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user