2025-11-04 08:09:12 +01:00
|
|
|
import glob as glob_module
|
2025-11-04 05:17:27 +01:00
|
|
|
import json
|
|
|
|
|
import logging
|
2025-11-04 08:09:12 +01:00
|
|
|
import os
|
2025-11-04 05:17:27 +01:00
|
|
|
import readline
|
2025-11-04 08:09:12 +01:00
|
|
|
import signal
|
|
|
|
|
import sqlite3
|
|
|
|
|
import sys
|
|
|
|
|
import traceback
|
2025-11-04 05:17:27 +01:00
|
|
|
from concurrent.futures import ThreadPoolExecutor
|
2025-11-04 08:09:12 +01:00
|
|
|
|
|
|
|
|
from pr.commands import handle_command
|
|
|
|
|
from pr.config import (
|
|
|
|
|
DB_PATH,
|
|
|
|
|
DEFAULT_API_URL,
|
|
|
|
|
DEFAULT_MODEL,
|
|
|
|
|
HISTORY_FILE,
|
|
|
|
|
LOG_FILE,
|
|
|
|
|
MODEL_LIST_URL,
|
|
|
|
|
)
|
2025-11-04 05:17:27 +01:00
|
|
|
from pr.core.api import call_api
|
2025-11-04 08:09:12 +01:00
|
|
|
from pr.core.autonomous_interactions import (
|
|
|
|
|
get_global_autonomous,
|
|
|
|
|
stop_global_autonomous,
|
|
|
|
|
)
|
|
|
|
|
from pr.core.background_monitor import (
|
|
|
|
|
get_global_monitor,
|
|
|
|
|
start_global_monitor,
|
|
|
|
|
stop_global_monitor,
|
|
|
|
|
)
|
|
|
|
|
from pr.core.context import init_system_message, truncate_tool_result
|
2025-11-06 15:15:06 +01:00
|
|
|
from pr.tools import get_tools_definition
|
|
|
|
|
from pr.tools.agents import (
|
|
|
|
|
collaborate_agents,
|
|
|
|
|
create_agent,
|
|
|
|
|
execute_agent_task,
|
|
|
|
|
list_agents,
|
|
|
|
|
remove_agent,
|
|
|
|
|
)
|
|
|
|
|
from pr.tools.command import kill_process, run_command, tail_process
|
|
|
|
|
from pr.tools.database import db_get, db_query, db_set
|
|
|
|
|
from pr.tools.filesystem import (
|
2025-11-04 08:09:12 +01:00
|
|
|
chdir,
|
2025-11-06 15:15:06 +01:00
|
|
|
clear_edit_tracker,
|
|
|
|
|
display_edit_summary,
|
|
|
|
|
display_edit_timeline,
|
2025-11-04 08:09:12 +01:00
|
|
|
getpwd,
|
|
|
|
|
index_source_directory,
|
|
|
|
|
list_directory,
|
|
|
|
|
mkdir,
|
|
|
|
|
read_file,
|
|
|
|
|
search_replace,
|
|
|
|
|
write_file,
|
2025-11-04 05:17:27 +01:00
|
|
|
)
|
2025-11-04 07:52:36 +01:00
|
|
|
from pr.tools.interactive_control import (
|
2025-11-04 08:09:12 +01:00
|
|
|
close_interactive_session,
|
|
|
|
|
list_active_sessions,
|
|
|
|
|
read_session_output,
|
|
|
|
|
send_input_to_session,
|
|
|
|
|
start_interactive_session,
|
2025-11-04 07:52:36 +01:00
|
|
|
)
|
2025-11-06 15:15:06 +01:00
|
|
|
from pr.tools.memory import (
|
|
|
|
|
add_knowledge_entry,
|
|
|
|
|
delete_knowledge_entry,
|
|
|
|
|
get_knowledge_by_category,
|
|
|
|
|
get_knowledge_entry,
|
|
|
|
|
get_knowledge_statistics,
|
|
|
|
|
search_knowledge,
|
|
|
|
|
update_knowledge_importance,
|
|
|
|
|
)
|
|
|
|
|
from pr.tools.patch import apply_patch, create_diff, display_file_diff
|
|
|
|
|
from pr.tools.python_exec import python_exec
|
|
|
|
|
from pr.tools.web import http_fetch, web_search, web_search_news
|
2025-11-04 08:09:12 +01:00
|
|
|
from pr.ui import Colors, render_markdown
|
2025-11-04 05:17:27 +01:00
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
logger = logging.getLogger("pr")
|
2025-11-04 05:17:27 +01:00
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
|
|
|
|
|
file_handler = logging.FileHandler(LOG_FILE)
|
2025-11-04 08:10:37 +01:00
|
|
|
file_handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s"))
|
2025-11-04 05:17:27 +01:00
|
|
|
logger.addHandler(file_handler)
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
class Assistant:
|
|
|
|
|
def __init__(self, args):
|
|
|
|
|
self.args = args
|
|
|
|
|
self.messages = []
|
|
|
|
|
self.verbose = args.verbose
|
2025-11-04 08:09:12 +01:00
|
|
|
self.debug = getattr(args, "debug", False)
|
2025-11-04 05:17:27 +01:00
|
|
|
self.syntax_highlighting = not args.no_syntax
|
|
|
|
|
|
|
|
|
|
if self.debug:
|
|
|
|
|
console_handler = logging.StreamHandler()
|
|
|
|
|
console_handler.setLevel(logging.DEBUG)
|
2025-11-04 08:10:37 +01:00
|
|
|
console_handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
|
2025-11-04 05:17:27 +01:00
|
|
|
logger.addHandler(console_handler)
|
|
|
|
|
logger.debug("Debug mode enabled")
|
2025-11-04 08:09:12 +01:00
|
|
|
self.api_key = os.environ.get("OPENROUTER_API_KEY", "")
|
|
|
|
|
self.model = args.model or os.environ.get("AI_MODEL", DEFAULT_MODEL)
|
|
|
|
|
self.api_url = args.api_url or os.environ.get("API_URL", DEFAULT_API_URL)
|
|
|
|
|
self.model_list_url = args.model_list_url or os.environ.get(
|
|
|
|
|
"MODEL_LIST_URL", MODEL_LIST_URL
|
|
|
|
|
)
|
|
|
|
|
self.use_tools = os.environ.get("USE_TOOLS", "1") == "1"
|
2025-11-06 15:15:06 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
self.interrupt_count = 0
|
|
|
|
|
self.python_globals = {}
|
|
|
|
|
self.db_conn = None
|
|
|
|
|
self.autonomous_mode = False
|
|
|
|
|
self.autonomous_iterations = 0
|
2025-11-04 07:52:36 +01:00
|
|
|
self.background_monitoring = False
|
2025-11-04 05:17:27 +01:00
|
|
|
self.init_database()
|
|
|
|
|
self.messages.append(init_system_message(args))
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from pr.core.enhanced_assistant import EnhancedAssistant
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
self.enhanced = EnhancedAssistant(self)
|
|
|
|
|
if self.debug:
|
|
|
|
|
logger.debug("Enhanced assistant features initialized")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning(f"Could not initialize enhanced features: {e}")
|
|
|
|
|
self.enhanced = None
|
|
|
|
|
|
2025-11-04 07:52:36 +01:00
|
|
|
# Initialize background monitoring components
|
|
|
|
|
try:
|
|
|
|
|
start_global_monitor()
|
|
|
|
|
autonomous = get_global_autonomous()
|
|
|
|
|
autonomous.start(llm_callback=self._handle_background_updates)
|
|
|
|
|
self.background_monitoring = True
|
|
|
|
|
if self.debug:
|
|
|
|
|
logger.debug("Background monitoring initialized")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning(f"Could not initialize background monitoring: {e}")
|
|
|
|
|
self.background_monitoring = False
|
|
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
def init_database(self):
|
|
|
|
|
try:
|
|
|
|
|
logger.debug(f"Initializing database at {DB_PATH}")
|
|
|
|
|
self.db_conn = sqlite3.connect(DB_PATH, check_same_thread=False)
|
|
|
|
|
cursor = self.db_conn.cursor()
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
cursor.execute(
|
|
|
|
|
"""CREATE TABLE IF NOT EXISTS kv_store
|
|
|
|
|
(key TEXT PRIMARY KEY, value TEXT, timestamp REAL)"""
|
|
|
|
|
)
|
2025-11-04 05:17:27 +01:00
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
cursor.execute(
|
|
|
|
|
"""CREATE TABLE IF NOT EXISTS file_versions
|
2025-11-04 05:17:27 +01:00
|
|
|
(id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
|
|
|
filepath TEXT, content TEXT, hash TEXT,
|
2025-11-04 08:09:12 +01:00
|
|
|
timestamp REAL, version INTEGER)"""
|
|
|
|
|
)
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
self.db_conn.commit()
|
|
|
|
|
logger.debug("Database initialized successfully")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Database initialization error: {e}")
|
|
|
|
|
self.db_conn = None
|
|
|
|
|
|
2025-11-04 07:52:36 +01:00
|
|
|
def _handle_background_updates(self, updates):
|
|
|
|
|
"""Handle background session updates by injecting them into the conversation."""
|
2025-11-04 08:09:12 +01:00
|
|
|
if not updates or not updates.get("sessions"):
|
2025-11-04 07:52:36 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Format the update as a system message
|
|
|
|
|
update_message = self._format_background_update_message(updates)
|
|
|
|
|
|
|
|
|
|
# Inject into current conversation if we're in an active session
|
|
|
|
|
if self.messages and len(self.messages) > 0:
|
2025-11-04 08:09:12 +01:00
|
|
|
self.messages.append(
|
|
|
|
|
{
|
|
|
|
|
"role": "system",
|
|
|
|
|
"content": f"Background session updates: {update_message}",
|
|
|
|
|
}
|
|
|
|
|
)
|
2025-11-04 07:52:36 +01:00
|
|
|
|
|
|
|
|
if self.verbose:
|
|
|
|
|
print(f"{Colors.CYAN}Background update: {update_message}{Colors.RESET}")
|
|
|
|
|
|
|
|
|
|
def _format_background_update_message(self, updates):
|
|
|
|
|
"""Format background updates for LLM consumption."""
|
|
|
|
|
session_summaries = []
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
for session_name, session_info in updates.get("sessions", {}).items():
|
|
|
|
|
summary = session_info.get("summary", f"Session {session_name}")
|
2025-11-04 07:52:36 +01:00
|
|
|
session_summaries.append(f"{session_name}: {summary}")
|
|
|
|
|
|
|
|
|
|
if session_summaries:
|
|
|
|
|
return "Active background sessions: " + "; ".join(session_summaries)
|
|
|
|
|
else:
|
|
|
|
|
return "No active background sessions requiring attention."
|
|
|
|
|
|
|
|
|
|
def _check_background_updates(self):
|
|
|
|
|
"""Check for pending background updates and display them."""
|
|
|
|
|
if not self.background_monitoring:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
monitor = get_global_monitor()
|
|
|
|
|
events = monitor.get_pending_events()
|
|
|
|
|
|
|
|
|
|
if events:
|
|
|
|
|
print(f"\n{Colors.CYAN}Background Events:{Colors.RESET}")
|
|
|
|
|
for event in events:
|
2025-11-04 08:09:12 +01:00
|
|
|
event_type = event.get("type", "unknown")
|
|
|
|
|
session_name = event.get("session_name", "unknown")
|
|
|
|
|
|
|
|
|
|
if event_type == "session_started":
|
2025-11-04 08:10:37 +01:00
|
|
|
print(f" {Colors.GREEN}✓{Colors.RESET} Session '{session_name}' started")
|
2025-11-04 08:09:12 +01:00
|
|
|
elif event_type == "session_ended":
|
2025-11-04 08:10:37 +01:00
|
|
|
print(f" {Colors.YELLOW}✗{Colors.RESET} Session '{session_name}' ended")
|
2025-11-04 08:09:12 +01:00
|
|
|
elif event_type == "output_received":
|
|
|
|
|
lines = len(event.get("new_output", {}).get("stdout", []))
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.BLUE}📝{Colors.RESET} Session '{session_name}' produced {lines} lines of output"
|
|
|
|
|
)
|
|
|
|
|
elif event_type == "possible_input_needed":
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.RED}❓{Colors.RESET} Session '{session_name}' may need input"
|
|
|
|
|
)
|
|
|
|
|
elif event_type == "high_output_volume":
|
|
|
|
|
total = event.get("total_lines", 0)
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.YELLOW}📊{Colors.RESET} Session '{session_name}' has high output volume ({total} lines)"
|
|
|
|
|
)
|
|
|
|
|
elif event_type == "inactive_session":
|
|
|
|
|
inactive_time = event.get("inactive_seconds", 0)
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.GRAY}⏰{Colors.RESET} Session '{session_name}' inactive for {inactive_time:.0f}s"
|
|
|
|
|
)
|
2025-11-04 07:52:36 +01:00
|
|
|
|
|
|
|
|
print() # Add blank line after events
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
if self.debug:
|
2025-11-04 08:10:37 +01:00
|
|
|
print(f"{Colors.RED}Error checking background updates: {e}{Colors.RESET}")
|
2025-11-04 07:52:36 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
def execute_tool_calls(self, tool_calls):
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
|
|
logger.debug(f"Executing {len(tool_calls)} tool call(s)")
|
|
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=5) as executor:
|
|
|
|
|
futures = []
|
|
|
|
|
|
|
|
|
|
for tool_call in tool_calls:
|
2025-11-04 08:09:12 +01:00
|
|
|
func_name = tool_call["function"]["name"]
|
|
|
|
|
arguments = json.loads(tool_call["function"]["arguments"])
|
2025-11-04 05:17:27 +01:00
|
|
|
logger.debug(f"Tool call: {func_name} with arguments: {arguments}")
|
|
|
|
|
|
|
|
|
|
func_map = {
|
2025-11-04 08:09:12 +01:00
|
|
|
"http_fetch": lambda **kw: http_fetch(**kw),
|
|
|
|
|
"run_command": lambda **kw: run_command(**kw),
|
|
|
|
|
"tail_process": lambda **kw: tail_process(**kw),
|
|
|
|
|
"kill_process": lambda **kw: kill_process(**kw),
|
2025-11-04 08:10:37 +01:00
|
|
|
"start_interactive_session": lambda **kw: start_interactive_session(**kw),
|
2025-11-04 08:09:12 +01:00
|
|
|
"send_input_to_session": lambda **kw: send_input_to_session(**kw),
|
|
|
|
|
"read_session_output": lambda **kw: read_session_output(**kw),
|
2025-11-04 08:10:37 +01:00
|
|
|
"close_interactive_session": lambda **kw: close_interactive_session(**kw),
|
2025-11-04 08:09:12 +01:00
|
|
|
"read_file": lambda **kw: read_file(**kw, db_conn=self.db_conn),
|
|
|
|
|
"write_file": lambda **kw: write_file(**kw, db_conn=self.db_conn),
|
|
|
|
|
"list_directory": lambda **kw: list_directory(**kw),
|
|
|
|
|
"mkdir": lambda **kw: mkdir(**kw),
|
|
|
|
|
"chdir": lambda **kw: chdir(**kw),
|
|
|
|
|
"getpwd": lambda **kw: getpwd(**kw),
|
|
|
|
|
"db_set": lambda **kw: db_set(**kw, db_conn=self.db_conn),
|
|
|
|
|
"db_get": lambda **kw: db_get(**kw, db_conn=self.db_conn),
|
|
|
|
|
"db_query": lambda **kw: db_query(**kw, db_conn=self.db_conn),
|
|
|
|
|
"web_search": lambda **kw: web_search(**kw),
|
|
|
|
|
"web_search_news": lambda **kw: web_search_news(**kw),
|
|
|
|
|
"python_exec": lambda **kw: python_exec(
|
|
|
|
|
**kw, python_globals=self.python_globals
|
|
|
|
|
),
|
|
|
|
|
"index_source_directory": lambda **kw: index_source_directory(**kw),
|
2025-11-04 08:10:37 +01:00
|
|
|
"search_replace": lambda **kw: search_replace(**kw, db_conn=self.db_conn),
|
2025-11-05 15:34:23 +01:00
|
|
|
# "open_editor": lambda **kw: open_editor(**kw),
|
|
|
|
|
# "editor_insert_text": lambda **kw: editor_insert_text(
|
|
|
|
|
# **kw, db_conn=self.db_conn
|
|
|
|
|
# ),
|
|
|
|
|
# "editor_replace_text": lambda **kw: editor_replace_text(
|
|
|
|
|
# **kw, db_conn=self.db_conn
|
|
|
|
|
# ),
|
|
|
|
|
# "editor_search": lambda **kw: editor_search(**kw),
|
|
|
|
|
# "close_editor": lambda **kw: close_editor(**kw),
|
2025-11-04 08:09:12 +01:00
|
|
|
"create_diff": lambda **kw: create_diff(**kw),
|
|
|
|
|
"apply_patch": lambda **kw: apply_patch(**kw, db_conn=self.db_conn),
|
|
|
|
|
"display_file_diff": lambda **kw: display_file_diff(**kw),
|
|
|
|
|
"display_edit_summary": lambda **kw: display_edit_summary(),
|
|
|
|
|
"display_edit_timeline": lambda **kw: display_edit_timeline(**kw),
|
|
|
|
|
"clear_edit_tracker": lambda **kw: clear_edit_tracker(),
|
2025-11-04 08:10:37 +01:00
|
|
|
"start_interactive_session": lambda **kw: start_interactive_session(**kw),
|
2025-11-04 08:09:12 +01:00
|
|
|
"send_input_to_session": lambda **kw: send_input_to_session(**kw),
|
|
|
|
|
"read_session_output": lambda **kw: read_session_output(**kw),
|
|
|
|
|
"list_active_sessions": lambda **kw: list_active_sessions(**kw),
|
2025-11-04 08:10:37 +01:00
|
|
|
"close_interactive_session": lambda **kw: close_interactive_session(**kw),
|
2025-11-04 08:09:12 +01:00
|
|
|
"create_agent": lambda **kw: create_agent(**kw),
|
|
|
|
|
"list_agents": lambda **kw: list_agents(**kw),
|
|
|
|
|
"execute_agent_task": lambda **kw: execute_agent_task(**kw),
|
|
|
|
|
"remove_agent": lambda **kw: remove_agent(**kw),
|
|
|
|
|
"collaborate_agents": lambda **kw: collaborate_agents(**kw),
|
|
|
|
|
"add_knowledge_entry": lambda **kw: add_knowledge_entry(**kw),
|
|
|
|
|
"get_knowledge_entry": lambda **kw: get_knowledge_entry(**kw),
|
|
|
|
|
"search_knowledge": lambda **kw: search_knowledge(**kw),
|
2025-11-04 08:10:37 +01:00
|
|
|
"get_knowledge_by_category": lambda **kw: get_knowledge_by_category(**kw),
|
|
|
|
|
"update_knowledge_importance": lambda **kw: update_knowledge_importance(**kw),
|
2025-11-04 08:09:12 +01:00
|
|
|
"delete_knowledge_entry": lambda **kw: delete_knowledge_entry(**kw),
|
2025-11-04 08:10:37 +01:00
|
|
|
"get_knowledge_statistics": lambda **kw: get_knowledge_statistics(**kw),
|
2025-11-04 05:17:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if func_name in func_map:
|
|
|
|
|
future = executor.submit(func_map[func_name], **arguments)
|
2025-11-04 08:09:12 +01:00
|
|
|
futures.append((tool_call["id"], future))
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
for tool_id, future in futures:
|
|
|
|
|
try:
|
|
|
|
|
result = future.result(timeout=30)
|
|
|
|
|
result = truncate_tool_result(result)
|
|
|
|
|
logger.debug(f"Tool result for {tool_id}: {str(result)[:200]}...")
|
2025-11-04 08:09:12 +01:00
|
|
|
results.append(
|
|
|
|
|
{
|
|
|
|
|
"tool_call_id": tool_id,
|
|
|
|
|
"role": "tool",
|
|
|
|
|
"content": json.dumps(result),
|
|
|
|
|
}
|
|
|
|
|
)
|
2025-11-04 05:17:27 +01:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.debug(f"Tool error for {tool_id}: {str(e)}")
|
|
|
|
|
error_msg = str(e)[:200] if len(str(e)) > 200 else str(e)
|
2025-11-04 08:09:12 +01:00
|
|
|
results.append(
|
|
|
|
|
{
|
|
|
|
|
"tool_call_id": tool_id,
|
|
|
|
|
"role": "tool",
|
2025-11-04 08:10:37 +01:00
|
|
|
"content": json.dumps({"status": "error", "error": error_msg}),
|
2025-11-04 08:09:12 +01:00
|
|
|
}
|
|
|
|
|
)
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
def process_response(self, response):
|
2025-11-04 08:09:12 +01:00
|
|
|
if "error" in response:
|
2025-11-04 05:17:27 +01:00
|
|
|
return f"Error: {response['error']}"
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
if "choices" not in response or not response["choices"]:
|
2025-11-04 05:17:27 +01:00
|
|
|
return "No response from API"
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
message = response["choices"][0]["message"]
|
2025-11-04 05:17:27 +01:00
|
|
|
self.messages.append(message)
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
if "tool_calls" in message and message["tool_calls"]:
|
2025-11-04 05:17:27 +01:00
|
|
|
if self.verbose:
|
|
|
|
|
print(f"{Colors.YELLOW}Executing tool calls...{Colors.RESET}")
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
tool_results = self.execute_tool_calls(message["tool_calls"])
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
for result in tool_results:
|
|
|
|
|
self.messages.append(result)
|
|
|
|
|
|
|
|
|
|
follow_up = call_api(
|
2025-11-04 08:09:12 +01:00
|
|
|
self.messages,
|
|
|
|
|
self.model,
|
|
|
|
|
self.api_url,
|
|
|
|
|
self.api_key,
|
|
|
|
|
self.use_tools,
|
|
|
|
|
get_tools_definition(),
|
|
|
|
|
verbose=self.verbose,
|
2025-11-04 05:17:27 +01:00
|
|
|
)
|
|
|
|
|
return self.process_response(follow_up)
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
content = message.get("content", "")
|
2025-11-04 05:17:27 +01:00
|
|
|
return render_markdown(content, self.syntax_highlighting)
|
|
|
|
|
|
|
|
|
|
def signal_handler(self, signum, frame):
|
|
|
|
|
if self.autonomous_mode:
|
|
|
|
|
self.interrupt_count += 1
|
|
|
|
|
if self.interrupt_count >= 2:
|
|
|
|
|
print(f"\n{Colors.RED}Force exiting autonomous mode...{Colors.RESET}")
|
|
|
|
|
self.autonomous_mode = False
|
|
|
|
|
sys.exit(0)
|
|
|
|
|
else:
|
2025-11-04 08:10:37 +01:00
|
|
|
print(f"\n{Colors.YELLOW}Press Ctrl+C again to force exit{Colors.RESET}")
|
2025-11-04 05:17:27 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
|
|
self.interrupt_count += 1
|
|
|
|
|
if self.interrupt_count >= 2:
|
|
|
|
|
print(f"\n{Colors.RED}Exiting...{Colors.RESET}")
|
|
|
|
|
self.cleanup()
|
|
|
|
|
sys.exit(0)
|
|
|
|
|
else:
|
|
|
|
|
print(f"\n{Colors.YELLOW}Press Ctrl+C again to exit{Colors.RESET}")
|
|
|
|
|
|
|
|
|
|
def setup_readline(self):
|
|
|
|
|
try:
|
|
|
|
|
readline.read_history_file(HISTORY_FILE)
|
|
|
|
|
except FileNotFoundError:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
readline.set_history_length(1000)
|
|
|
|
|
|
|
|
|
|
import atexit
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
atexit.register(readline.write_history_file, HISTORY_FILE)
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
commands = [
|
|
|
|
|
"exit",
|
|
|
|
|
"quit",
|
|
|
|
|
"help",
|
|
|
|
|
"reset",
|
|
|
|
|
"dump",
|
|
|
|
|
"verbose",
|
|
|
|
|
"models",
|
|
|
|
|
"tools",
|
|
|
|
|
"review",
|
|
|
|
|
"refactor",
|
|
|
|
|
"obfuscate",
|
|
|
|
|
"/auto",
|
2025-11-05 15:34:23 +01:00
|
|
|
"/edit",
|
2025-11-04 08:09:12 +01:00
|
|
|
]
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
def completer(text, state):
|
|
|
|
|
options = [cmd for cmd in commands if cmd.startswith(text)]
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
glob_pattern = os.path.expanduser(text) + "*"
|
2025-11-04 05:17:27 +01:00
|
|
|
path_options = glob_module.glob(glob_pattern)
|
|
|
|
|
|
|
|
|
|
path_options = [p + os.sep if os.path.isdir(p) else p for p in path_options]
|
|
|
|
|
|
|
|
|
|
combined_options = sorted(list(set(options + path_options)))
|
2025-11-04 08:09:12 +01:00
|
|
|
# combined_options.extend(self.commands)
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
if state < len(combined_options):
|
|
|
|
|
return combined_options[state]
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
delims = readline.get_completer_delims()
|
2025-11-04 08:09:12 +01:00
|
|
|
readline.set_completer_delims(delims.replace("/", ""))
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
readline.set_completer(completer)
|
2025-11-04 08:09:12 +01:00
|
|
|
readline.parse_and_bind("tab: complete")
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
def run_repl(self):
|
|
|
|
|
self.setup_readline()
|
|
|
|
|
signal.signal(signal.SIGINT, self.signal_handler)
|
|
|
|
|
|
|
|
|
|
print(f"{Colors.BOLD}r{Colors.RESET}")
|
|
|
|
|
print(f"Type 'help' for commands or start chatting")
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
try:
|
2025-11-04 07:52:36 +01:00
|
|
|
# Check for background updates before prompting user
|
|
|
|
|
if self.background_monitoring:
|
|
|
|
|
self._check_background_updates()
|
|
|
|
|
|
|
|
|
|
# Create prompt with background status
|
|
|
|
|
prompt = f"{Colors.BLUE}You"
|
|
|
|
|
if self.background_monitoring:
|
|
|
|
|
try:
|
|
|
|
|
from pr.multiplexer import get_all_sessions
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 07:52:36 +01:00
|
|
|
sessions = get_all_sessions()
|
2025-11-04 08:09:12 +01:00
|
|
|
active_count = sum(
|
|
|
|
|
1 for s in sessions.values() if s.get("status") == "running"
|
|
|
|
|
)
|
2025-11-04 07:52:36 +01:00
|
|
|
if active_count > 0:
|
|
|
|
|
prompt += f"[{active_count}bg]"
|
|
|
|
|
except:
|
|
|
|
|
pass
|
|
|
|
|
prompt += f">{Colors.RESET} "
|
|
|
|
|
|
|
|
|
|
user_input = input(prompt).strip()
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
if not user_input:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
cmd_result = handle_command(self, user_input)
|
|
|
|
|
|
|
|
|
|
if cmd_result is False:
|
|
|
|
|
break
|
|
|
|
|
elif cmd_result is True:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
process_message(self, user_input)
|
|
|
|
|
|
|
|
|
|
except EOFError:
|
|
|
|
|
break
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
|
self.signal_handler(None, None)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"{Colors.RED}Error: {e}{Colors.RESET}")
|
|
|
|
|
logging.error(f"REPL error: {e}\n{traceback.format_exc()}")
|
|
|
|
|
|
|
|
|
|
def run_single(self):
|
|
|
|
|
if self.args.message:
|
|
|
|
|
message = self.args.message
|
|
|
|
|
else:
|
|
|
|
|
message = sys.stdin.read()
|
|
|
|
|
|
2025-11-04 05:57:23 +01:00
|
|
|
from pr.autonomous.mode import run_autonomous_mode
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 05:57:23 +01:00
|
|
|
run_autonomous_mode(self, message)
|
2025-11-04 05:17:27 +01:00
|
|
|
|
|
|
|
|
def cleanup(self):
|
2025-11-04 08:09:12 +01:00
|
|
|
if hasattr(self, "enhanced") and self.enhanced:
|
2025-11-04 05:17:27 +01:00
|
|
|
try:
|
|
|
|
|
self.enhanced.cleanup()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error cleaning up enhanced features: {e}")
|
|
|
|
|
|
2025-11-04 07:52:36 +01:00
|
|
|
# Stop background monitoring
|
|
|
|
|
if self.background_monitoring:
|
|
|
|
|
try:
|
|
|
|
|
stop_global_autonomous()
|
|
|
|
|
stop_global_monitor()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error stopping background monitoring: {e}")
|
|
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
try:
|
|
|
|
|
from pr.multiplexer import cleanup_all_multiplexers
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
cleanup_all_multiplexers()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error cleaning up multiplexers: {e}")
|
|
|
|
|
|
|
|
|
|
if self.db_conn:
|
|
|
|
|
self.db_conn.close()
|
|
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
|
try:
|
2025-11-04 08:09:12 +01:00
|
|
|
print(
|
|
|
|
|
f"DEBUG: interactive={self.args.interactive}, message={self.args.message}, isatty={sys.stdin.isatty()}"
|
|
|
|
|
)
|
2025-11-04 05:17:27 +01:00
|
|
|
if self.args.interactive or (not self.args.message and sys.stdin.isatty()):
|
2025-11-04 05:57:23 +01:00
|
|
|
print("DEBUG: calling run_repl")
|
2025-11-04 05:17:27 +01:00
|
|
|
self.run_repl()
|
|
|
|
|
else:
|
2025-11-04 05:57:23 +01:00
|
|
|
print("DEBUG: calling run_single")
|
2025-11-04 05:17:27 +01:00
|
|
|
self.run_single()
|
|
|
|
|
finally:
|
|
|
|
|
self.cleanup()
|
|
|
|
|
|
2025-11-04 08:09:12 +01:00
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
def process_message(assistant, message):
|
2025-11-05 15:34:23 +01:00
|
|
|
from pr.core.knowledge_context import inject_knowledge_context
|
|
|
|
|
|
|
|
|
|
inject_knowledge_context(assistant, message)
|
|
|
|
|
|
2025-11-04 05:17:27 +01:00
|
|
|
assistant.messages.append({"role": "user", "content": message})
|
|
|
|
|
|
|
|
|
|
logger.debug(f"Processing user message: {message[:100]}...")
|
|
|
|
|
logger.debug(f"Current message count: {len(assistant.messages)}")
|
|
|
|
|
|
|
|
|
|
if assistant.verbose:
|
|
|
|
|
print(f"{Colors.GRAY}Sending request to API...{Colors.RESET}")
|
|
|
|
|
|
|
|
|
|
response = call_api(
|
2025-11-04 08:09:12 +01:00
|
|
|
assistant.messages,
|
|
|
|
|
assistant.model,
|
|
|
|
|
assistant.api_url,
|
|
|
|
|
assistant.api_key,
|
|
|
|
|
assistant.use_tools,
|
|
|
|
|
get_tools_definition(),
|
|
|
|
|
verbose=assistant.verbose,
|
2025-11-04 05:17:27 +01:00
|
|
|
)
|
|
|
|
|
result = assistant.process_response(response)
|
|
|
|
|
|
|
|
|
|
print(f"\n{Colors.GREEN}r:{Colors.RESET} {result}\n")
|