|
import json
|
|
import os
|
|
|
|
import pytest
|
|
|
|
from rp.core.usage_tracker import UsageTracker
|
|
|
|
|
|
@pytest.fixture
|
|
def temp_usage_file(tmp_path, monkeypatch):
|
|
from rp.core import usage_tracker
|
|
|
|
original_file = usage_tracker.USAGE_DB_FILE
|
|
temp_file = str(tmp_path / "usage.json")
|
|
monkeypatch.setattr(usage_tracker, "USAGE_DB_FILE", temp_file)
|
|
yield temp_file
|
|
if os.path.exists(temp_file):
|
|
os.remove(temp_file)
|
|
monkeypatch.setattr(usage_tracker, "USAGE_DB_FILE", original_file)
|
|
|
|
|
|
def test_usage_tracker_init():
|
|
tracker = UsageTracker()
|
|
summary = tracker.get_session_summary()
|
|
assert summary["requests"] == 0
|
|
assert summary["total_tokens"] == 0
|
|
assert summary["estimated_cost"] == 0.0
|
|
|
|
|
|
def test_track_request_known_model():
|
|
tracker = UsageTracker()
|
|
tracker.track_request("gpt-3.5-turbo", 100, 50)
|
|
|
|
summary = tracker.get_session_summary()
|
|
assert summary["requests"] == 1
|
|
assert summary["input_tokens"] == 100
|
|
assert summary["output_tokens"] == 50
|
|
assert summary["total_tokens"] == 150
|
|
assert "gpt-3.5-turbo" in summary["models_used"]
|
|
# Cost: (100/1000)*0.0005 + (50/1000)*0.0015 = 0.00005 + 0.000075 = 0.000125
|
|
assert abs(summary["estimated_cost"] - 0.000125) < 1e-6
|
|
|
|
|
|
def test_track_request_unknown_model():
|
|
tracker = UsageTracker()
|
|
tracker.track_request("unknown-model", 100, 50)
|
|
|
|
summary = tracker.get_session_summary()
|
|
assert summary["requests"] == 1
|
|
assert summary["estimated_cost"] == 0.0 # Unknown model, cost 0
|
|
|
|
|
|
def test_track_request_multiple():
|
|
tracker = UsageTracker()
|
|
tracker.track_request("gpt-3.5-turbo", 100, 50)
|
|
tracker.track_request("gpt-4", 200, 100)
|
|
|
|
summary = tracker.get_session_summary()
|
|
assert summary["requests"] == 2
|
|
assert summary["input_tokens"] == 300
|
|
assert summary["output_tokens"] == 150
|
|
assert summary["total_tokens"] == 450
|
|
assert len(summary["models_used"]) == 2
|
|
|
|
|
|
def test_get_formatted_summary():
|
|
tracker = UsageTracker()
|
|
tracker.track_request("gpt-3.5-turbo", 100, 50)
|
|
|
|
formatted = tracker.get_formatted_summary()
|
|
assert "Total Requests: 1" in formatted
|
|
assert "Total Tokens: 150" in formatted
|
|
assert "Estimated Cost: $0.0001" in formatted
|
|
assert "gpt-3.5-turbo" in formatted
|
|
|
|
|
|
def test_get_total_usage_no_file(temp_usage_file):
|
|
total = UsageTracker.get_total_usage()
|
|
assert total["total_requests"] == 0
|
|
assert total["total_tokens"] == 0
|
|
assert total["total_cost"] == 0.0
|
|
|
|
|
|
def test_get_total_usage_with_data(temp_usage_file):
|
|
# Manually create history file
|
|
history = [
|
|
{
|
|
"timestamp": "2023-01-01",
|
|
"model": "gpt-3.5-turbo",
|
|
"input_tokens": 100,
|
|
"output_tokens": 50,
|
|
"total_tokens": 150,
|
|
"cost": 0.000125,
|
|
},
|
|
{
|
|
"timestamp": "2023-01-02",
|
|
"model": "gpt-4",
|
|
"input_tokens": 200,
|
|
"output_tokens": 100,
|
|
"total_tokens": 300,
|
|
"cost": 0.008,
|
|
},
|
|
]
|
|
with open(temp_usage_file, "w") as f:
|
|
json.dump(history, f)
|
|
|
|
total = UsageTracker.get_total_usage()
|
|
assert total["total_requests"] == 2
|
|
assert total["total_tokens"] == 450
|
|
assert abs(total["total_cost"] - 0.008125) < 1e-6
|