This commit is contained in:
retoor 2025-10-03 04:29:16 +02:00
parent 6da5999860
commit 9457562600
2 changed files with 182 additions and 837 deletions

440
benchmark.py Executable file → Normal file
View File

@ -24,6 +24,7 @@ class RequestMetrics:
status: int
success: bool
error: Optional[str] = None
filename: Optional[str] = None # To track created/moved resources
@dataclass
@ -36,31 +37,33 @@ class MethodStats:
total_duration: float = 0.0
durations: List[float] = field(default_factory=list)
errors: Dict[str, int] = field(default_factory=lambda: defaultdict(int))
@property
def success_rate(self) -> float:
return (self.successful_requests / self.total_requests * 100) if self.total_requests > 0 else 0
@property
def avg_duration(self) -> float:
return self.total_duration / self.total_requests if self.total_requests > 0 else 0
@property
def requests_per_second(self) -> float:
# A more accurate RPS for a method is its count over the total benchmark time
# This property is not used in the final report, but we'll leave it for potential use.
return self.total_requests / self.total_duration if self.total_duration > 0 else 0
@property
def min_duration(self) -> float:
return min(self.durations) if self.durations else 0
@property
def max_duration(self) -> float:
return max(self.durations) if self.durations else 0
@property
def p50_duration(self) -> float:
return statistics.median(self.durations) if self.durations else 0
@property
def p95_duration(self) -> float:
if not self.durations:
@ -68,7 +71,7 @@ class MethodStats:
sorted_durations = sorted(self.durations)
index = int(len(sorted_durations) * 0.95)
return sorted_durations[index] if index < len(sorted_durations) else sorted_durations[-1]
@property
def p99_duration(self) -> float:
if not self.durations:
@ -80,8 +83,8 @@ class MethodStats:
class WebDAVBenchmark:
"""WebDAV server benchmark runner"""
def __init__(self, url: str, username: str, password: str,
def __init__(self, url: str, username: str, password: str,
concurrency: int = 50, duration: int = 60):
self.url = url.rstrip('/')
self.username = username
@ -89,29 +92,31 @@ class WebDAVBenchmark:
self.concurrency = concurrency
self.duration = duration
self.stats: Dict[str, MethodStats] = defaultdict(lambda: MethodStats(method=""))
self.start_time = 0
self.start_time = 0.0
self.stop_flag = False
self.auth = aiohttp.BasicAuth(username, password)
def random_string(self, length: int = 10) -> str:
"""Generate random string"""
return ''.join(random.choices(string.ascii_letters + string.digits, k=length))
async def record_metric(self, metric: RequestMetrics):
"""Record a request metric"""
stats = self.stats[metric.method]
stats.method = metric.method
if not stats.method:
stats.method = metric.method
stats.total_requests += 1
stats.total_duration += metric.duration
stats.durations.append(metric.duration)
if metric.success:
stats.successful_requests += 1
else:
stats.failed_requests += 1
if metric.error:
stats.errors[metric.error] += 1
error_key = f"Status {metric.status}" if metric.status != 0 else str(metric.error)
stats.errors[error_key] += 1
async def benchmark_options(self, session: aiohttp.ClientSession) -> RequestMetrics:
"""Benchmark OPTIONS request"""
start = time.time()
@ -119,351 +124,266 @@ class WebDAVBenchmark:
async with session.options(self.url, auth=self.auth) as resp:
duration = time.time() - start
return RequestMetrics(
method='OPTIONS',
duration=duration,
status=resp.status,
method='OPTIONS', duration=duration, status=resp.status,
success=resp.status == 200
)
except Exception as e:
return RequestMetrics(
method='OPTIONS',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='OPTIONS', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_propfind(self, session: aiohttp.ClientSession, depth: int = 0) -> RequestMetrics:
"""Benchmark PROPFIND request"""
propfind_body = '''<?xml version="1.0"?>
<D:propfind xmlns:D="DAV:">
<D:allprop/>
</D:propfind>'''
start = time.time()
try:
async with session.request(
'PROPFIND',
self.url,
auth=self.auth,
data=propfind_body,
'PROPFIND', self.url, auth=self.auth, data=propfind_body,
headers={'Depth': str(depth), 'Content-Type': 'application/xml'}
) as resp:
await resp.read() # Consume response
await resp.read()
duration = time.time() - start
return RequestMetrics(
method='PROPFIND',
duration=duration,
status=resp.status,
method='PROPFIND', duration=duration, status=resp.status,
success=resp.status == 207
)
except Exception as e:
return RequestMetrics(
method='PROPFIND',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='PROPFIND', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_put(self, session: aiohttp.ClientSession) -> RequestMetrics:
"""Benchmark PUT request"""
filename = f"bench_{self.random_string()}.txt"
content = self.random_string(1024).encode() # 1KB file
content = self.random_string(1024).encode()
start = time.time()
try:
async with session.put(
f"{self.url}/{filename}",
auth=self.auth,
data=content
) as resp:
async with session.put(f"{self.url}/{filename}", auth=self.auth, data=content) as resp:
duration = time.time() - start
is_success = resp.status in [201, 204]
return RequestMetrics(
method='PUT',
duration=duration,
status=resp.status,
success=resp.status in [201, 204]
method='PUT', duration=duration, status=resp.status,
success=is_success,
filename=filename if is_success else None
)
except Exception as e:
return RequestMetrics(
method='PUT',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='PUT', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_get(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark GET request"""
start = time.time()
try:
print(f"{self.url}/{filename}")
async with session.get(
f"{self.url}/{filename}",
auth=self.auth
) as resp:
await resp.read() # Consume response
async with session.get(f"{self.url}/{filename}", auth=self.auth) as resp:
await resp.read()
duration = time.time() - start
return RequestMetrics(
method='GET',
duration=duration,
status=resp.status,
method='GET', duration=duration, status=resp.status,
success=resp.status == 200
)
except Exception as e:
return RequestMetrics(
method='GET',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='GET', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_head(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark HEAD request"""
start = time.time()
try:
async with session.head(
f"{self.url}/{filename}",
auth=self.auth
) as resp:
async with session.head(f"{self.url}/{filename}", auth=self.auth) as resp:
duration = time.time() - start
return RequestMetrics(
method='HEAD',
duration=duration,
status=resp.status,
method='HEAD', duration=duration, status=resp.status,
success=resp.status == 200
)
except Exception as e:
return RequestMetrics(
method='HEAD',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='HEAD', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_mkcol(self, session: aiohttp.ClientSession) -> RequestMetrics:
"""Benchmark MKCOL request"""
dirname = f"bench_dir_{self.random_string()}"
start = time.time()
try:
async with session.request(
'MKCOL',
f"{self.url}/{dirname}/",
auth=self.auth
) as resp:
async with session.request('MKCOL', f"{self.url}/{dirname}/", auth=self.auth) as resp:
duration = time.time() - start
is_success = resp.status == 201
return RequestMetrics(
method='MKCOL',
duration=duration,
status=resp.status,
success=resp.status == 201
method='MKCOL', duration=duration, status=resp.status,
success=is_success,
filename=dirname if is_success else None
)
except Exception as e:
return RequestMetrics(
method='MKCOL',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='MKCOL', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_proppatch(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark PROPPATCH request"""
proppatch_body = f'''<?xml version="1.0"?>
proppatch_body = '''<?xml version="1.0"?>
<D:propertyupdate xmlns:D="DAV:">
<D:set>
<D:prop>
<D:displayname>Benchmark Test</D:displayname>
</D:prop>
</D:set>
<D:set><D:prop><D:displayname>Benchmark Test</D:displayname></D:prop></D:set>
</D:propertyupdate>'''
start = time.time()
try:
async with session.request(
'PROPPATCH',
f"{self.url}/{filename}",
auth=self.auth,
data=proppatch_body,
'PROPPATCH', f"{self.url}/{filename}", auth=self.auth, data=proppatch_body,
headers={'Content-Type': 'application/xml'}
) as resp:
await resp.read()
duration = time.time() - start
return RequestMetrics(
method='PROPPATCH',
duration=duration,
status=resp.status,
method='PROPPATCH', duration=duration, status=resp.status,
success=resp.status == 207
)
except Exception as e:
return RequestMetrics(
method='PROPPATCH',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='PROPPATCH', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_copy(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark COPY request"""
dest_filename = f"copy_{self.random_string()}.txt"
start = time.time()
try:
async with session.request(
'COPY',
f"{self.url}/{filename}",
auth=self.auth,
'COPY', f"{self.url}/{filename}", auth=self.auth,
headers={'Destination': f"{self.url}/{dest_filename}"}
) as resp:
duration = time.time() - start
is_success = resp.status in [201, 204]
return RequestMetrics(
method='COPY',
duration=duration,
status=resp.status,
success=resp.status in [201, 204]
method='COPY', duration=duration, status=resp.status,
success=is_success,
filename=dest_filename if is_success else None
)
except Exception as e:
return RequestMetrics(
method='COPY',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='COPY', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_move(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark MOVE request"""
dest_filename = f"moved_{self.random_string()}.txt"
start = time.time()
try:
async with session.request(
'MOVE',
f"{self.url}/{filename}",
auth=self.auth,
'MOVE', f"{self.url}/{filename}", auth=self.auth,
headers={'Destination': f"{self.url}/{dest_filename}"}
) as resp:
duration = time.time() - start
is_success = resp.status in [201, 204]
return RequestMetrics(
method='MOVE',
duration=duration,
status=resp.status,
success=resp.status in [201, 204]
method='MOVE', duration=duration, status=resp.status,
success=is_success,
filename=dest_filename if is_success else None
)
except Exception as e:
return RequestMetrics(
method='MOVE',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='MOVE', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_lock(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark LOCK request"""
lock_body = '''<?xml version="1.0"?>
<D:lockinfo xmlns:D="DAV:">
<D:lockscope><D:exclusive/></D:lockscope>
<D:locktype><D:write/></D:locktype>
<D:owner>
<D:href>benchmark</D:href>
</D:owner>
<D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype>
<D:owner><D:href>benchmark</D:href></D:owner>
</D:lockinfo>'''
start = time.time()
try:
async with session.request(
'LOCK',
f"{self.url}/{filename}",
auth=self.auth,
data=lock_body,
'LOCK', f"{self.url}/{filename}", auth=self.auth, data=lock_body,
headers={'Content-Type': 'application/xml', 'Timeout': 'Second-300'}
) as resp:
lock_token = resp.headers.get('Lock-Token', '').strip('<>')
await resp.read()
duration = time.time() - start
is_success = resp.status == 200
# Unlock immediately to clean up
if lock_token:
if is_success and lock_token:
try:
async with session.request(
'UNLOCK',
f"{self.url}/{filename}",
auth=self.auth,
'UNLOCK', f"{self.url}/{filename}", auth=self.auth,
headers={'Lock-Token': f'<{lock_token}>'}
) as unlock_resp:
):
pass
except:
pass
return RequestMetrics(
method='LOCK',
duration=duration,
status=resp.status,
success=resp.status == 200
method='LOCK', duration=duration, status=resp.status,
success=is_success
)
except Exception as e:
return RequestMetrics(
method='LOCK',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='LOCK', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_delete(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark DELETE request"""
async def benchmark_delete(self, session: aiohttp.ClientSession, resource_name: str) -> RequestMetrics:
"""Benchmark DELETE request for files or directories"""
start = time.time()
try:
async with session.delete(
f"{self.url}/{filename}",
auth=self.auth
) as resp:
# Add trailing slash for directories for some servers
url_path = f"{self.url}/{resource_name}"
if "dir" in resource_name:
url_path += "/"
async with session.delete(url_path, auth=self.auth) as resp:
duration = time.time() - start
return RequestMetrics(
method='DELETE',
duration=duration,
status=resp.status,
method='DELETE', duration=duration, status=resp.status,
success=resp.status == 204
)
except Exception as e:
return RequestMetrics(
method='DELETE',
duration=time.time() - start,
status=0,
success=False,
error=str(e)
method='DELETE', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def worker(self, worker_id: int, session: aiohttp.ClientSession):
"""Worker coroutine that runs various benchmarks"""
test_files = []
# Create initial test file
filename = f"bench_worker_{worker_id}_{self.random_string()}.txt"
test_dirs = []
# Create an initial test file to ensure other operations can start
metric = await self.benchmark_put(session)
await self.record_metric(metric)
if metric.success:
test_files.append(filename)
if metric.success and metric.filename:
test_files.append(metric.filename)
while not self.stop_flag:
elapsed = time.time() - self.start_time
if elapsed >= self.duration:
self.stop_flag = True
break
# Randomly choose operation
operation = random.choice([
# Weighted random choice
operations = [
'options', 'propfind', 'put', 'get', 'head',
'mkcol', 'proppatch', 'copy', 'move', 'lock', 'delete'
])
]
# Ensure some operations are more frequent
weights = [5, 5, 15, 15, 10, 5, 5, 5, 5, 5, 20] # More PUT, GET, DELETE
operation = random.choices(operations, weights=weights, k=1)[0]
metric = None
try:
if operation == 'options':
metric = await self.benchmark_options(session)
@ -474,9 +394,8 @@ class WebDAVBenchmark:
elif operation == 'put':
metric = await self.benchmark_put(session)
if metric.success:
filename = f"bench_worker_{worker_id}_{self.random_string()}.txt"
test_files.append(filename)
if metric.success and metric.filename:
test_files.append(metric.filename)
elif operation == 'get' and test_files:
filename = random.choice(test_files)
@ -485,44 +404,49 @@ class WebDAVBenchmark:
elif operation == 'head' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_head(session, filename)
elif operation == 'mkcol':
metric = await self.benchmark_mkcol(session)
if metric.success and metric.filename:
test_dirs.append(metric.filename)
elif operation == 'proppatch' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_proppatch(session, filename)
elif operation == 'copy' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_copy(session, filename)
if metric.success and metric.filename:
test_files.append(metric.filename)
elif operation == 'move' and test_files:
if len(test_files) > 1:
filename = test_files.pop(random.randrange(len(test_files)))
metric = await self.benchmark_move(session, filename)
else:
continue
elif operation == 'move' and len(test_files) > 1:
filename_to_move = test_files.pop(random.randrange(len(test_files)))
metric = await self.benchmark_move(session, filename_to_move)
if metric.success and metric.filename:
test_files.append(metric.filename)
elif operation == 'lock' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_lock(session, filename)
elif operation == 'delete':
# Randomly delete a file or a directory
if test_dirs and random.random() < 0.2 and len(test_dirs) > 0: # 20% chance to delete a dir
dir_to_delete = test_dirs.pop(random.randrange(len(test_dirs)))
metric = await self.benchmark_delete(session, dir_to_delete)
elif len(test_files) > 1:
file_to_delete = test_files.pop(random.randrange(len(test_files)))
metric = await self.benchmark_delete(session, file_to_delete)
elif operation == 'delete' and len(test_files) > 1:
filename = test_files.pop(random.randrange(len(test_files)))
metric = await self.benchmark_delete(session, filename)
else:
continue
await self.record_metric(metric)
if metric:
await self.record_metric(metric)
except Exception as e:
print(f"Worker {worker_id} error: {e}")
# Small delay to prevent overwhelming
await asyncio.sleep(0.001)
await asyncio.sleep(0.01) # Small delay to prevent tight loop on empty lists
async def run(self):
"""Run the benchmark"""
print("="*80)
@ -534,31 +458,27 @@ class WebDAVBenchmark:
print(f"User: {self.username}")
print("="*80)
print()
connector = aiohttp.TCPConnector(limit=self.concurrency * 2)
timeout = aiohttp.ClientTimeout(total=30)
async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session:
self.start_time = time.time()
# Create worker tasks
workers = [
asyncio.create_task(self.worker(i, session))
for i in range(self.concurrency)
]
# Progress indicator
progress_task = asyncio.create_task(self.show_progress())
# Wait for all workers
await asyncio.gather(*workers, return_exceptions=True)
# Stop progress
self.stop_flag = True
await progress_task
# Print results
self.print_results()
async def show_progress(self):
"""Show progress during benchmark"""
while not self.stop_flag:
@ -568,10 +488,9 @@ class WebDAVBenchmark:
total_requests = sum(s.total_requests for s in self.stats.values())
print(f"\rProgress: {elapsed:.1f}s / {self.duration}s | Total Requests: {total_requests}", end='', flush=True)
await asyncio.sleep(1)
await asyncio.sleep(0.5)
print()
def print_results(self):
"""Print benchmark results"""
print("\n")
@ -579,22 +498,24 @@ class WebDAVBenchmark:
print("BENCHMARK RESULTS")
print("="*80)
print()
total_duration = time.time() - self.start_time
total_requests = sum(s.total_requests for s in self.stats.values())
total_success = sum(s.successful_requests for s in self.stats.values())
total_failed = sum(s.failed_requests for s in self.stats.values())
total_failed = total_requests - total_success
success_rate = (total_success / total_requests * 100) if total_requests > 0 else 0
failed_rate = (total_failed / total_requests * 100) if total_requests > 0 else 0
print(f"Total Duration: {total_duration:.2f}s")
print(f"Total Requests: {total_requests:,}")
print(f"Successful: {total_success:,} ({total_success/total_requests*100:.1f}%)")
print(f"Failed: {total_failed:,} ({total_failed/total_requests*100:.1f}%)")
print(f"Successful: {total_success:,} ({success_rate:.1f}%)")
print(f"Failed: {total_failed:,} ({failed_rate:.1f}%)")
print(f"Overall RPS: {total_requests/total_duration:.2f}")
print()
# Sort methods by request count
sorted_stats = sorted(self.stats.values(), key=lambda s: s.total_requests, reverse=True)
print("="*80)
print("PER-METHOD STATISTICS")
print("="*80)
@ -604,17 +525,20 @@ class WebDAVBenchmark:
if stats.total_requests == 0:
continue
# Calculate RPS based on total benchmark duration for better comparison
method_rps = stats.total_requests / total_duration
print(f"Method: {stats.method}")
print(f" Requests: {stats.total_requests:>8,}")
print(f" Success Rate: {stats.success_rate:>8.2f}%")
print(f" RPS: {stats.requests_per_second:>8.2f}")
print(f" Requests: {stats.total_requests:>8,}")
print(f" Success Rate: {stats.success_rate:>8.2f}%")
print(f" RPS: {method_rps:>8.2f}")
print(f" Latency (ms):")
print(f" Min: {stats.min_duration*1000:>8.2f}")
print(f" Avg: {stats.avg_duration*1000:>8.2f}")
print(f" P50: {stats.p50_duration*1000:>8.2f}")
print(f" P95: {stats.p95_duration*1000:>8.2f}")
print(f" P99: {stats.p99_duration*1000:>8.2f}")
print(f" Max: {stats.max_duration*1000:>8.2f}")
print(f" Min: {stats.min_duration*1000:>8.2f}")
print(f" Avg: {stats.avg_duration*1000:>8.2f}")
print(f" P50: {stats.p50_duration*1000:>8.2f}")
print(f" P95: {stats.p95_duration*1000:>8.2f}")
print(f" P99: {stats.p99_duration*1000:>8.2f}")
print(f" Max: {stats.max_duration*1000:>8.2f}")
if stats.failed_requests > 0 and stats.errors:
print(f" Errors:")
@ -623,7 +547,7 @@ class WebDAVBenchmark:
print(f" {error_short}: {count}")
print()
print("="*80)
@ -634,9 +558,9 @@ async def main():
parser.add_argument('username', help='Username for authentication')
parser.add_argument('password', help='Password for authentication')
parser.add_argument('-c', '--concurrency', type=int, default=50,
help='Number of concurrent workers (default: 50)')
help='Number of concurrent workers (default: 50)')
parser.add_argument('-d', '--duration', type=int, default=60,
help='Benchmark duration in seconds (default: 60)')
help='Benchmark duration in seconds (default: 60)')
args = parser.parse_args()

View File

@ -1,579 +0,0 @@
#!/usr/bin/env python3
"""
WebDAV Server Concurrent Benchmark Tool
Heavy load testing with performance metrics per method
"""
import asyncio
import aiohttp
import time
import argparse
import statistics
from dataclasses import dataclass, field
from typing import List, Dict, Optional
from collections import defaultdict
import random
import string
@dataclass
class RequestMetrics:
"""Metrics for a single request"""
method: str
duration: float
status: int
success: bool
error: Optional[str] = None
filename: Optional[str] = None # To track created/moved resources
@dataclass
class MethodStats:
"""Statistics for a specific HTTP method"""
method: str
total_requests: int = 0
successful_requests: int = 0
failed_requests: int = 0
total_duration: float = 0.0
durations: List[float] = field(default_factory=list)
errors: Dict[str, int] = field(default_factory=lambda: defaultdict(int))
@property
def success_rate(self) -> float:
return (self.successful_requests / self.total_requests * 100) if self.total_requests > 0 else 0
@property
def avg_duration(self) -> float:
return self.total_duration / self.total_requests if self.total_requests > 0 else 0
@property
def requests_per_second(self) -> float:
# A more accurate RPS for a method is its count over the total benchmark time
# This property is not used in the final report, but we'll leave it for potential use.
return self.total_requests / self.total_duration if self.total_duration > 0 else 0
@property
def min_duration(self) -> float:
return min(self.durations) if self.durations else 0
@property
def max_duration(self) -> float:
return max(self.durations) if self.durations else 0
@property
def p50_duration(self) -> float:
return statistics.median(self.durations) if self.durations else 0
@property
def p95_duration(self) -> float:
if not self.durations:
return 0
sorted_durations = sorted(self.durations)
index = int(len(sorted_durations) * 0.95)
return sorted_durations[index] if index < len(sorted_durations) else sorted_durations[-1]
@property
def p99_duration(self) -> float:
if not self.durations:
return 0
sorted_durations = sorted(self.durations)
index = int(len(sorted_durations) * 0.99)
return sorted_durations[index] if index < len(sorted_durations) else sorted_durations[-1]
class WebDAVBenchmark:
"""WebDAV server benchmark runner"""
def __init__(self, url: str, username: str, password: str,
concurrency: int = 50, duration: int = 60):
self.url = url.rstrip('/')
self.username = username
self.password = password
self.concurrency = concurrency
self.duration = duration
self.stats: Dict[str, MethodStats] = defaultdict(lambda: MethodStats(method=""))
self.start_time = 0.0
self.stop_flag = False
self.auth = aiohttp.BasicAuth(username, password)
def random_string(self, length: int = 10) -> str:
"""Generate random string"""
return ''.join(random.choices(string.ascii_letters + string.digits, k=length))
async def record_metric(self, metric: RequestMetrics):
"""Record a request metric"""
stats = self.stats[metric.method]
if not stats.method:
stats.method = metric.method
stats.total_requests += 1
stats.total_duration += metric.duration
stats.durations.append(metric.duration)
if metric.success:
stats.successful_requests += 1
else:
stats.failed_requests += 1
error_key = f"Status {metric.status}" if metric.status != 0 else str(metric.error)
stats.errors[error_key] += 1
async def benchmark_options(self, session: aiohttp.ClientSession) -> RequestMetrics:
"""Benchmark OPTIONS request"""
start = time.time()
try:
async with session.options(self.url, auth=self.auth) as resp:
duration = time.time() - start
return RequestMetrics(
method='OPTIONS', duration=duration, status=resp.status,
success=resp.status == 200
)
except Exception as e:
return RequestMetrics(
method='OPTIONS', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_propfind(self, session: aiohttp.ClientSession, depth: int = 0) -> RequestMetrics:
"""Benchmark PROPFIND request"""
propfind_body = '''<?xml version="1.0"?>
<D:propfind xmlns:D="DAV:">
<D:allprop/>
</D:propfind>'''
start = time.time()
try:
async with session.request(
'PROPFIND', self.url, auth=self.auth, data=propfind_body,
headers={'Depth': str(depth), 'Content-Type': 'application/xml'}
) as resp:
await resp.read()
duration = time.time() - start
return RequestMetrics(
method='PROPFIND', duration=duration, status=resp.status,
success=resp.status == 207
)
except Exception as e:
return RequestMetrics(
method='PROPFIND', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_put(self, session: aiohttp.ClientSession) -> RequestMetrics:
"""Benchmark PUT request"""
filename = f"bench_{self.random_string()}.txt"
content = self.random_string(1024).encode()
start = time.time()
try:
async with session.put(f"{self.url}/{filename}", auth=self.auth, data=content) as resp:
duration = time.time() - start
is_success = resp.status in [201, 204]
return RequestMetrics(
method='PUT', duration=duration, status=resp.status,
success=is_success,
filename=filename if is_success else None
)
except Exception as e:
return RequestMetrics(
method='PUT', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_get(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark GET request"""
start = time.time()
try:
async with session.get(f"{self.url}/{filename}", auth=self.auth) as resp:
await resp.read()
duration = time.time() - start
return RequestMetrics(
method='GET', duration=duration, status=resp.status,
success=resp.status == 200
)
except Exception as e:
return RequestMetrics(
method='GET', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_head(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark HEAD request"""
start = time.time()
try:
async with session.head(f"{self.url}/{filename}", auth=self.auth) as resp:
duration = time.time() - start
return RequestMetrics(
method='HEAD', duration=duration, status=resp.status,
success=resp.status == 200
)
except Exception as e:
return RequestMetrics(
method='HEAD', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_mkcol(self, session: aiohttp.ClientSession) -> RequestMetrics:
"""Benchmark MKCOL request"""
dirname = f"bench_dir_{self.random_string()}"
start = time.time()
try:
async with session.request('MKCOL', f"{self.url}/{dirname}/", auth=self.auth) as resp:
duration = time.time() - start
is_success = resp.status == 201
return RequestMetrics(
method='MKCOL', duration=duration, status=resp.status,
success=is_success,
filename=dirname if is_success else None
)
except Exception as e:
return RequestMetrics(
method='MKCOL', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_proppatch(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark PROPPATCH request"""
proppatch_body = '''<?xml version="1.0"?>
<D:propertyupdate xmlns:D="DAV:">
<D:set><D:prop><D:displayname>Benchmark Test</D:displayname></D:prop></D:set>
</D:propertyupdate>'''
start = time.time()
try:
async with session.request(
'PROPPATCH', f"{self.url}/{filename}", auth=self.auth, data=proppatch_body,
headers={'Content-Type': 'application/xml'}
) as resp:
await resp.read()
duration = time.time() - start
return RequestMetrics(
method='PROPPATCH', duration=duration, status=resp.status,
success=resp.status == 207
)
except Exception as e:
return RequestMetrics(
method='PROPPATCH', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_copy(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark COPY request"""
dest_filename = f"copy_{self.random_string()}.txt"
start = time.time()
try:
async with session.request(
'COPY', f"{self.url}/{filename}", auth=self.auth,
headers={'Destination': f"{self.url}/{dest_filename}"}
) as resp:
duration = time.time() - start
is_success = resp.status in [201, 204]
return RequestMetrics(
method='COPY', duration=duration, status=resp.status,
success=is_success,
filename=dest_filename if is_success else None
)
except Exception as e:
return RequestMetrics(
method='COPY', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_move(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark MOVE request"""
dest_filename = f"moved_{self.random_string()}.txt"
start = time.time()
try:
async with session.request(
'MOVE', f"{self.url}/{filename}", auth=self.auth,
headers={'Destination': f"{self.url}/{dest_filename}"}
) as resp:
duration = time.time() - start
is_success = resp.status in [201, 204]
return RequestMetrics(
method='MOVE', duration=duration, status=resp.status,
success=is_success,
filename=dest_filename if is_success else None
)
except Exception as e:
return RequestMetrics(
method='MOVE', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_lock(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
"""Benchmark LOCK request"""
lock_body = '''<?xml version="1.0"?>
<D:lockinfo xmlns:D="DAV:">
<D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype>
<D:owner><D:href>benchmark</D:href></D:owner>
</D:lockinfo>'''
start = time.time()
try:
async with session.request(
'LOCK', f"{self.url}/{filename}", auth=self.auth, data=lock_body,
headers={'Content-Type': 'application/xml', 'Timeout': 'Second-300'}
) as resp:
lock_token = resp.headers.get('Lock-Token', '').strip('<>')
await resp.read()
duration = time.time() - start
is_success = resp.status == 200
if is_success and lock_token:
try:
async with session.request(
'UNLOCK', f"{self.url}/{filename}", auth=self.auth,
headers={'Lock-Token': f'<{lock_token}>'}
):
pass
except:
pass
return RequestMetrics(
method='LOCK', duration=duration, status=resp.status,
success=is_success
)
except Exception as e:
return RequestMetrics(
method='LOCK', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def benchmark_delete(self, session: aiohttp.ClientSession, resource_name: str) -> RequestMetrics:
"""Benchmark DELETE request for files or directories"""
start = time.time()
try:
# Add trailing slash for directories for some servers
url_path = f"{self.url}/{resource_name}"
if "dir" in resource_name:
url_path += "/"
async with session.delete(url_path, auth=self.auth) as resp:
duration = time.time() - start
return RequestMetrics(
method='DELETE', duration=duration, status=resp.status,
success=resp.status == 204
)
except Exception as e:
return RequestMetrics(
method='DELETE', duration=time.time() - start, status=0,
success=False, error=str(e)
)
async def worker(self, worker_id: int, session: aiohttp.ClientSession):
"""Worker coroutine that runs various benchmarks"""
test_files = []
test_dirs = []
# Create an initial test file to ensure other operations can start
metric = await self.benchmark_put(session)
await self.record_metric(metric)
if metric.success and metric.filename:
test_files.append(metric.filename)
while not self.stop_flag:
elapsed = time.time() - self.start_time
if elapsed >= self.duration:
self.stop_flag = True
break
# Weighted random choice
operations = [
'options', 'propfind', 'put', 'get', 'head',
'mkcol', 'proppatch', 'copy', 'move', 'lock', 'delete'
]
# Ensure some operations are more frequent
weights = [5, 5, 15, 15, 10, 5, 5, 5, 5, 5, 20] # More PUT, GET, DELETE
operation = random.choices(operations, weights=weights, k=1)[0]
metric = None
try:
if operation == 'options':
metric = await self.benchmark_options(session)
elif operation == 'propfind':
depth = random.choice([0, 1])
metric = await self.benchmark_propfind(session, depth)
elif operation == 'put':
metric = await self.benchmark_put(session)
if metric.success and metric.filename:
test_files.append(metric.filename)
elif operation == 'get' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_get(session, filename)
elif operation == 'head' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_head(session, filename)
elif operation == 'mkcol':
metric = await self.benchmark_mkcol(session)
if metric.success and metric.filename:
test_dirs.append(metric.filename)
elif operation == 'proppatch' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_proppatch(session, filename)
elif operation == 'copy' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_copy(session, filename)
if metric.success and metric.filename:
test_files.append(metric.filename)
elif operation == 'move' and len(test_files) > 1:
filename_to_move = test_files.pop(random.randrange(len(test_files)))
metric = await self.benchmark_move(session, filename_to_move)
if metric.success and metric.filename:
test_files.append(metric.filename)
elif operation == 'lock' and test_files:
filename = random.choice(test_files)
metric = await self.benchmark_lock(session, filename)
elif operation == 'delete':
# Randomly delete a file or a directory
if test_dirs and random.random() < 0.2 and len(test_dirs) > 0: # 20% chance to delete a dir
dir_to_delete = test_dirs.pop(random.randrange(len(test_dirs)))
metric = await self.benchmark_delete(session, dir_to_delete)
elif len(test_files) > 1:
file_to_delete = test_files.pop(random.randrange(len(test_files)))
metric = await self.benchmark_delete(session, file_to_delete)
if metric:
await self.record_metric(metric)
except Exception as e:
print(f"Worker {worker_id} error: {e}")
await asyncio.sleep(0.01) # Small delay to prevent tight loop on empty lists
async def run(self):
"""Run the benchmark"""
print("="*80)
print("WebDAV Server Concurrent Benchmark")
print("="*80)
print(f"URL: {self.url}")
print(f"Concurrency: {self.concurrency} workers")
print(f"Duration: {self.duration} seconds")
print(f"User: {self.username}")
print("="*80)
print()
connector = aiohttp.TCPConnector(limit=self.concurrency * 2)
timeout = aiohttp.ClientTimeout(total=30)
async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session:
self.start_time = time.time()
workers = [
asyncio.create_task(self.worker(i, session))
for i in range(self.concurrency)
]
progress_task = asyncio.create_task(self.show_progress())
await asyncio.gather(*workers, return_exceptions=True)
self.stop_flag = True
await progress_task
self.print_results()
async def show_progress(self):
"""Show progress during benchmark"""
while not self.stop_flag:
elapsed = time.time() - self.start_time
if elapsed >= self.duration:
break
total_requests = sum(s.total_requests for s in self.stats.values())
print(f"\rProgress: {elapsed:.1f}s / {self.duration}s | Total Requests: {total_requests}", end='', flush=True)
await asyncio.sleep(0.5)
print()
def print_results(self):
"""Print benchmark results"""
print("\n")
print("="*80)
print("BENCHMARK RESULTS")
print("="*80)
print()
total_duration = time.time() - self.start_time
total_requests = sum(s.total_requests for s in self.stats.values())
total_success = sum(s.successful_requests for s in self.stats.values())
total_failed = total_requests - total_success
success_rate = (total_success / total_requests * 100) if total_requests > 0 else 0
failed_rate = (total_failed / total_requests * 100) if total_requests > 0 else 0
print(f"Total Duration: {total_duration:.2f}s")
print(f"Total Requests: {total_requests:,}")
print(f"Successful: {total_success:,} ({success_rate:.1f}%)")
print(f"Failed: {total_failed:,} ({failed_rate:.1f}%)")
print(f"Overall RPS: {total_requests/total_duration:.2f}")
print()
sorted_stats = sorted(self.stats.values(), key=lambda s: s.total_requests, reverse=True)
print("="*80)
print("PER-METHOD STATISTICS")
print("="*80)
print()
for stats in sorted_stats:
if stats.total_requests == 0:
continue
# Calculate RPS based on total benchmark duration for better comparison
method_rps = stats.total_requests / total_duration
print(f"Method: {stats.method}")
print(f" Requests: {stats.total_requests:>8,}")
print(f" Success Rate: {stats.success_rate:>8.2f}%")
print(f" RPS: {method_rps:>8.2f}")
print(f" Latency (ms):")
print(f" Min: {stats.min_duration*1000:>8.2f}")
print(f" Avg: {stats.avg_duration*1000:>8.2f}")
print(f" P50: {stats.p50_duration*1000:>8.2f}")
print(f" P95: {stats.p95_duration*1000:>8.2f}")
print(f" P99: {stats.p99_duration*1000:>8.2f}")
print(f" Max: {stats.max_duration*1000:>8.2f}")
if stats.failed_requests > 0 and stats.errors:
print(f" Errors:")
for error, count in sorted(stats.errors.items(), key=lambda x: x[1], reverse=True)[:5]:
error_short = error[:60] + '...' if len(error) > 60 else error
print(f" {error_short}: {count}")
print()
print("="*80)
async def main():
"""Main entry point"""
parser = argparse.ArgumentParser(description='WebDAV Server Concurrent Benchmark')
parser.add_argument('url', help='WebDAV server URL (e.g., http://localhost:8080/)')
parser.add_argument('username', help='Username for authentication')
parser.add_argument('password', help='Password for authentication')
parser.add_argument('-c', '--concurrency', type=int, default=50,
help='Number of concurrent workers (default: 50)')
parser.add_argument('-d', '--duration', type=int, default=60,
help='Benchmark duration in seconds (default: 60)')
args = parser.parse_args()
benchmark = WebDAVBenchmark(
url=args.url,
username=args.username,
password=args.password,
concurrency=args.concurrency,
duration=args.duration
)
await benchmark.run()
if __name__ == '__main__':
asyncio.run(main())