|
#!/usr/bin/env python3
|
|
"""
|
|
WebDAV Server Concurrent Benchmark Tool
|
|
Heavy load testing with performance metrics per method
|
|
"""
|
|
|
|
import asyncio
|
|
import aiohttp
|
|
import time
|
|
import argparse
|
|
import statistics
|
|
from dataclasses import dataclass, field
|
|
from typing import List, Dict, Optional
|
|
from collections import defaultdict
|
|
import random
|
|
import string
|
|
|
|
|
|
@dataclass
|
|
class RequestMetrics:
|
|
"""Metrics for a single request"""
|
|
method: str
|
|
duration: float
|
|
status: int
|
|
success: bool
|
|
error: Optional[str] = None
|
|
|
|
|
|
@dataclass
|
|
class MethodStats:
|
|
"""Statistics for a specific HTTP method"""
|
|
method: str
|
|
total_requests: int = 0
|
|
successful_requests: int = 0
|
|
failed_requests: int = 0
|
|
total_duration: float = 0.0
|
|
durations: List[float] = field(default_factory=list)
|
|
errors: Dict[str, int] = field(default_factory=lambda: defaultdict(int))
|
|
|
|
@property
|
|
def success_rate(self) -> float:
|
|
return (self.successful_requests / self.total_requests * 100) if self.total_requests > 0 else 0
|
|
|
|
@property
|
|
def avg_duration(self) -> float:
|
|
return self.total_duration / self.total_requests if self.total_requests > 0 else 0
|
|
|
|
@property
|
|
def requests_per_second(self) -> float:
|
|
return self.total_requests / self.total_duration if self.total_duration > 0 else 0
|
|
|
|
@property
|
|
def min_duration(self) -> float:
|
|
return min(self.durations) if self.durations else 0
|
|
|
|
@property
|
|
def max_duration(self) -> float:
|
|
return max(self.durations) if self.durations else 0
|
|
|
|
@property
|
|
def p50_duration(self) -> float:
|
|
return statistics.median(self.durations) if self.durations else 0
|
|
|
|
@property
|
|
def p95_duration(self) -> float:
|
|
if not self.durations:
|
|
return 0
|
|
sorted_durations = sorted(self.durations)
|
|
index = int(len(sorted_durations) * 0.95)
|
|
return sorted_durations[index] if index < len(sorted_durations) else sorted_durations[-1]
|
|
|
|
@property
|
|
def p99_duration(self) -> float:
|
|
if not self.durations:
|
|
return 0
|
|
sorted_durations = sorted(self.durations)
|
|
index = int(len(sorted_durations) * 0.99)
|
|
return sorted_durations[index] if index < len(sorted_durations) else sorted_durations[-1]
|
|
|
|
|
|
class WebDAVBenchmark:
|
|
"""WebDAV server benchmark runner"""
|
|
|
|
def __init__(self, url: str, username: str, password: str,
|
|
concurrency: int = 50, duration: int = 60):
|
|
self.url = url.rstrip('/')
|
|
self.username = username
|
|
self.password = password
|
|
self.concurrency = concurrency
|
|
self.duration = duration
|
|
self.stats: Dict[str, MethodStats] = defaultdict(lambda: MethodStats(method=""))
|
|
self.start_time = 0
|
|
self.stop_flag = False
|
|
self.auth = aiohttp.BasicAuth(username, password)
|
|
|
|
def random_string(self, length: int = 10) -> str:
|
|
"""Generate random string"""
|
|
return ''.join(random.choices(string.ascii_letters + string.digits, k=length))
|
|
|
|
async def record_metric(self, metric: RequestMetrics):
|
|
"""Record a request metric"""
|
|
stats = self.stats[metric.method]
|
|
stats.method = metric.method
|
|
stats.total_requests += 1
|
|
stats.total_duration += metric.duration
|
|
stats.durations.append(metric.duration)
|
|
|
|
if metric.success:
|
|
stats.successful_requests += 1
|
|
else:
|
|
stats.failed_requests += 1
|
|
if metric.error:
|
|
stats.errors[metric.error] += 1
|
|
|
|
async def benchmark_options(self, session: aiohttp.ClientSession) -> RequestMetrics:
|
|
"""Benchmark OPTIONS request"""
|
|
start = time.time()
|
|
try:
|
|
async with session.options(self.url, auth=self.auth) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='OPTIONS',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 200
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='OPTIONS',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_propfind(self, session: aiohttp.ClientSession, depth: int = 0) -> RequestMetrics:
|
|
"""Benchmark PROPFIND request"""
|
|
propfind_body = '''<?xml version="1.0"?>
|
|
<D:propfind xmlns:D="DAV:">
|
|
<D:allprop/>
|
|
</D:propfind>'''
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.request(
|
|
'PROPFIND',
|
|
self.url,
|
|
auth=self.auth,
|
|
data=propfind_body,
|
|
headers={'Depth': str(depth), 'Content-Type': 'application/xml'}
|
|
) as resp:
|
|
await resp.read() # Consume response
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='PROPFIND',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 207
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='PROPFIND',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_put(self, session: aiohttp.ClientSession) -> RequestMetrics:
|
|
"""Benchmark PUT request"""
|
|
filename = f"bench_{self.random_string()}.txt"
|
|
content = self.random_string(1024).encode() # 1KB file
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.put(
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth,
|
|
data=content
|
|
) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='PUT',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status in [201, 204]
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='PUT',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_get(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark GET request"""
|
|
start = time.time()
|
|
try:
|
|
|
|
print(f"{self.url}/{filename}")
|
|
async with session.get(
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth
|
|
) as resp:
|
|
await resp.read() # Consume response
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='GET',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 200
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='GET',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_head(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark HEAD request"""
|
|
start = time.time()
|
|
try:
|
|
async with session.head(
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth
|
|
) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='HEAD',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 200
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='HEAD',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_mkcol(self, session: aiohttp.ClientSession) -> RequestMetrics:
|
|
"""Benchmark MKCOL request"""
|
|
dirname = f"bench_dir_{self.random_string()}"
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.request(
|
|
'MKCOL',
|
|
f"{self.url}/{dirname}/",
|
|
auth=self.auth
|
|
) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='MKCOL',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 201
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='MKCOL',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_proppatch(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark PROPPATCH request"""
|
|
proppatch_body = f'''<?xml version="1.0"?>
|
|
<D:propertyupdate xmlns:D="DAV:">
|
|
<D:set>
|
|
<D:prop>
|
|
<D:displayname>Benchmark Test</D:displayname>
|
|
</D:prop>
|
|
</D:set>
|
|
</D:propertyupdate>'''
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.request(
|
|
'PROPPATCH',
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth,
|
|
data=proppatch_body,
|
|
headers={'Content-Type': 'application/xml'}
|
|
) as resp:
|
|
await resp.read()
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='PROPPATCH',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 207
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='PROPPATCH',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_copy(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark COPY request"""
|
|
dest_filename = f"copy_{self.random_string()}.txt"
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.request(
|
|
'COPY',
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth,
|
|
headers={'Destination': f"{self.url}/{dest_filename}"}
|
|
) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='COPY',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status in [201, 204]
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='COPY',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_move(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark MOVE request"""
|
|
dest_filename = f"moved_{self.random_string()}.txt"
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.request(
|
|
'MOVE',
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth,
|
|
headers={'Destination': f"{self.url}/{dest_filename}"}
|
|
) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='MOVE',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status in [201, 204]
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='MOVE',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_lock(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark LOCK request"""
|
|
lock_body = '''<?xml version="1.0"?>
|
|
<D:lockinfo xmlns:D="DAV:">
|
|
<D:lockscope><D:exclusive/></D:lockscope>
|
|
<D:locktype><D:write/></D:locktype>
|
|
<D:owner>
|
|
<D:href>benchmark</D:href>
|
|
</D:owner>
|
|
</D:lockinfo>'''
|
|
|
|
start = time.time()
|
|
try:
|
|
async with session.request(
|
|
'LOCK',
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth,
|
|
data=lock_body,
|
|
headers={'Content-Type': 'application/xml', 'Timeout': 'Second-300'}
|
|
) as resp:
|
|
lock_token = resp.headers.get('Lock-Token', '').strip('<>')
|
|
await resp.read()
|
|
duration = time.time() - start
|
|
|
|
# Unlock immediately to clean up
|
|
if lock_token:
|
|
try:
|
|
async with session.request(
|
|
'UNLOCK',
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth,
|
|
headers={'Lock-Token': f'<{lock_token}>'}
|
|
) as unlock_resp:
|
|
pass
|
|
except:
|
|
pass
|
|
|
|
return RequestMetrics(
|
|
method='LOCK',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 200
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='LOCK',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def benchmark_delete(self, session: aiohttp.ClientSession, filename: str) -> RequestMetrics:
|
|
"""Benchmark DELETE request"""
|
|
start = time.time()
|
|
try:
|
|
async with session.delete(
|
|
f"{self.url}/{filename}",
|
|
auth=self.auth
|
|
) as resp:
|
|
duration = time.time() - start
|
|
return RequestMetrics(
|
|
method='DELETE',
|
|
duration=duration,
|
|
status=resp.status,
|
|
success=resp.status == 204
|
|
)
|
|
except Exception as e:
|
|
return RequestMetrics(
|
|
method='DELETE',
|
|
duration=time.time() - start,
|
|
status=0,
|
|
success=False,
|
|
error=str(e)
|
|
)
|
|
|
|
async def worker(self, worker_id: int, session: aiohttp.ClientSession):
|
|
"""Worker coroutine that runs various benchmarks"""
|
|
test_files = []
|
|
|
|
# Create initial test file
|
|
filename = f"bench_worker_{worker_id}_{self.random_string()}.txt"
|
|
metric = await self.benchmark_put(session)
|
|
await self.record_metric(metric)
|
|
if metric.success:
|
|
test_files.append(filename)
|
|
|
|
while not self.stop_flag:
|
|
elapsed = time.time() - self.start_time
|
|
if elapsed >= self.duration:
|
|
self.stop_flag = True
|
|
break
|
|
|
|
# Randomly choose operation
|
|
operation = random.choice([
|
|
'options', 'propfind', 'put', 'get', 'head',
|
|
'mkcol', 'proppatch', 'copy', 'move', 'lock', 'delete'
|
|
])
|
|
|
|
try:
|
|
if operation == 'options':
|
|
metric = await self.benchmark_options(session)
|
|
|
|
elif operation == 'propfind':
|
|
depth = random.choice([0, 1])
|
|
metric = await self.benchmark_propfind(session, depth)
|
|
|
|
elif operation == 'put':
|
|
metric = await self.benchmark_put(session)
|
|
if metric.success:
|
|
filename = f"bench_worker_{worker_id}_{self.random_string()}.txt"
|
|
test_files.append(filename)
|
|
|
|
elif operation == 'get' and test_files:
|
|
filename = random.choice(test_files)
|
|
metric = await self.benchmark_get(session, filename)
|
|
|
|
elif operation == 'head' and test_files:
|
|
filename = random.choice(test_files)
|
|
metric = await self.benchmark_head(session, filename)
|
|
|
|
elif operation == 'mkcol':
|
|
metric = await self.benchmark_mkcol(session)
|
|
|
|
elif operation == 'proppatch' and test_files:
|
|
filename = random.choice(test_files)
|
|
metric = await self.benchmark_proppatch(session, filename)
|
|
|
|
elif operation == 'copy' and test_files:
|
|
filename = random.choice(test_files)
|
|
metric = await self.benchmark_copy(session, filename)
|
|
|
|
elif operation == 'move' and test_files:
|
|
if len(test_files) > 1:
|
|
filename = test_files.pop(random.randrange(len(test_files)))
|
|
metric = await self.benchmark_move(session, filename)
|
|
else:
|
|
continue
|
|
|
|
elif operation == 'lock' and test_files:
|
|
filename = random.choice(test_files)
|
|
metric = await self.benchmark_lock(session, filename)
|
|
|
|
elif operation == 'delete' and len(test_files) > 1:
|
|
filename = test_files.pop(random.randrange(len(test_files)))
|
|
metric = await self.benchmark_delete(session, filename)
|
|
|
|
else:
|
|
continue
|
|
|
|
await self.record_metric(metric)
|
|
|
|
except Exception as e:
|
|
print(f"Worker {worker_id} error: {e}")
|
|
|
|
# Small delay to prevent overwhelming
|
|
await asyncio.sleep(0.001)
|
|
|
|
async def run(self):
|
|
"""Run the benchmark"""
|
|
print("="*80)
|
|
print("WebDAV Server Concurrent Benchmark")
|
|
print("="*80)
|
|
print(f"URL: {self.url}")
|
|
print(f"Concurrency: {self.concurrency} workers")
|
|
print(f"Duration: {self.duration} seconds")
|
|
print(f"User: {self.username}")
|
|
print("="*80)
|
|
print()
|
|
|
|
connector = aiohttp.TCPConnector(limit=self.concurrency * 2)
|
|
timeout = aiohttp.ClientTimeout(total=30)
|
|
|
|
async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session:
|
|
self.start_time = time.time()
|
|
|
|
# Create worker tasks
|
|
workers = [
|
|
asyncio.create_task(self.worker(i, session))
|
|
for i in range(self.concurrency)
|
|
]
|
|
|
|
# Progress indicator
|
|
progress_task = asyncio.create_task(self.show_progress())
|
|
|
|
# Wait for all workers
|
|
await asyncio.gather(*workers, return_exceptions=True)
|
|
|
|
# Stop progress
|
|
await progress_task
|
|
|
|
# Print results
|
|
self.print_results()
|
|
|
|
async def show_progress(self):
|
|
"""Show progress during benchmark"""
|
|
while not self.stop_flag:
|
|
elapsed = time.time() - self.start_time
|
|
if elapsed >= self.duration:
|
|
break
|
|
|
|
total_requests = sum(s.total_requests for s in self.stats.values())
|
|
print(f"\rProgress: {elapsed:.1f}s / {self.duration}s | Total Requests: {total_requests}", end='', flush=True)
|
|
await asyncio.sleep(1)
|
|
|
|
print()
|
|
|
|
def print_results(self):
|
|
"""Print benchmark results"""
|
|
print("\n")
|
|
print("="*80)
|
|
print("BENCHMARK RESULTS")
|
|
print("="*80)
|
|
print()
|
|
|
|
total_duration = time.time() - self.start_time
|
|
total_requests = sum(s.total_requests for s in self.stats.values())
|
|
total_success = sum(s.successful_requests for s in self.stats.values())
|
|
total_failed = sum(s.failed_requests for s in self.stats.values())
|
|
|
|
print(f"Total Duration: {total_duration:.2f}s")
|
|
print(f"Total Requests: {total_requests:,}")
|
|
print(f"Successful: {total_success:,} ({total_success/total_requests*100:.1f}%)")
|
|
print(f"Failed: {total_failed:,} ({total_failed/total_requests*100:.1f}%)")
|
|
print(f"Overall RPS: {total_requests/total_duration:.2f}")
|
|
print()
|
|
|
|
# Sort methods by request count
|
|
sorted_stats = sorted(self.stats.values(), key=lambda s: s.total_requests, reverse=True)
|
|
|
|
print("="*80)
|
|
print("PER-METHOD STATISTICS")
|
|
print("="*80)
|
|
print()
|
|
|
|
for stats in sorted_stats:
|
|
if stats.total_requests == 0:
|
|
continue
|
|
|
|
print(f"Method: {stats.method}")
|
|
print(f" Requests: {stats.total_requests:>8,}")
|
|
print(f" Success Rate: {stats.success_rate:>8.2f}%")
|
|
print(f" RPS: {stats.requests_per_second:>8.2f}")
|
|
print(f" Latency (ms):")
|
|
print(f" Min: {stats.min_duration*1000:>8.2f}")
|
|
print(f" Avg: {stats.avg_duration*1000:>8.2f}")
|
|
print(f" P50: {stats.p50_duration*1000:>8.2f}")
|
|
print(f" P95: {stats.p95_duration*1000:>8.2f}")
|
|
print(f" P99: {stats.p99_duration*1000:>8.2f}")
|
|
print(f" Max: {stats.max_duration*1000:>8.2f}")
|
|
|
|
if stats.failed_requests > 0 and stats.errors:
|
|
print(f" Errors:")
|
|
for error, count in sorted(stats.errors.items(), key=lambda x: x[1], reverse=True)[:5]:
|
|
error_short = error[:60] + '...' if len(error) > 60 else error
|
|
print(f" {error_short}: {count}")
|
|
|
|
print()
|
|
|
|
print("="*80)
|
|
|
|
|
|
async def main():
|
|
"""Main entry point"""
|
|
parser = argparse.ArgumentParser(description='WebDAV Server Concurrent Benchmark')
|
|
parser.add_argument('url', help='WebDAV server URL (e.g., http://localhost:8080/)')
|
|
parser.add_argument('username', help='Username for authentication')
|
|
parser.add_argument('password', help='Password for authentication')
|
|
parser.add_argument('-c', '--concurrency', type=int, default=50,
|
|
help='Number of concurrent workers (default: 50)')
|
|
parser.add_argument('-d', '--duration', type=int, default=60,
|
|
help='Benchmark duration in seconds (default: 60)')
|
|
|
|
args = parser.parse_args()
|
|
|
|
benchmark = WebDAVBenchmark(
|
|
url=args.url,
|
|
username=args.username,
|
|
password=args.password,
|
|
concurrency=args.concurrency,
|
|
duration=args.duration
|
|
)
|
|
|
|
await benchmark.run()
|
|
|
|
|
|
if __name__ == '__main__':
|
|
asyncio.run(main())
|