Initial commit.
All checks were successful
Build and Test / build-c (push) Successful in 36s
Build and Test / build-python (push) Successful in 25s
Build and Test / valgrind (push) Successful in 46s

This commit is contained in:
retoor 2025-12-13 00:11:12 +01:00
commit 5482f8b5c9
7 changed files with 1614 additions and 0 deletions

View File

@ -0,0 +1,63 @@
# retoor <retoor@molodetz.nl>
name: Build and Test
on:
push:
branches:
- main
- master
pull_request:
branches:
- main
- master
jobs:
build-c:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential libssl-dev valgrind
- name: Build
run: make
- name: Build debug
run: make debug
valgrind:
runs-on: ubuntu-latest
needs: build-c
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential libssl-dev valgrind
- name: Run valgrind memory tests
run: make valgrind
build-python:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install dependencies
run: make py-install
- name: Test Python version
run: make py-test

38
Makefile Normal file
View File

@ -0,0 +1,38 @@
# retoor <retoor@molodetz.nl>
CC = gcc
CFLAGS = -Wall -Wextra -O2
CFLAGS_DEBUG = -Wall -Wextra -g -O0
LDFLAGS = -lssl -lcrypto -lm
TARGET = abr
TEST_URL = https://example.com/
PYTHON = python3
all: $(TARGET)
$(TARGET): main.o
$(CC) $(CFLAGS) -o $@ $^ $(LDFLAGS)
main.o: main.c
$(CC) $(CFLAGS) -c main.c
debug: clean
$(CC) $(CFLAGS_DEBUG) -o $(TARGET) main.c $(LDFLAGS)
valgrind: debug
valgrind --leak-check=full --show-leak-kinds=definite,indirect,possible --errors-for-leak-kinds=definite,indirect,possible --error-exitcode=1 ./$(TARGET) -n 5 -c 2 -i $(TEST_URL)
clean:
rm -f $(TARGET) main.o
py-install:
$(PYTHON) -m pip install -r requirements.txt
py-run:
$(PYTHON) abr.py -n 5 -c 2 -i $(TEST_URL)
py-test:
$(PYTHON) abr.py -n 10 -c 5 -i $(TEST_URL)
.PHONY: all clean debug valgrind py-install py-run py-test

96
README.md Normal file
View File

@ -0,0 +1,96 @@
<!-- retoor <retoor@molodetz.nl> -->
# abr
HTTP benchmark tool inspired by ApacheBench. Available in C and Python implementations.
## C Version
Uses non-blocking sockets with poll() multiplexing and OpenSSL for TLS.
### Requirements
- GCC
- OpenSSL development libraries (libssl-dev)
- POSIX-compliant system (Linux, BSD, macOS)
### Build
```sh
make # build optimized binary
make debug # build with debug symbols
make valgrind # run memory leak tests
make clean # remove build artifacts
```
### Usage
```sh
./abr -n <requests> -c <concurrency> [-k] [-i] <url>
```
## Python Version
Uses asyncio with aiohttp for concurrent HTTP requests.
### Requirements
- Python 3.7+
- aiohttp
### Install
```sh
make py-install
```
### Usage
```sh
python3 abr.py -n <requests> -c <concurrency> [-k] [-i] <url>
make py-run # quick test run
make py-test # test with more requests
```
## Options
| Option | Description |
|--------|-------------|
| `-n` | Total number of requests |
| `-c` | Concurrent connections (max 10000) |
| `-k` | Enable HTTP Keep-Alive |
| `-i` | Skip SSL certificate verification |
## Example
```sh
./abr -n 1000 -c 50 -k https://example.com/
python3 abr.py -n 1000 -c 50 -k https://example.com/
```
## Output
- Requests per second
- Transfer rate (KB/s)
- Response time percentiles (50th, 66th, 75th, 80th, 90th, 95th, 98th, 99th)
- Connection time statistics (min, mean, median, max, standard deviation)
## Technical Details
### C Version
- Event-driven architecture using poll()
- Connection pooling with keep-alive support
- Chunked transfer-encoding support
- IPv4 and IPv6 via getaddrinfo()
- 30-second per-request timeout
- Graceful shutdown on SIGINT/SIGTERM
- OpenSSL 1.0.x and 1.1+ compatibility
- Memory leak free (verified with valgrind)
### Python Version
- Async I/O with asyncio and aiohttp
- Connection pooling with keep-alive support
- 30-second per-request timeout
- Graceful shutdown on SIGINT/SIGTERM

BIN
abr Executable file

Binary file not shown.

294
abr.py Executable file
View File

@ -0,0 +1,294 @@
#!/usr/bin/env python3
# retoor <retoor@molodetz.nl>
import asyncio
import aiohttp
import ssl
import time
import statistics
import argparse
import signal
import sys
from urllib.parse import urlparse
from typing import List, Dict, Any, Optional
REQUEST_TIMEOUT_S = 30
MAX_CONNECTIONS = 10000
class Style:
RESET = '\033[0m'
BOLD = '\033[1m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
CYAN = '\033[36m'
shutdown_requested = False
def signal_handler(sig, frame):
global shutdown_requested
shutdown_requested = True
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
async def fetch(session: aiohttp.ClientSession, semaphore: asyncio.Semaphore, url: str, timeout: aiohttp.ClientTimeout) -> Dict[str, Any]:
async with semaphore:
start_time = time.monotonic()
try:
async with session.get(url, timeout=timeout) as response:
body_bytes = await response.read()
end_time = time.monotonic()
header_size = sum(len(k) + len(v) + 4 for k, v in response.raw_headers) + 2
return {
"status": response.status,
"duration_ms": (end_time - start_time) * 1000,
"body_size_bytes": len(body_bytes),
"header_size_bytes": header_size,
"server_software": response.headers.get("Server"),
"failed": response.status >= 400,
"error": None
}
except asyncio.TimeoutError:
end_time = time.monotonic()
return {
"status": None,
"duration_ms": (end_time - start_time) * 1000,
"body_size_bytes": 0,
"header_size_bytes": 0,
"server_software": None,
"failed": True,
"error": f"Request timeout ({REQUEST_TIMEOUT_S}s)"
}
except aiohttp.ClientError as e:
end_time = time.monotonic()
return {
"status": None,
"duration_ms": (end_time - start_time) * 1000,
"body_size_bytes": 0,
"header_size_bytes": 0,
"server_software": None,
"failed": True,
"error": str(e)
}
except Exception as e:
end_time = time.monotonic()
return {
"status": None,
"duration_ms": (end_time - start_time) * 1000,
"body_size_bytes": 0,
"header_size_bytes": 0,
"server_software": None,
"failed": True,
"error": str(e)
}
def format_bytes(bytes_val: int) -> str:
if bytes_val < 1024:
return f"{bytes_val} bytes"
units = ["KB", "MB", "GB", "TB"]
value = bytes_val / 1024
for unit in units:
if value < 1024:
return f"{value:.2f} {unit}"
value /= 1024
return f"{value:.2f} TB"
def print_summary(results: List[Dict[str, Any]], total_duration_s: float, url: str, total_requests: int, concurrency: int, total_connections: int):
success_results = [r for r in results if not r["failed"]]
failed_count = len(results) - len(success_results)
if not success_results:
print(f"{Style.RED}All requests failed. Cannot generate a detailed summary.{Style.RESET}")
print(f"Total time: {total_duration_s:.3f} seconds")
print(f"Failed requests: {failed_count}")
if results and results[0]['error']:
print(f"Sample error: {results[0]['error']}")
return
parsed = urlparse(url)
hostname = parsed.hostname or "unknown"
port = parsed.port or (443 if parsed.scheme == "https" else 80)
path = parsed.path or "/"
if parsed.query:
path += "?" + parsed.query
first_result = success_results[0]
doc_length = first_result["body_size_bytes"]
request_durations_ms = [r["duration_ms"] for r in success_results]
total_html_transferred = sum(r["body_size_bytes"] for r in success_results)
total_transferred = sum(r["body_size_bytes"] + r["header_size_bytes"] for r in success_results)
req_per_second = total_requests / total_duration_s
time_per_req_concurrent = (total_duration_s * 1000) / total_requests
time_per_req_mean = (total_duration_s * 1000 * concurrency) / total_requests
transfer_rate_kbytes_s = (total_transferred / 1024) / total_duration_s
min_time = min(request_durations_ms)
mean_time = statistics.mean(request_durations_ms)
stdev_time = statistics.stdev(request_durations_ms) if len(request_durations_ms) > 1 else 0
median_time = statistics.median(request_durations_ms)
max_time = max(request_durations_ms)
sorted_durations = sorted(request_durations_ms)
n = len(sorted_durations)
percentiles = {}
for p in [50, 66, 75, 80, 90, 95, 98, 99]:
idx = max(0, int(n * p / 100) - 1)
percentiles[p] = sorted_durations[idx]
percentiles[100] = max_time
y, g, r, c, b, rs = Style.YELLOW, Style.GREEN, Style.RED, Style.CYAN, Style.BOLD, Style.RESET
fail_color = g if failed_count == 0 else r
print(f"{y}Server Software:{rs} {first_result['server_software'] or 'N/A'}")
print(f"{y}Server Hostname:{rs} {hostname}")
print(f"{y}Server Port:{rs} {port}\n")
print(f"{y}Document Path:{rs} {path}")
print(f"{y}Document Length:{rs} {format_bytes(doc_length)}\n")
print(f"{y}Concurrency Level:{rs} {concurrency}")
print(f"{y}Time taken for tests:{rs} {total_duration_s:.3f} seconds")
print(f"{y}Complete requests:{rs} {total_requests}")
print(f"{y}Failed requests:{rs} {fail_color}{failed_count}{rs}")
print(f"{y}Total connections made:{rs} {total_connections}")
print(f"{y}Total transferred:{rs} {format_bytes(total_transferred)}")
print(f"{y}HTML transferred:{rs} {format_bytes(total_html_transferred)}")
print(f"{y}Requests per second:{rs} {g}{req_per_second:.2f}{rs} [#/sec] (mean)")
print(f"{y}Time per request:{rs} {time_per_req_mean:.3f} [ms] (mean)")
print(f"{y}Time per request:{rs} {time_per_req_concurrent:.3f} [ms] (mean, across all concurrent requests)")
print(f"{y}Transfer rate:{rs} {g}{transfer_rate_kbytes_s:.2f}{rs} [Kbytes/sec] received\n")
print(f"{c}{b}Connection Times (ms){rs}")
print(f"{c}---------------------{rs}")
print(f"{'min:':<10}{min_time:>8.0f}")
print(f"{'mean:':<10}{mean_time:>8.0f}")
print(f"{'sd:':<10}{stdev_time:>8.1f}")
print(f"{'median:':<10}{median_time:>8.0f}")
print(f"{'max:':<10}{max_time:>8.0f}\n")
print(f"{c}{b}Percentage of the requests served within a certain time (ms){rs}")
for p, t in percentiles.items():
print(f" {g}{p:>3}%{rs} {t:.0f}")
async def main(url: str, total_requests: int, concurrency: int, keep_alive: bool, insecure: bool):
global shutdown_requested
parsed = urlparse(url)
hostname = parsed.hostname or "unknown"
print("abr, a Python-based HTTP benchmark inspired by ApacheBench.")
print(f"Benchmarking {hostname} (be patient)...")
if insecure and parsed.scheme == "https":
print(f"{Style.YELLOW}Warning: SSL certificate verification disabled{Style.RESET}")
semaphore = asyncio.Semaphore(concurrency)
timeout = aiohttp.ClientTimeout(total=REQUEST_TIMEOUT_S)
ssl_context: Optional[ssl.SSLContext] = None
if insecure:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
connector = aiohttp.TCPConnector(
limit=min(concurrency, MAX_CONNECTIONS),
force_close=not keep_alive,
ssl=ssl_context if insecure else None
)
total_connections = 0
async with aiohttp.ClientSession(connector=connector) as session:
tasks = [asyncio.create_task(fetch(session, semaphore, url, timeout)) for _ in range(total_requests)]
results = []
completed_count = 0
failed_count = 0
success_count = 0
total_duration_ms = 0
total_bytes_transferred = 0
benchmark_start_time = time.monotonic()
for future in asyncio.as_completed(tasks):
if shutdown_requested:
for task in tasks:
if not task.done():
task.cancel()
break
try:
result = await future
except asyncio.CancelledError:
break
results.append(result)
completed_count += 1
total_connections += 1
total_bytes_transferred += result["body_size_bytes"] + result["header_size_bytes"]
if result["failed"]:
failed_count += 1
else:
success_count += 1
total_duration_ms += result["duration_ms"]
elapsed_time = time.monotonic() - benchmark_start_time
req_per_sec = completed_count / elapsed_time if elapsed_time > 0 else 0
avg_latency_ms = total_duration_ms / success_count if success_count > 0 else 0
transfer_rate_kbs = (total_bytes_transferred / 1024) / elapsed_time if elapsed_time > 0 else 0
fail_color = Style.GREEN if failed_count == 0 else Style.RED
status_line = (
f"\r{Style.BOLD}Completed: {completed_count}/{total_requests} | "
f"Failed: {fail_color}{failed_count}{Style.RESET}{Style.BOLD} | "
f"RPS: {Style.GREEN}{req_per_sec:.1f}{Style.RESET}{Style.BOLD} | "
f"Avg Latency: {avg_latency_ms:.0f}ms | "
f"Rate: {transfer_rate_kbs:.1f} KB/s{Style.RESET}"
)
sys.stdout.write(status_line)
sys.stdout.flush()
benchmark_end_time = time.monotonic()
if shutdown_requested:
print(f"\n{Style.YELLOW}Shutdown requested, cleaning up...{Style.RESET}")
sys.stdout.write("\n\n")
print(f"{Style.GREEN}{Style.BOLD}Finished {len(results)} requests{Style.RESET}\n")
total_duration = benchmark_end_time - benchmark_start_time
print_summary(results, total_duration, url, len(results), concurrency, total_connections)
if shutdown_requested:
sys.exit(130)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="A Python-based HTTP benchmark tool inspired by ApacheBench.",
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument('-n', type=int, required=True, help='Total number of requests to perform')
parser.add_argument('-c', type=int, required=True, help='Number of concurrent connections')
parser.add_argument('-k', action='store_true', help='Enable HTTP Keep-Alive')
parser.add_argument('-i', action='store_true', help='Insecure mode (skip SSL certificate verification)')
parser.add_argument('url', type=str, help='URL to benchmark')
args = parser.parse_args()
if args.n <= 0:
parser.error("Number of requests (-n) must be positive")
if args.c <= 0 or args.c > MAX_CONNECTIONS:
parser.error(f"Concurrency (-c) must be between 1 and {MAX_CONNECTIONS}")
if args.n < args.c:
parser.error("Number of requests (-n) cannot be less than the concurrency level (-c)")
asyncio.run(main(url=args.url, total_requests=args.n, concurrency=args.c, keep_alive=args.k, insecure=args.i))

1122
main.c Normal file

File diff suppressed because it is too large Load Diff

1
requirements.txt Normal file
View File

@ -0,0 +1 @@
aiohttp>=3.8.0