···
+
mutable latencies : (float * float) list; (* (timestamp, latency) pairs *)
let create_latency_stats () = {
···
+
let update_latency stats latency timestamp =
stats.count <- stats.count + 1;
stats.total <- stats.total +. latency;
stats.min <- min stats.min latency;
+
stats.max <- max stats.max latency;
+
stats.latencies <- (timestamp, latency) :: stats.latencies
(** Generate a random message of given size *)
let generate_message size =
···
(** Client test: connect via pool, send message, verify echo *)
+
let run_client_test ~clock ~test_start_time pool endpoint message latency_stats errors =
let msg_len = String.length message in
let start_time = Eio.Time.now clock in
Conpool.with_connection pool endpoint (fun flow ->
Eio.Flow.copy_string message flow;
Eio.Flow.copy_string "\n" flow;
let response = Eio.Buf_read.of_flow flow ~max_size:(msg_len + 1) in
let echoed = Eio.Buf_read.line response in
let end_time = Eio.Time.now clock in
let latency = (end_time -. start_time) *. 1000.0 in (* Convert to ms *)
+
let relative_time = (end_time -. test_start_time) *. 1000.0 in (* ms since test start *)
if String.equal echoed message then begin
+
update_latency latency_stats latency relative_time
(** Run a single client that sends multiple messages *)
+
let run_client ~clock ~test_start_time pool endpoints (cfg : config) latency_stats errors client_id =
+
for _ = 1 to cfg.messages_per_client do
let endpoint_idx = Random.int (Array.length endpoints) in
let endpoint = endpoints.(endpoint_idx) in
+
let message = Printf.sprintf "c%d-%s" client_id (generate_message cfg.message_size) in
+
run_client_test ~clock ~test_start_time pool endpoint message latency_stats errors
+
(** Pool statistics aggregated from all endpoints *)
+
(** Test result type *)
+
messages_per_client : int;
+
latency_data : (float * float) list; (* (timestamp, latency) pairs for visualization *)
+
pool_stats : pool_stats;
+
(** Main stress test runner - returns a test result *)
+
let run_stress_test ~env (cfg : config) : test_result =
let net = Eio.Stdenv.net env in
let clock = Eio.Stdenv.clock env in
let latency_stats = create_latency_stats () in
+
let result : test_result option ref = ref None in
Eio.Switch.run @@ fun sw ->
+
ports := Array.init cfg.num_servers (fun _ ->
start_echo_server ~sw net
···
Conpool.Endpoint.make ~host:"127.0.0.1" ~port
+
(* Create connection pool *)
let pool_config = Conpool.Config.make
+
~max_connections_per_endpoint:cfg.pool_size
~max_connection_lifetime:120.0
···
let start_time = Eio.Time.now clock in
(* Run clients in parallel *)
+
let total_clients = cfg.num_servers * cfg.num_clients in
let client_ids = List.init total_clients (fun i -> i) in
+
Eio.Fiber.List.iter ~max_fibers:cfg.max_parallel_clients
+
run_client ~clock ~test_start_time:start_time pool endpoints cfg latency_stats errors client_id)
let end_time = Eio.Time.now clock in
let duration = end_time -. start_time in
+
(* Collect pool statistics from all endpoints *)
+
let all_stats = Conpool.all_stats pool in
+
let pool_stats = List.fold_left (fun acc (_, stats) ->
+
total_created = acc.total_created + Conpool.Stats.total_created stats;
+
total_reused = acc.total_reused + Conpool.Stats.total_reused stats;
+
total_closed = acc.total_closed + Conpool.Stats.total_closed stats;
+
active = acc.active + Conpool.Stats.active stats;
+
idle = acc.idle + Conpool.Stats.idle stats;
+
pool_errors = acc.pool_errors + Conpool.Stats.errors stats;
+
) { total_created = 0; total_reused = 0; total_closed = 0; active = 0; idle = 0; pool_errors = 0 } all_stats in
+
let r : test_result = {
+
num_servers = cfg.num_servers;
+
num_clients = cfg.num_clients;
+
messages_per_client = cfg.messages_per_client;
+
pool_size = cfg.pool_size;
total_messages = latency_stats.count;
throughput = float_of_int latency_stats.count /. duration;
···
min_latency = if latency_stats.count > 0 then latency_stats.min else 0.0;
max_latency = latency_stats.max;
+
latency_data = List.rev latency_stats.latencies;
···
| None -> failwith "Test failed to produce result"
+
(** Convert result to JSON string *)
+
let result_to_json result =
+
"messages_per_client": %d,
+
result.messages_per_client
+
(** Escape strings for JavaScript *)
+
let buf = Buffer.create (String.length s) in
+
| '\\' -> Buffer.add_string buf "\\\\"
+
| '"' -> Buffer.add_string buf "\\\""
+
| '\n' -> Buffer.add_string buf "\\n"
+
| '\r' -> Buffer.add_string buf "\\r"
+
| '\t' -> Buffer.add_string buf "\\t"
+
| _ -> Buffer.add_char buf c
+
(** Calculate histogram buckets for latency data *)
+
let calculate_histogram latencies num_buckets =
+
if List.length latencies = 0 then ([], []) else
+
let latency_values = List.map snd latencies in
+
let min_lat = List.fold_left min Float.infinity latency_values in
+
let max_lat = List.fold_left max 0.0 latency_values in
+
let bucket_width = (max_lat -. min_lat) /. float_of_int num_buckets in
+
let buckets = Array.make num_buckets 0 in
+
let bucket_idx = min (num_buckets - 1) (int_of_float ((lat -. min_lat) /. bucket_width)) in
+
buckets.(bucket_idx) <- buckets.(bucket_idx) + 1
+
let bucket_labels = List.init num_buckets (fun i ->
+
let start = min_lat +. (float_of_int i *. bucket_width) in
+
Printf.sprintf "%.2f" start
+
let bucket_counts = Array.to_list buckets in
+
(bucket_labels, bucket_counts)
+
(** Generate HTML report from test results *)
+
let generate_html_report results =
+
let timestamp = Unix.time () |> Unix.gmtime in
+
let date_str = Printf.sprintf "%04d-%02d-%02d %02d:%02d:%02d UTC"
+
(timestamp.Unix.tm_year + 1900)
+
(timestamp.Unix.tm_mon + 1)
+
(* Calculate summary statistics *)
+
let total_messages = List.fold_left (fun acc r -> acc + r.total_messages) 0 results in
+
let total_errors = List.fold_left (fun acc r -> acc + r.total_errors) 0 results in
+
let total_duration = List.fold_left (fun acc r -> acc +. r.duration) 0.0 results in
+
(* Generate JavaScript arrays for comparison charts *)
+
let test_names = String.concat ", " (List.map (fun r -> Printf.sprintf "\"%s\"" (js_escape r.test_name)) results) in
+
let throughputs = String.concat ", " (List.map (fun r -> Printf.sprintf "%.2f" r.throughput) results) in
+
let avg_latencies = String.concat ", " (List.map (fun r -> Printf.sprintf "%.2f" r.avg_latency) results) in
+
let error_rates = String.concat ", " (List.map (fun r ->
+
if r.total_messages > 0 then
+
Printf.sprintf "%.2f" (float_of_int r.total_errors /. float_of_int r.total_messages *. 100.0)
+
(* Generate per-test detailed sections with histograms and timelines *)
+
let test_details = String.concat "\n" (List.mapi (fun idx r ->
+
let (hist_labels, hist_counts) = calculate_histogram r.latency_data 20 in
+
let hist_labels_str = String.concat ", " (List.map (fun s -> Printf.sprintf "\"%s\"" s) hist_labels) in
+
let hist_counts_str = String.concat ", " (List.map string_of_int hist_counts) in
+
(* Sample data points for timeline (take every Nth point if too many) *)
+
let max_points = 500 in
+
let sample_rate = max 1 ((List.length r.latency_data) / max_points) in
+
let sampled_data = List.filteri (fun i _ -> i mod sample_rate = 0) r.latency_data in
+
let timeline_data = String.concat ", " (List.map (fun (t, l) ->
+
Printf.sprintf "{x: %.2f, y: %.3f}" t l
+
<div class="test-detail">
+
<div class="compact-grid">
+
<div class="compact-metric"><span class="label">Servers:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Clients:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Msgs/Client:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Pool Size:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Total Msgs:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Duration:</span> <span class="value">%.2fs</span></div>
+
<div class="compact-metric highlight"><span class="label">Throughput:</span> <span class="value">%.0f/s</span></div>
+
<div class="compact-metric highlight"><span class="label">Avg Lat:</span> <span class="value">%.2fms</span></div>
+
<div class="compact-metric"><span class="label">Min Lat:</span> <span class="value">%.2fms</span></div>
+
<div class="compact-metric"><span class="label">Max Lat:</span> <span class="value">%.2fms</span></div>
+
<div class="compact-metric %s"><span class="label">Errors:</span> <span class="value">%d</span></div>
+
<div class="compact-grid" style="margin-top: 0.5rem;">
+
<div class="compact-metric"><span class="label">Conns Created:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Conns Reused:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Conns Closed:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Active:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Idle:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Reuse Rate:</span> <span class="value">%.1f%%%%</span></div>
+
<div class="chart-row">
+
<div class="chart-half">
+
<h4>Latency Distribution</h4>
+
<canvas id="hist_%d"></canvas>
+
<div class="chart-half">
+
<h4>Latency Timeline</h4>
+
<canvas id="timeline_%d"></canvas>
+
new Chart(document.getElementById('hist_%d'), {
+
backgroundColor: 'rgba(102, 126, 234, 0.6)',
+
borderColor: 'rgba(102, 126, 234, 1)',
+
maintainAspectRatio: false,
+
plugins: { legend: { display: false } },
+
x: { title: { display: true, text: 'Latency (ms)' } },
+
y: { beginAtZero: true, title: { display: true, text: 'Count' } }
+
new Chart(document.getElementById('timeline_%d'), {
+
backgroundColor: 'rgba(118, 75, 162, 0.5)',
+
borderColor: 'rgba(118, 75, 162, 0.8)',
+
maintainAspectRatio: false,
+
plugins: { legend: { display: false } },
+
x: { title: { display: true, text: 'Time (ms)' } },
+
y: { beginAtZero: true, title: { display: true, text: 'Latency (ms)' } }
+
(js_escape r.test_name)
+
(if r.total_errors > 0 then "error" else "")
+
r.pool_stats.total_created
+
r.pool_stats.total_reused
+
r.pool_stats.total_closed
+
(if r.pool_stats.total_created > 0 then
+
(float_of_int r.pool_stats.total_reused /. float_of_int r.pool_stats.total_created *. 100.0)
+
Printf.sprintf {|<!DOCTYPE html>
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>Connection Pool Stress Test Results</title>
+
<script src="https://cdn.jsdelivr.net/npm/chart.js@4.4.0/dist/chart.umd.min.js"></script>
+
* { margin: 0; padding: 0; box-sizing: border-box; }
+
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
+
.container { max-width: 1600px; margin: 0 auto; }
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
grid-template-columns: repeat(auto-fit, minmax(120px, 1fr));
+
background: linear-gradient(135deg, #667eea 0%%, #764ba2 100%%);
+
.summary-metric-label {
+
.summary-metric-value {
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
grid-template-columns: repeat(3, 1fr);
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
border-left: 3px solid #667eea;
+
grid-template-columns: repeat(auto-fit, minmax(100px, 1fr));
+
padding: 0.4rem 0.6rem;
+
justify-content: space-between;
+
.compact-metric .label {
+
.compact-metric .value {
+
.compact-metric.highlight {
+
background: linear-gradient(135deg, #667eea 0%%, #764ba2 100%%);
+
.compact-metric.highlight .label,
+
.compact-metric.highlight .value {
+
.compact-metric.error {
+
border: 1px solid #fcc;
+
grid-template-columns: 1fr 1fr;
+
@media (max-width: 1200px) {
+
.comparison-charts { grid-template-columns: 1fr; }
+
.chart-row { grid-template-columns: 1fr; }
+
@media (max-width: 768px) {
+
.compact-grid { grid-template-columns: repeat(2, 1fr); }
+
<div class="container">
+
<h1>Connection Pool Stress Test Results</h1>
+
<div class="subtitle">%s</div>
+
<div class="summary-grid">
+
<div class="summary-metric">
+
<div class="summary-metric-label">Tests</div>
+
<div class="summary-metric-value">%d</div>
+
<div class="summary-metric">
+
<div class="summary-metric-label">Messages</div>
+
<div class="summary-metric-value">%s</div>
+
<div class="summary-metric">
+
<div class="summary-metric-label">Errors</div>
+
<div class="summary-metric-value">%d</div>
+
<div class="summary-metric">
+
<div class="summary-metric-label">Duration</div>
+
<div class="summary-metric-value">%.1fs</div>
+
<div class="comparison">
+
<div class="comparison-charts">
+
<div class="comparison-chart"><canvas id="cmpThroughput"></canvas></div>
+
<div class="comparison-chart"><canvas id="cmpLatency"></canvas></div>
+
<div class="comparison-chart"><canvas id="cmpErrors"></canvas></div>
+
const testNames = [%s];
+
const throughputs = [%s];
+
const avgLatencies = [%s];
+
const errorRates = [%s];
+
primary: 'rgba(102, 126, 234, 0.8)',
+
secondary: 'rgba(118, 75, 162, 0.8)',
+
danger: 'rgba(220, 53, 69, 0.8)',
+
new Chart(document.getElementById('cmpThroughput'), {
+
backgroundColor: cc.primary,
+
borderColor: cc.primary,
+
maintainAspectRatio: false,
+
legend: { display: false },
+
title: { display: true, text: 'Throughput (msg/s)' }
+
scales: { y: { beginAtZero: true } }
+
new Chart(document.getElementById('cmpLatency'), {
+
backgroundColor: cc.secondary,
+
borderColor: cc.secondary,
+
maintainAspectRatio: false,
+
legend: { display: false },
+
title: { display: true, text: 'Avg Latency (ms)' }
+
scales: { y: { beginAtZero: true } }
+
new Chart(document.getElementById('cmpErrors'), {
+
backgroundColor: cc.danger,
+
borderColor: cc.danger,
+
maintainAspectRatio: false,
+
legend: { display: false },
+
title: { display: true, text: 'Error Rate (%%)' }
+
scales: { y: { beginAtZero: true } }
+
(if total_messages >= 1000 then
+
Printf.sprintf "%d,%03d" (total_messages / 1000) (total_messages mod 1000)
+
string_of_int total_messages)
+
(** Run all preset tests and return results *)
let run_all_presets ~env =
Printf.eprintf "Running test: %s\n%!" config.name;
···
let config = if config.name = "default" then custom_config else config in
Eio_main.run @@ fun env ->
+
let result = run_stress_test ~env config in
+
let results = [result] in
+
let json = Printf.sprintf "[%s]" (result_to_json result) in
let oc = open_out output_file in
Printf.printf "Results written to %s\n" output_file;
+
if Filename.check_suffix output_file ".json" then
+
Filename.chop_suffix output_file ".json" ^ ".html"
+
let html = generate_html_report results in
+
let oc_html = open_out html_file in
+
output_string oc_html html;
+
Printf.printf "HTML report written to %s\n" html_file;
Printf.printf "Test: %s - %d messages, %.2f msg/s, %.2fms avg latency, %d errors\n"
+
result.test_name result.total_messages result.throughput result.avg_latency result.total_errors
Eio_main.run @@ fun env ->
+
let results = run_all_presets ~env in
+
let json = "[" ^ String.concat ",\n" (List.map result_to_json results) ^ "]" in
let oc = open_out output_file in
Printf.printf "Results written to %s\n" output_file;
+
if Filename.check_suffix output_file ".json" then
+
Filename.chop_suffix output_file ".json" ^ ".html"
+
let html = generate_html_report results in
+
let oc_html = open_out html_file in
+
output_string oc_html html;
+
Printf.printf "HTML report written to %s\n" html_file;
Printf.printf " %s: %d messages, %.2f msg/s, %.2fms avg latency, %d errors\n"
+
r.test_name r.total_messages r.throughput r.avg_latency r.total_errors
Printf.printf "Running extended stress test: %d servers, %d clients/server, %d msgs/client\n"
···
Printf.printf "Total messages: %d\n%!"
(extended_preset.num_servers * extended_preset.num_clients * extended_preset.messages_per_client);
Eio_main.run @@ fun env ->
+
let result = run_stress_test ~env extended_preset in
+
let results = [result] in
+
let json = Printf.sprintf "[%s]" (result_to_json result) in
let oc = open_out output_file in
Printf.printf "Results written to %s\n" output_file;
+
if Filename.check_suffix output_file ".json" then
+
Filename.chop_suffix output_file ".json" ^ ".html"
+
let html = generate_html_report results in
+
let oc_html = open_out html_file in
+
output_string oc_html html;
+
Printf.printf "HTML report written to %s\n" html_file;
Printf.printf "Test: %s - %d messages, %.2f msg/s, %.2fms avg latency, %d errors\n"
+
result.test_name result.total_messages result.throughput result.avg_latency result.total_errors