TCP/TLS connection pooling for Eio

html output

Changed files
+642 -118
test
+1 -11
test/dune
···
(executable
(name stress_test)
-
(modules stress_test trace)
+
(modules stress_test)
(libraries conpool eio eio_main unix))
-
(executable
-
(name visualize)
-
(modules visualize)
-
(libraries str))
-
(rule
(alias runtest)
(deps stress_test.exe)
(action (run ./stress_test.exe --all -o stress_test_results.json)))
-
-
(rule
-
(alias runtest)
-
(deps visualize.exe stress_test_results.json)
-
(action (run ./visualize.exe -i stress_test_results.json -o stress_test_results.html)))
+641 -107
test/stress_test.ml
···
mutable total : float;
mutable min : float;
mutable max : float;
+
mutable latencies : (float * float) list; (* (timestamp, latency) pairs *)
}
let create_latency_stats () = {
···
total = 0.0;
min = Float.infinity;
max = 0.0;
+
latencies = [];
}
-
let update_latency stats latency =
+
let update_latency stats latency timestamp =
stats.count <- stats.count + 1;
stats.total <- stats.total +. latency;
stats.min <- min stats.min latency;
-
stats.max <- max stats.max latency
+
stats.max <- max stats.max latency;
+
stats.latencies <- (timestamp, latency) :: stats.latencies
(** Generate a random message of given size *)
let generate_message size =
···
port
(** Client test: connect via pool, send message, verify echo *)
-
let run_client_test ~clock ~collector pool endpoint endpoint_id message client_id latency_stats errors =
+
let run_client_test ~clock ~test_start_time pool endpoint message latency_stats errors =
let msg_len = String.length message in
let start_time = Eio.Time.now clock in
-
-
(* Get or create connection ID for tracking *)
-
let conn_id = Trace.next_connection_id collector in
try
Conpool.with_connection pool endpoint (fun flow ->
-
(* Record acquire event *)
-
Trace.record collector ~clock ~event_type:Trace.Connection_acquired
-
~endpoint_id ~connection_id:conn_id ~client_id ();
-
(* Send message *)
Eio.Flow.copy_string message flow;
Eio.Flow.copy_string "\n" flow;
-
Trace.record collector ~clock ~event_type:Trace.Message_sent
-
~endpoint_id ~connection_id:conn_id ~client_id ();
(* Read echo response *)
let response = Eio.Buf_read.of_flow flow ~max_size:(msg_len + 1) in
let echoed = Eio.Buf_read.line response in
-
Trace.record collector ~clock ~event_type:Trace.Message_received
-
~endpoint_id ~connection_id:conn_id ~client_id ();
let end_time = Eio.Time.now clock in
let latency = (end_time -. start_time) *. 1000.0 in (* Convert to ms *)
+
let relative_time = (end_time -. test_start_time) *. 1000.0 in (* ms since test start *)
if String.equal echoed message then begin
-
update_latency latency_stats latency;
-
Trace.record collector ~clock ~event_type:Trace.Message_verified
-
~endpoint_id ~connection_id:conn_id ~client_id ()
+
update_latency latency_stats latency relative_time
end else begin
-
incr errors;
-
Trace.record collector ~clock ~event_type:(Trace.Connection_error "echo_mismatch")
-
~endpoint_id ~connection_id:conn_id ~client_id ()
-
end;
-
-
(* Record release event *)
-
Trace.record collector ~clock ~event_type:Trace.Connection_released
-
~endpoint_id ~connection_id:conn_id ~client_id ()
+
incr errors
+
end
)
-
with ex ->
-
incr errors;
-
Trace.record collector ~clock ~event_type:(Trace.Connection_error (Printexc.to_string ex))
-
~endpoint_id ~connection_id:conn_id ~client_id ()
+
with _ex ->
+
incr errors
(** Run a single client that sends multiple messages *)
-
let run_client ~clock ~collector pool endpoints config latency_stats errors client_id =
-
for _ = 1 to config.messages_per_client do
+
let run_client ~clock ~test_start_time pool endpoints (cfg : config) latency_stats errors client_id =
+
for _ = 1 to cfg.messages_per_client do
let endpoint_idx = Random.int (Array.length endpoints) in
let endpoint = endpoints.(endpoint_idx) in
-
let message = Printf.sprintf "c%d-%s" client_id (generate_message config.message_size) in
-
run_client_test ~clock ~collector pool endpoint endpoint_idx message client_id latency_stats errors
+
let message = Printf.sprintf "c%d-%s" client_id (generate_message cfg.message_size) in
+
run_client_test ~clock ~test_start_time pool endpoint message latency_stats errors
done
-
(** Main stress test runner - returns a test trace *)
-
let run_stress_test ~env config : Trace.test_trace =
+
(** Pool statistics aggregated from all endpoints *)
+
type pool_stats = {
+
total_created : int;
+
total_reused : int;
+
total_closed : int;
+
active : int;
+
idle : int;
+
pool_errors : int;
+
}
+
+
(** Test result type *)
+
type test_result = {
+
test_name : string;
+
num_servers : int;
+
num_clients : int;
+
messages_per_client : int;
+
pool_size : int;
+
duration : float;
+
total_messages : int;
+
total_errors : int;
+
throughput : float;
+
avg_latency : float;
+
min_latency : float;
+
max_latency : float;
+
latency_data : (float * float) list; (* (timestamp, latency) pairs for visualization *)
+
pool_stats : pool_stats;
+
}
+
+
(** Main stress test runner - returns a test result *)
+
let run_stress_test ~env (cfg : config) : test_result =
let net = Eio.Stdenv.net env in
let clock = Eio.Stdenv.clock env in
-
let collector = Trace.create_collector () in
let latency_stats = create_latency_stats () in
let errors = ref 0 in
let ports = ref [||] in
-
let trace_config : Trace.test_config = {
-
num_servers = config.num_servers;
-
num_clients = config.num_clients;
-
messages_per_client = config.messages_per_client;
-
max_parallel_clients = config.max_parallel_clients;
-
message_size = config.message_size;
-
pool_size = config.pool_size;
-
} in
-
-
let start_unix_time = Unix.gettimeofday () in
-
-
let result = ref None in
+
let result : test_result option ref = ref None in
begin
try
Eio.Switch.run @@ fun sw ->
(* Start echo servers *)
-
ports := Array.init config.num_servers (fun _ ->
+
ports := Array.init cfg.num_servers (fun _ ->
start_echo_server ~sw net
);
···
Conpool.Endpoint.make ~host:"127.0.0.1" ~port
) !ports in
-
(* Create connection pool with hooks to track events *)
+
(* Create connection pool *)
let pool_config = Conpool.Config.make
-
~max_connections_per_endpoint:config.pool_size
+
~max_connections_per_endpoint:cfg.pool_size
~max_idle_time:30.0
~max_connection_lifetime:120.0
~connect_timeout:5.0
~connect_retry_count:3
-
~on_connection_created:(fun ep ->
-
let port = Conpool.Endpoint.port ep in
-
let endpoint_id = Array.to_list !ports
-
|> List.mapi (fun i p -> (i, p))
-
|> List.find (fun (_, p) -> p = port)
-
|> fst in
-
let conn_id = Trace.next_connection_id collector in
-
Trace.record collector ~clock ~event_type:Trace.Connection_created
-
~endpoint_id ~connection_id:conn_id ()
-
)
-
~on_connection_reused:(fun ep ->
-
let port = Conpool.Endpoint.port ep in
-
let endpoint_id = Array.to_list !ports
-
|> List.mapi (fun i p -> (i, p))
-
|> List.find (fun (_, p) -> p = port)
-
|> fst in
-
let conn_id = Trace.next_connection_id collector in
-
Trace.record collector ~clock ~event_type:Trace.Connection_reused
-
~endpoint_id ~connection_id:conn_id ()
-
)
-
~on_connection_closed:(fun ep ->
-
let port = Conpool.Endpoint.port ep in
-
let endpoint_id = Array.to_list !ports
-
|> List.mapi (fun i p -> (i, p))
-
|> List.find (fun (_, p) -> p = port)
-
|> fst in
-
let conn_id = Trace.next_connection_id collector in
-
Trace.record collector ~clock ~event_type:Trace.Connection_closed
-
~endpoint_id ~connection_id:conn_id ()
-
)
()
in
···
(* Record start time *)
let start_time = Eio.Time.now clock in
-
Trace.set_start_time collector start_time;
(* Run clients in parallel *)
-
let total_clients = config.num_servers * config.num_clients in
+
let total_clients = cfg.num_servers * cfg.num_clients in
let client_ids = List.init total_clients (fun i -> i) in
-
Eio.Fiber.List.iter ~max_fibers:config.max_parallel_clients
+
Eio.Fiber.List.iter ~max_fibers:cfg.max_parallel_clients
(fun client_id ->
-
run_client ~clock ~collector pool endpoints config latency_stats errors client_id)
+
run_client ~clock ~test_start_time:start_time pool endpoints cfg latency_stats errors client_id)
client_ids;
let end_time = Eio.Time.now clock in
let duration = end_time -. start_time in
+
(* Collect pool statistics from all endpoints *)
+
let all_stats = Conpool.all_stats pool in
+
let pool_stats = List.fold_left (fun acc (_, stats) ->
+
{
+
total_created = acc.total_created + Conpool.Stats.total_created stats;
+
total_reused = acc.total_reused + Conpool.Stats.total_reused stats;
+
total_closed = acc.total_closed + Conpool.Stats.total_closed stats;
+
active = acc.active + Conpool.Stats.active stats;
+
idle = acc.idle + Conpool.Stats.idle stats;
+
pool_errors = acc.pool_errors + Conpool.Stats.errors stats;
+
}
+
) { total_created = 0; total_reused = 0; total_closed = 0; active = 0; idle = 0; pool_errors = 0 } all_stats in
+
(* Build result *)
-
let events = Trace.get_events collector in
-
let endpoint_summaries = Trace.compute_endpoint_summaries events config.num_servers !ports in
-
-
result := Some {
-
Trace.test_name = config.name;
-
config = trace_config;
-
start_time = start_unix_time;
+
let r : test_result = {
+
test_name = cfg.name;
+
num_servers = cfg.num_servers;
+
num_clients = cfg.num_clients;
+
messages_per_client = cfg.messages_per_client;
+
pool_size = cfg.pool_size;
duration;
-
events;
-
endpoint_summaries;
total_messages = latency_stats.count;
total_errors = !errors;
throughput = float_of_int latency_stats.count /. duration;
···
else 0.0;
min_latency = if latency_stats.count > 0 then latency_stats.min else 0.0;
max_latency = latency_stats.max;
-
};
+
latency_data = List.rev latency_stats.latencies;
+
pool_stats;
+
} in
+
result := Some r;
Eio.Switch.fail sw Exit
with Exit -> ()
···
| Some r -> r
| None -> failwith "Test failed to produce result"
-
(** Run all preset tests and return traces *)
+
(** Convert result to JSON string *)
+
let result_to_json result =
+
Printf.sprintf {|{
+
"test_name": "%s",
+
"num_servers": %d,
+
"num_clients": %d,
+
"messages_per_client": %d,
+
"duration": %.3f,
+
"total_messages": %d,
+
"total_errors": %d,
+
"throughput": %.2f,
+
"avg_latency": %.2f,
+
"min_latency": %.2f,
+
"max_latency": %.2f
+
}|}
+
result.test_name
+
result.num_servers
+
result.num_clients
+
result.messages_per_client
+
result.duration
+
result.total_messages
+
result.total_errors
+
result.throughput
+
result.avg_latency
+
result.min_latency
+
result.max_latency
+
+
(** Escape strings for JavaScript *)
+
let js_escape s =
+
let buf = Buffer.create (String.length s) in
+
String.iter (fun c ->
+
match c with
+
| '\\' -> Buffer.add_string buf "\\\\"
+
| '"' -> Buffer.add_string buf "\\\""
+
| '\n' -> Buffer.add_string buf "\\n"
+
| '\r' -> Buffer.add_string buf "\\r"
+
| '\t' -> Buffer.add_string buf "\\t"
+
| _ -> Buffer.add_char buf c
+
) s;
+
Buffer.contents buf
+
+
(** Calculate histogram buckets for latency data *)
+
let calculate_histogram latencies num_buckets =
+
if List.length latencies = 0 then ([], []) else
+
let latency_values = List.map snd latencies in
+
let min_lat = List.fold_left min Float.infinity latency_values in
+
let max_lat = List.fold_left max 0.0 latency_values in
+
let bucket_width = (max_lat -. min_lat) /. float_of_int num_buckets in
+
+
let buckets = Array.make num_buckets 0 in
+
List.iter (fun lat ->
+
let bucket_idx = min (num_buckets - 1) (int_of_float ((lat -. min_lat) /. bucket_width)) in
+
buckets.(bucket_idx) <- buckets.(bucket_idx) + 1
+
) latency_values;
+
+
let bucket_labels = List.init num_buckets (fun i ->
+
let start = min_lat +. (float_of_int i *. bucket_width) in
+
Printf.sprintf "%.2f" start
+
) in
+
let bucket_counts = Array.to_list buckets in
+
(bucket_labels, bucket_counts)
+
+
(** Generate HTML report from test results *)
+
let generate_html_report results =
+
let timestamp = Unix.time () |> Unix.gmtime in
+
let date_str = Printf.sprintf "%04d-%02d-%02d %02d:%02d:%02d UTC"
+
(timestamp.Unix.tm_year + 1900)
+
(timestamp.Unix.tm_mon + 1)
+
timestamp.Unix.tm_mday
+
timestamp.Unix.tm_hour
+
timestamp.Unix.tm_min
+
timestamp.Unix.tm_sec
+
in
+
+
(* Calculate summary statistics *)
+
let total_messages = List.fold_left (fun acc r -> acc + r.total_messages) 0 results in
+
let total_errors = List.fold_left (fun acc r -> acc + r.total_errors) 0 results in
+
let total_duration = List.fold_left (fun acc r -> acc +. r.duration) 0.0 results in
+
+
(* Generate JavaScript arrays for comparison charts *)
+
let test_names = String.concat ", " (List.map (fun r -> Printf.sprintf "\"%s\"" (js_escape r.test_name)) results) in
+
let throughputs = String.concat ", " (List.map (fun r -> Printf.sprintf "%.2f" r.throughput) results) in
+
let avg_latencies = String.concat ", " (List.map (fun r -> Printf.sprintf "%.2f" r.avg_latency) results) in
+
let error_rates = String.concat ", " (List.map (fun r ->
+
if r.total_messages > 0 then
+
Printf.sprintf "%.2f" (float_of_int r.total_errors /. float_of_int r.total_messages *. 100.0)
+
else "0.0"
+
) results) in
+
+
(* Generate per-test detailed sections with histograms and timelines *)
+
let test_details = String.concat "\n" (List.mapi (fun idx r ->
+
let (hist_labels, hist_counts) = calculate_histogram r.latency_data 20 in
+
let hist_labels_str = String.concat ", " (List.map (fun s -> Printf.sprintf "\"%s\"" s) hist_labels) in
+
let hist_counts_str = String.concat ", " (List.map string_of_int hist_counts) in
+
+
(* Sample data points for timeline (take every Nth point if too many) *)
+
let max_points = 500 in
+
let sample_rate = max 1 ((List.length r.latency_data) / max_points) in
+
let sampled_data = List.filteri (fun i _ -> i mod sample_rate = 0) r.latency_data in
+
let timeline_data = String.concat ", " (List.map (fun (t, l) ->
+
Printf.sprintf "{x: %.2f, y: %.3f}" t l
+
) sampled_data) in
+
+
Printf.sprintf {|
+
<div class="test-detail">
+
<h3>%s</h3>
+
<div class="compact-grid">
+
<div class="compact-metric"><span class="label">Servers:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Clients:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Msgs/Client:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Pool Size:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Total Msgs:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Duration:</span> <span class="value">%.2fs</span></div>
+
<div class="compact-metric highlight"><span class="label">Throughput:</span> <span class="value">%.0f/s</span></div>
+
<div class="compact-metric highlight"><span class="label">Avg Lat:</span> <span class="value">%.2fms</span></div>
+
<div class="compact-metric"><span class="label">Min Lat:</span> <span class="value">%.2fms</span></div>
+
<div class="compact-metric"><span class="label">Max Lat:</span> <span class="value">%.2fms</span></div>
+
<div class="compact-metric %s"><span class="label">Errors:</span> <span class="value">%d</span></div>
+
</div>
+
<div class="compact-grid" style="margin-top: 0.5rem;">
+
<div class="compact-metric"><span class="label">Conns Created:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Conns Reused:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Conns Closed:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Active:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Idle:</span> <span class="value">%d</span></div>
+
<div class="compact-metric"><span class="label">Reuse Rate:</span> <span class="value">%.1f%%%%</span></div>
+
</div>
+
<div class="chart-row">
+
<div class="chart-half">
+
<h4>Latency Distribution</h4>
+
<canvas id="hist_%d"></canvas>
+
</div>
+
<div class="chart-half">
+
<h4>Latency Timeline</h4>
+
<canvas id="timeline_%d"></canvas>
+
</div>
+
</div>
+
</div>
+
<script>
+
new Chart(document.getElementById('hist_%d'), {
+
type: 'bar',
+
data: {
+
labels: [%s],
+
datasets: [{
+
label: 'Count',
+
data: [%s],
+
backgroundColor: 'rgba(102, 126, 234, 0.6)',
+
borderColor: 'rgba(102, 126, 234, 1)',
+
borderWidth: 1
+
}]
+
},
+
options: {
+
responsive: true,
+
maintainAspectRatio: false,
+
plugins: { legend: { display: false } },
+
scales: {
+
x: { title: { display: true, text: 'Latency (ms)' } },
+
y: { beginAtZero: true, title: { display: true, text: 'Count' } }
+
}
+
}
+
});
+
+
new Chart(document.getElementById('timeline_%d'), {
+
type: 'scatter',
+
data: {
+
datasets: [{
+
label: 'Latency',
+
data: [%s],
+
backgroundColor: 'rgba(118, 75, 162, 0.5)',
+
borderColor: 'rgba(118, 75, 162, 0.8)',
+
pointRadius: 2
+
}]
+
},
+
options: {
+
responsive: true,
+
maintainAspectRatio: false,
+
plugins: { legend: { display: false } },
+
scales: {
+
x: { title: { display: true, text: 'Time (ms)' } },
+
y: { beginAtZero: true, title: { display: true, text: 'Latency (ms)' } }
+
}
+
}
+
});
+
</script>|}
+
(js_escape r.test_name)
+
r.num_servers
+
r.num_clients
+
r.messages_per_client
+
r.pool_size
+
r.total_messages
+
r.duration
+
r.throughput
+
r.avg_latency
+
r.min_latency
+
r.max_latency
+
(if r.total_errors > 0 then "error" else "")
+
r.total_errors
+
r.pool_stats.total_created
+
r.pool_stats.total_reused
+
r.pool_stats.total_closed
+
r.pool_stats.active
+
r.pool_stats.idle
+
(if r.pool_stats.total_created > 0 then
+
(float_of_int r.pool_stats.total_reused /. float_of_int r.pool_stats.total_created *. 100.0)
+
else 0.0)
+
idx idx idx
+
hist_labels_str
+
hist_counts_str
+
idx
+
timeline_data
+
) results) in
+
+
Printf.sprintf {|<!DOCTYPE html>
+
<html lang="en">
+
<head>
+
<meta charset="UTF-8">
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
+
<title>Connection Pool Stress Test Results</title>
+
<script src="https://cdn.jsdelivr.net/npm/chart.js@4.4.0/dist/chart.umd.min.js"></script>
+
<style>
+
* { margin: 0; padding: 0; box-sizing: border-box; }
+
body {
+
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
+
background: #f5f5f5;
+
padding: 1rem;
+
color: #333;
+
font-size: 14px;
+
}
+
.container { max-width: 1600px; margin: 0 auto; }
+
h1 {
+
color: #667eea;
+
text-align: center;
+
margin-bottom: 0.3rem;
+
font-size: 1.8rem;
+
}
+
.subtitle {
+
text-align: center;
+
margin-bottom: 1rem;
+
font-size: 0.9rem;
+
color: #666;
+
}
+
.summary {
+
background: white;
+
border-radius: 6px;
+
padding: 1rem;
+
margin-bottom: 1rem;
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
}
+
.summary h2 {
+
color: #667eea;
+
margin-bottom: 0.8rem;
+
font-size: 1.2rem;
+
}
+
.summary-grid {
+
display: grid;
+
grid-template-columns: repeat(auto-fit, minmax(120px, 1fr));
+
gap: 0.8rem;
+
}
+
.summary-metric {
+
text-align: center;
+
padding: 0.8rem;
+
background: linear-gradient(135deg, #667eea 0%%, #764ba2 100%%);
+
border-radius: 4px;
+
color: white;
+
}
+
.summary-metric-label {
+
font-size: 0.75rem;
+
opacity: 0.9;
+
margin-bottom: 0.3rem;
+
}
+
.summary-metric-value {
+
font-size: 1.4rem;
+
font-weight: bold;
+
}
+
.comparison {
+
background: white;
+
border-radius: 6px;
+
padding: 1rem;
+
margin-bottom: 1rem;
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
}
+
.comparison h2 {
+
color: #667eea;
+
margin-bottom: 0.8rem;
+
font-size: 1.2rem;
+
}
+
.comparison-charts {
+
display: grid;
+
grid-template-columns: repeat(3, 1fr);
+
gap: 1rem;
+
}
+
.comparison-chart {
+
height: 200px;
+
position: relative;
+
}
+
.test-detail {
+
background: white;
+
border-radius: 6px;
+
padding: 1rem;
+
margin-bottom: 1rem;
+
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
+
border-left: 3px solid #667eea;
+
}
+
.test-detail h3 {
+
color: #764ba2;
+
margin-bottom: 0.6rem;
+
font-size: 1.1rem;
+
}
+
.test-detail h4 {
+
color: #666;
+
margin-bottom: 0.4rem;
+
font-size: 0.9rem;
+
font-weight: 500;
+
}
+
.compact-grid {
+
display: grid;
+
grid-template-columns: repeat(auto-fit, minmax(100px, 1fr));
+
gap: 0.4rem;
+
margin-bottom: 0.8rem;
+
font-size: 0.85rem;
+
}
+
.compact-metric {
+
background: #f8f9fa;
+
padding: 0.4rem 0.6rem;
+
border-radius: 3px;
+
display: flex;
+
justify-content: space-between;
+
align-items: center;
+
}
+
.compact-metric .label {
+
color: #666;
+
font-weight: 500;
+
}
+
.compact-metric .value {
+
color: #333;
+
font-weight: 600;
+
}
+
.compact-metric.highlight {
+
background: linear-gradient(135deg, #667eea 0%%, #764ba2 100%%);
+
color: white;
+
}
+
.compact-metric.highlight .label,
+
.compact-metric.highlight .value {
+
color: white;
+
}
+
.compact-metric.error {
+
background: #fee;
+
border: 1px solid #fcc;
+
}
+
.chart-row {
+
display: grid;
+
grid-template-columns: 1fr 1fr;
+
gap: 1rem;
+
}
+
.chart-half {
+
position: relative;
+
height: 220px;
+
}
+
@media (max-width: 1200px) {
+
.comparison-charts { grid-template-columns: 1fr; }
+
.chart-row { grid-template-columns: 1fr; }
+
}
+
@media (max-width: 768px) {
+
.compact-grid { grid-template-columns: repeat(2, 1fr); }
+
}
+
</style>
+
</head>
+
<body>
+
<div class="container">
+
<h1>Connection Pool Stress Test Results</h1>
+
<div class="subtitle">%s</div>
+
+
<div class="summary">
+
<h2>Summary</h2>
+
<div class="summary-grid">
+
<div class="summary-metric">
+
<div class="summary-metric-label">Tests</div>
+
<div class="summary-metric-value">%d</div>
+
</div>
+
<div class="summary-metric">
+
<div class="summary-metric-label">Messages</div>
+
<div class="summary-metric-value">%s</div>
+
</div>
+
<div class="summary-metric">
+
<div class="summary-metric-label">Errors</div>
+
<div class="summary-metric-value">%d</div>
+
</div>
+
<div class="summary-metric">
+
<div class="summary-metric-label">Duration</div>
+
<div class="summary-metric-value">%.1fs</div>
+
</div>
+
</div>
+
</div>
+
+
<div class="comparison">
+
<h2>Comparison</h2>
+
<div class="comparison-charts">
+
<div class="comparison-chart"><canvas id="cmpThroughput"></canvas></div>
+
<div class="comparison-chart"><canvas id="cmpLatency"></canvas></div>
+
<div class="comparison-chart"><canvas id="cmpErrors"></canvas></div>
+
</div>
+
</div>
+
+
%s
+
</div>
+
+
<script>
+
const testNames = [%s];
+
const throughputs = [%s];
+
const avgLatencies = [%s];
+
const errorRates = [%s];
+
+
const cc = {
+
primary: 'rgba(102, 126, 234, 0.8)',
+
secondary: 'rgba(118, 75, 162, 0.8)',
+
danger: 'rgba(220, 53, 69, 0.8)',
+
};
+
+
new Chart(document.getElementById('cmpThroughput'), {
+
type: 'bar',
+
data: {
+
labels: testNames,
+
datasets: [{
+
label: 'msg/s',
+
data: throughputs,
+
backgroundColor: cc.primary,
+
borderColor: cc.primary,
+
borderWidth: 1
+
}]
+
},
+
options: {
+
responsive: true,
+
maintainAspectRatio: false,
+
plugins: {
+
legend: { display: false },
+
title: { display: true, text: 'Throughput (msg/s)' }
+
},
+
scales: { y: { beginAtZero: true } }
+
}
+
});
+
+
new Chart(document.getElementById('cmpLatency'), {
+
type: 'bar',
+
data: {
+
labels: testNames,
+
datasets: [{
+
label: 'ms',
+
data: avgLatencies,
+
backgroundColor: cc.secondary,
+
borderColor: cc.secondary,
+
borderWidth: 1
+
}]
+
},
+
options: {
+
responsive: true,
+
maintainAspectRatio: false,
+
plugins: {
+
legend: { display: false },
+
title: { display: true, text: 'Avg Latency (ms)' }
+
},
+
scales: { y: { beginAtZero: true } }
+
}
+
});
+
+
new Chart(document.getElementById('cmpErrors'), {
+
type: 'bar',
+
data: {
+
labels: testNames,
+
datasets: [{
+
label: '%%',
+
data: errorRates,
+
backgroundColor: cc.danger,
+
borderColor: cc.danger,
+
borderWidth: 1
+
}]
+
},
+
options: {
+
responsive: true,
+
maintainAspectRatio: false,
+
plugins: {
+
legend: { display: false },
+
title: { display: true, text: 'Error Rate (%%)' }
+
},
+
scales: { y: { beginAtZero: true } }
+
}
+
});
+
</script>
+
</body>
+
</html>|}
+
date_str
+
(List.length results)
+
(if total_messages >= 1000 then
+
Printf.sprintf "%d,%03d" (total_messages / 1000) (total_messages mod 1000)
+
else
+
string_of_int total_messages)
+
total_errors
+
total_duration
+
test_details
+
test_names
+
throughputs
+
avg_latencies
+
error_rates
+
+
(** Run all preset tests and return results *)
let run_all_presets ~env =
List.map (fun config ->
Printf.eprintf "Running test: %s\n%!" config.name;
···
| Single config ->
let config = if config.name = "default" then custom_config else config in
Eio_main.run @@ fun env ->
-
let trace = run_stress_test ~env config in
-
let json = Printf.sprintf "[%s]" (Trace.trace_to_json trace) in
+
let result = run_stress_test ~env config in
+
let results = [result] in
+
+
(* Write JSON *)
+
let json = Printf.sprintf "[%s]" (result_to_json result) in
let oc = open_out output_file in
output_string oc json;
close_out oc;
Printf.printf "Results written to %s\n" output_file;
+
+
(* Write HTML *)
+
let html_file =
+
if Filename.check_suffix output_file ".json" then
+
Filename.chop_suffix output_file ".json" ^ ".html"
+
else
+
output_file ^ ".html"
+
in
+
let html = generate_html_report results in
+
let oc_html = open_out html_file in
+
output_string oc_html html;
+
close_out oc_html;
+
Printf.printf "HTML report written to %s\n" html_file;
+
Printf.printf "Test: %s - %d messages, %.2f msg/s, %.2fms avg latency, %d errors\n"
-
trace.test_name trace.total_messages trace.throughput trace.avg_latency trace.total_errors
+
result.test_name result.total_messages result.throughput result.avg_latency result.total_errors
| AllPresets ->
Eio_main.run @@ fun env ->
-
let traces = run_all_presets ~env in
-
let json = "[" ^ String.concat ",\n" (List.map Trace.trace_to_json traces) ^ "]" in
+
let results = run_all_presets ~env in
+
+
(* Write JSON *)
+
let json = "[" ^ String.concat ",\n" (List.map result_to_json results) ^ "]" in
let oc = open_out output_file in
output_string oc json;
close_out oc;
Printf.printf "Results written to %s\n" output_file;
-
List.iter (fun t ->
+
+
(* Write HTML *)
+
let html_file =
+
if Filename.check_suffix output_file ".json" then
+
Filename.chop_suffix output_file ".json" ^ ".html"
+
else
+
output_file ^ ".html"
+
in
+
let html = generate_html_report results in
+
let oc_html = open_out html_file in
+
output_string oc_html html;
+
close_out oc_html;
+
Printf.printf "HTML report written to %s\n" html_file;
+
+
List.iter (fun r ->
Printf.printf " %s: %d messages, %.2f msg/s, %.2fms avg latency, %d errors\n"
-
t.Trace.test_name t.total_messages t.throughput t.avg_latency t.total_errors
-
) traces
+
r.test_name r.total_messages r.throughput r.avg_latency r.total_errors
+
) results
| Extended ->
Printf.printf "Running extended stress test: %d servers, %d clients/server, %d msgs/client\n"
···
Printf.printf "Total messages: %d\n%!"
(extended_preset.num_servers * extended_preset.num_clients * extended_preset.messages_per_client);
Eio_main.run @@ fun env ->
-
let trace = run_stress_test ~env extended_preset in
-
let json = Printf.sprintf "[%s]" (Trace.trace_to_json trace) in
+
let result = run_stress_test ~env extended_preset in
+
let results = [result] in
+
+
(* Write JSON *)
+
let json = Printf.sprintf "[%s]" (result_to_json result) in
let oc = open_out output_file in
output_string oc json;
close_out oc;
Printf.printf "Results written to %s\n" output_file;
+
+
(* Write HTML *)
+
let html_file =
+
if Filename.check_suffix output_file ".json" then
+
Filename.chop_suffix output_file ".json" ^ ".html"
+
else
+
output_file ^ ".html"
+
in
+
let html = generate_html_report results in
+
let oc_html = open_out html_file in
+
output_string oc_html html;
+
close_out oc_html;
+
Printf.printf "HTML report written to %s\n" html_file;
+
Printf.printf "Test: %s - %d messages, %.2f msg/s, %.2fms avg latency, %d errors\n"
-
trace.test_name trace.total_messages trace.throughput trace.avg_latency trace.total_errors
+
result.test_name result.total_messages result.throughput result.avg_latency result.total_errors