My agentic slop goes here. Not intended for anyone else!

more

+291
stack/requests/lib/digest_auth.ml
···
···
+
(** RFC 2617 HTTP Digest Authentication implementation *)
+
+
module Log = (val Logs.src_log (Logs.Src.create "requests.digest_auth" ~doc:"HTTP Digest Authentication") : Logs.LOG)
+
+
(** Digest auth challenge parameters from WWW-Authenticate header *)
+
type challenge = {
+
realm : string;
+
domain : string option;
+
nonce : string;
+
opaque : string option;
+
stale : bool;
+
algorithm : [`MD5 | `MD5_sess | `SHA256 | `SHA256_sess];
+
qop : [`Auth | `Auth_int] list option; (* quality of protection *)
+
charset : string option;
+
userhash : bool;
+
}
+
+
(** Client's chosen parameters for response *)
+
type client_data = {
+
username : string;
+
password : string;
+
nc : int; (* nonce count *)
+
cnonce : string; (* client nonce *)
+
qop_chosen : [`Auth | `Auth_int] option;
+
}
+
+
(** Parse WWW-Authenticate header for Digest challenge *)
+
let parse_challenge header_value =
+
(* Remove "Digest " prefix if present *)
+
let value =
+
if String.starts_with ~prefix:"Digest " header_value then
+
String.sub header_value 7 (String.length header_value - 7)
+
else header_value
+
in
+
+
(* Parse comma-separated key=value pairs *)
+
let parse_params str =
+
let rec parse_one pos acc =
+
if pos >= String.length str then acc
+
else
+
(* Skip whitespace *)
+
let pos = ref pos in
+
while !pos < String.length str && str.[!pos] = ' ' do incr pos done;
+
if !pos >= String.length str then acc
+
else
+
(* Find key *)
+
let key_start = !pos in
+
while !pos < String.length str && str.[!pos] <> '=' do incr pos done;
+
if !pos >= String.length str then acc
+
else
+
let key = String.trim (String.sub str key_start (!pos - key_start)) in
+
incr pos; (* Skip '=' *)
+
+
(* Parse value - may be quoted *)
+
let value, next_pos =
+
if !pos < String.length str && str.[!pos] = '"' then begin
+
(* Quoted value *)
+
incr pos;
+
let value_start = !pos in
+
while !pos < String.length str && str.[!pos] <> '"' do
+
if str.[!pos] = '\\' && !pos + 1 < String.length str then
+
pos := !pos + 2 (* Skip escaped character *)
+
else
+
incr pos
+
done;
+
let value = String.sub str value_start (!pos - value_start) in
+
if !pos < String.length str then incr pos; (* Skip closing quote *)
+
(* Skip to next comma *)
+
while !pos < String.length str && str.[!pos] <> ',' do incr pos done;
+
if !pos < String.length str then incr pos; (* Skip comma *)
+
(value, !pos)
+
end else begin
+
(* Unquoted value *)
+
let value_start = !pos in
+
while !pos < String.length str && str.[!pos] <> ',' do incr pos done;
+
let value = String.trim (String.sub str value_start (!pos - value_start)) in
+
if !pos < String.length str then incr pos; (* Skip comma *)
+
(value, !pos)
+
end
+
in
+
parse_one next_pos ((key, value) :: acc)
+
in
+
List.rev (parse_one 0 [])
+
in
+
+
let params = parse_params value in
+
+
(* Extract required and optional parameters *)
+
let get_param name = List.assoc_opt name params in
+
let get_param_req name =
+
match get_param name with
+
| Some v -> v
+
| None -> failwith (Printf.sprintf "Missing required Digest parameter: %s" name)
+
in
+
+
try
+
let realm = get_param_req "realm" in
+
let nonce = get_param_req "nonce" in
+
+
let algorithm = match get_param "algorithm" with
+
| Some "MD5" | None -> `MD5
+
| Some "MD5-sess" -> `MD5_sess
+
| Some "SHA-256" -> `SHA256
+
| Some "SHA-256-sess" -> `SHA256_sess
+
| Some a ->
+
Log.warn (fun m -> m "Unknown digest algorithm: %s, using MD5" a);
+
`MD5
+
in
+
+
let qop = match get_param "qop" with
+
| None -> None
+
| Some qop_str ->
+
let qops = String.split_on_char ',' qop_str |> List.map String.trim in
+
Some (List.filter_map (function
+
| "auth" -> Some `Auth
+
| "auth-int" -> Some `Auth_int
+
| _ -> None
+
) qops)
+
in
+
+
Some {
+
realm;
+
domain = get_param "domain";
+
nonce;
+
opaque = get_param "opaque";
+
stale = (match get_param "stale" with
+
| Some "true" | Some "TRUE" -> true
+
| _ -> false);
+
algorithm;
+
qop;
+
charset = get_param "charset";
+
userhash = (match get_param "userhash" with
+
| Some "true" | Some "TRUE" -> true
+
| _ -> false);
+
}
+
with
+
| Failure msg ->
+
Log.warn (fun m -> m "Failed to parse Digest challenge: %s" msg);
+
None
+
| Not_found -> None
+
+
(** Generate client nonce *)
+
let generate_cnonce () =
+
let rand_bytes = Mirage_crypto_rng.generate 16 in
+
Base64.encode_string rand_bytes
+
+
(** Hash function based on algorithm *)
+
let hash_function = function
+
| `MD5 | `MD5_sess ->
+
fun s -> Digestif.MD5.(to_hex (digest_string s))
+
| `SHA256 | `SHA256_sess ->
+
fun s -> Digestif.SHA256.(to_hex (digest_string s))
+
+
(** Calculate H(A1) according to RFC 2617 *)
+
let calculate_ha1 ~algorithm ~username ~realm ~password ~nonce ~cnonce =
+
let hash = hash_function algorithm in
+
match algorithm with
+
| `MD5 | `SHA256 ->
+
hash (Printf.sprintf "%s:%s:%s" username realm password)
+
| `MD5_sess | `SHA256_sess ->
+
let ha1_base = hash (Printf.sprintf "%s:%s:%s" username realm password) in
+
hash (Printf.sprintf "%s:%s:%s" ha1_base nonce cnonce)
+
+
(** Calculate H(A2) according to RFC 2617 *)
+
let calculate_ha2 ~algorithm ~meth ~uri ~qop ~body =
+
let hash = hash_function algorithm in
+
let method_str = match meth with
+
| `GET -> "GET" | `POST -> "POST" | `PUT -> "PUT"
+
| `DELETE -> "DELETE" | `HEAD -> "HEAD" | `OPTIONS -> "OPTIONS"
+
| `PATCH -> "PATCH" | `TRACE -> "TRACE" | `CONNECT -> "CONNECT"
+
| `Other s -> s
+
in
+
match qop with
+
| None | Some `Auth ->
+
hash (Printf.sprintf "%s:%s" method_str (Uri.path_and_query uri))
+
| Some `Auth_int ->
+
(* For auth-int, include hash of entity body *)
+
let body_hash = match body with
+
| None -> hash ""
+
| Some b -> hash b
+
in
+
hash (Printf.sprintf "%s:%s:%s" method_str (Uri.path_and_query uri) body_hash)
+
+
(** Calculate the response hash *)
+
let calculate_response ~ha1 ~ha2 ~nonce ~nc ~cnonce ~qop =
+
let hash = hash_function `MD5 in (* Response always uses the same hash as HA1 *)
+
match qop with
+
| None ->
+
hash (Printf.sprintf "%s:%s:%s" ha1 nonce ha2)
+
| Some qop_value ->
+
let qop_str = match qop_value with
+
| `Auth -> "auth"
+
| `Auth_int -> "auth-int"
+
in
+
let nc_str = Printf.sprintf "%08x" nc in
+
hash (Printf.sprintf "%s:%s:%s:%s:%s:%s" ha1 nonce nc_str cnonce qop_str ha2)
+
+
(** Generate Authorization header value for Digest auth *)
+
let generate_auth_header ~challenge ~client_data ~meth ~uri ~body =
+
let { username; password; nc; cnonce; qop_chosen } = client_data in
+
let { realm; nonce; opaque; algorithm; _ } = challenge in
+
+
(* Calculate hashes *)
+
let ha1 = calculate_ha1 ~algorithm ~username ~realm ~password ~nonce ~cnonce in
+
let ha2 = calculate_ha2 ~algorithm ~meth ~uri ~qop:qop_chosen ~body in
+
let response = calculate_response ~ha1 ~ha2 ~nonce ~nc ~cnonce ~qop:qop_chosen in
+
+
(* Build Authorization header *)
+
let params = [
+
("username", Printf.sprintf "\"%s\"" username);
+
("realm", Printf.sprintf "\"%s\"" realm);
+
("nonce", Printf.sprintf "\"%s\"" nonce);
+
("uri", Printf.sprintf "\"%s\"" (Uri.path_and_query uri));
+
("response", Printf.sprintf "\"%s\"" response);
+
] in
+
+
let params = match algorithm with
+
| `MD5 -> params (* MD5 is default, don't need to specify *)
+
| `MD5_sess -> ("algorithm", "MD5-sess") :: params
+
| `SHA256 -> ("algorithm", "SHA-256") :: params
+
| `SHA256_sess -> ("algorithm", "SHA-256-sess") :: params
+
in
+
+
let params = match opaque with
+
| Some o -> ("opaque", Printf.sprintf "\"%s\"" o) :: params
+
| None -> params
+
in
+
+
let params = match qop_chosen with
+
| None -> params
+
| Some qop ->
+
let qop_str = match qop with `Auth -> "auth" | `Auth_int -> "auth-int" in
+
let nc_str = Printf.sprintf "%08x" nc in
+
("qop", qop_str) ::
+
("nc", nc_str) ::
+
("cnonce", Printf.sprintf "\"%s\"" cnonce) ::
+
params
+
in
+
+
"Digest " ^ String.concat ", " (List.map (fun (k, v) -> k ^ "=" ^ v) params)
+
+
(** Nonce counter storage - in production should be persistent *)
+
module NonceCounter = struct
+
let table = Hashtbl.create 16
+
+
let get_and_increment ~nonce =
+
let current = try Hashtbl.find table nonce with Not_found -> 0 in
+
Hashtbl.replace table nonce (current + 1);
+
current + 1
+
+
let reset ~nonce =
+
Hashtbl.remove table nonce
+
end
+
+
(** Apply Digest authentication to a request *)
+
let apply_digest_auth ~username ~password ~meth ~uri ~headers ~body ~challenge_header =
+
match parse_challenge challenge_header with
+
| None ->
+
Log.warn (fun m -> m "Failed to parse Digest challenge");
+
headers
+
| Some challenge ->
+
(* Choose QOP if server offers options *)
+
let qop_chosen = match challenge.qop with
+
| None -> None
+
| Some qops ->
+
(* Prefer auth over auth-int for simplicity *)
+
if List.mem `Auth qops then Some `Auth
+
else if List.mem `Auth_int qops then Some `Auth_int
+
else None
+
in
+
+
(* Get or generate client nonce *)
+
let cnonce = generate_cnonce () in
+
+
(* Get and increment nonce counter *)
+
let nc = NonceCounter.get_and_increment ~nonce:challenge.nonce in
+
+
let client_data = { username; password; nc; cnonce; qop_chosen } in
+
let auth_value = generate_auth_header ~challenge ~client_data ~meth ~uri ~body in
+
+
Cohttp.Header.add headers "Authorization" auth_value
+
+
(** Check if a response requires digest auth *)
+
let is_digest_challenge response =
+
let status = Cohttp.Response.status response in
+
match Cohttp.Code.code_of_status status with
+
| 401 ->
+
(match Cohttp.Header.get (Cohttp.Response.headers response) "www-authenticate" with
+
| Some header when String.starts_with ~prefix:"Digest" header -> Some header
+
| _ -> None)
+
| _ -> None
+2 -1
stack/requests/lib/dune
···
(library
(public_name requests)
(name requests)
-
(libraries eio cohttp-eio tls-eio ca-certs x509 uri yojson logs base64 unix digestif mirage-crypto-rng mirage-crypto-rng.unix domain-name xdg))
···
(library
(public_name requests)
(name requests)
+
(modules requests requests_types requests_cache digest_auth)
+
(libraries eio cohttp-eio tls-eio ca-certs x509 uri yojson logs base64 unix digestif mirage-crypto-rng mirage-crypto-rng.unix domain-name xdg cacheio xdge cstruct))
+259 -103
stack/requests/lib/requests.ml
···
open Eio
(* Initialize the RNG on module load for OAuth and other crypto operations *)
let () = Mirage_crypto_rng_unix.use_default ()
···
type t =
| None
| Basic of { username : string; password : string }
-
| Digest of { username : string; password : string }
| Bearer of { token : string }
| OAuth1 of {
consumer_key : string;
···
let basic ~username ~password = Basic { username; password }
-
let digest ~username ~password = Digest { username; password }
let bearer ~token = Bearer { token }
···
| Basic { username; password } ->
let encoded = Base64.encode_string (Printf.sprintf "%s:%s" username password) in
Cohttp.Header.add headers "Authorization" (Printf.sprintf "Basic %s" encoded)
-
| Digest { username; password } ->
-
(* Simplified - would need challenge-response in real implementation *)
-
Log.warn (fun m -> m "Digest auth not fully implemented - using Basic fallback");
-
let encoded = Base64.encode_string (Printf.sprintf "%s:%s" username password) in
-
Cohttp.Header.add headers "Authorization" (Printf.sprintf "Basic %s" encoded)
| Bearer { token } ->
Cohttp.Header.add headers "Authorization" (Printf.sprintf "Bearer %s" token)
| OAuth1 { consumer_key; consumer_secret; token; token_secret; signature_method } ->
···
| Custom f -> f meth uri headers
end
-
module Response = struct
-
type t = {
-
status : Cohttp.Code.status_code;
-
headers : Cohttp.Header.t;
-
body : string;
-
body_stream : Buf_read.t option;
-
}
-
-
let status t = t.status
-
let headers t = t.headers
-
let body t = t.body
-
let body_stream t =
-
match t.body_stream with
-
| Some s -> s
-
| None -> Buf_read.of_string t.body
-
-
let is_success t =
-
let code = Cohttp.Code.code_of_status t.status in
-
code >= 200 && code < 300
-
-
let is_redirect t =
-
let code = Cohttp.Code.code_of_status t.status in
-
code >= 300 && code < 400
-
-
let is_client_error t =
-
let code = Cohttp.Code.code_of_status t.status in
-
code >= 400 && code < 500
-
-
let is_server_error t =
-
let code = Cohttp.Code.code_of_status t.status in
-
code >= 500 && code < 600
-
-
let _pp ppf t =
-
Format.fprintf ppf "@[<v>Response:@,Status: %s@,Headers: %d@,Body: %d bytes@]"
-
(Cohttp.Code.string_of_status t.status)
-
(Cohttp.Header.to_lines t.headers |> List.length)
-
(String.length t.body)
-
end
(* Retry Implementation *)
module Retry = struct
···
if t.backoff.factor = 0.0 then 0.0
else
let base_time = t.backoff.factor *. (2.0 ** float_of_int t.retry_count) in
-
let jittered = base_time +. Random.float t.backoff.jitter in
min jittered t.backoff.max
let sleep ~clock t response =
···
clock : clock;
tls_config : Tls.config;
default_headers : Cohttp.Header.t;
} constraint 'a = [> `Generic] Net.ty
-
let create ?(tls_config=Tls.default ()) ?(default_headers=Cohttp.Header.init ()) ~clock net =
-
{ net; clock = Clock clock; tls_config; default_headers }
let make_client net tls_config =
···
{ Response.status; headers; body; body_stream = None }
let rec request_with_retries ~sw t ?(config=Config.default) ?body ~meth uri retry_state =
-
let client = make_client t.net t.tls_config in
-
let merged_headers = merge_headers t.default_headers config.Config.headers in
-
(* Apply authentication *)
-
let merged_headers = Auth.apply config.Config.auth meth uri merged_headers in
-
let config = { config with Config.headers = merged_headers } in
-
try
-
let result = request_with_redirects ~sw client config uri 0 meth body in
-
if not (Response.is_success result) then
-
let status = Cohttp.Code.code_of_status result.Response.status in
-
if Retry.is_retry retry_state ~method_:meth ~status_code:status then begin
-
Log.info (fun m -> m "Retrying request to %a (attempt %d/%d)"
-
Uri.pp uri (retry_state.Retry.retry_count + 1) retry_state.Retry.total);
-
let retry_state = Retry.increment retry_state ~method_:meth ~url:uri
-
~response:result () in
-
(match t.clock with Clock c -> Retry.sleep ~clock:c retry_state (Some result));
-
request_with_retries ~sw t ~config ?body ~meth uri retry_state
-
end else
-
raise (Request_error (Http_error {
-
status = result.Response.status;
-
body = result.Response.body;
-
headers = result.Response.headers
-
}))
-
else
-
result
with
| Request_error _ as e -> raise e
| e ->
···
let clear_cookies t = t.cookies := []
end
-
let download_file ~sw t ?config uri ~path =
-
let response = get ~sw t ?config uri in
-
let oc = open_out_bin path in
-
try
-
output_string oc response.Response.body;
-
close_out oc
-
with e ->
-
close_out_noerr oc;
-
raise e
let stream_response ~sw t ?config uri f =
let client = make_client t.net t.tls_config in
···
| None -> Cohttp.Header.init ()
in
let merged_headers = merge_headers t.default_headers headers in
let _resp, body = Cohttp_eio.Client.get ~sw client uri ~headers:merged_headers in
let buf_reader = Eio.Buf_read.of_flow ~max_size:(16 * 1024 * 1024) body in
f buf_reader
···
let create_client = fun ?tls_config ?default_headers ~clock net ->
{ net; clock; tls_config = Option.value ~default:(Tls.default ()) tls_config;
-
default_headers = Option.value ~default:(Cohttp.Header.init ()) default_headers }
in
let req_client = create_client ?tls_config:t.tls_config ~default_headers:t.headers ~clock:t.clock t.net in
let config = Config.create ~headers ~follow_redirects:redirect () in
···
}
let choose_boundary () =
-
Printf.sprintf "----OCamlBoundary%08x%08x"
-
(Random.int 0x7FFFFFFF) (Random.int 0x7FFFFFFF)
let encode_multipart_formdata ~fields ~boundary =
let boundary = Option.value boundary ~default:(choose_boundary ()) in
···
let download ~sw t ?config ?(chunk_size=8192) uri ~sink =
Log.debug (fun m -> m "Streaming download from %s with chunk size %d" (Uri.to_string uri) chunk_size);
-
let response = get ~sw t ?config uri in
-
(* Write response body to sink in chunks *)
-
let body = Response.body response in
-
let rec write_chunks pos =
-
if pos < String.length body then
-
let len = min chunk_size (String.length body - pos) in
-
let chunk = String.sub body pos len in
-
Flow.copy_string chunk sink;
-
write_chunks (pos + len)
in
-
write_chunks 0
let iter_response ?(chunk_size=8192) response ~f =
-
let body = Response.body response in
-
let rec iter pos =
-
if pos < String.length body then
-
let len = min chunk_size (String.length body - pos) in
-
let chunk = String.sub body pos len in
-
f chunk;
-
iter (pos + len)
-
in
-
iter 0
let lines ?(chunk_size=8192) ?(keep_ends=false) response =
let body = Response.body response in
···
Log.warn (fun m -> m "Failed to parse JSON line: %s" line);
`Null)
end
(* Global defaults *)
module Defaults = struct
···
open Eio
+
(* Include shared types module *)
+
module Requests_types = Requests_types
+
+
(* Include cache module *)
+
module Requests_cache = Requests_cache
+
(* Initialize the RNG on module load for OAuth and other crypto operations *)
let () = Mirage_crypto_rng_unix.use_default ()
···
type t =
| None
| Basic of { username : string; password : string }
+
| DigestAuth of { username : string; password : string; challenge : string option ref }
| Bearer of { token : string }
| OAuth1 of {
consumer_key : string;
···
let basic ~username ~password = Basic { username; password }
+
let digest ~username ~password = DigestAuth { username; password; challenge = ref (None : string option) }
let bearer ~token = Bearer { token }
···
| Basic { username; password } ->
let encoded = Base64.encode_string (Printf.sprintf "%s:%s" username password) in
Cohttp.Header.add headers "Authorization" (Printf.sprintf "Basic %s" encoded)
+
| DigestAuth { username; password; challenge } ->
+
(* Use stored challenge if available, otherwise headers unchanged (will trigger 401) *)
+
(match !challenge with
+
| None -> headers (* No challenge yet, will get 401 response *)
+
| Some challenge_header ->
+
(* Apply digest auth with challenge *)
+
Digest_auth.apply_digest_auth ~username ~password ~meth ~uri ~headers ~body:None ~challenge_header)
| Bearer { token } ->
Cohttp.Header.add headers "Authorization" (Printf.sprintf "Bearer %s" token)
| OAuth1 { consumer_key; consumer_secret; token; token_secret; signature_method } ->
···
| Custom f -> f meth uri headers
end
+
module Response = Requests_types.Response
(* Retry Implementation *)
module Retry = struct
···
if t.backoff.factor = 0.0 then 0.0
else
let base_time = t.backoff.factor *. (2.0 ** float_of_int t.retry_count) in
+
(* Use Mirage crypto RNG for jitter calculation *)
+
let rand_bytes = Mirage_crypto_rng.generate 4 in
+
let rand_cstruct = Cstruct.of_string rand_bytes in
+
let rand_uint32 = Cstruct.LE.get_uint32 rand_cstruct 0 in
+
(* Convert to float in [0, 1) range *)
+
let normalized = Int32.to_float rand_uint32 /. (2.0 ** 32.0) in
+
let jittered = base_time +. (normalized *. t.backoff.jitter) in
min jittered t.backoff.max
let sleep ~clock t response =
···
clock : clock;
tls_config : Tls.config;
default_headers : Cohttp.Header.t;
+
cache : Requests_cache.t option;
} constraint 'a = [> `Generic] Net.ty
+
let create ?(tls_config=Tls.default ()) ?(default_headers=Cohttp.Header.init ()) ?cache ~clock net =
+
{ net; clock = Clock clock; tls_config; default_headers; cache }
+
+
let create_with_cache ~sw ?(tls_config=Tls.default ()) ?(default_headers=Cohttp.Header.init ())
+
~cache_dir ~clock net =
+
let cache = Requests_cache.create ~sw ~enabled:true ~cache_dir () in
+
{ net; clock = Clock clock; tls_config; default_headers; cache = Some cache }
let make_client net tls_config =
···
{ Response.status; headers; body; body_stream = None }
let rec request_with_retries ~sw t ?(config=Config.default) ?body ~meth uri retry_state =
+
(* Check cache first for GET/HEAD requests *)
+
let check_cache () =
+
match t.cache, meth with
+
| Some cache, (`GET | `HEAD) ->
+
Requests_cache.get cache ~method_:meth ~url:uri ~headers:config.Config.headers
+
| _ -> None
+
in
+
+
match check_cache () with
+
| Some cached_response ->
+
Log.debug (fun m -> m "Using cached response for %s" (Uri.to_string uri));
+
cached_response
+
| None ->
+
let client = make_client t.net t.tls_config in
+
let merged_headers = merge_headers t.default_headers config.Config.headers in
+
(* Apply authentication *)
+
let merged_headers = Auth.apply config.Config.auth meth uri merged_headers in
+
let config = { config with Config.headers = merged_headers } in
+
+
try
+
let result = request_with_redirects ~sw client config uri 0 meth body in
+
+
(* Handle Digest auth challenge if we get a 401 *)
+
let result = match config.Config.auth with
+
| DigestAuth { username = _; password = _; challenge } when result.Response.status = `Unauthorized ->
+
(match Cohttp.Header.get result.Response.headers "www-authenticate" with
+
| Some www_auth when String.starts_with ~prefix:"Digest" www_auth ->
+
(* Store the challenge *)
+
challenge := Some www_auth;
+
Log.debug (fun m -> m "Got Digest challenge, retrying with auth");
+
+
(* Retry request with digest auth *)
+
let merged_headers = merge_headers t.default_headers config.Config.headers in
+
let merged_headers = Auth.apply config.Config.auth meth uri merged_headers in
+
let config = { config with Config.headers = merged_headers } in
+
request_with_redirects ~sw client config uri 0 meth body
+
| _ -> result)
+
| _ -> result
+
in
+
+
(* Store successful responses in cache *)
+
(match t.cache with
+
| Some cache when Response.is_success result ->
+
Requests_cache.put cache ~method_:meth ~url:uri
+
~request_headers:config.Config.headers ~response:result
+
| _ -> ());
+
if not (Response.is_success result) then
+
let status = Cohttp.Code.code_of_status result.Response.status in
+
if Retry.is_retry retry_state ~method_:meth ~status_code:status then begin
+
Log.info (fun m -> m "Retrying request to %a (attempt %d/%d)"
+
Uri.pp uri (retry_state.Retry.retry_count + 1) retry_state.Retry.total);
+
let retry_state = Retry.increment retry_state ~method_:meth ~url:uri
+
~response:result () in
+
(match t.clock with Clock c -> Retry.sleep ~clock:c retry_state (Some result));
+
request_with_retries ~sw t ~config ?body ~meth uri retry_state
+
end else
+
raise (Request_error (Http_error {
+
status = result.Response.status;
+
body = result.Response.body;
+
headers = result.Response.headers
+
}))
+
else
+
result
with
| Request_error _ as e -> raise e
| e ->
···
let clear_cookies t = t.cookies := []
end
let stream_response ~sw t ?config uri f =
let client = make_client t.net t.tls_config in
···
| None -> Cohttp.Header.init ()
in
let merged_headers = merge_headers t.default_headers headers in
+
let merged_headers = match config with
+
| Some c -> Auth.apply c.Config.auth `GET uri merged_headers
+
| None -> merged_headers
+
in
let _resp, body = Cohttp_eio.Client.get ~sw client uri ~headers:merged_headers in
let buf_reader = Eio.Buf_read.of_flow ~max_size:(16 * 1024 * 1024) body in
f buf_reader
···
let create_client = fun ?tls_config ?default_headers ~clock net ->
{ net; clock; tls_config = Option.value ~default:(Tls.default ()) tls_config;
+
default_headers = Option.value ~default:(Cohttp.Header.init ()) default_headers;
+
cache = None }
in
let req_client = create_client ?tls_config:t.tls_config ~default_headers:t.headers ~clock:t.clock t.net in
let config = Config.create ~headers ~follow_redirects:redirect () in
···
}
let choose_boundary () =
+
(* Use Mirage crypto RNG for boundary generation *)
+
let rand_bytes = Mirage_crypto_rng.generate 8 in
+
let rand_cstruct = Cstruct.of_string rand_bytes in
+
let rand_hex = Cstruct.to_hex_string rand_cstruct in
+
Printf.sprintf "----OCamlBoundary%s" rand_hex
let encode_multipart_formdata ~fields ~boundary =
let boundary = Option.value boundary ~default:(choose_boundary ()) in
···
let download ~sw t ?config ?(chunk_size=8192) uri ~sink =
Log.debug (fun m -> m "Streaming download from %s with chunk size %d" (Uri.to_string uri) chunk_size);
+
(* Create client and perform streaming download directly *)
+
let client = make_client t.net t.tls_config in
+
let headers = match config with
+
| Some c -> c.Config.headers
+
| None -> Cohttp.Header.init ()
+
in
+
let merged_headers = merge_headers t.default_headers headers in
+
let merged_headers = match config with
+
| Some c -> Auth.apply c.Config.auth `GET uri merged_headers
+
| None -> merged_headers
+
in
+
+
let _resp, body = Cohttp_eio.Client.get ~sw client uri ~headers:merged_headers in
+
let buf_reader = Eio.Buf_read.of_flow ~max_size:(16 * 1024 * 1024) body in
+
+
(* Stream data in chunks to the sink *)
+
let rec stream_chunks () =
+
try
+
let chunk = Eio.Buf_read.take chunk_size buf_reader in
+
if String.length chunk > 0 then (
+
Flow.copy_string chunk sink;
+
stream_chunks ()
+
)
+
with
+
| End_of_file -> ()
+
| Eio.Buf_read.Buffer_limit_exceeded ->
+
(* Read in smaller chunks when buffer limit exceeded *)
+
let smaller_chunk_size = min chunk_size 1024 in
+
let rec read_smaller () =
+
try
+
let chunk = Eio.Buf_read.take smaller_chunk_size buf_reader in
+
if String.length chunk > 0 then (
+
Flow.copy_string chunk sink;
+
read_smaller ()
+
)
+
with End_of_file -> ()
+
in
+
read_smaller ()
in
+
stream_chunks ()
+
let iter_response ?(chunk_size=8192) response ~f =
+
(* Use the body_stream if available, otherwise process the body string *)
+
match response.Response.body_stream with
+
| Some buf_reader ->
+
let rec iter_stream () =
+
try
+
let chunk = Eio.Buf_read.take chunk_size buf_reader in
+
if String.length chunk > 0 then (
+
f chunk;
+
iter_stream ()
+
)
+
with
+
| End_of_file -> ()
+
| Eio.Buf_read.Buffer_limit_exceeded ->
+
let smaller_chunk_size = min chunk_size 1024 in
+
let rec read_smaller () =
+
try
+
let chunk = Eio.Buf_read.take smaller_chunk_size buf_reader in
+
if String.length chunk > 0 then (
+
f chunk;
+
read_smaller ()
+
)
+
with End_of_file -> ()
+
in
+
read_smaller ()
+
in
+
iter_stream ()
+
| None ->
+
(* Fallback to processing the body string in chunks *)
+
let body = Response.body response in
+
let rec iter pos =
+
if pos < String.length body then
+
let len = min chunk_size (String.length body - pos) in
+
let chunk = String.sub body pos len in
+
f chunk;
+
iter (pos + len)
+
in
+
iter 0
let lines ?(chunk_size=8192) ?(keep_ends=false) response =
let body = Response.body response in
···
Log.warn (fun m -> m "Failed to parse JSON line: %s" line);
`Null)
end
+
+
(* Download utility functions using Stream module *)
+
let download_file ~sw t ?config uri ~path =
+
Log.debug (fun m -> m "Downloading file from %s to %s" (Uri.to_string uri) (Eio.Path.native_exn path));
+
(* Use streaming download to avoid loading entire file into memory *)
+
Eio.Path.with_open_out ~create:(`Or_truncate 0o644) path (fun file ->
+
let sink = (file :> Eio.Flow.sink_ty Eio.Resource.t) in
+
Stream.download ~sw t ?config uri ~sink
+
)
+
+
(* Add function for range request support with caching *)
+
let download_file_range ~sw t ?config uri ~path ~start_byte ~end_byte =
+
Log.debug (fun m -> m "Downloading file range %Ld-%Ld from %s to %s"
+
start_byte end_byte (Uri.to_string uri) (Eio.Path.native_exn path));
+
+
(* Check if we have this range in cache *)
+
let try_cache () =
+
match t.cache with
+
| Some cache ->
+
let range = Requests_cache.Range.{ start = start_byte; end_ = Some end_byte } in
+
Requests_cache.download_range cache ~sw ~url:uri ~range
+
~on_chunk:(fun _data -> ())
+
| None -> None
+
in
+
+
match try_cache () with
+
| Some true ->
+
Log.debug (fun m -> m "Using cached data for range %Ld-%Ld" start_byte end_byte)
+
| Some false | None ->
+
(* Fallback to regular range request *)
+
let range_header = Printf.sprintf "bytes=%Ld-%Ld" start_byte end_byte in
+
let config = match config with
+
| Some c -> Some (Config.add_header "Range" range_header c)
+
| None -> Some (Config.add_header "Range" range_header Config.default)
+
in
+
+
(* Download and cache the chunk *)
+
let response = get ~sw t ?config uri in
+
+
(* Store the chunk in cache if we have one *)
+
(match t.cache with
+
| Some cache ->
+
let range = Requests_cache.Range.{ start = start_byte; end_ = Some end_byte } in
+
Requests_cache.put_chunk cache ~url:uri ~range ~data:response.Response.body
+
| None -> ());
+
+
(* Write to file *)
+
Eio.Path.with_open_out ~create:(`Or_truncate 0o644) path (fun file ->
+
Flow.copy_string response.Response.body (file :> Eio.Flow.sink_ty Eio.Resource.t)
+
)
+
+
(* Add function for resumable downloads *)
+
let download_file_resume ~sw t ?config uri ~path =
+
Log.debug (fun m -> m "Attempting resumable download from %s to %s" (Uri.to_string uri) (Eio.Path.native_exn path));
+
let start_byte =
+
try
+
let stat = Eio.Path.stat ~follow:false path in
+
Optint.Int63.to_int64 stat.size
+
with
+
| _ -> 0L
+
in
+
+
if start_byte > 0L then (
+
Log.info (fun m -> m "Resuming download from byte %Ld" start_byte);
+
let range_header = Printf.sprintf "bytes=%Ld-" start_byte in
+
let config = match config with
+
| Some c -> Some (Config.add_header "Range" range_header c)
+
| None -> Some (Config.add_header "Range" range_header Config.default)
+
in
+
+
Eio.Path.with_open_out ~append:true ~create:(`Or_truncate 0o644) path (fun file ->
+
let sink = (file :> Eio.Flow.sink_ty Eio.Resource.t) in
+
Stream.download ~sw t ?config uri ~sink
+
)
+
) else (
+
download_file ~sw t ?config uri ~path
+
)
(* Global defaults *)
module Defaults = struct
+83 -1
stack/requests/lib/requests.mli
···
(** Create an insecure TLS config (no certificate verification) *)
end
(** Client type *)
type 'a t constraint 'a = [> `Generic] Net.ty
···
val create :
?tls_config:Tls.config ->
?default_headers:Cohttp.Header.t ->
clock:_ Eio.Time.clock ->
'a Net.t ->
'a t
···
'a t ->
?config:Config.t ->
Uri.t ->
-
path:string ->
unit
val stream_response :
···
sink:Flow.sink_ty Flow.sink ->
unit
(** Stream download to an Eio Flow sink *)
val iter_response :
?chunk_size:int ->
···
(** Create an insecure TLS config (no certificate verification) *)
end
+
(** Shared types module *)
+
module Requests_types : sig
+
module Response : sig
+
type t = {
+
status : Cohttp.Code.status_code;
+
headers : Cohttp.Header.t;
+
body : string;
+
body_stream : Eio.Buf_read.t option;
+
}
+
end
+
end
+
+
(** HTTP caching module *)
+
module Requests_cache : sig
+
type t
+
+
val create :
+
sw:Eio.Switch.t ->
+
enabled:bool ->
+
?cache_get_requests:bool ->
+
?cache_range_requests:bool ->
+
cache_dir:Eio.Fs.dir_ty Eio.Path.t option ->
+
unit -> t
+
+
val get : t ->
+
method_:[`GET | `HEAD | `POST | `PUT | `DELETE | `PATCH | `OPTIONS | `TRACE | `CONNECT] ->
+
url:Uri.t ->
+
headers:Cohttp.Header.t ->
+
Requests_types.Response.t option
+
+
val put : t ->
+
method_:[`GET | `HEAD | `POST | `PUT | `DELETE | `PATCH | `OPTIONS | `TRACE | `CONNECT] ->
+
url:Uri.t ->
+
request_headers:Cohttp.Header.t ->
+
response:Requests_types.Response.t ->
+
unit
+
+
val evict : t -> url:Uri.t -> unit
+
val clear : t -> unit
+
+
module Range : sig
+
type t = {
+
start : int64;
+
end_ : int64 option;
+
}
+
end
+
+
val put_chunk : t -> url:Uri.t -> range:Range.t -> data:string -> unit
+
val has_complete : t -> url:Uri.t -> total_size:int64 -> bool
+
val missing_ranges : t -> url:Uri.t -> total_size:int64 -> Range.t list
+
end
+
(** Client type *)
type 'a t constraint 'a = [> `Generic] Net.ty
···
val create :
?tls_config:Tls.config ->
?default_headers:Cohttp.Header.t ->
+
?cache:Requests_cache.t ->
+
clock:_ Eio.Time.clock ->
+
'a Net.t ->
+
'a t
+
+
(** Create an HTTP client with caching enabled *)
+
val create_with_cache :
+
sw:Eio.Switch.t ->
+
?tls_config:Tls.config ->
+
?default_headers:Cohttp.Header.t ->
+
cache_dir:Eio.Fs.dir_ty Eio.Path.t option ->
clock:_ Eio.Time.clock ->
'a Net.t ->
'a t
···
'a t ->
?config:Config.t ->
Uri.t ->
+
path:Eio.Fs.dir_ty Eio.Path.t ->
+
unit
+
+
val download_file_range :
+
sw:Switch.t ->
+
'a t ->
+
?config:Config.t ->
+
Uri.t ->
+
path:Eio.Fs.dir_ty Eio.Path.t ->
+
start_byte:int64 ->
+
end_byte:int64 ->
+
unit
+
+
val download_file_resume :
+
sw:Switch.t ->
+
'a t ->
+
?config:Config.t ->
+
Uri.t ->
+
path:Eio.Fs.dir_ty Eio.Path.t ->
unit
val stream_response :
···
sink:Flow.sink_ty Flow.sink ->
unit
(** Stream download to an Eio Flow sink *)
+
val iter_response :
?chunk_size:int ->
+504
stack/requests/lib/requests_cache.ml
···
···
+
(** HTTP caching with cacheio integration - separate headers and body storage *)
+
+
open Eio
+
+
module Log = (val Logs.src_log (Logs.Src.create "requests.cache" ~doc:"HTTP cache with cacheio") : Logs.LOG)
+
+
type t = {
+
sw : Switch.t;
+
enabled : bool;
+
cache_get_requests : bool;
+
cache_range_requests : bool;
+
cacheio : Cacheio.t option; (* Optional cacheio backend *)
+
memory_cache : (string, Requests_types.Response.t * float) Hashtbl.t; (* Memory cache fallback *)
+
}
+
+
let create ~sw ~enabled ?(cache_get_requests=true) ?(cache_range_requests=true) ~cache_dir () =
+
let cacheio =
+
match cache_dir with
+
| Some dir when enabled ->
+
(try
+
Some (Cacheio.create ~base_dir:dir)
+
with e ->
+
Log.warn (fun m -> m "Failed to create cacheio backend: %s. Using memory cache only."
+
(Printexc.to_string e));
+
None)
+
| _ -> None
+
in
+
{ sw; enabled; cache_get_requests; cache_range_requests;
+
cacheio; memory_cache = Hashtbl.create 100 }
+
+
(** Generate cache key from method and URL *)
+
let make_cache_key ~method_ ~url ~headers =
+
(* Include method, URL, and important headers in cache key *)
+
let method_str = match method_ with
+
| `GET -> "GET" | `HEAD -> "HEAD"
+
| _ -> "OTHER" (* Don't cache non-idempotent methods *)
+
in
+
let url_str = Uri.to_string url in
+
+
(* Include range header if present *)
+
let range_str = match Cohttp.Header.get headers "range" with
+
| Some r -> "_range:" ^ r
+
| None -> ""
+
in
+
+
Printf.sprintf "%s_%s%s" method_str url_str range_str
+
+
(** Check if a response is cacheable *)
+
let is_cacheable ~method_ ~status ~headers =
+
(* Only cache GET and HEAD requests *)
+
match method_ with
+
| `GET | `HEAD ->
+
let code = Cohttp.Code.code_of_status status in
+
(* Cache successful responses and some redirects *)
+
if code >= 200 && code < 300 then
+
(* Check cache-control headers *)
+
match Cohttp.Header.get headers "cache-control" with
+
| Some cc ->
+
let cc_lower = String.lowercase_ascii cc in
+
(* Simple substring search without regex *)
+
let rec contains_substring str sub pos =
+
if pos + String.length sub > String.length str then false
+
else if String.sub str pos (String.length sub) = sub then true
+
else contains_substring str sub (pos + 1)
+
in
+
if contains_substring cc_lower "no-store" 0 ||
+
contains_substring cc_lower "no-cache" 0 ||
+
contains_substring cc_lower "private" 0 then
+
false
+
else
+
true
+
| None -> true
+
else if code = 301 || code = 308 then
+
true (* Permanent redirects *)
+
else
+
false
+
| _ -> false
+
+
(** Parse cache-control max-age directive *)
+
let parse_max_age headers =
+
match Cohttp.Header.get headers "cache-control" with
+
| Some cc ->
+
(* Look for max-age=N *)
+
let parts = String.split_on_char ',' cc |> List.map String.trim in
+
List.find_map (fun part ->
+
let prefix = "max-age=" in
+
let prefix_len = String.length prefix in
+
if String.length part >= prefix_len &&
+
String.sub part 0 prefix_len = prefix then
+
let value = String.sub part prefix_len (String.length part - prefix_len) in
+
try Some (float_of_string value) with _ -> None
+
else None
+
) parts
+
| None -> None
+
+
(** Serialize headers and status to JSON *)
+
let serialize_metadata ~status ~headers =
+
let status_code = Cohttp.Code.code_of_status status in
+
let headers_assoc = Cohttp.Header.to_list headers in
+
let json = `Assoc [
+
("status_code", `Int status_code);
+
("headers", `Assoc (List.map (fun (k, v) -> (k, `String v)) headers_assoc));
+
] in
+
Yojson.Basic.to_string json
+
+
(** Deserialize headers and status from JSON *)
+
let deserialize_metadata json_str =
+
try
+
let json = Yojson.Basic.from_string json_str in
+
let status_code = json |> Yojson.Basic.Util.member "status_code" |> Yojson.Basic.Util.to_int in
+
let status = Cohttp.Code.status_of_code status_code in
+
let headers_json = json |> Yojson.Basic.Util.member "headers" |> Yojson.Basic.Util.to_assoc in
+
let headers = headers_json |> List.map (fun (k, v) ->
+
(k, Yojson.Basic.Util.to_string v)
+
) |> Cohttp.Header.of_list in
+
Some (status, headers)
+
with _ -> None
+
+
(** Get cached response if available and not expired *)
+
let get t ~method_ ~url ~headers =
+
if not t.enabled then None
+
else if method_ = `GET && not t.cache_get_requests then None
+
else
+
let key = make_cache_key ~method_ ~url ~headers in
+
+
(* Try cacheio first *)
+
match t.cacheio with
+
| Some cache ->
+
(* Check for metadata entry *)
+
let metadata_key = key ^ ".meta" in
+
let body_key = key ^ ".body" in
+
+
if Cacheio.exists cache ~key:metadata_key && Cacheio.exists cache ~key:body_key then
+
Switch.run @@ fun sw ->
+
(* Read metadata *)
+
let metadata_opt = match Cacheio.get cache ~key:metadata_key ~sw with
+
| Some source ->
+
let buf = Buffer.create 256 in
+
let rec read_all () =
+
let chunk = Cstruct.create 1024 in
+
try
+
let n = Flow.single_read source chunk in
+
if n > 0 then begin
+
Buffer.add_string buf (Cstruct.to_string ~off:0 ~len:n chunk);
+
read_all ()
+
end
+
with End_of_file -> ()
+
in
+
read_all ();
+
deserialize_metadata (Buffer.contents buf)
+
| None -> None
+
in
+
+
(match metadata_opt with
+
| Some (status, resp_headers) ->
+
(* Read body *)
+
(match Cacheio.get cache ~key:body_key ~sw with
+
| Some source ->
+
let buf = Buffer.create 4096 in
+
let rec read_all () =
+
let chunk = Cstruct.create 4096 in
+
try
+
let n = Flow.single_read source chunk in
+
if n > 0 then begin
+
Buffer.add_string buf (Cstruct.to_string ~off:0 ~len:n chunk);
+
read_all ()
+
end
+
with End_of_file -> ()
+
in
+
read_all ();
+
let body = Buffer.contents buf in
+
Log.debug (fun m -> m "Cache hit for %s" (Uri.to_string url));
+
Some { Requests_types.Response.status; headers = resp_headers; body; body_stream = None }
+
| None ->
+
Log.debug (fun m -> m "Cache body missing for %s" (Uri.to_string url));
+
None)
+
| None ->
+
Log.debug (fun m -> m "Cache metadata missing for %s" (Uri.to_string url));
+
None)
+
else
+
(Log.debug (fun m -> m "Cache miss for %s" (Uri.to_string url));
+
None)
+
| None ->
+
(* Fall back to memory cache *)
+
match Hashtbl.find_opt t.memory_cache key with
+
| Some (response, expiry) when expiry > Unix.gettimeofday () ->
+
Log.debug (fun m -> m "Memory cache hit for %s" (Uri.to_string url));
+
Some response
+
| _ ->
+
Log.debug (fun m -> m "Cache miss for %s" (Uri.to_string url));
+
None
+
+
(** Get cached response body as stream *)
+
let get_stream t ~method_ ~url ~headers ~sw =
+
if not t.enabled then None
+
else if method_ = `GET && not t.cache_get_requests then None
+
else
+
let key = make_cache_key ~method_ ~url ~headers in
+
+
match t.cacheio with
+
| Some cache ->
+
let metadata_key = key ^ ".meta" in
+
let body_key = key ^ ".body" in
+
+
if Cacheio.exists cache ~key:metadata_key && Cacheio.exists cache ~key:body_key then
+
(* Read metadata first *)
+
let metadata_opt =
+
match Cacheio.get cache ~key:metadata_key ~sw with
+
| Some source ->
+
let buf = Buffer.create 256 in
+
let rec read_all () =
+
let chunk = Cstruct.create 1024 in
+
try
+
let n = Flow.single_read source chunk in
+
if n > 0 then begin
+
Buffer.add_string buf (Cstruct.to_string ~off:0 ~len:n chunk);
+
read_all ()
+
end
+
with End_of_file -> ()
+
in
+
read_all ();
+
deserialize_metadata (Buffer.contents buf)
+
| None -> None
+
in
+
+
(match metadata_opt with
+
| Some (status, resp_headers) ->
+
(* Return body stream directly *)
+
(match Cacheio.get cache ~key:body_key ~sw with
+
| Some source ->
+
Log.debug (fun m -> m "Streaming cache hit for %s" (Uri.to_string url));
+
Some (status, resp_headers, source)
+
| None -> None)
+
| None -> None)
+
else None
+
| None -> None
+
+
(** Store response in cache *)
+
let put t ~method_ ~url ~request_headers ~response =
+
if not t.enabled then ()
+
else if is_cacheable ~method_ ~status:response.Requests_types.Response.status ~headers:response.headers then
+
let key = make_cache_key ~method_ ~url ~headers:request_headers in
+
let ttl = parse_max_age response.headers in
+
+
Log.debug (fun m -> m "Caching response for %s (ttl: %s)"
+
(Uri.to_string url)
+
(match ttl with Some t -> Printf.sprintf "%.0fs" t | None -> "3600s"));
+
+
(* Store in cacheio if available *)
+
(match t.cacheio with
+
| Some cache ->
+
Switch.run @@ fun _sw ->
+
+
(* Store metadata *)
+
let metadata_key = key ^ ".meta" in
+
let metadata = serialize_metadata ~status:response.status ~headers:response.headers in
+
let metadata_source = Flow.string_source metadata in
+
Cacheio.put cache ~key:metadata_key ~source:metadata_source ~ttl ();
+
+
(* Store body *)
+
let body_key = key ^ ".body" in
+
let body_source = Flow.string_source response.body in
+
Cacheio.put cache ~key:body_key ~source:body_source ~ttl ()
+
| None -> ());
+
+
(* Also store in memory cache as fallback *)
+
let expiry = Unix.gettimeofday () +. Option.value ttl ~default:3600.0 in
+
Hashtbl.replace t.memory_cache key (response, expiry)
+
+
(** Store response with streaming body *)
+
let put_stream t ~method_ ~url ~request_headers ~status ~headers ~body_source ~ttl =
+
if not t.enabled then ()
+
else if is_cacheable ~method_ ~status ~headers then
+
let key = make_cache_key ~method_ ~url ~headers:request_headers in
+
+
Log.debug (fun m -> m "Caching streamed response for %s (ttl: %s)"
+
(Uri.to_string url)
+
(match ttl with Some t -> Printf.sprintf "%.0fs" t | None -> "3600s"));
+
+
match t.cacheio with
+
| Some cache ->
+
Switch.run @@ fun _sw ->
+
+
(* Store metadata *)
+
let metadata_key = key ^ ".meta" in
+
let metadata = serialize_metadata ~status ~headers in
+
let metadata_source = Flow.string_source metadata in
+
Cacheio.put cache ~key:metadata_key ~source:metadata_source ~ttl ();
+
+
(* Store body directly from source *)
+
let body_key = key ^ ".body" in
+
Cacheio.put cache ~key:body_key ~source:body_source ~ttl ()
+
| None -> ()
+
+
(** Range request support - exposed module *)
+
module Range = struct
+
type t = {
+
start : int64;
+
end_ : int64 option; (* None means to end of file *)
+
}
+
+
let of_header header =
+
(* Parse Range: bytes=start-end *)
+
let prefix = "bytes=" in
+
let prefix_len = String.length prefix in
+
if String.length header >= prefix_len &&
+
String.sub header 0 prefix_len = prefix then
+
let range_str = String.sub header prefix_len (String.length header - prefix_len) in
+
match String.split_on_char '-' range_str with
+
| [start; ""] ->
+
(* bytes=N- means from N to end *)
+
(try Some { start = Int64.of_string start; end_ = None }
+
with _ -> None)
+
| [start; end_] ->
+
(* bytes=N-M *)
+
(try Some {
+
start = Int64.of_string start;
+
end_ = Some (Int64.of_string end_)
+
}
+
with _ -> None)
+
| _ -> None
+
else None
+
+
let to_header t =
+
match t.end_ with
+
| None -> Printf.sprintf "bytes=%Ld-" t.start
+
| Some e -> Printf.sprintf "bytes=%Ld-%Ld" t.start e
+
+
let to_cacheio_range t ~total_size =
+
let end_ = match t.end_ with
+
| None -> Int64.pred total_size
+
| Some e -> min e (Int64.pred total_size)
+
in
+
(* Convert to Cacheio.Range.t *)
+
Cacheio.Range.create ~start:t.start ~end_
+
end
+
+
(** Download with range request support and caching *)
+
let download_range t ~sw ~url ~range ~on_chunk =
+
let range_header = Range.to_header range in
+
+
Log.debug (fun m -> m "Range request for %s: %s"
+
(Uri.to_string url) range_header);
+
+
(* Check if we have cached data for this range *)
+
match t.cacheio with
+
| Some cache ->
+
let key = Uri.to_string url in
+
(* Convert our range to cacheio range - we need the total size *)
+
(* For now, assume a large file size as we don't know the actual size *)
+
let cacheio_range = Range.to_cacheio_range range ~total_size:Int64.max_int in
+
+
(match Cacheio.get_range cache ~key ~range:cacheio_range ~sw with
+
| `Complete source ->
+
(* Read chunks from the complete cached file *)
+
let rec read_chunks () =
+
let chunk = Cstruct.create 8192 in
+
try
+
let n = Flow.single_read source chunk in
+
if n > 0 then begin
+
on_chunk (Cstruct.to_string ~off:0 ~len:n chunk);
+
read_chunks ()
+
end
+
with End_of_file -> ()
+
in
+
read_chunks ();
+
Some true
+
| `Chunks chunk_sources ->
+
(* Read from multiple chunk sources *)
+
List.iter (fun (_range, source) ->
+
let rec read_chunk () =
+
let chunk = Cstruct.create 8192 in
+
try
+
let n = Flow.single_read source chunk in
+
if n > 0 then begin
+
on_chunk (Cstruct.to_string ~off:0 ~len:n chunk);
+
read_chunk ()
+
end
+
with End_of_file -> ()
+
in
+
read_chunk ()
+
) chunk_sources;
+
Some true
+
| `Not_found ->
+
None)
+
| None ->
+
(* No cacheio backend available *)
+
None
+
+
(** Store a response chunk in cache *)
+
let put_chunk t ~url ~range ~data =
+
if not t.enabled || not t.cache_range_requests then ()
+
else
+
match t.cacheio with
+
| Some cache ->
+
let key = Uri.to_string url in
+
(* Convert our range to cacheio range *)
+
let cacheio_range = Range.to_cacheio_range range ~total_size:Int64.max_int in
+
+
Switch.run @@ fun _sw ->
+
let source = Flow.string_source data in
+
Cacheio.put_chunk cache ~key ~range:cacheio_range ~source ()
+
| None ->
+
Log.debug (fun m -> m "Cannot cache chunk for %s: no cacheio backend"
+
(Uri.to_string url))
+
+
(** Check if we have complete cached data for a URL *)
+
let has_complete t ~url ~total_size =
+
if not t.enabled then false
+
else
+
match t.cacheio with
+
| Some cache ->
+
let key = Uri.to_string url in
+
Cacheio.has_complete_chunks cache ~key ~total_size
+
| None -> false
+
+
(** Get missing ranges for a URL *)
+
let missing_ranges t ~url ~total_size =
+
if not t.enabled then
+
(* Return full range if cache disabled *)
+
[{ Range.start = 0L; end_ = Some (Int64.pred total_size) }]
+
else
+
match t.cacheio with
+
| Some cache ->
+
let key = Uri.to_string url in
+
let cacheio_ranges = Cacheio.missing_ranges cache ~key ~total_size in
+
(* Convert Cacheio.Range.t to our Range.t *)
+
List.map (fun r ->
+
{ Range.start = Cacheio.Range.start r;
+
end_ = Some (Cacheio.Range.end_ r) }
+
) cacheio_ranges
+
| None ->
+
(* No cache, need full range *)
+
[{ Range.start = 0L; end_ = Some (Int64.pred total_size) }]
+
+
(** Coalesce chunks into a complete file *)
+
let coalesce_chunks t ~url =
+
if not t.enabled then false
+
else
+
match t.cacheio with
+
| Some cache ->
+
let key = Uri.to_string url in
+
(* Run coalescing synchronously *)
+
let promise = Cacheio.coalesce_chunks cache ~key ~verify:true () in
+
(match Eio.Promise.await promise with
+
| Ok () ->
+
Log.info (fun m -> m "Successfully coalesced chunks for %s" key);
+
true
+
| Error exn ->
+
Log.warn (fun m -> m "Failed to coalesce chunks for %s: %s"
+
key (Printexc.to_string exn));
+
false)
+
| None -> false
+
+
(** Clear cache for a specific URL *)
+
let evict t ~url =
+
if not t.enabled then ()
+
else
+
let key = make_cache_key ~method_:`GET ~url ~headers:(Cohttp.Header.init ()) in
+
(* Evict from cacheio - both metadata and body *)
+
(match t.cacheio with
+
| Some cache ->
+
Cacheio.delete cache ~key:(key ^ ".meta");
+
Cacheio.delete cache ~key:(key ^ ".body")
+
| None -> ());
+
(* Evict from memory cache *)
+
Log.debug (fun m -> m "Evicting cache for %s" (Uri.to_string url));
+
Hashtbl.remove t.memory_cache key
+
+
(** Clear entire cache *)
+
let clear t =
+
Log.info (fun m -> m "Clearing entire cache");
+
(* Clear cacheio *)
+
(match t.cacheio with
+
| Some cache -> Cacheio.clear cache
+
| None -> ());
+
(* Clear memory cache *)
+
Hashtbl.clear t.memory_cache
+
+
(** Get cache statistics *)
+
let stats t =
+
let cacheio_stats =
+
match t.cacheio with
+
| Some cache ->
+
let stats = Cacheio.stats cache in
+
`Assoc [
+
("total_entries", `Int (Cacheio.Stats.entry_count stats));
+
("total_bytes", `Int (Int64.to_int (Cacheio.Stats.total_size stats)));
+
("expired_entries", `Int (Cacheio.Stats.expired_count stats));
+
("pinned_entries", `Int (Cacheio.Stats.pinned_count stats));
+
("temporary_entries", `Int (Cacheio.Stats.temporary_count stats));
+
]
+
| None ->
+
`Assoc []
+
in
+
+
`Assoc [
+
("memory_cache_entries", `Int (Hashtbl.length t.memory_cache));
+
("cache_backend", `String (if Option.is_some t.cacheio then "cacheio" else "memory"));
+
("enabled", `Bool t.enabled);
+
("cache_get_requests", `Bool t.cache_get_requests);
+
("cache_range_requests", `Bool t.cache_range_requests);
+
("cacheio_stats", cacheio_stats);
+
]
+34
stack/requests/lib/requests_types.ml
···
···
+
(** Shared types for the requests library *)
+
+
module Response = struct
+
type t = {
+
status : Cohttp.Code.status_code;
+
headers : Cohttp.Header.t;
+
body : string;
+
body_stream : Eio.Buf_read.t option;
+
}
+
+
let status t = t.status
+
let headers t = t.headers
+
let body t = t.body
+
let body_stream t =
+
match t.body_stream with
+
| Some s -> s
+
| None -> Eio.Buf_read.of_string t.body
+
+
let is_success t =
+
let code = Cohttp.Code.code_of_status t.status in
+
code >= 200 && code < 300
+
+
let is_redirect t =
+
let code = Cohttp.Code.code_of_status t.status in
+
code >= 300 && code < 400
+
+
let is_client_error t =
+
let code = Cohttp.Code.code_of_status t.status in
+
code >= 400 && code < 500
+
+
let is_server_error t =
+
let code = Cohttp.Code.code_of_status t.status in
+
code >= 500 && code < 600
+
end
+4 -4
stack/requests/test/dune
···
-
(executable
-
(public_name test_requests)
-
(name test_requests)
-
(libraries requests eio_main ca-certs))
···
+
(executables
+
(public_names test_requests test_streaming test_cache test_simple_cache test_cache_integration)
+
(names test_requests test_streaming test_cache test_simple_cache test_cache_integration)
+
(libraries requests eio_main ca-certs uri yojson cacheio))
+226
stack/requests/test/test_cache.ml
···
···
+
(** Test cacheio integration with requests library *)
+
+
open Eio
+
+
let test_basic_caching () =
+
Eio_main.run @@ fun env ->
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Path.(env#fs / "/tmp/test_requests_cache") in
+
+
(* Clean up any existing test directory *)
+
(try
+
Eio.Process.run env#process_mgr ["rm"; "-rf"; "/tmp/test_requests_cache"]
+
with _ -> ());
+
+
(* Create requests client with caching enabled *)
+
let client = Requests.create_with_cache ~sw ~cache_dir:(Some cache_dir)
+
~clock:env#clock env#net in
+
+
(* Test URL - using a small file from a reliable source *)
+
let url = Uri.of_string "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf" in
+
+
Printf.printf "Test 1: First request (cache miss)...\n";
+
let start_time = Unix.gettimeofday () in
+
let response1 = Requests.get ~sw client url in
+
let first_time = Unix.gettimeofday () -. start_time in
+
Printf.printf " Status: %s\n" (Cohttp.Code.string_of_status ((Requests.Response.status response1)));
+
Printf.printf " Body size: %d bytes\n" (String.length ((Requests.Response.body response1)));
+
Printf.printf " Time: %.3fs\n" first_time;
+
+
Printf.printf "\nTest 2: Second request (should be cached)...\n";
+
let start_time = Unix.gettimeofday () in
+
let response2 = Requests.get ~sw client url in
+
let second_time = Unix.gettimeofday () -. start_time in
+
Printf.printf " Status: %s\n" (Cohttp.Code.string_of_status (Requests.Response.status response2));
+
Printf.printf " Body size: %d bytes\n" (String.length (Requests.Response.body response2));
+
Printf.printf " Time: %.3fs\n" second_time;
+
+
(* Verify responses are identical *)
+
assert ((Requests.Response.body response1) = (Requests.Response.body response2));
+
assert ((Requests.Response.status response1) = (Requests.Response.status response2));
+
+
(* Cache should be significantly faster *)
+
Printf.printf "\nCache speedup: %.1fx faster\n" (first_time /. second_time);
+
+
(* Check that cache directory was created and has files *)
+
let cache_files = ref 0 in
+
let rec count_files dir =
+
try
+
let entries = Path.read_dir dir in
+
List.iter (fun entry ->
+
let path = Path.(dir / entry) in
+
match Path.kind ~follow:false path with
+
| `Directory -> count_files path
+
| `Regular_file -> incr cache_files
+
| _ -> ()
+
) entries
+
with _ -> ()
+
in
+
count_files cache_dir;
+
Printf.printf "\nCache contains %d file(s)\n" !cache_files;
+
assert (!cache_files > 0);
+
+
Printf.printf "\nBasic caching test passed! ✓\n"
+
+
let test_range_requests () =
+
Eio_main.run @@ fun env ->
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Path.(env#fs / "/tmp/test_requests_cache_range") in
+
+
(* Clean up any existing test directory *)
+
(try
+
Eio.Process.run env#process_mgr ["rm"; "-rf"; "/tmp/test_requests_cache_range"]
+
with _ -> ());
+
+
(* Create requests client with caching enabled *)
+
let client = Requests.create_with_cache ~sw ~cache_dir:(Some cache_dir)
+
~clock:env#clock env#net in
+
+
(* Test URL *)
+
let url = Uri.of_string "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf" in
+
+
Printf.printf "Test 1: Download first 1000 bytes...\n";
+
let out_path1 = Path.(env#fs / "/tmp/test_range_1.dat") in
+
Requests.download_file_range ~sw client url ~path:out_path1 ~start_byte:0L ~end_byte:999L;
+
+
(* Check file was created *)
+
let stat1 = Path.stat ~follow:false out_path1 in
+
let size1 = Optint.Int63.to_int64 stat1.size in
+
Printf.printf " Downloaded %Ld bytes\n" size1;
+
assert (size1 <= 1000L);
+
+
Printf.printf "\nTest 2: Download bytes 500-1499...\n";
+
let out_path2 = Path.(env#fs / "/tmp/test_range_2.dat") in
+
Requests.download_file_range ~sw client url ~path:out_path2 ~start_byte:500L ~end_byte:1499L;
+
+
let stat2 = Path.stat ~follow:false out_path2 in
+
let size2 = Optint.Int63.to_int64 stat2.size in
+
Printf.printf " Downloaded %Ld bytes\n" size2;
+
assert (size2 <= 1000L);
+
+
Printf.printf "\nTest 3: Request same range again (should use cache)...\n";
+
let start_time = Unix.gettimeofday () in
+
let out_path3 = Path.(env#fs / "/tmp/test_range_3.dat") in
+
Requests.download_file_range ~sw client url ~path:out_path3 ~start_byte:500L ~end_byte:1499L;
+
let cache_time = Unix.gettimeofday () -. start_time in
+
Printf.printf " Time: %.3fs\n" cache_time;
+
+
(* Verify files 2 and 3 are identical *)
+
let content2 = Path.load out_path2 in
+
let content3 = Path.load out_path3 in
+
assert (content2 = content3);
+
+
Printf.printf "\nRange request caching test passed! ✓\n"
+
+
let test_cache_headers () =
+
Eio_main.run @@ fun env ->
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Path.(env#fs / "/tmp/test_requests_cache_headers") in
+
+
(* Clean up any existing test directory *)
+
(try
+
Eio.Process.run env#process_mgr ["rm"; "-rf"; "/tmp/test_requests_cache_headers"]
+
with _ -> ());
+
+
(* Create requests client with caching enabled *)
+
let client = Requests.create_with_cache ~sw ~cache_dir:(Some cache_dir)
+
~clock:env#clock env#net in
+
+
(* Test with a URL that has cache-control headers *)
+
let url = Uri.of_string "https://httpbin.org/cache/60" in (* Cache for 60 seconds *)
+
+
Printf.printf "Test 1: Request with cache-control headers...\n";
+
let response1 = Requests.get ~sw client url in
+
Printf.printf " Status: %s\n" (Cohttp.Code.string_of_status (Requests.Response.status response1));
+
+
(* Check if Cache-Control header is present *)
+
match Cohttp.Header.get (Requests.Response.headers response1) "cache-control" with
+
| Some cc ->
+
Printf.printf " Cache-Control: %s\n" cc
+
| None ->
+
Printf.printf " No Cache-Control header\n";
+
+
Printf.printf "\nTest 2: Request no-cache URL...\n";
+
let nocache_url = Uri.of_string "https://httpbin.org/response-headers?Cache-Control=no-cache" in
+
let response2 = Requests.get ~sw client nocache_url in
+
Printf.printf " Status: %s\n" (Cohttp.Code.string_of_status (Requests.Response.status response2));
+
+
match Cohttp.Header.get (Requests.Response.headers response2) "cache-control" with
+
| Some cc ->
+
Printf.printf " Cache-Control: %s\n" cc
+
| None ->
+
Printf.printf " No Cache-Control header\n";
+
+
Printf.printf "\nCache header handling test passed! ✓\n"
+
+
let test_cache_eviction () =
+
Eio_main.run @@ fun env ->
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Path.(env#fs / "/tmp/test_requests_cache_evict") in
+
+
(* Clean up any existing test directory *)
+
(try
+
Eio.Process.run env#process_mgr ["rm"; "-rf"; "/tmp/test_requests_cache_evict"]
+
with _ -> ());
+
+
(* Create the cache directly to test eviction *)
+
let cache = Requests.Requests_cache.create ~sw ~enabled:true ~cache_dir:(Some cache_dir) () in
+
+
let url = Uri.of_string "https://httpbin.org/json" in
+
+
Printf.printf "Test 1: Cache a response...\n";
+
let response = {
+
Requests.Requests_types.Response.status = `OK;
+
headers = Cohttp.Header.init ();
+
body = "{\"test\": \"data\"}";
+
body_stream = None;
+
} in
+
+
Requests.Requests_cache.put cache ~method_:`GET ~url ~request_headers:(Cohttp.Header.init ()) ~response;
+
Printf.printf " Response cached\n";
+
+
Printf.printf "\nTest 2: Verify cache hit...\n";
+
let cached = Requests.Requests_cache.get cache ~method_:`GET ~url ~headers:(Cohttp.Header.init ()) in
+
assert (cached <> None);
+
Printf.printf " Cache hit confirmed\n";
+
+
Printf.printf "\nTest 3: Evict from cache...\n";
+
Requests.Requests_cache.evict cache ~url;
+
Printf.printf " Entry evicted\n";
+
+
Printf.printf "\nTest 4: Verify cache miss after eviction...\n";
+
let cached2 = Requests.Requests_cache.get cache ~method_:`GET ~url ~headers:(Cohttp.Header.init ()) in
+
assert (cached2 = None);
+
Printf.printf " Cache miss confirmed\n";
+
+
Printf.printf "\nCache eviction test passed! ✓\n"
+
+
let () =
+
Printf.printf "=== Testing Requests Library with Cacheio ===\n\n";
+
+
Printf.printf "Running basic caching test...\n";
+
Printf.printf "-----------------------------\n";
+
test_basic_caching ();
+
+
Printf.printf "\n\nRunning range request test...\n";
+
Printf.printf "-----------------------------\n";
+
test_range_requests ();
+
+
Printf.printf "\n\nRunning cache header test...\n";
+
Printf.printf "-----------------------------\n";
+
(try test_cache_headers () with e ->
+
Printf.printf " Skipped (httpbin may be unreliable): %s\n" (Printexc.to_string e));
+
+
Printf.printf "\n\nRunning cache eviction test...\n";
+
Printf.printf "-----------------------------\n";
+
test_cache_eviction ();
+
+
Printf.printf "\n\n=== All tests completed! ===\n"
+136
stack/requests/test/test_cache_integration.ml
···
···
+
(** Test cache integration with cacheio *)
+
+
open Eio
+
open Requests
+
+
let test_basic_cache ~env =
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Eio.Path.(env#fs / "/tmp/test_cache") in
+
+
(* Ensure the directory exists *)
+
(try Eio.Path.mkdir ~perm:0o755 cache_dir
+
with Eio.Io (Eio.Fs.E (Already_exists _), _) -> ());
+
+
(* Ensure tmp subdirectory exists *)
+
let tmp_dir = Eio.Path.(cache_dir / "tmp") in
+
(try Eio.Path.mkdir ~perm:0o755 tmp_dir
+
with Eio.Io (Eio.Fs.E (Already_exists _), _) -> ());
+
+
(* Create cache with cacheio backend *)
+
let cache = Requests_cache.create
+
~sw
+
~enabled:true
+
~cache_get_requests:true
+
~cache_range_requests:true
+
~cache_dir:(Some cache_dir)
+
()
+
in
+
+
(* Test basic operations *)
+
let url = Uri.of_string "http://example.com/test" in
+
let headers = Cohttp.Header.init () in
+
+
(* Check cache miss *)
+
let result = Requests_cache.get cache ~method_:`GET ~url ~headers in
+
(match result with
+
| None -> Printf.printf "✓ Cache miss works correctly\n"
+
| Some r ->
+
Printf.printf "ERROR: Expected cache miss but got response with body: %s\n" r.body;
+
assert false);
+
+
(* Store a response *)
+
let response = {
+
Requests_types.Response.status = `OK;
+
headers = Cohttp.Header.of_list [
+
("Content-Type", "text/plain");
+
("Cache-Control", "max-age=3600")
+
];
+
body = "Test response body";
+
body_stream = None;
+
} in
+
+
Requests_cache.put cache ~method_:`GET ~url ~request_headers:headers ~response;
+
Printf.printf "✓ Cache put works correctly\n";
+
+
(* Check cache hit *)
+
let cached = Requests_cache.get cache ~method_:`GET ~url ~headers in
+
assert (cached <> None);
+
(match cached with
+
| Some r ->
+
if r.body = "Test response body" then
+
Printf.printf "✓ Cache hit works correctly\n"
+
else begin
+
Printf.printf "ERROR: Body mismatch. Expected: 'Test response body', Got: '%s'\n" r.body;
+
assert false
+
end
+
| None -> failwith "Expected cache hit");
+
+
(* Test cache eviction *)
+
Requests_cache.evict cache ~url;
+
let after_evict = Requests_cache.get cache ~method_:`GET ~url ~headers in
+
assert (after_evict = None);
+
Printf.printf "✓ Cache eviction works correctly\n";
+
+
(* Clear cache *)
+
Requests_cache.clear cache;
+
Printf.printf "✓ Cache clear works correctly\n"
+
+
let test_range_requests ~env =
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Eio.Path.(env#fs / "/tmp/test_cache_ranges") in
+
+
(* Ensure the directory exists *)
+
(try Eio.Path.mkdir ~perm:0o755 cache_dir
+
with Eio.Io (Eio.Fs.E (Already_exists _), _) -> ());
+
+
(* Ensure tmp subdirectory exists *)
+
let tmp_dir = Eio.Path.(cache_dir / "tmp") in
+
(try Eio.Path.mkdir ~perm:0o755 tmp_dir
+
with Eio.Io (Eio.Fs.E (Already_exists _), _) -> ());
+
+
(* Create cache with range support *)
+
let cache = Requests_cache.create
+
~sw
+
~enabled:true
+
~cache_range_requests:true
+
~cache_dir:(Some cache_dir)
+
()
+
in
+
+
let url = Uri.of_string "http://example.com/largefile" in
+
+
(* Test missing ranges *)
+
let missing = Requests_cache.missing_ranges cache ~url ~total_size:1000L in
+
assert (List.length missing = 1);
+
(match missing with
+
| [{Requests_cache.Range.start = 0L; end_ = Some 999L}] ->
+
Printf.printf "✓ Missing ranges works correctly\n"
+
| _ -> failwith "Unexpected missing ranges");
+
+
(* Store a chunk *)
+
let range = Requests_cache.Range.{ start = 0L; end_ = Some 99L } in
+
let data = String.make 100 'A' in
+
Requests_cache.put_chunk cache ~url ~range ~data;
+
Printf.printf "✓ Chunk storage works correctly\n";
+
+
(* Check if complete *)
+
let is_complete = Requests_cache.has_complete cache ~url ~total_size:100L in
+
Printf.printf "✓ Completeness check: %b\n" is_complete
+
+
let () =
+
Eio_main.run @@ fun env ->
+
Printf.printf "Testing cache integration with cacheio...\n";
+
+
(* Test basic cache operations *)
+
Printf.printf "\n=== Testing basic cache operations ===\n";
+
test_basic_cache ~env;
+
+
(* Test range request caching *)
+
Printf.printf "\n=== Testing range request caching ===\n";
+
test_range_requests ~env;
+
+
Printf.printf "\n✅ All cache integration tests passed!\n"
+28
stack/requests/test/test_rng.ml
···
···
+
(** Test that Mirage crypto RNG is working properly *)
+
+
open Eio
+
+
let test_rng () =
+
Printf.printf "Testing Mirage crypto RNG usage...\n";
+
+
(* Generate some random values *)
+
for i = 1 to 5 do
+
let rand_bytes = Mirage_crypto_rng.generate 8 in
+
let rand_cstruct = Cstruct.of_string rand_bytes in
+
let rand_hex = Cstruct.to_hex_string rand_cstruct in
+
Printf.printf " Random value %d: %s\n" i rand_hex
+
done;
+
+
Printf.printf "\nTesting multipart boundary generation...\n";
+
let module R = Requests in
+
for i = 1 to 3 do
+
let boundary = R.Body.choose_boundary () in
+
Printf.printf " Boundary %d: %s\n" i boundary
+
done;
+
+
Printf.printf "\n✓ Mirage crypto RNG working correctly\n"
+
+
let () =
+
(* Initialize RNG (already done in requests.ml but being explicit) *)
+
Mirage_crypto_rng_unix.use_default ();
+
test_rng ()
+11
stack/requests/test/test_rng_simple.ml
···
···
+
(* Simple test for RNG *)
+
let () =
+
Mirage_crypto_rng_unix.use_default ();
+
+
for i = 1 to 5 do
+
let rand = Mirage_crypto_rng.generate 8 in
+
let hex = Hex.of_string rand |> Hex.to_string in
+
Printf.printf "Random %d: %s\n" i hex
+
done;
+
+
Printf.printf "RNG test successful!\n"
+78
stack/requests/test/test_simple_cache.ml
···
···
+
(** Simple test for cacheio integration *)
+
+
open Eio
+
+
let test_simple_caching () =
+
Eio_main.run @@ fun env ->
+
Switch.run @@ fun sw ->
+
+
(* Create a temporary cache directory *)
+
let cache_dir = Path.(env#fs / "/tmp/test_requests_cache_simple") in
+
+
(* Clean up any existing test directory *)
+
(try
+
Eio.Process.run env#process_mgr ["rm"; "-rf"; "/tmp/test_requests_cache_simple"]
+
with _ -> ());
+
+
(* Create cache directly *)
+
let cache = Requests.Requests_cache.create ~sw ~enabled:true ~cache_dir:(Some cache_dir) () in
+
+
let url = Uri.of_string "https://example.com/test" in
+
let headers = Cohttp.Header.init () in
+
+
Printf.printf "Test 1: Store a response in cache...\n";
+
let response = {
+
Requests.Requests_types.Response.status = `OK;
+
headers = Cohttp.Header.add headers "content-type" "text/plain";
+
body = "This is test content from the cache!";
+
body_stream = None;
+
} in
+
+
Requests.Requests_cache.put cache ~method_:`GET ~url ~request_headers:headers ~response;
+
Printf.printf " Response stored\n";
+
+
Printf.printf "\nTest 2: Retrieve from cache...\n";
+
match Requests.Requests_cache.get cache ~method_:`GET ~url ~headers with
+
| Some cached ->
+
Printf.printf " Status: %s\n" (Cohttp.Code.string_of_status cached.Requests.Requests_types.Response.status);
+
Printf.printf " Body: %s\n" cached.body;
+
assert (cached.body = response.body)
+
| None ->
+
failwith "Expected cache hit but got miss";
+
+
Printf.printf "\nTest 3: Cache miss for different URL...\n";
+
let other_url = Uri.of_string "https://example.com/other" in
+
match Requests.Requests_cache.get cache ~method_:`GET ~url:other_url ~headers with
+
| Some _ -> failwith "Expected cache miss but got hit"
+
| None -> Printf.printf " Cache miss confirmed\n";
+
+
Printf.printf "\nTest 4: Test range chunk storage...\n";
+
let chunk_url = Uri.of_string "https://example.com/largefile" in
+
let range1 = Requests.Requests_cache.Range.{ start = 0L; end_ = Some 99L } in
+
let chunk_data1 = String.make 100 'A' in
+
+
Requests.Requests_cache.put_chunk cache ~url:chunk_url ~range:range1 ~data:chunk_data1;
+
Printf.printf " Chunk 0-99 stored\n";
+
+
let range2 = Requests.Requests_cache.Range.{ start = 100L; end_ = Some 199L } in
+
let chunk_data2 = String.make 100 'B' in
+
+
Requests.Requests_cache.put_chunk cache ~url:chunk_url ~range:range2 ~data:chunk_data2;
+
Printf.printf " Chunk 100-199 stored\n";
+
+
Printf.printf "\nTest 5: Check for complete chunks...\n";
+
let has_complete = Requests.Requests_cache.has_complete cache ~url:chunk_url ~total_size:200L in
+
Printf.printf " Complete data available: %b\n" has_complete;
+
+
Printf.printf "\nTest 6: Get missing ranges...\n";
+
let missing = Requests.Requests_cache.missing_ranges cache ~url:chunk_url ~total_size:300L in
+
Printf.printf " Missing ranges: %d\n" (List.length missing);
+
List.iter (fun _range ->
+
Printf.printf " (range stub)\n"
+
) missing;
+
+
Printf.printf "\nSimple caching test passed! ✓\n"
+
+
let () =
+
Printf.printf "=== Testing Simple Cache Integration ===\n\n";
+
test_simple_caching ()
+87
stack/requests/test/test_streaming.ml
···
···
+
open Eio.Std
+
+
(** Simple test for streaming download functionality *)
+
+
let test_basic_streaming env =
+
traceln "Testing basic streaming download...";
+
+
(* Test with a small URL to verify streaming works *)
+
let test_url = Uri.of_string "https://httpbin.org/json" in
+
+
Eio.Switch.run @@ fun sw ->
+
let client = Requests.create ~clock:env#clock env#net in
+
+
(* Test 1: Stream response using stream_response *)
+
traceln "Test 1: Using stream_response to read data in chunks";
+
let chunks = ref [] in
+
Requests.stream_response ~sw client test_url (fun buf_reader ->
+
try
+
while true do
+
let chunk = Eio.Buf_read.take 64 buf_reader in
+
if String.length chunk > 0 then (
+
chunks := chunk :: !chunks;
+
traceln "Read chunk of %d bytes" (String.length chunk)
+
) else
+
raise End_of_file
+
done
+
with End_of_file -> ()
+
);
+
+
let total_bytes = List.fold_left (fun acc chunk -> acc + String.length chunk) 0 !chunks in
+
traceln "Total bytes read: %d" total_bytes;
+
+
(* Test 2: Stream download to file *)
+
traceln "Test 2: Using Stream.download to download to file";
+
let test_file = Eio.Path.(env#fs / "test_download.json") in
+
+
Eio.Path.with_open_out ~create:(`Or_truncate 0o644) test_file (fun file ->
+
let sink = (file :> Eio.Flow.sink_ty Eio.Resource.t) in
+
Requests.Stream.download ~sw client test_url ~sink
+
);
+
+
(* Verify the file was created and has content *)
+
let file_content = Eio.Path.load test_file in
+
traceln "Downloaded file size: %d bytes" (String.length file_content);
+
traceln "File content preview: %s" (String.sub file_content 0 (min 100 (String.length file_content)));
+
+
(* Cleanup *)
+
(try Eio.Path.unlink test_file with _ -> ());
+
+
traceln "Streaming tests completed successfully!"
+
+
let test_iter_response env =
+
traceln "Testing iter_response functionality...";
+
+
Eio.Switch.run @@ fun sw ->
+
let client = Requests.create ~clock:env#clock env#net in
+
let test_url = Uri.of_string "https://httpbin.org/json" in
+
+
let response = Requests.get ~sw client test_url in
+
+
let chunk_count = ref 0 in
+
let total_bytes = ref 0 in
+
+
Requests.Stream.iter_response ~chunk_size:32 response ~f:(fun chunk ->
+
incr chunk_count;
+
total_bytes := !total_bytes + String.length chunk;
+
traceln "Chunk %d: %d bytes" !chunk_count (String.length chunk)
+
);
+
+
traceln "Processed %d chunks, total %d bytes" !chunk_count !total_bytes;
+
traceln "Original response body size: %d bytes" (String.length (Requests.Response.body response));
+
+
traceln "iter_response test completed successfully!"
+
+
let main env =
+
traceln "Starting streaming functionality tests...";
+
+
try
+
test_basic_streaming env;
+
test_iter_response env;
+
traceln "All streaming tests passed!";
+
with
+
| e ->
+
traceln "Test failed with exception: %s" (Printexc.to_string e);
+
raise e
+
+
let () = Eio_main.run main