this repo has no description

Merge pull request #22897 from UnixJunkie/parany_1311

new file: packages/parany/parany.13.1.1/opam

Kate 9afae3ec 67edbdc2

Changed files
+39
packages
parany
parany.13.1.1
+39
packages/parany/parany.13.1.1/opam
···
···
+
opam-version: "2.0"
+
maintainer: "unixjunkie@sdf.org"
+
authors: "Francois Berenger"
+
license: "LGPL-2.0-or-later"
+
homepage: "https://github.com/UnixJunkie/parany"
+
bug-reports: "https://github.com/UnixJunkie/parany/issues"
+
dev-repo: "git+https://github.com/UnixJunkie/parany.git"
+
depends: [
+
"domainslib" {>= "0.5.0"}
+
"dune" {>= "1.6.0"}
+
"ocaml" {>= "5.0.0"}
+
]
+
build: [
+
["dune" "build" "-p" name "-j" jobs]
+
["dune" "build" "-p" name "-j" jobs "src/test.exe"] {with-test & os-distribution != "alpine"}
+
["./test.sh"] {with-test & os-distribution != "alpine"}
+
]
+
synopsis: "Parallelize any computation"
+
description: """
+
Generalized map reduce for parallel computers (not distributed computing).
+
Can process in parallel an infinite stream of elements.
+
+
Can process a very large file in parallel on a multicore computer;
+
provided there is a way to cut your file into independent blocks
+
(the 'demux' function).
+
The processing function is called 'work'.
+
The function gathering the results is called 'mux'.
+
The chunk size (number of items) processed by one call to the 'work' function
+
is called 'csize'.
+
+
There is a minimalist Parmap module, if you want to switch
+
to/from Parmap easily.
+
+
Read the corresponding ocamldoc before using.
+
"""
+
url {
+
src: "https://github.com/UnixJunkie/parany/archive/v13.1.1.tar.gz"
+
checksum: "md5=ad54c73b57630a851d6c20f1c4ab906c"
+
}