Organisationsocaml-multicorepicos02903c ()freebsd-5.2_opam-2.2

freebsd-5.2_opam-2.2

Logs

Show full logs
2024-10-09 17:51.27: New job: test ocaml-multicore/picos https://github.com/ocaml-multicore/picos.git#refs/heads/add-queue (02903c28b98e5627ec18b290f4d3cd444da73bd2) (freebsd-x86_64:freebsd-5.2_opam-2.2)
Base: freebsd-ocaml-5.2
Opam project build

To reproduce locally:

git clone --recursive "https://github.com/ocaml-multicore/picos.git" -b "add-queue" && cd "picos" && git reset --hard 02903c28
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM freebsd-ocaml-5.2
# freebsd-5.2_opam-2.2
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/local/bin/opam-2.2 /usr/local/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e f7c5ed58d4e80e295c4022514da894342e69bfa0 || git fetch origin master) && git reset -q --hard f7c5ed58d4e80e295c4022514da894342e69bfa0 && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 picos_std.opam picos_mux.opam picos_meta.opam picos_lwt.opam picos_io_cohttp.opam picos_io.opam picos_aux.opam picos.opam ./
RUN opam pin add -yn picos_std.dev './' && \
    opam pin add -yn picos_mux.dev './' && \
    opam pin add -yn picos_meta.dev './' && \
    opam pin add -yn picos_lwt.dev './' && \
    opam pin add -yn picos_io_cohttp.dev './' && \
    opam pin add -yn picos_io.dev './' && \
    opam pin add -yn picos_aux.dev './' && \
    opam pin add -yn picos.dev './'
ENV DEPS="alcotest.1.8.0 angstrom.0.16.1 asn1-combinators.0.3.2 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-bytes.base base-domains.base base-nnp.base base-threads.base base-unix.base base64.3.5.1 bigstringaf.0.10.0 bos.0.2.1 ca-certs.1.0.0 camlp-streams.5.0.1 cmdliner.1.3.0 cohttp.6.0.0~beta2 cohttp-lwt.6.0.0~beta2 cohttp-lwt-unix.6.0.0~beta2 conduit.6.2.3 conduit-lwt.6.2.3 conduit-lwt-unix.6.2.3 conf-gmp.4 conf-gmp-powm-sec.3 conf-npm.1 conf-pkg-config.3 containers.3.14 cppo.1.7.0 csexp.1.5.2 digestif.1.2.0 domain-local-await.1.0.1 domain-name.0.4.0 domain_shims.0.1.0 dscheck.0.5.0 dune.3.16.0 dune-configurator.3.16.0 duration.0.2.1 either.1.0.0 eqaf.0.10 fmt.0.9.0 fpath.0.7.3 gen.1.1 gmap.0.3.0 host-arch-x86_64.1 host-system-other.1 http.6.0.0~beta2 ipaddr.5.6.0 ipaddr-sexp.5.6.0 js_of_ocaml.5.8.2 js_of_ocaml-compiler.5.8.2 kdf.1.0.0 logs.0.7.0 lwt.5.7.0 macaddr.5.6.0 magic-mime.1.3.1 mdx.2.4.1 menhir.20240715 menhirCST.20240715 menhirLib.20240715 menhirSdk.20240715 mirage-crypto.1.1.0 mirage-crypto-ec.1.1.0 mirage-crypto-pk.1.1.0 mirage-crypto-rng.1.1.0 mtime.2.1.0 multicore-bench.0.1.5 multicore-magic.2.3.0 multicore-magic-dscheck.2.3.0 num.1.5-1 ocaml.5.2.0 ocaml-base-compiler.5.2.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.3.6.9 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.15.0 ocamlfind.1.9.6 ocplib-endian.1.2 ohex.0.2.0 oseq.0.5.1 parsexp.v0.17.0 ppx_derivers.1.2.1 ppx_sexp_conv.v0.17.0 ppxlib.0.33.0 ppxlib_jane.v0.17.0 psq.0.2.1 ptime.1.2.0 qcheck-core.0.22 qcheck-multicoretests-util.0.4 qcheck-stm.0.4 re.1.12.0 result.1.5 rresult.0.7.0 sedlex.3.2 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdlib-shims.0.3.0 stringext.1.6.0 thread-local-storage.0.2 thread-table.1.0.0 topkg.1.0.7 tsort.2.1.0 uri.4.4.0 uri-sexp.4.4.0 uutf.1.0.3 x509.1.0.4 yojson.2.2.2 zarith.1.14"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.2 --depext-only -y picos_std.dev picos_mux.dev picos_meta.dev picos_lwt.dev picos_io_cohttp.dev picos_io.dev picos_aux.dev picos.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build

END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK

2024-10-09 17:51.27: Using cache hint "ocaml-multicore/picos-freebsd-ocaml-5.2-freebsd-5.2_opam-2.2-764e145505dba3b8e45a3c2357e3b070"
2024-10-09 17:51.27: Using OBuilder spec:
((from freebsd-ocaml-5.2)
 (comment freebsd-5.2_opam-2.2)
 (user (uid 1000) (gid 1000))
 (env CLICOLOR_FORCE 1)
 (env OPAMCOLOR always)
 (workdir /src)
 (run (shell "sudo ln -f /usr/local/bin/opam-2.2 /usr/local/bin/opam"))
 (run (shell "opam init --reinit -ni"))
 (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
 (workdir /src)
 (run (shell "sudo chown opam /src"))
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "cd ~/opam-repository && (git cat-file -e f7c5ed58d4e80e295c4022514da894342e69bfa0 || git fetch origin master) && git reset -q --hard f7c5ed58d4e80e295c4022514da894342e69bfa0 && git log --no-decorate -n1 --oneline && opam update -u"))
 (copy (src picos_std.opam picos_mux.opam picos_meta.opam picos_lwt.opam picos_io_cohttp.opam picos_io.opam picos_aux.opam picos.opam)
       (dst ./))
 (run (network host)
      (shell  "opam pin add -yn picos_std.dev './' && \
             \nopam pin add -yn picos_mux.dev './' && \
             \nopam pin add -yn picos_meta.dev './' && \
             \nopam pin add -yn picos_lwt.dev './' && \
             \nopam pin add -yn picos_io_cohttp.dev './' && \
             \nopam pin add -yn picos_io.dev './' && \
             \nopam pin add -yn picos_aux.dev './' && \
             \nopam pin add -yn picos.dev './'"))
 (env DEPS "alcotest.1.8.0 angstrom.0.16.1 asn1-combinators.0.3.2 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-bytes.base base-domains.base base-nnp.base base-threads.base base-unix.base base64.3.5.1 bigstringaf.0.10.0 bos.0.2.1 ca-certs.1.0.0 camlp-streams.5.0.1 cmdliner.1.3.0 cohttp.6.0.0~beta2 cohttp-lwt.6.0.0~beta2 cohttp-lwt-unix.6.0.0~beta2 conduit.6.2.3 conduit-lwt.6.2.3 conduit-lwt-unix.6.2.3 conf-gmp.4 conf-gmp-powm-sec.3 conf-npm.1 conf-pkg-config.3 containers.3.14 cppo.1.7.0 csexp.1.5.2 digestif.1.2.0 domain-local-await.1.0.1 domain-name.0.4.0 domain_shims.0.1.0 dscheck.0.5.0 dune.3.16.0 dune-configurator.3.16.0 duration.0.2.1 either.1.0.0 eqaf.0.10 fmt.0.9.0 fpath.0.7.3 gen.1.1 gmap.0.3.0 host-arch-x86_64.1 host-system-other.1 http.6.0.0~beta2 ipaddr.5.6.0 ipaddr-sexp.5.6.0 js_of_ocaml.5.8.2 js_of_ocaml-compiler.5.8.2 kdf.1.0.0 logs.0.7.0 lwt.5.7.0 macaddr.5.6.0 magic-mime.1.3.1 mdx.2.4.1 menhir.20240715 menhirCST.20240715 menhirLib.20240715 menhirSdk.20240715 mirage-crypto.1.1.0 mirage-crypto-ec.1.1.0 mirage-crypto-pk.1.1.0 mirage-crypto-rng.1.1.0 mtime.2.1.0 multicore-bench.0.1.5 multicore-magic.2.3.0 multicore-magic-dscheck.2.3.0 num.1.5-1 ocaml.5.2.0 ocaml-base-compiler.5.2.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.3.6.9 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.15.0 ocamlfind.1.9.6 ocplib-endian.1.2 ohex.0.2.0 oseq.0.5.1 parsexp.v0.17.0 ppx_derivers.1.2.1 ppx_sexp_conv.v0.17.0 ppxlib.0.33.0 ppxlib_jane.v0.17.0 psq.0.2.1 ptime.1.2.0 qcheck-core.0.22 qcheck-multicoretests-util.0.4 qcheck-stm.0.4 re.1.12.0 result.1.5 rresult.0.7.0 sedlex.3.2 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdlib-shims.0.3.0 stringext.1.6.0 thread-local-storage.0.2 thread-table.1.0.0 topkg.1.0.7 tsort.2.1.0 uri.4.4.0 uri-sexp.4.4.0 uutf.1.0.3 x509.1.0.4 yojson.2.2.2 zarith.1.14")
 (env CI true)
 (env OCAMLCI true)
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "opam update --depexts && opam install --cli=2.2 --depext-only -y picos_std.dev picos_mux.dev picos_meta.dev picos_lwt.dev picos_io_cohttp.dev picos_io.dev picos_aux.dev picos.dev $DEPS"))
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "opam install $DEPS"))
 (copy (src .) (dst /src))
 (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)

2024-10-09 17:51.27: Waiting for resource in pool OCluster
2024-10-09 17:51.27: Waiting for worker…
2024-10-09 17:51.27: Got resource from pool OCluster
Building on summer
All commits already cached
HEAD is now at 02903c2 Add `Picos_std_sync.Queue`

(from freebsd-ocaml-5.2)
2024-10-09 17:41.38 ---> using "3515bf58445883216685df527c06fc8b9b7799700694ffa335d1401da4433261" from cache

/: (comment freebsd-5.2_opam-2.2)

/: (user (uid 1000) (gid 1000))

/: (env CLICOLOR_FORCE 1)

/: (env OPAMCOLOR always)

/: (workdir /src)

/src: (run (shell "sudo ln -f /usr/local/bin/opam-2.2 /usr/local/bin/opam"))
2024-10-09 17:41.38 ---> using "ce5d446dcad4679490c4842006d3847ab79c5b03f27c64107dd9b4b693f9227b" from cache

/src: (run (shell "opam init --reinit -ni"))
No configuration file found, using built-in defaults.
Checking for available remotes: rsync and local, git.
  - you won't be able to use mercurial repositories unless you install the hg command on your system.
  - you won't be able to use darcs repositories unless you install the darcs command on your system.

This version of opam requires an update to the layout of /home/opam/.opam from version 2.1 to version 2.2, which can't be reverted.
You may want to back it up before going further.

Continue? [y/n] y
[NOTE] The 'jobs' option was reset, its value was 1 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:
           opam option jobs=1 --global
Format upgrade done.

<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] no changes from git+file:///home/opam/opam-repository
2024-10-09 17:41.38 ---> using "315f104d2431f3f8513b249e297f5cd9ab00e87e07651779a2c033f75790f186" from cache

/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
FreeBSD 14.0-RELEASE-p11
The OCaml toplevel, version 5.2.0
2.2.1
2024-10-09 17:41.38 ---> using "122bbc3ca2d5d079e67c62df055dfe2029bb1467ac0b82961d95a877a82d7ad7" from cache

/src: (workdir /src)

/src: (run (shell "sudo chown opam /src"))
2024-10-09 17:41.39 ---> using "ea3d067661d07cb941931c0bd3d63b7212a0090aedfda056edf38cb8ce4f84dc" from cache

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "cd ~/opam-repository && (git cat-file -e f7c5ed58d4e80e295c4022514da894342e69bfa0 || git fetch origin master) && git reset -q --hard f7c5ed58d4e80e295c4022514da894342e69bfa0 && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
 * branch                  master     -> FETCH_HEAD
   5e7a27da27..34250d4317  master     -> origin/master
f7c5ed58d4 Merge pull request #26635 from hannesm/release-x509-v1.0.4

<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository

Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).

The following packages are not being upgraded because the new versions conflict with other installed packages:
  - ocaml.5.4.0
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
2024-10-09 17:41.39 ---> using "0070f919689cd29aa3f04b35054edc8498824a458cdd001adf0f05da98d0891a" from cache

/src: (copy (src picos_std.opam picos_mux.opam picos_meta.opam picos_lwt.opam picos_io_cohttp.opam picos_io.opam picos_aux.opam picos.opam)
            (dst ./))
2024-10-09 17:41.39 ---> using "7bcbf8213d0c7c295102ea50ad865e456889e9058e860ec32e00391d69bdb467" from cache

/src: (run (network host)
           (shell  "opam pin add -yn picos_std.dev './' && \
                  \nopam pin add -yn picos_mux.dev './' && \
                  \nopam pin add -yn picos_meta.dev './' && \
                  \nopam pin add -yn picos_lwt.dev './' && \
                  \nopam pin add -yn picos_io_cohttp.dev './' && \
                  \nopam pin add -yn picos_io.dev './' && \
                  \nopam pin add -yn picos_aux.dev './' && \
                  \nopam pin add -yn picos.dev './'"))
[picos_std.dev] synchronised (file:///src)
picos_std is now pinned to file:///src (version dev)
[picos_mux.dev] synchronised (file:///src)
picos_mux is now pinned to file:///src (version dev)
[picos_meta.dev] synchronised (file:///src)
picos_meta is now pinned to file:///src (version dev)
[picos_lwt.dev] synchronised (file:///src)
picos_lwt is now pinned to file:///src (version dev)
[picos_io_cohttp.dev] synchronised (file:///src)
picos_io_cohttp is now pinned to file:///src (version dev)
[picos_io.dev] synchronised (file:///src)
picos_io is now pinned to file:///src (version dev)
[picos_aux.dev] synchronised (file:///src)
picos_aux is now pinned to file:///src (version dev)
[picos.dev] synchronised (file:///src)
picos is now pinned to file:///src (version dev)
2024-10-09 17:41.39 ---> using "710fa2cc01ba2a5a467eacef9690cebdce7eb4efd0732a66878f8f29e267e6bd" from cache

/src: (env DEPS "alcotest.1.8.0 angstrom.0.16.1 asn1-combinators.0.3.2 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-bytes.base base-domains.base base-nnp.base base-threads.base base-unix.base base64.3.5.1 bigstringaf.0.10.0 bos.0.2.1 ca-certs.1.0.0 camlp-streams.5.0.1 cmdliner.1.3.0 cohttp.6.0.0~beta2 cohttp-lwt.6.0.0~beta2 cohttp-lwt-unix.6.0.0~beta2 conduit.6.2.3 conduit-lwt.6.2.3 conduit-lwt-unix.6.2.3 conf-gmp.4 conf-gmp-powm-sec.3 conf-npm.1 conf-pkg-config.3 containers.3.14 cppo.1.7.0 csexp.1.5.2 digestif.1.2.0 domain-local-await.1.0.1 domain-name.0.4.0 domain_shims.0.1.0 dscheck.0.5.0 dune.3.16.0 dune-configurator.3.16.0 duration.0.2.1 either.1.0.0 eqaf.0.10 fmt.0.9.0 fpath.0.7.3 gen.1.1 gmap.0.3.0 host-arch-x86_64.1 host-system-other.1 http.6.0.0~beta2 ipaddr.5.6.0 ipaddr-sexp.5.6.0 js_of_ocaml.5.8.2 js_of_ocaml-compiler.5.8.2 kdf.1.0.0 logs.0.7.0 lwt.5.7.0 macaddr.5.6.0 magic-mime.1.3.1 mdx.2.4.1 menhir.20240715 menhirCST.20240715 menhirLib.20240715 menhirSdk.20240715 mirage-crypto.1.1.0 mirage-crypto-ec.1.1.0 mirage-crypto-pk.1.1.0 mirage-crypto-rng.1.1.0 mtime.2.1.0 multicore-bench.0.1.5 multicore-magic.2.3.0 multicore-magic-dscheck.2.3.0 num.1.5-1 ocaml.5.2.0 ocaml-base-compiler.5.2.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.3.6.9 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.15.0 ocamlfind.1.9.6 ocplib-endian.1.2 ohex.0.2.0 oseq.0.5.1 parsexp.v0.17.0 ppx_derivers.1.2.1 ppx_sexp_conv.v0.17.0 ppxlib.0.33.0 ppxlib_jane.v0.17.0 psq.0.2.1 ptime.1.2.0 qcheck-core.0.22 qcheck-multicoretests-util.0.4 qcheck-stm.0.4 re.1.12.0 result.1.5 rresult.0.7.0 sedlex.3.2 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdlib-shims.0.3.0 stringext.1.6.0 thread-local-storage.0.2 thread-table.1.0.0 topkg.1.0.7 tsort.2.1.0 uri.4.4.0 uri-sexp.4.4.0 uutf.1.0.3 x509.1.0.4 yojson.2.2.2 zarith.1.14")

/src: (env CI true)

/src: (env OCAMLCI true)

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "opam update --depexts && opam install --cli=2.2 --depext-only -y picos_std.dev picos_mux.dev picos_meta.dev picos_lwt.dev picos_io_cohttp.dev picos_io.dev picos_aux.dev picos.dev $DEPS"))
[WARNING] Unknown update command for bsd, skipping system update

<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[picos.dev] synchronised (no changes)
[picos_aux.dev] synchronised (no changes)
[picos_io.dev] synchronised (no changes)
[picos_io_cohttp.dev] synchronised (no changes)
[picos_lwt.dev] synchronised (no changes)
[picos_meta.dev] synchronised (no changes)
[picos_mux.dev] synchronised (no changes)
[picos_std.dev] synchronised (no changes)

[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.0).
[NOTE] Package ocaml is already installed (current version is 5.2.0).
[NOTE] Package host-system-other is already installed (current version is 1).
[NOTE] Package host-arch-x86_64 is already installed (current version is 1).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).

The following system packages will first need to be installed:
    ca_root_nss gmp npm

<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>

+ /usr/local/bin/sudo "pkg" "install" "-y" "ca_root_nss" "gmp" "npm"
- Updating FreeBSD repository catalogue...
- [summer] Fetching data.pkg: .......... done
- Processing entries: 
- Processing entries....
- ..
- ..
- ..
- ..
- . done
- FreeBSD repository update completed. 34404 packages processed.
- All repositories are up to date.
- The following 9 package(s) will be affected (of 0 checked):
- 
- New packages to be INSTALLED:
- 	brotli: 1.1.0,1
- 	c-ares: 1.30.0
- 	ca_root_nss: 3.93_2
- 	gmp: 6.3.0
- 	icu: 74.2_1,1
- 	libuv: 1.48.0
- 	node20: 20.15.1
- 	npm: 10.8.1
- 	npm-node20: 10.8.1
- 
- Number of packages to be installed: 9
- 
- The process will require 111 MiB more space.
- 26 MiB to be downloaded.
- [summer] [1/9] Fetching c-ares-1.30.0.pkg: .......... done
- [summer] [2/9] Fetching npm-node20-10.8.1.pkg: .......... done
- [summer] [3/9] Fetching icu-74.2_1,1.pkg: ...
- ....... done
- [summer] [4/9] Fetching libuv-1.48.0.pkg: .......... done
- [summer] [5/9] Fetching gmp-6.3.0.pkg: .......... done
- [summer] [6/9] Fetching npm-10.8.1.pkg: . done
- [summer] [7/9] Fetching brotli-1.1.0,1.pkg: .......... done
- [summer] [8/9] Fetching ca_root_nss-3.93_2.pkg: ...... done
- [summer] [9/9] Fetching node20-20.15.1.pkg: 
- ..
- ...
- ..... done
- Checking integrity... done (0 conflicting)
- [summer] [1/9] Installing c-ares-1.30.0...
- [summer] [1/9] Extracting c-ares-1.30.0: .......... done
- [summer] [2/9] Installing icu-74.2_1,1...
- [summer] [2/9] Extracting icu-74.2_1,1: .......... done
- [summer] [3/9] Installing libuv-1.48.0...
- [summer] [3/9] Extracting libuv-1.48.0: .......... done
- [summer] [4/9] Installing brotli-1.1.0,1...
- [summer] [4/9] Extracting brotli-1.1.0,1: .......... done
- [summer] [5/9] Installing ca_root_nss-3.93_2...
- [summer] [5/9] Extracting ca_root_nss-3.93_2: ....... done
- Scanning /usr/share/certs/untrusted for certificates...
- Scanning /usr/share/certs/trusted for certificates...
- Scanning /usr/local/share/certs for certificates...
- [summer] [6/9] Installing node20-20.15.1...
- [summer] [6/9] Extracting node20-20.15.1: .......... done
- [summer] [7/9] Installing npm-node20-10.8.1...
- [summer] [7/9] Extracting npm-node20-10.8.1: .......... done
- [summer] [8/9] Installing gmp-6.3.0...
- [summer] [8/9] Extracting gmp-6.3.0: .......... done
- [summer] [9/9] Installing npm-10.8.1...
- [summer] [9/9] Extracting npm-10.8.1: ... done
- =====
- Message from ca_root_nss-3.93_2:
- 
- --
- FreeBSD does not, and can not warrant that the certification authorities
- whose certificates are included in this package have in any way been
- audited for trustworthiness or RFC 3647 compliance.
- 
- Assessment and verification of trust is the complete responsibility of
- the system administrator.
- 
- This package installs symlinks to support root certificate discovery
- for software that either uses other cryptographic libraries than
- OpenSSL, or use OpenSSL but do not follow recommended practice.
- 
- If you prefer to do this manually, replace the following symlinks with
- either an empty file or your site-local certificate bundle.
- 
-   * /etc/ssl/cert.pem
-   * /usr/local/etc/ssl/cert.pem
-   * /usr/local/openssl/cert.pem
- =====
- Message from node20-20.15.1:
- 
- --
- Note: If you need npm (Node Package Manager), please install www/npm.
2024-10-09 17:41.39 ---> using "d986035d50c8dc280e6bb27e7da1a7af36491572af128919af43cafdb7e24030" from cache

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.0).
[NOTE] Package ocaml is already installed (current version is 5.2.0).
[NOTE] Package host-system-other is already installed (current version is 1).
[NOTE] Package host-arch-x86_64 is already installed (current version is 1).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 102 packages
   alcotest                   1.8.0
   angstrom                   0.16.1
   asn1-combinators           0.3.2
   astring                    0.8.5
   backoff                    0.1.1
   base                       v0.17.1
   base-bytes                 base
   base64                     3.5.1
   bigstringaf                0.10.0
   bos                        0.2.1
   ca-certs                   1.0.0
   camlp-streams              5.0.1
   cmdliner                   1.3.0
   cohttp                     6.0.0~beta2
   cohttp-lwt                 6.0.0~beta2
   cohttp-lwt-unix            6.0.0~beta2
   conduit                    6.2.3
   conduit-lwt                6.2.3
   conduit-lwt-unix           6.2.3
   conf-gmp                   4
   conf-gmp-powm-sec          3
   conf-npm                   1
   conf-pkg-config            3
   containers                 3.14
   cppo                       1.7.0
   csexp                      1.5.2
   digestif                   1.2.0
   domain-local-await         1.0.1
   domain-name                0.4.0
   domain_shims               0.1.0
   dscheck                    0.5.0
   dune                       3.16.0
   dune-configurator          3.16.0
   duration                   0.2.1
   either                     1.0.0
   eqaf                       0.10
   fmt                        0.9.0
   fpath                      0.7.3
   gen                        1.1
   gmap                       0.3.0
   http                       6.0.0~beta2
   ipaddr                     5.6.0
   ipaddr-sexp                5.6.0
   js_of_ocaml                5.8.2
   js_of_ocaml-compiler       5.8.2
   kdf                        1.0.0
   logs                       0.7.0
   lwt                        5.7.0
   macaddr                    5.6.0
   magic-mime                 1.3.1
   mdx                        2.4.1
   menhir                     20240715
   menhirCST                  20240715
   menhirLib                  20240715
   menhirSdk                  20240715
   mirage-crypto              1.1.0
   mirage-crypto-ec           1.1.0
   mirage-crypto-pk           1.1.0
   mirage-crypto-rng          1.1.0
   mtime                      2.1.0
   multicore-bench            0.1.5
   multicore-magic            2.3.0
   multicore-magic-dscheck    2.3.0
   num                        1.5-1
   ocaml-compiler-libs        v0.17.0
   ocaml-syntax-shims         1.0.0
   ocaml-version              3.6.9
   ocaml_intrinsics_kernel    v0.17.1
   ocamlbuild                 0.15.0
   ocamlfind                  1.9.6
   ocplib-endian              1.2
   ohex                       0.2.0
   oseq                       0.5.1
   parsexp                    v0.17.0
   ppx_derivers               1.2.1
   ppx_sexp_conv              v0.17.0
   ppxlib                     0.33.0
   ppxlib_jane                v0.17.0
   psq                        0.2.1
   ptime                      1.2.0
   qcheck-core                0.22
   qcheck-multicoretests-util 0.4
   qcheck-stm                 0.4
   re                         1.12.0
   result                     1.5
   rresult                    0.7.0
   sedlex                     3.2
   seq                        base
   sexplib                    v0.17.0
   sexplib0                   v0.17.0
   stdlib-shims               0.3.0
   stringext                  1.6.0
   thread-local-storage       0.2
   thread-table               1.0.0
   topkg                      1.0.7
   tsort                      2.1.0
   uri                        4.4.0
   uri-sexp                   4.4.0
   uutf                       1.0.3
   x509                       1.0.4
   yojson                     2.2.2
   zarith                     1.14

<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
 retrieved angstrom.0.16.1  (cached)
 retrieved asn1-combinators.0.3.2  (cached)
 retrieved astring.0.8.5  (cached)
 retrieved backoff.0.1.1  (cached)
 retrieved alcotest.1.8.0  (cached)
 retrieved base64.3.5.1  (cached)
 retrieved bigstringaf.0.10.0  (cached)
 retrieved base.v0.17.1  (cached)
 retrieved ca-certs.1.0.0  (cached)
 retrieved bos.0.2.1  (cached)
 retrieved camlp-streams.5.0.1  (cached)
 retrieved cmdliner.1.3.0  (cached)
 retrieved conf-gmp.4  (cached)
 retrieved conf-gmp-powm-sec.3  (cached)
 retrieved conduit.6.2.3, conduit-lwt.6.2.3, conduit-lwt-unix.6.2.3  (cached)
 installed conf-pkg-config.3
 retrieved cppo.1.7.0  (cached)
 installed conf-gmp.4
 retrieved csexp.1.5.2  (cached)
 installed conf-gmp-powm-sec.3
 retrieved containers.3.14  (cached)
 retrieved cohttp.6.0.0~beta2, cohttp-lwt.6.0.0~beta2, cohttp-lwt-unix.6.0.0~beta2, http.6.0.0~beta2  (cached)
 retrieved domain-local-await.1.0.1  (cached)
 installed conf-npm.1
 retrieved domain-name.0.4.0  (cached)
 retrieved domain_shims.0.1.0  (cached)
 retrieved dscheck.0.5.0  (cached)
 retrieved duration.0.2.1  (cached)
 retrieved either.1.0.0  (cached)
 retrieved eqaf.0.10  (cached)
 retrieved fmt.0.9.0  (cached)
 retrieved fpath.0.7.3  (cached)
 retrieved gen.1.1  (cached)
 retrieved digestif.1.2.0  (cached)
 retrieved gmap.0.3.0  (cached)
 retrieved ipaddr.5.6.0, ipaddr-sexp.5.6.0, macaddr.5.6.0  (cached)
 retrieved kdf.1.0.0  (cached)
 retrieved logs.0.7.0  (cached)
 retrieved lwt.5.7.0  (cached)
 retrieved magic-mime.1.3.1  (cached)
 retrieved mdx.2.4.1  (cached)
 retrieved menhir.20240715, menhirCST.20240715, menhirLib.20240715, menhirSdk.20240715  (cached)
 retrieved dune.3.16.0, dune-configurator.3.16.0  (cached)
 retrieved mtime.2.1.0  (cached)
 retrieved mirage-crypto.1.1.0, mirage-crypto-ec.1.1.0, mirage-crypto-pk.1.1.0, mirage-crypto-rng.1.1.0  (cached)
 retrieved multicore-bench.0.1.5  (cached)
 retrieved multicore-magic.2.3.0, multicore-magic-dscheck.2.3.0  (cached)
 retrieved num.1.5-1  (cached)
 retrieved ocaml-syntax-shims.1.0.0  (cached)
 retrieved ocaml-compiler-libs.v0.17.0  (cached)
 retrieved js_of_ocaml.5.8.2, js_of_ocaml-compiler.5.8.2  (cached)
 retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
 retrieved ocaml-version.3.6.9  (cached)
 retrieved ocplib-endian.1.2  (cached)
 retrieved ocamlfind.1.9.6  (cached)
 retrieved oseq.0.5.1  (cached)
 retrieved ohex.0.2.0  (cached)
 retrieved ocamlbuild.0.15.0  (cached)
 retrieved ppx_derivers.1.2.1  (cached)
 retrieved ppx_sexp_conv.v0.17.0  (cached)
 retrieved parsexp.v0.17.0  (cached)
 retrieved ppxlib_jane.v0.17.0  (cached)
 retrieved psq.0.2.1  (cached)
 retrieved ptime.1.2.0  (cached)
 retrieved qcheck-core.0.22  (cached)
 retrieved qcheck-multicoretests-util.0.4, qcheck-stm.0.4  (cached)
 retrieved re.1.12.0  (cached)
 retrieved result.1.5  (cached)
 installed cmdliner.1.3.0
 retrieved sedlex.3.2  (cached)
 retrieved seq.base  (cached)
 installed seq.base
 retrieved rresult.0.7.0  (cached)
 retrieved ppxlib.0.33.0  (cached)
 retrieved sexplib0.v0.17.0  (cached)
 retrieved sexplib.v0.17.0  (cached)
 retrieved stringext.1.6.0  (cached)
 retrieved thread-local-storage.0.2  (cached)
 retrieved thread-table.1.0.0  (cached)
 retrieved stdlib-shims.0.3.0  (cached)
 retrieved tsort.2.1.0  (cached)
 retrieved topkg.1.0.7  (cached)
 retrieved uutf.1.0.3  (cached)
 retrieved uri.4.4.0, uri-sexp.4.4.0  (cached)
 retrieved yojson.2.2.2  (cached)
 retrieved zarith.1.14  (cached)
 retrieved x509.1.0.4  (cached)
 installed num.1.5-1
 installed ocamlfind.1.9.6
 installed base-bytes.base
 installed ocamlbuild.0.15.0
 installed zarith.1.14
 installed topkg.1.0.7
 installed rresult.0.7.0
 installed mtime.2.1.0
 installed uutf.1.0.3
 installed ptime.1.2.0
 installed fmt.0.9.0
 installed astring.0.8.5
 installed fpath.0.7.3
 installed dune.3.16.0
 installed gmap.0.3.0
 installed either.1.0.0
 installed macaddr.5.6.0
 installed http.6.0.0~beta2
 installed eqaf.0.10
 installed gen.1.1
 installed menhirCST.20240715
 installed duration.0.2.1
 installed domain_shims.0.1.0
 installed domain-name.0.4.0
 installed csexp.1.5.2
 installed cppo.1.7.0
 installed ipaddr.5.6.0
 installed ocaml_intrinsics_kernel.v0.17.1
 installed ocaml-version.3.6.9
 installed ocaml-syntax-shims.1.0.0
 installed ocaml-compiler-libs.v0.17.0
 installed digestif.1.2.0
 installed multicore-magic.2.3.0
 installed menhirSdk.20240715
 installed ppx_derivers.1.2.1
 installed menhirLib.20240715
 installed ohex.0.2.0
 installed magic-mime.1.3.1
 installed camlp-streams.5.0.1
 installed base64.3.5.1
 installed ocplib-endian.1.2
 installed stdlib-shims.0.3.0
 installed tsort.2.1.0
 installed thread-table.1.0.0
 installed thread-local-storage.0.2
 installed stringext.1.6.0
 installed sexplib0.v0.17.0
 installed result.1.5
 installed re.1.12.0
 installed domain-local-await.1.0.1
 installed qcheck-core.0.22
 installed psq.0.2.1
 installed oseq.0.5.1
 installed dune-configurator.3.16.0
 installed bigstringaf.0.10.0
 installed qcheck-multicoretests-util.0.4
 installed alcotest.1.8.0
 installed parsexp.v0.17.0
 installed yojson.2.2.2
 installed backoff.0.1.1
 installed asn1-combinators.0.3.2
 installed qcheck-stm.0.4
 installed angstrom.0.16.1
 installed multicore-bench.0.1.5
 installed sexplib.v0.17.0
 installed mirage-crypto.1.1.0
 installed containers.3.14
 installed kdf.1.0.0
 installed uri.4.4.0
 installed dscheck.0.5.0
 installed multicore-magic-dscheck.2.3.0
 installed lwt.5.7.0
 installed base.v0.17.1
 installed menhir.20240715
 installed ppxlib.0.33.0
 installed ppxlib_jane.v0.17.0
 installed sedlex.3.2
 installed ppx_sexp_conv.v0.17.0
 installed ipaddr-sexp.5.6.0
 installed uri-sexp.4.4.0
 installed js_of_ocaml-compiler.5.8.2
 installed js_of_ocaml.5.8.2
 installed logs.0.7.0
 installed mirage-crypto-rng.1.1.0
 installed conduit.6.2.3
 installed mirage-crypto-pk.1.1.0
 installed cohttp.6.0.0~beta2
 installed mdx.2.4.1
 installed conduit-lwt.6.2.3
 installed bos.0.2.1
 installed cohttp-lwt.6.0.0~beta2
 installed mirage-crypto-ec.1.1.0
 installed x509.1.0.4
 installed ca-certs.1.0.0
 installed conduit-lwt-unix.6.2.3
 installed cohttp-lwt-unix.6.0.0~beta2
Done.
2024-10-09 17:41.39 ---> using "cd44e18231806bda0bb59f745361f34f3ab736fe1de1e279d87c6ab3d52174c5" from cache

/src: (copy (src .) (dst /src))
2024-10-09 17:41.40 ---> saved as "24c8bf625f231ac14de5766369d8ebca2aeb1ede3a8f9178adcc77b413a87b07"

/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/bench && ./main.exe -brief 'Picos Computation')
Picos Computation:
  time per attach detach pair/trivial:
    100.28 ns
  attach detach pairs over time/trivial:
    9.97 M/s
  time per attach detach pair/1 worker:
    268.92 ns
  attach detach pairs over time/1 worker:
    3.72 M/s
  time per attach detach pair/2 workers:
    582.83 ns
  attach detach pairs over time/2 workers:
    3.43 M/s
  time per attach detach pair/4 workers:
    1065.59 ns
  attach detach pairs over time/4 workers:
    3.75 M/s
(cd _build/default/bench && ./main.exe -brief 'Picos Current')
Picos Current:
  time per op/1 worker:
    46.85 ns
  ops over time/1 worker:
    21.35 M/s
  time per op/2 workers:
    53.41 ns
  ops over time/2 workers:
    37.44 M/s
  time per op/4 workers:
    985.30 ns
  ops over time/4 workers:
    4.06 M/s
(cd _build/default/test && ./test_lwt_unix.exe)
Testing `Picos_lwt'.
This run has ID `BXAPGCSL'.

  [OK]          Basics          0   

Full test results in `/src/_build/default/test/_build/_tests/Picos_lwt'.
Test Successful in 0.088s. 1 test run.
(cd _build/default/test && ./test_picos_lwt_unix_with_cohttp.exe)
Uri: //127.0.0.1:8000/hello-lwt
Method: GET

host: 127.0.0.1:8000
user-agent: ocaml-cohttp/v6.0.0_beta2

Body: 
(cd _build/default/test && ./test_server_and_client.exe)
Using blocking sockets and fibers on OCaml 5:
  Recursive server running
  Server listening
  Client B running
  Server accepting
  Client A running
  Client B connected
  Client B wrote 100
  Server accepted client
  Server accepting
  Server read 100
  Client A connected
  Server accepted client
  Server wrote 50
  Client B read 50
  Client A wrote 100
  Server accepting
  Server read 100
  Server wrote 50
  Client A read 50
Server and Client test: OK
(cd _build/default/test && /usr/local/bin/node test_js_of_ocaml.bc.js)
Hello, from js_of_ocaml with Picos!
(cd _build/default/test && ./test_finally.exe)
Testing `Picos_finally'.
This run has ID `JHUGR1CV'.

  [OK]          move            0   is lazy.
  [OK]          borrow          0   returns resource.

Full test results in `/src/_build/default/test/_build/_tests/Picos_finally'.
Test Successful in 0.042s. 2 tests run.
(cd _build/default/test && ./test_sync.exe -- Event 0)
Testing `Picos_sync'.
This run has ID `6PB0QWGN'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [OK]          Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.133s. 1 test run.
(cd _build/default/lib/picos_std.structured && ./mdx_gen.bc.exe picos_std_structured.mli) > _build/default/lib/picos_std.structured/.mdx/picos_std_structured.mli.corrected
Leaked file descriptor (7).
(cd _build/default/test && ./test_sync.exe -- Lazy 0)
Testing `Picos_sync'.
This run has ID `7ZST5YW0'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [OK]          Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.004s. 1 test run.
(cd _build/default/test && ./test_io.exe)
Testing `Picos_io'.
This run has ID `IBGPMHVK'.

  [OK]          Unix          0   openfile and read.
  [OK]          Unix          1   sleepf.
  [OK]          Unix          2   select empty timeout.
  [OK]          Unix          3   select empty ∞.
  [OK]          Unix          4   select.
  [OK]          Unix          5   system.

Full test results in `/src/_build/default/test/_build/_tests/Picos_io'.
Test Successful in 0.800s. 6 tests run.
(cd _build/default/test && ./test_sync.exe -- Lazy 1)
Testing `Picos_sync'.
This run has ID `GY4YVKK1'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [OK]          Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.010s. 1 test run.
(cd _build/default/test && ./test_sync.exe -- Semaphore 0)
Testing `Picos_sync'.
This run has ID `4MK9UEXJ'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [OK]          Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.004s. 1 test run.
(cd _build/default/test && ./test_picos_dscheck.exe)
Testing `Picos DSCheck'.
This run has ID `BU6MB8UN'.

  [OK]          Trigger              0   basic contract.
  [OK]          Computation          0   basic contract.
  [OK]          Computation          1   removes triggers.

Full test results in `/src/_build/default/test/_build/_tests/Picos DSCheck'.
Test Successful in 0.961s. 3 tests run.
(cd _build/default/bench && ./main.exe -brief 'Picos FLS (excluding Current)')
Picos FLS (excluding Current):
  time per get/1 worker:
    11.12 ns
  gets over time/1 worker:
    89.95 M/s
  time per set/1 worker:
    14.36 ns
  sets over time/1 worker:
    69.66 M/s
  time per get/2 workers:
    8.87 ns
  gets over time/2 workers:
    225.54 M/s
  time per set/2 workers:
    19.64 ns
  sets over time/2 workers:
    101.86 M/s
  time per get/4 workers:
    11.16 ns
  gets over time/4 workers:
    358.43 M/s
  time per set/4 workers:
    20.36 ns
  sets over time/4 workers:
    196.45 M/s
(cd _build/default/test && ./test_structured.exe)
Testing `Picos_structured'.
This run has ID `409GM912'.

  [OK]          Bundle          0   fork after terminate.
  [OK]          Bundle          1   fork after escape.
  [OK]          Bundle          2   exception in child terminates.
  [OK]          Bundle          3   cancelation awaits children.
  [OK]          Bundle          4   block raises when forbidden.
  [OK]          Bundle          5   block raises Sys_error when fiber finishes.
  [OK]          Bundle          6   termination nests.
  [OK]          Bundle          7   promise cancelation does not terminate.
  [OK]          Bundle          8   error in promise terminates.
  [OK]          Bundle          9   can wait promises.
  [OK]          Bundle         10   can select promises.
  [OK]          Bundle         11   any and all errors.
  [OK]          Bundle         12   any and all returns.
  [OK]          Bundle         13   race any.

Full test results in `/src/_build/default/test/_build/_tests/Picos_structured'.
Test Successful in 1.618s. 14 tests run.
(cd _build/default/test && ./test_io_with_lwt.exe)
Testing `Picos_io_with_lwt'.
This run has ID `X3ZW0I9H'.

  [OK]          Unix          0   system.

Full test results in `/src/_build/default/test/_build/_tests/Picos_io_with_lwt'.
Test Successful in 2.166s. 1 test run.
(cd _build/default/test && ./test_sync.exe -- Semaphore 1)
Testing `Picos_sync'.
This run has ID `6Z8UIA6U'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [OK]          Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 1.669s. 1 test run.
(cd _build/default/test && ./test_sync.exe -- 'Non-cancelable ops' 0)
Testing `Picos_sync'.
This run has ID `NSUXEKKC'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [OK]          Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.045s. 1 test run.
(cd _build/default/test && ./test_io_cohttp.exe)
Uri: //127.0.0.1:58064/hello-io-cohttp
Method: POST

host: 127.0.0.1:58064
user-agent: ocaml-cohttp/v6.0.0_beta2
content-length: 17

Body: It's-a-Me, Picos!
(cd _build/default/test && ./test_sync.exe -- 'Mutex and Condition' 0)
Testing `Picos_sync'.
This run has ID `6TZS0MM4'.

  [OK]          Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.003s. 1 test run.
(cd _build/default/test && ./test_sync.exe -- 'Mutex and Condition' 1)
Testing `Picos_sync'.
This run has ID `86OR58HV'.

  [SKIP]        Mutex and Condition          0   basics.
  [OK]          Mutex and Condition          1   errors.
  [SKIP]        Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 0.022s. 1 test run.
(cd _build/default/bench && ./main.exe -brief 'Picos TLS')
Picos TLS:
  time per get/1 worker:
    14.06 ns
  gets over time/1 worker:
    71.14 M/s
  time per set/1 worker:
    30.11 ns
  sets over time/1 worker:
    33.22 M/s
  time per get/2 workers:
    15.67 ns
  gets over time/2 workers:
    127.65 M/s
  time per set/2 workers:
    30.93 ns
  sets over time/2 workers:
    64.66 M/s
  time per get/4 workers:
    15.99 ns
  gets over time/4 workers:
    250.09 M/s
  time per set/4 workers:
    30.19 ns
  sets over time/4 workers:
    132.48 M/s
(cd _build/default/test && ./test_select.exe)
Testing `Picos_select'.
This run has ID `00VGOWSK'.

  [OK]          Intr          0   

Full test results in `/src/_build/default/test/_build/_tests/Picos_select'.
Test Successful in 4.683s. 1 test run.
(cd _build/default/test && ./test_mpmcq_dscheck.exe)
Testing `Picos_mpmcq DSCheck'.
This run has ID `0CMWX2EH'.

  [OK]          Multiple pushes and pops          0   

Full test results in `/src/_build/default/test/_build/_tests/Picos_mpmcq DSCheck'.
Test Successful in 2.770s. 1 test run.
(cd _build/default/bench && ./main.exe -brief 'Picos DLS')
Picos DLS:
  time per get/1 worker:
    13.25 ns
  gets over time/1 worker:
    75.46 M/s
  time per set/1 worker:
    19.46 ns
  sets over time/1 worker:
    51.40 M/s
  time per get/2 workers:
    13.21 ns
  gets over time/2 workers:
    151.42 M/s
  time per set/2 workers:
    22.35 ns
  sets over time/2 workers:
    89.48 M/s
  time per get/4 workers:
    14.59 ns
  gets over time/4 workers:
    274.07 M/s
  time per set/4 workers:
    50.89 ns
  sets over time/4 workers:
    78.60 M/s
(cd _build/default/test && ./test_sync.exe -- 'Mutex and Condition' 2)
Testing `Picos_sync'.
This run has ID `8Z5VE4KR'.

  [SKIP]        Mutex and Condition          0   basics.
  [SKIP]        Mutex and Condition          1   errors.
  [OK]          Mutex and Condition          2   cancelation.
  [SKIP]        Semaphore                    0   basics.
  [SKIP]        Semaphore                    1   stress.
  [SKIP]        Lazy                         0   basics.
  [SKIP]        Lazy                         1   cancelation.
  [SKIP]        Event                        0   basics.
  [SKIP]        Non-cancelable ops           0   are not canceled.

Full test results in `/src/_build/default/test/_build/_tests/Picos_sync'.
Test Successful in 3.581s. 1 test run.
(cd _build/default/bench && ./main.exe -brief 'Picos Mutex')
Picos Mutex:
  time per locked yield/1 fiber:
    351.11 ns
  locked yields over time/1 fiber:
    2.85 M/s
  time per locked yield/2 fibers:
    746.69 ns
  locked yields over time/2 fibers:
    1.34 M/s
  time per locked yield/4 fibers:
    1528.49 ns
  locked yields over time/4 fibers:
    0.65 M/s
  time per locked yield/8 fibers:
    1732.14 ns
  locked yields over time/8 fibers:
    0.58 M/s
  time per locked yield/256 fibers:
    1446.10 ns
  locked yields over time/256 fibers:
    0.69 M/s
  time per locked yield/512 fibers:
    1688.93 ns
  locked yields over time/512 fibers:
    0.59 M/s
  time per locked yield/1024 fibers:
    2359.26 ns
  locked yields over time/1024 fibers:
    0.42 M/s
  time per locked yield/2 domains:
    6482.78 ns
  locked yields over time/2 domains:
    0.31 M/s
  time per locked yield/4 domains:
    13576.78 ns
  locked yields over time/4 domains:
    0.29 M/s
  time per locked yield/8 domains:
    188545.25 ns
  locked yields over time/8 domains:
    0.04 M/s
(cd _build/default/test && ./test_picos.exe)
Testing `Picos'.
This run has ID `F0XD3HXZ'.

  [OK]          Trigger               0   basics.
  [OK]          Computation           0   basics.
  [OK]          Computation           1   tx.
  [OK]          Computation           2   signals in order.
  [OK]          Fiber.FLS             0   basics.
  [OK]          Cancel                0   
  [OK]          Cancel after          0   

Full test results in `/src/_build/default/test/_build/_tests/Picos'.
Test Successful in 8.622s. 7 tests run.
(cd _build/default/bench && ./main.exe -brief 'Picos Semaphore')
Picos Semaphore:
  time per acquired yield/4 fibers, 1 resource:
    2539.00 ns
  acquired yields over time/4 fibers, 1 resource:
    0.39 M/s
  time per acquired yield/4 fibers, 2 resources:
    2295.96 ns
  acquired yields over time/4 fibers, 2 resources:
    0.44 M/s
  time per acquired yield/4 fibers, 3 resources:
    1314.30 ns
  acquired yields over time/4 fibers, 3 resources:
    0.76 M/s
  time per acquired yield/4 fibers, 4 resources:
    387.91 ns
  acquired yields over time/4 fibers, 4 resources:
    2.58 M/s
  time per acquired yield/4 domains, 1 resource:
    1325674.11 ns
  acquired yields over time/4 domains, 1 resource:
    0.00 M/s
  time per acquired yield/4 domains, 2 resources:
    2050.84 ns
  acquired yields over time/4 domains, 2 resources:
    1.95 M/s
  time per acquired yield/4 domains, 3 resources:
    1695.07 ns
  acquired yields over time/4 domains, 3 resources:
    2.36 M/s
  time per acquired yield/4 domains, 4 resources:
    4091.68 ns
  acquired yields over time/4 domains, 4 resources:
    0.98 M/s
(cd _build/default/bench && ./main.exe -brief 'Picos Spawn')
Picos Spawn:
  time per spawn/with packed computation:
    230.68 ns
  spawns over time/with packed computation:
    4.33 M/s
(cd _build/default/test && ./test_schedulers.exe)
Testing `Picos schedulers'.
This run has ID `4773PZEW'.

  [OK]          Trivial main returns                          0   
  [OK]          Scheduler completes main computation          0   
  [OK]          Current                                       0   
  [OK]          Cancel_after                                  0   basic.
  [OK]          Cancel_after                                  1   long timeout.
  [OK]          Operation on canceled fiber raises            0   
  [OK]          Cross scheduler wakeup                        0   
  [OK]          Fatal exception terminates scheduler          0   

Full test results in `/src/_build/default/test/_build/_tests/Picos schedulers'.
Test Successful in 69.521s. 8 tests run.
(cd _build/default/bench && ./main.exe -brief 'Picos Queue')
Picos Queue:
  time per message/one domain:
    99.21 ns
  messages over time/one domain:
    10.08 M/s
  time per message/1 nb adder, 1 nb taker:
    278.27 ns
  messages over time/1 nb adder, 1 nb taker:
    7.19 M/s
  time per message/1 nb adder, 2 nb takers:
    225.61 ns
  messages over time/1 nb adder, 2 nb takers:
    13.30 M/s
  time per message/1 nb adder, 4 nb takers:
    2022.47 ns
  messages over time/1 nb adder, 4 nb takers:
    2.47 M/s
  time per message/2 nb adders, 1 nb taker:
    386.89 ns
  messages over time/2 nb adders, 1 nb taker:
    7.75 M/s
  time per message/2 nb adders, 2 nb takers:
    1669.94 ns
  messages over time/2 nb adders, 2 nb takers:
    2.40 M/s
  time per message/2 nb adders, 4 nb takers:
    3442.38 ns
  messages over time/2 nb adders, 4 nb takers:
    1.74 M/s
  time per message/4 nb adders, 1 nb taker:
    2010.79 ns
  messages over time/4 nb adders, 1 nb taker:
    2.49 M/s
  time per message/4 nb adders, 2 nb takers:
    3082.94 ns
  messages over time/4 nb adders, 2 nb takers:
    1.95 M/s
  time per message/4 nb adders, 4 nb takers:
    6686.02 ns
  messages over time/4 nb adders, 4 nb takers:
    1.20 M/s
(cd _build/default/bench && ./main.exe -brief 'Picos Yield')
Picos Yield:
  time per yield/1 fiber:
    315.11 ns
  yields over time/1 fiber:
    3.17 M/s
  time per yield/10 fibers:
    571.91 ns
  yields over time/10 fibers:
    1.75 M/s
  time per yield/100 fibers:
    761.24 ns
  yields over time/100 fibers:
    1.31 M/s
  time per yield/1000 fibers:
    1348.29 ns
  yields over time/1000 fibers:
    0.74 M/s
  time per yield/10000 fibers:
    2678.52 ns
  yields over time/10000 fibers:
    0.37 M/s
(cd _build/default/test && ./test_mpscq.exe)
random seed: 3105812739948301126
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   32     0.0s Mpscq sequential
[ ]    0    0    0    0 /   32     0.0s Mpscq sequential (generating)
[]   32    0    0   32 /   32     0.0s Mpscq sequential

[ ]    0    0    0    0 /   32     0.0s Mpscq parallel
[ ]    1    0    0    1 /   32     1.6s Mpscq parallel
[ ]    2    0    0    2 /   32     3.0s Mpscq parallel
[ ]    3    0    0    3 /   32     3.7s Mpscq parallel
[ ]    4    0    0    4 /   32     3.8s Mpscq parallel
[ ]    5    0    0    5 /   32     4.0s Mpscq parallel
[ ]    6    0    0    6 /   32     4.2s Mpscq parallel
[ ]    7    0    0    7 /   32     4.5s Mpscq parallel
[ ]    8    0    0    8 /   32     4.6s Mpscq parallel
[ ]    9    0    0    9 /   32     4.7s Mpscq parallel
[ ]   11    0    0   11 /   32     4.9s Mpscq parallel
[ ]   12    0    0   12 /   32     5.1s Mpscq parallel
[ ]   13    0    0   13 /   32     5.5s Mpscq parallel
[ ]   15    0    0   15 /   32     5.7s Mpscq parallel
[ ]   16    0    0   16 /   32     5.8s Mpscq parallel
[ ]   17    0    0   17 /   32     6.0s Mpscq parallel
[ ]   18    0    0   18 /   32     6.1s Mpscq parallel
[ ]   19    0    0   19 /   32     6.2s Mpscq parallel
[ ]   20    0    0   20 /   32     6.6s Mpscq parallel
[ ]   21    0    0   21 /   32     6.9s Mpscq parallel
[ ]   22    0    0   22 /   32     7.8s Mpscq parallel
[ ]   23    0    0   23 /   32     8.0s Mpscq parallel
[ ]   25    0    0   25 /   32     8.2s Mpscq parallel
[ ]   30    0    0   30 /   32     8.4s Mpscq parallel
[]   32    0    0   32 /   32     8.5s Mpscq parallel
================================================================================
success (ran 2 tests)
random seed: 1284304396299000159
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   64     0.0s Mpscq sequential
[]   64    0    0   64 /   64     0.0s Mpscq sequential

[ ]    0    0    0    0 /   64     0.0s Mpscq parallel
[ ]    2    0    0    2 /   64     0.4s Mpscq parallel
[ ]    6    0    0    6 /   64     0.5s Mpscq parallel
[ ]   10    0    0   10 /   64     0.6s Mpscq parallel
[ ]   12    0    0   12 /   64     0.8s Mpscq parallel
[ ]   15    0    0   15 /   64     0.9s Mpscq parallel
[ ]   16    0    0   16 /   64     1.0s Mpscq parallel
[ ]   20    0    0   20 /   64     1.2s Mpscq parallel
[ ]   23    0    0   23 /   64     1.3s Mpscq parallel
[ ]   26    0    0   26 /   64     1.4s Mpscq parallel
[ ]   28    0    0   28 /   64     1.5s Mpscq parallel
[ ]   29    0    0   29 /   64     1.7s Mpscq parallel
[ ]   32    0    0   32 /   64     2.0s Mpscq parallel
[ ]   33    0    0   33 /   64     2.2s Mpscq parallel
[ ]   34    0    0   34 /   64     2.4s Mpscq parallel
[ ]   35    0    0   35 /   64     2.6s Mpscq parallel
[ ]   37    0    0   37 /   64     2.7s Mpscq parallel
[ ]   38    0    0   38 /   64     2.8s Mpscq parallel
[ ]   39    0    0   39 /   64     3.3s Mpscq parallel
[ ]   40    0    0   40 /   64     4.0s Mpscq parallel
[ ]   41    0    0   41 /   64     4.6s Mpscq parallel
[ ]   42    0    0   42 /   64     9.8s Mpscq parallel
[ ]   43    0    0   43 /   64    12.9s Mpscq parallel
[ ]   44    0    0   44 /   64    19.3s Mpscq parallel
[ ]   45    0    0   45 /   64    24.4s Mpscq parallel
[ ]   46    0    0   46 /   64    28.8s Mpscq parallel
[ ]   47    0    0   47 /   64    32.8s Mpscq parallel
[ ]   48    0    0   48 /   64    36.0s Mpscq parallel
[ ]   49    0    0   49 /   64    40.5s Mpscq parallel
[ ]   50    0    0   50 /   64    47.2s Mpscq parallel
[ ]   51    0    0   51 /   64    53.0s Mpscq parallel
[ ]   52    0    0   52 /   64    61.5s Mpscq parallel
[ ]   53    0    0   53 /   64    68.5s Mpscq parallel
[ ]   54    0    0   54 /   64    74.6s Mpscq parallel
[ ]   55    0    0   55 /   64    80.1s Mpscq parallel
[ ]   56    0    0   56 /   64    85.8s Mpscq parallel
[ ]   57    0    0   57 /   64    90.3s Mpscq parallel
[ ]   58    0    0   58 /   64    95.0s Mpscq parallel
[ ]   59    0    0   59 /   64    98.5s Mpscq parallel
[ ]   60    0    0   60 /   64   102.5s Mpscq parallel
[ ]   61    0    0   61 /   64   105.8s Mpscq parallel
[ ]   62    0    0   62 /   64   110.1s Mpscq parallel
[ ]   63    0    0   63 /   64   112.8s Mpscq parallel
[ ]   64    0    0   64 /   64   116.2s Mpscq parallel
[]   64    0    0   64 /   64   116.2s Mpscq parallel
================================================================================
success (ran 2 tests)
(cd _build/default/bench && ./main.exe -brief 'Picos Cancel_after with Picos_select')
Picos Cancel_after with Picos_select:
  time per round-trip/1 worker:
    13706.71 ns
  round-trips over time/1 worker:
    0.07 M/s
  time per round-trip/2 workers:
    24040.36 ns
  round-trips over time/2 workers:
    0.08 M/s
  time per round-trip/4 workers:
    179430.49 ns
  round-trips over time/4 workers:
    0.02 M/s
  time per async round-trip/1 worker:
    72398.16 ns
  async round-trips over time/1 worker:
    0.01 M/s
  time per async round-trip/2 workers:
    29857.91 ns
  async round-trips over time/2 workers:
    0.07 M/s
  time per async round-trip/4 workers:
    203087.20 ns
  async round-trips over time/4 workers:
    0.02 M/s
(cd _build/default/test && ./test_htbl.exe)
random seed: 2319445045687912631
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   32     0.0s Htbl sequential
[ ]    0    0    0    0 /   32     0.0s Htbl sequential (generating)
[]   32    0    0   32 /   32     0.0s Htbl sequential

[ ]    0    0    0    0 /   32     0.0s Htbl parallel
[ ]    1    0    0    1 /   32     1.5s Htbl parallel
[ ]    2    0    0    2 /   32     2.7s Htbl parallel
[ ]    3    0    0    3 /   32     3.4s Htbl parallel
[ ]    4    0    0    4 /   32     3.6s Htbl parallel
[ ]    5    0    0    5 /   32     3.9s Htbl parallel
[ ]    6    0    0    6 /   32     4.2s Htbl parallel
[ ]    7    0    0    7 /   32     4.3s Htbl parallel
[ ]    8    0    0    8 /   32     4.5s Htbl parallel
[ ]   10    0    0   10 /   32     4.6s Htbl parallel
[ ]   11    0    0   11 /   32     4.8s Htbl parallel
[ ]   13    0    0   13 /   32     4.9s Htbl parallel
[ ]   14    0    0   14 /   32     5.1s Htbl parallel
[ ]   15    0    0   15 /   32     5.5s Htbl parallel
[ ]   16    0    0   16 /   32     5.7s Htbl parallel
[ ]   17    0    0   17 /   32     5.8s Htbl parallel
[ ]   18    0    0   18 /   32     5.9s Htbl parallel
[ ]   19    0    0   19 /   32     6.3s Htbl parallel
[ ]   20    0    0   20 /   32     6.4s Htbl parallel
[ ]   21    0    0   21 /   32     6.9s Htbl parallel
[ ]   22    0    0   22 /   32     7.6s Htbl parallel
[ ]   23    0    0   23 /   32     7.8s Htbl parallel
[ ]   26    0    0   26 /   32     7.9s Htbl parallel
[ ]   31    0    0   31 /   32     8.0s Htbl parallel
[]   32    0    0   32 /   32     8.1s Htbl parallel
================================================================================
success (ran 2 tests)
random seed: 2459300142279455638
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   64     0.0s Htbl sequential
[]   64    0    0   64 /   64     0.0s Htbl sequential

[ ]    0    0    0    0 /   64     0.0s Htbl parallel
[ ]    2    0    0    2 /   64     0.2s Htbl parallel
[ ]    5    0    0    5 /   64     0.4s Htbl parallel
[ ]    8    0    0    8 /   64     0.5s Htbl parallel
[ ]   12    0    0   12 /   64     0.6s Htbl parallel
[ ]   13    0    0   13 /   64     0.7s Htbl parallel
[ ]   17    0    0   17 /   64     0.8s Htbl parallel
[ ]   21    0    0   21 /   64     1.1s Htbl parallel
[ ]   22    0    0   22 /   64     1.3s Htbl parallel
[ ]   23    0    0   23 /   64     1.5s Htbl parallel
[ ]   27    0    0   27 /   64     1.7s Htbl parallel
[ ]   29    0    0   29 /   64     1.8s Htbl parallel
[ ]   31    0    0   31 /   64     1.9s Htbl parallel
[ ]   32    0    0   32 /   64     2.0s Htbl parallel
[ ]   38    0    0   38 /   64     2.1s Htbl parallel
[ ]   42    0    0   42 /   64     2.4s Htbl parallel
[ ]   43    0    0   43 /   64     2.7s Htbl parallel
[ ]   45    0    0   45 /   64     2.8s Htbl parallel
[ ]   46    0    0   46 /   64     3.2s Htbl parallel
[ ]   47    0    0   47 /   64     3.7s Htbl parallel
[ ]   48    0    0   48 /   64     4.4s Htbl parallel
[ ]   49    0    0   49 /   64     4.6s Htbl parallel
[ ]   50    0    0   50 /   64     6.0s Htbl parallel
[ ]   51    0    0   51 /   64     7.8s Htbl parallel
[ ]   52    0    0   52 /   64     9.3s Htbl parallel
[ ]   53    0    0   53 /   64    10.8s Htbl parallel
[ ]   54    0    0   54 /   64    12.2s Htbl parallel
[ ]   55    0    0   55 /   64    13.3s Htbl parallel
[ ]   56    0    0   56 /   64    14.7s Htbl parallel
[ ]   57    0    0   57 /   64    15.8s Htbl parallel
[ ]   58    0    0   58 /   64    17.3s Htbl parallel
[ ]   59    0    0   59 /   64    18.7s Htbl parallel
[ ]   60    0    0   60 /   64    19.9s Htbl parallel
[ ]   61    0    0   61 /   64    21.1s Htbl parallel
[ ]   62    0    0   62 /   64    22.0s Htbl parallel
[ ]   63    0    0   63 /   64    22.9s Htbl parallel
[ ]   64    0    0   64 /   64    24.1s Htbl parallel
[]   64    0    0   64 /   64    24.1s Htbl parallel
================================================================================
success (ran 2 tests)
random seed: 2386122755199146064
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   83     0.0s Htbl sequential
[]   83    0    0   83 /   83     0.0s Htbl sequential

[ ]    0    0    0    0 /   83     0.0s Htbl parallel
[ ]    1    0    0    1 /   83     1.3s Htbl parallel
[ ]    2    0    0    2 /   83     2.4s Htbl parallel
[ ]    3    0    0    3 /   83     3.8s Htbl parallel
[ ]    4    0    0    4 /   83     5.0s Htbl parallel
[ ]    5    0    0    5 /   83     6.1s Htbl parallel
[ ]    6    0    0    6 /   83     7.0s Htbl parallel
[ ]    7    0    0    7 /   83     8.4s Htbl parallel
[ ]    8    0    0    8 /   83     9.8s Htbl parallel
[ ]    9    0    0    9 /   83    10.9s Htbl parallel
[ ]   10    0    0   10 /   83    12.1s Htbl parallel
[ ]   11    0    0   11 /   83    13.1s Htbl parallel
[ ]   12    0    0   12 /   83    14.4s Htbl parallel
[ ]   13    0    0   13 /   83    15.9s Htbl parallel
[ ]   14    0    0   14 /   83    17.1s Htbl parallel
[ ]   15    0    0   15 /   83    18.4s Htbl parallel
[ ]   16    0    0   16 /   83    19.6s Htbl parallel
[ ]   17    0    0   17 /   83    20.5s Htbl parallel
[ ]   18    0    0   18 /   83    21.6s Htbl parallel
[ ]   19    0    0   19 /   83    23.0s Htbl parallel
[ ]   20    0    0   20 /   83    24.5s Htbl parallel
[ ]   21    0    0   21 /   83    25.8s Htbl parallel
[ ]   22    0    0   22 /   83    26.6s Htbl parallel
[ ]   23    0    0   23 /   83    27.9s Htbl parallel
[ ]   24    0    0   24 /   83    29.4s Htbl parallel
[ ]   25    0    0   25 /   83    30.7s Htbl parallel
[ ]   26    0    0   26 /   83    31.8s Htbl parallel
[ ]   27    0    0   27 /   83    32.7s Htbl parallel
[ ]   28    0    0   28 /   83    34.4s Htbl parallel
[ ]   29    0    0   29 /   83    36.2s Htbl parallel
[ ]   30    0    0   30 /   83    38.4s Htbl parallel
[ ]   31    0    0   31 /   83    40.5s Htbl parallel
[ ]   32    0    0   32 /   83    42.1s Htbl parallel
[ ]   33    0    0   33 /   83    45.0s Htbl parallel
[ ]   34    0    0   34 /   83    46.4s Htbl parallel
[ ]   35    0    0   35 /   83    47.5s Htbl parallel
[ ]   36    0    0   36 /   83    49.3s Htbl parallel
[ ]   37    0    0   37 /   83    53.4s Htbl parallel
[ ]   38    0    0   38 /   83    55.0s Htbl parallel
[ ]   39    0    0   39 /   83    56.6s Htbl parallel
[ ]   40    0    0   40 /   83    58.0s Htbl parallel
[ ]   41    0    0   41 /   83    59.4s Htbl parallel
[ ]   42    0    0   42 /   83    60.4s Htbl parallel
[ ]   43    0    0   43 /   83    61.7s Htbl parallel
[ ]   44    0    0   44 /   83    62.2s Htbl parallel
[ ]   45    0    0   45 /   83    63.0s Htbl parallel
[ ]   46    0    0   46 /   83    66.3s Htbl parallel
[ ]   47    0    0   47 /   83    67.1s Htbl parallel
[ ]   48    0    0   48 /   83    69.3s Htbl parallel
[ ]   49    0    0   49 /   83    70.3s Htbl parallel
[ ]   50    0    0   50 /   83    71.3s Htbl parallel
[ ]   51    0    0   51 /   83    72.6s Htbl parallel
[ ]   52    0    0   52 /   83    73.9s Htbl parallel
[ ]   53    0    0   53 /   83    75.4s Htbl parallel
[ ]   54    0    0   54 /   83    76.4s Htbl parallel
[ ]   55    0    0   55 /   83    77.3s Htbl parallel
[ ]   56    0    0   56 /   83    78.8s Htbl parallel
[ ]   57    0    0   57 /   83    80.4s Htbl parallel
[ ]   58    0    0   58 /   83    81.9s Htbl parallel
[ ]   59    0    0   59 /   83    83.5s Htbl parallel
[ ]   60    0    0   60 /   83    84.8s Htbl parallel
[ ]   61    0    0   61 /   83    86.0s Htbl parallel
[ ]   62    0    0   62 /   83    87.0s Htbl parallel
[ ]   63    0    0   63 /   83    88.4s Htbl parallel
[ ]   64    0    0   64 /   83    89.7s Htbl parallel
[ ]   65    0    0   65 /   83    90.8s Htbl parallel
[ ]   66    0    0   66 /   83    92.2s Htbl parallel
[ ]   67    0    0   67 /   83    93.6s Htbl parallel
[ ]   68    0    0   68 /   83    95.1s Htbl parallel
[ ]   69    0    0   69 /   83    96.2s Htbl parallel
[ ]   70    0    0   70 /   83    97.2s Htbl parallel
[ ]   71    0    0   71 /   83    98.0s Htbl parallel
[ ]   72    0    0   72 /   83    99.0s Htbl parallel
[ ]   73    0    0   73 /   83   100.0s Htbl parallel
[ ]   74    0    0   74 /   83   100.7s Htbl parallel
[ ]   75    0    0   75 /   83   101.6s Htbl parallel
[ ]   76    0    0   76 /   83   102.5s Htbl parallel
[ ]   77    0    0   77 /   83   103.5s Htbl parallel
[ ]   78    0    0   78 /   83   104.2s Htbl parallel
[ ]   79    0    0   79 /   83   105.2s Htbl parallel
[ ]   80    0    0   80 /   83   106.0s Htbl parallel
[ ]   81    0    0   81 /   83   107.0s Htbl parallel
[ ]   82    0    0   82 /   83   108.2s Htbl parallel
[ ]   83    0    0   83 /   83   109.2s Htbl parallel
[]   83    0    0   83 /   83   109.2s Htbl parallel
================================================================================
success (ran 2 tests)
(cd _build/default/bench && ./main.exe -brief 'Ref with Picos_sync.Mutex')
Ref with Picos_sync.Mutex:
  time per op/get (checked):
    305.17 ns
  ops over time/get (checked):
    3.28 M/s
  time per op/incr (checked):
    423.06 ns
  ops over time/incr (checked):
    2.36 M/s
  time per op/push & pop (checked):
    675.62 ns
  ops over time/push & pop (checked):
    1.48 M/s
  time per op/cas int (checked):
    977.70 ns
  ops over time/cas int (checked):
    1.02 M/s
  time per op/xchg int (checked):
    1019.61 ns
  ops over time/xchg int (checked):
    0.98 M/s
  time per op/swap (checked):
    1145.13 ns
  ops over time/swap (checked):
    0.87 M/s
  time per op/get (unchecked):
    54.02 ns
  ops over time/get (unchecked):
    18.51 M/s
  time per op/incr (unchecked):
    309.34 ns
  ops over time/incr (unchecked):
    3.23 M/s
  time per op/push & pop (unchecked):
    562.74 ns
  ops over time/push & pop (unchecked):
    1.78 M/s
  time per op/cas int (unchecked):
    245.05 ns
  ops over time/cas int (unchecked):
    4.08 M/s
  time per op/xchg int (unchecked):
    130.35 ns
  ops over time/xchg int (unchecked):
    7.67 M/s
  time per op/swap (unchecked):
    833.94 ns
  ops over time/swap (unchecked):
    1.20 M/s
(cd _build/default/test && ./test_sync_queue.exe)
random seed: 1240012149023024254
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   32     0.0s Picos_std_sync.Queue sequential
[ ]    0    0    0    0 /   32     0.0s Picos_std_sync.Queue sequential (generating)
[]   32    0    0   32 /   32     0.0s Picos_std_sync.Queue sequential

[ ]    0    0    0    0 /   32     0.0s Picos_std_sync.Queue parallel
[ ]    1    0    0    1 /   32     1.5s Picos_std_sync.Queue parallel
[ ]    2    0    0    2 /   32     2.7s Picos_std_sync.Queue parallel
[ ]    3    0    0    3 /   32     3.4s Picos_std_sync.Queue parallel
[ ]    4    0    0    4 /   32     3.8s Picos_std_sync.Queue parallel
[ ]    5    0    0    5 /   32     3.9s Picos_std_sync.Queue parallel
[ ]    6    0    0    6 /   32     4.0s Picos_std_sync.Queue parallel
[ ]    7    0    0    7 /   32     4.2s Picos_std_sync.Queue parallel
[ ]    8    0    0    8 /   32     4.5s Picos_std_sync.Queue parallel
[ ]    9    0    0    9 /   32     4.6s Picos_std_sync.Queue parallel
[ ]   11    0    0   11 /   32     4.8s Picos_std_sync.Queue parallel
[ ]   12    0    0   12 /   32     4.9s Picos_std_sync.Queue parallel
[ ]   14    0    0   14 /   32     5.0s Picos_std_sync.Queue parallel
[ ]   16    0    0   16 /   32     5.2s Picos_std_sync.Queue parallel
[ ]   18    0    0   18 /   32     5.4s Picos_std_sync.Queue parallel
[ ]   20    0    0   20 /   32     5.7s Picos_std_sync.Queue parallel
[ ]   21    0    0   21 /   32     5.8s Picos_std_sync.Queue parallel
[ ]   23    0    0   23 /   32     6.0s Picos_std_sync.Queue parallel
[ ]   24    0    0   24 /   32     6.2s Picos_std_sync.Queue parallel
[ ]   25    0    0   25 /   32     6.4s Picos_std_sync.Queue parallel
[ ]   26    0    0   26 /   32     6.6s Picos_std_sync.Queue parallel
[ ]   27    0    0   27 /   32     7.1s Picos_std_sync.Queue parallel
[ ]   28    0    0   28 /   32     7.5s Picos_std_sync.Queue parallel
[ ]   29    0    0   29 /   32     7.9s Picos_std_sync.Queue parallel
[]   32    0    0   32 /   32     8.0s Picos_std_sync.Queue parallel
================================================================================
success (ran 2 tests)
random seed: 2696608881705123401
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   64     0.0s Picos_std_sync.Queue sequential
[]   64    0    0   64 /   64     0.0s Picos_std_sync.Queue sequential

[ ]    0    0    0    0 /   64     0.0s Picos_std_sync.Queue parallel
[ ]    1    0    0    1 /   64     0.1s Picos_std_sync.Queue parallel
[ ]    4    0    0    4 /   64     0.2s Picos_std_sync.Queue parallel
[ ]    5    0    0    5 /   64     0.3s Picos_std_sync.Queue parallel
[ ]    6    0    0    6 /   64     0.5s Picos_std_sync.Queue parallel
[ ]   10    0    0   10 /   64     0.6s Picos_std_sync.Queue parallel
[ ]   13    0    0   13 /   64     0.7s Picos_std_sync.Queue parallel
[ ]   17    0    0   17 /   64     0.8s Picos_std_sync.Queue parallel
[ ]   20    0    0   20 /   64     0.9s Picos_std_sync.Queue parallel
[ ]   22    0    0   22 /   64     1.1s Picos_std_sync.Queue parallel
[ ]   26    0    0   26 /   64     1.4s Picos_std_sync.Queue parallel
[ ]   32    0    0   32 /   64     1.6s Picos_std_sync.Queue parallel
[ ]   37    0    0   37 /   64     1.8s Picos_std_sync.Queue parallel
[ ]   41    0    0   41 /   64     1.9s Picos_std_sync.Queue parallel
[ ]   43    0    0   43 /   64     2.0s Picos_std_sync.Queue parallel
[ ]   49    0    0   49 /   64     2.1s Picos_std_sync.Queue parallel
[ ]   55    0    0   55 /   64     2.3s Picos_std_sync.Queue parallel
[ ]   56    0    0   56 /   64     2.7s Picos_std_sync.Queue parallel
[ ]   57    0    0   57 /   64     2.9s Picos_std_sync.Queue parallel
[ ]   59    0    0   59 /   64     3.0s Picos_std_sync.Queue parallel
[ ]   61    0    0   61 /   64     3.2s Picos_std_sync.Queue parallel
[ ]   62    0    0   62 /   64     3.6s Picos_std_sync.Queue parallel
[ ]   63    0    0   63 /   64     4.2s Picos_std_sync.Queue parallel
[ ]   64    0    0   64 /   64     4.6s Picos_std_sync.Queue parallel
[]   64    0    0   64 /   64     4.6s Picos_std_sync.Queue parallel
================================================================================
success (ran 2 tests)
random seed: 4354606073302329861
generated error fail pass / total     time test name

[ ]    0    0    0    0 /  128     0.0s Picos_std_sync.Queue sequential
[]  128    0    0  128 /  128     0.0s Picos_std_sync.Queue sequential

[ ]    0    0    0    0 /  128     0.0s Picos_std_sync.Queue parallel
[ ]    1    0    0    1 /  128     0.4s Picos_std_sync.Queue parallel
[ ]    2    0    0    2 /  128     2.1s Picos_std_sync.Queue parallel
[ ]    3    0    0    3 /  128     3.3s Picos_std_sync.Queue parallel
[ ]    4    0    0    4 /  128     4.8s Picos_std_sync.Queue parallel
[ ]    5    0    0    5 /  128     6.0s Picos_std_sync.Queue parallel
[ ]    6    0    0    6 /  128     7.3s Picos_std_sync.Queue parallel
[ ]    7    0    0    7 /  128     8.6s Picos_std_sync.Queue parallel
[ ]    8    0    0    8 /  128     9.6s Picos_std_sync.Queue parallel
[ ]    9    0    0    9 /  128    10.8s Picos_std_sync.Queue parallel
[ ]   10    0    0   10 /  128    12.0s Picos_std_sync.Queue parallel
[ ]   11    0    0   11 /  128    13.1s Picos_std_sync.Queue parallel
[ ]   12    0    0   12 /  128    14.2s Picos_std_sync.Queue parallel
[ ]   13    0    0   13 /  128    15.3s Picos_std_sync.Queue parallel
[ ]   14    0    0   14 /  128    16.3s Picos_std_sync.Queue parallel
[ ]   15    0    0   15 /  128    16.9s Picos_std_sync.Queue parallel
[ ]   16    0    0   16 /  128    17.8s Picos_std_sync.Queue parallel
[ ]   17    0    0   17 /  128    19.2s Picos_std_sync.Queue parallel
[ ]   18    0    0   18 /  128    20.4s Picos_std_sync.Queue parallel
[ ]   19    0    0   19 /  128    21.5s Picos_std_sync.Queue parallel
[ ]   20    0    0   20 /  128    22.9s Picos_std_sync.Queue parallel
[ ]   21    0    0   21 /  128    24.4s Picos_std_sync.Queue parallel
[ ]   22    0    0   22 /  128    25.5s Picos_std_sync.Queue parallel
[ ]   23    0    0   23 /  128    26.6s Picos_std_sync.Queue parallel
[ ]   24    0    0   24 /  128    27.9s Picos_std_sync.Queue parallel
[ ]   25    0    0   25 /  128    28.8s Picos_std_sync.Queue parallel
[ ]   26    0    0   26 /  128    29.7s Picos_std_sync.Queue parallel
[ ]   27    0    0   27 /  128    30.9s Picos_std_sync.Queue parallel
[ ]   28    0    0   28 /  128    31.9s Picos_std_sync.Queue parallel
[ ]   29    0    0   29 /  128    32.9s Picos_std_sync.Queue parallel
[ ]   30    0    0   30 /  128    34.0s Picos_std_sync.Queue parallel
[ ]   31    0    0   31 /  128    35.2s Picos_std_sync.Queue parallel
[ ]   32    0    0   32 /  128    37.0s Picos_std_sync.Queue parallel
[ ]   33    0    0   33 /  128    38.4s Picos_std_sync.Queue parallel
[ ]   34    0    0   34 /  128    39.5s Picos_std_sync.Queue parallel
[ ]   35    0    0   35 /  128    40.6s Picos_std_sync.Queue parallel
[ ]   36    0    0   36 /  128    41.6s Picos_std_sync.Queue parallel
[ ]   37    0    0   37 /  128    43.0s Picos_std_sync.Queue parallel
[ ]   38    0    0   38 /  128    44.1s Picos_std_sync.Queue parallel
[ ]   39    0    0   39 /  128    45.2s Picos_std_sync.Queue parallel
[ ]   40    0    0   40 /  128    46.3s Picos_std_sync.Queue parallel
[ ]   41    0    0   41 /  128    48.0s Picos_std_sync.Queue parallel
[ ]   42    0    0   42 /  128    49.1s Picos_std_sync.Queue parallel
[ ]   43    0    0   43 /  128    50.2s Picos_std_sync.Queue parallel
[ ]   44    0    0   44 /  128    51.7s Picos_std_sync.Queue parallel
[ ]   45    0    0   45 /  128    52.9s Picos_std_sync.Queue parallel
[ ]   46    0    0   46 /  128    54.3s Picos_std_sync.Queue parallel
[ ]   47    0    0   47 /  128    56.1s Picos_std_sync.Queue parallel
[ ]   48    0    0   48 /  128    58.2s Picos_std_sync.Queue parallel
[ ]   49    0    0   49 /  128    60.0s Picos_std_sync.Queue parallel
[ ]   50    0    0   50 /  128    61.7s Picos_std_sync.Queue parallel
[ ]   51    0    0   51 /  128    63.8s Picos_std_sync.Queue parallel
[ ]   52    0    0   52 /  128    65.0s Picos_std_sync.Queue parallel
[ ]   53    0    0   53 /  128    66.5s Picos_std_sync.Queue parallel
[ ]   54    0    0   54 /  128    67.7s Picos_std_sync.Queue parallel
[ ]   55    0    0   55 /  128    72.5s Picos_std_sync.Queue parallel
[ ]   56    0    0   56 /  128    74.6s Picos_std_sync.Queue parallel
[ ]   57    0    0   57 /  128    76.0s Picos_std_sync.Queue parallel
[ ]   58    0    0   58 /  128    77.7s Picos_std_sync.Queue parallel
[ ]   59    0    0   59 /  128    78.8s Picos_std_sync.Queue parallel
[ ]   60    0    0   60 /  128    80.2s Picos_std_sync.Queue parallel
[ ]   61    0    0   61 /  128    81.3s Picos_std_sync.Queue parallel
[ ]   62    0    0   62 /  128    82.5s Picos_std_sync.Queue parallel
[ ]   63    0    0   63 /  128    84.8s Picos_std_sync.Queue parallel
[ ]   64    0    0   64 /  128    85.9s Picos_std_sync.Queue parallel
[ ]   65    0    0   65 /  128    87.3s Picos_std_sync.Queue parallel
[ ]   66    0    0   66 /  128    89.9s Picos_std_sync.Queue parallel
[ ]   67    0    0   67 /  128    91.0s Picos_std_sync.Queue parallel
[ ]   68    0    0   68 /  128    92.0s Picos_std_sync.Queue parallel
[ ]   69    0    0   69 /  128    92.8s Picos_std_sync.Queue parallel
[ ]   70    0    0   70 /  128    93.4s Picos_std_sync.Queue parallel
[ ]   71    0    0   71 /  128    94.3s Picos_std_sync.Queue parallel
[ ]   72    0    0   72 /  128    95.1s Picos_std_sync.Queue parallel
[ ]   73    0    0   73 /  128    96.0s Picos_std_sync.Queue parallel
[ ]   74    0    0   74 /  128    96.8s Picos_std_sync.Queue parallel
[ ]   75    0    0   75 /  128    97.6s Picos_std_sync.Queue parallel
[ ]   76    0    0   76 /  128    98.9s Picos_std_sync.Queue parallel
[ ]   77    0    0   77 /  128   101.2s Picos_std_sync.Queue parallel
[ ]   78    0    0   78 /  128   102.4s Picos_std_sync.Queue parallel
[ ]   79    0    0   79 /  128   103.3s Picos_std_sync.Queue parallel
[ ]   80    0    0   80 /  128   104.2s Picos_std_sync.Queue parallel
[ ]   81    0    0   81 /  128   104.9s Picos_std_sync.Queue parallel
[ ]   82    0    0   82 /  128   107.0s Picos_std_sync.Queue parallel
[ ]   83    0    0   83 /  128   108.0s Picos_std_sync.Queue parallel
[ ]   84    0    0   84 /  128   108.7s Picos_std_sync.Queue parallel
[ ]   85    0    0   85 /  128   109.4s Picos_std_sync.Queue parallel
[ ]   86    0    0   86 /  128   110.0s Picos_std_sync.Queue parallel
[ ]   87    0    0   87 /  128   110.8s Picos_std_sync.Queue parallel
[ ]   88    0    0   88 /  128   111.8s Picos_std_sync.Queue parallel
[ ]   89    0    0   89 /  128   112.6s Picos_std_sync.Queue parallel
[ ]   90    0    0   90 /  128   113.5s Picos_std_sync.Queue parallel
[ ]   91    0    0   91 /  128   114.5s Picos_std_sync.Queue parallel
[ ]   92    0    0   92 /  128   115.0s Picos_std_sync.Queue parallel
[ ]   93    0    0   93 /  128   115.8s Picos_std_sync.Queue parallel
[ ]   94    0    0   94 /  128   116.3s Picos_std_sync.Queue parallel
[ ]   95    0    0   95 /  128   117.1s Picos_std_sync.Queue parallel
[ ]   96    0    0   96 /  128   118.0s Picos_std_sync.Queue parallel
[ ]   97    0    0   97 /  128   119.0s Picos_std_sync.Queue parallel
[ ]   98    0    0   98 /  128   119.7s Picos_std_sync.Queue parallel
[ ]   99    0    0   99 /  128   120.3s Picos_std_sync.Queue parallel
[ ]  100    0    0  100 /  128   120.9s Picos_std_sync.Queue parallel
[ ]  101    0    0  101 /  128   121.6s Picos_std_sync.Queue parallel
[ ]  102    0    0  102 /  128   122.6s Picos_std_sync.Queue parallel
[ ]  103    0    0  103 /  128   123.3s Picos_std_sync.Queue parallel
[ ]  104    0    0  104 /  128   124.4s Picos_std_sync.Queue parallel
[ ]  105    0    0  105 /  128   125.1s Picos_std_sync.Queue parallel
[ ]  106    0    0  106 /  128   126.1s Picos_std_sync.Queue parallel
[ ]  107    0    0  107 /  128   127.0s Picos_std_sync.Queue parallel
[ ]  108    0    0  108 /  128   127.8s Picos_std_sync.Queue parallel
[ ]  109    0    0  109 /  128   128.6s Picos_std_sync.Queue parallel
[ ]  110    0    0  110 /  128   129.4s Picos_std_sync.Queue parallel
[ ]  111    0    0  111 /  128   130.3s Picos_std_sync.Queue parallel
[ ]  112    0    0  112 /  128   131.7s Picos_std_sync.Queue parallel
[ ]  113    0    0  113 /  128   132.4s Picos_std_sync.Queue parallel
[ ]  114    0    0  114 /  128   133.3s Picos_std_sync.Queue parallel
[ ]  115    0    0  115 /  128   134.1s Picos_std_sync.Queue parallel
[ ]  116    0    0  116 /  128   134.9s Picos_std_sync.Queue parallel
[ ]  117    0    0  117 /  128   135.6s Picos_std_sync.Queue parallel
[ ]  118    0    0  118 /  128   136.3s Picos_std_sync.Queue parallel
[ ]  119    0    0  119 /  128   137.2s Picos_std_sync.Queue parallel
[ ]  120    0    0  120 /  128   138.3s Picos_std_sync.Queue parallel
[ ]  121    0    0  121 /  128   138.9s Picos_std_sync.Queue parallel
[ ]  122    0    0  122 /  128   139.5s Picos_std_sync.Queue parallel
[ ]  123    0    0  123 /  128   140.3s Picos_std_sync.Queue parallel
[ ]  124    0    0  124 /  128   140.9s Picos_std_sync.Queue parallel
[ ]  125    0    0  125 /  128   141.6s Picos_std_sync.Queue parallel
[ ]  126    0    0  126 /  128   142.6s Picos_std_sync.Queue parallel
[ ]  127    0    0  127 /  128   143.2s Picos_std_sync.Queue parallel
[ ]  128    0    0  128 /  128   144.4s Picos_std_sync.Queue parallel
[]  128    0    0  128 /  128   144.4s Picos_std_sync.Queue parallel
================================================================================
success (ran 2 tests)
(cd _build/default/test && ./test_mpmcq.exe)
random seed: 4508317530146749691
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   32     0.0s Mpmcq sequential
[ ]    0    0    0    0 /   32     0.0s Mpmcq sequential (generating)
[]   32    0    0   32 /   32     0.0s Mpmcq sequential

[ ]    0    0    0    0 /   32     0.0s Mpmcq parallel
[ ]    1    0    0    1 /   32     1.6s Mpmcq parallel
[ ]    2    0    0    2 /   32     2.7s Mpmcq parallel
[ ]    3    0    0    3 /   32     3.2s Mpmcq parallel
[ ]    4    0    0    4 /   32     3.7s Mpmcq parallel
[ ]    5    0    0    5 /   32     3.8s Mpmcq parallel
[ ]    6    0    0    6 /   32     4.0s Mpmcq parallel
[ ]    7    0    0    7 /   32     4.3s Mpmcq parallel
[ ]    8    0    0    8 /   32     4.5s Mpmcq parallel
[ ]    9    0    0    9 /   32     4.6s Mpmcq parallel
[ ]   11    0    0   11 /   32     4.8s Mpmcq parallel
[ ]   12    0    0   12 /   32     4.9s Mpmcq parallel
[ ]   14    0    0   14 /   32     5.1s Mpmcq parallel
[ ]   16    0    0   16 /   32     5.4s Mpmcq parallel
[ ]   18    0    0   18 /   32     5.6s Mpmcq parallel
[ ]   19    0    0   19 /   32     5.7s Mpmcq parallel
[ ]   20    0    0   20 /   32     5.9s Mpmcq parallel
[ ]   22    0    0   22 /   32     6.2s Mpmcq parallel
[ ]   23    0    0   23 /   32     6.6s Mpmcq parallel
[ ]   24    0    0   24 /   32     7.4s Mpmcq parallel
[ ]   25    0    0   25 /   32     7.9s Mpmcq parallel
[ ]   28    0    0   28 /   32     8.0s Mpmcq parallel
[ ]   32    0    0   32 /   32     8.3s Mpmcq parallel
[]   32    0    0   32 /   32     8.3s Mpmcq parallel
================================================================================
success (ran 2 tests)
random seed: 882727670021807317
generated error fail pass / total     time test name

[ ]    0    0    0    0 /   64     0.0s Mpmcq sequential
[]   64    0    0   64 /   64     0.0s Mpmcq sequential

[ ]    0    0    0    0 /   64     0.0s Mpmcq parallel
[ ]    4    0    0    4 /   64     0.1s Mpmcq parallel
[ ]    5    0    0    5 /   64     0.3s Mpmcq parallel
[ ]    8    0    0    8 /   64     0.4s Mpmcq parallel
[ ]   11    0    0   11 /   64     0.5s Mpmcq parallel
[ ]   14    0    0   14 /   64     0.6s Mpmcq parallel
[ ]   20    0    0   20 /   64     0.7s Mpmcq parallel
[ ]   23    0    0   23 /   64     1.0s Mpmcq parallel
[ ]   26    0    0   26 /   64     1.3s Mpmcq parallel
[ ]   30    0    0   30 /   64     1.4s Mpmcq parallel
[ ]   34    0    0   34 /   64     1.7s Mpmcq parallel
[ ]   38    0    0   38 /   64     1.8s Mpmcq parallel
[ ]   41    0    0   41 /   64     1.9s Mpmcq parallel
[ ]   44    0    0   44 /   64     2.0s Mpmcq parallel
[ ]   48    0    0   48 /   64     2.5s Mpmcq parallel
[ ]   50    0    0   50 /   64     2.7s Mpmcq parallel
[ ]   52    0    0   52 /   64     3.1s Mpmcq parallel
[ ]   53    0    0   53 /   64     3.7s Mpmcq parallel
[ ]   54    0    0   54 /   64     4.2s Mpmcq parallel
[ ]   55    0    0   55 /   64     4.5s Mpmcq parallel
[ ]   56    0    0   56 /   64     6.4s Mpmcq parallel
[ ]   57    0    0   57 /   64     8.4s Mpmcq parallel
[ ]   58    0    0   58 /   64     9.7s Mpmcq parallel
[ ]   59    0    0   59 /   64    11.3s Mpmcq parallel
[ ]   60    0    0   60 /   64    12.4s Mpmcq parallel
[ ]   61    0    0   61 /   64    13.4s Mpmcq parallel
[ ]   62    0    0   62 /   64    14.3s Mpmcq parallel
[ ]   63    0    0   63 /   64    15.8s Mpmcq parallel
[ ]   64    0    0   64 /   64    17.1s Mpmcq parallel
[]   64    0    0   64 /   64    17.1s Mpmcq parallel
================================================================================
success (ran 2 tests)
random seed: 1903028710763313675
generated error fail pass / total     time test name

[ ]    0    0    0    0 /  128     0.0s Mpmcq sequential
[]  128    0    0  128 /  128     0.0s Mpmcq sequential

[ ]    0    0    0    0 /  128     0.0s Mpmcq parallel
[ ]    1    0    0    1 /  128     1.0s Mpmcq parallel
[ ]    2    0    0    2 /  128     1.8s Mpmcq parallel
[ ]    3    0    0    3 /  128     2.9s Mpmcq parallel
[ ]    4    0    0    4 /  128     3.8s Mpmcq parallel
[ ]    5    0    0    5 /  128     4.4s Mpmcq parallel
[ ]    6    0    0    6 /  128     5.3s Mpmcq parallel
[ ]    7    0    0    7 /  128     6.4s Mpmcq parallel
[ ]    8    0    0    8 /  128     7.6s Mpmcq parallel
[ ]    9    0    0    9 /  128     8.6s Mpmcq parallel
[ ]   10    0    0   10 /  128     9.5s Mpmcq parallel
[ ]   11    0    0   11 /  128    10.5s Mpmcq parallel
[ ]   12    0    0   12 /  128    11.8s Mpmcq parallel
[ ]   13    0    0   13 /  128    13.1s Mpmcq parallel
[ ]   14    0    0   14 /  128    14.0s Mpmcq parallel
[ ]   15    0    0   15 /  128    15.3s Mpmcq parallel
[ ]   16    0    0   16 /  128    16.3s Mpmcq parallel
[ ]   17    0    0   17 /  128    17.3s Mpmcq parallel
[ ]   18    0    0   18 /  128    18.3s Mpmcq parallel
[ ]   19    0    0   19 /  128    19.2s Mpmcq parallel
[ ]   20    0    0   20 /  128    21.5s Mpmcq parallel
[ ]   21    0    0   21 /  128    23.2s Mpmcq parallel
[ ]   22    0    0   22 /  128    24.4s Mpmcq parallel
[ ]   23    0    0   23 /  128    25.8s Mpmcq parallel
[ ]   24    0    0   24 /  128    27.3s Mpmcq parallel
[ ]   25    0    0   25 /  128    28.7s Mpmcq parallel
[ ]   26    0    0   26 /  128    30.8s Mpmcq parallel
[ ]   27    0    0   27 /  128    32.0s Mpmcq parallel
[ ]   28    0    0   28 /  128    34.0s Mpmcq parallel
[ ]   29    0    0   29 /  128    35.8s Mpmcq parallel
[ ]   30    0    0   30 /  128    37.2s Mpmcq parallel
[ ]   31    0    0   31 /  128    38.4s Mpmcq parallel
[ ]   32    0    0   32 /  128    39.1s Mpmcq parallel
[ ]   33    0    0   33 /  128    40.8s Mpmcq parallel
[ ]   34    0    0   34 /  128    42.1s Mpmcq parallel
[ ]   35    0    0   35 /  128    45.5s Mpmcq parallel
[ ]   36    0    0   36 /  128    47.9s Mpmcq parallel
[ ]   37    0    0   37 /  128    50.0s Mpmcq parallel
[ ]   38    0    0   38 /  128    51.8s Mpmcq parallel
[ ]   39    0    0   39 /  128    53.4s Mpmcq parallel
[ ]   40    0    0   40 /  128    54.7s Mpmcq parallel
[ ]   41    0    0   41 /  128    56.0s Mpmcq parallel
[ ]   42    0    0   42 /  128    57.5s Mpmcq parallel
[ ]   43    0    0   43 /  128    58.7s Mpmcq parallel
[ ]   44    0    0   44 /  128    61.0s Mpmcq parallel
[ ]   45    0    0   45 /  128    62.5s Mpmcq parallel
[ ]   46    0    0   46 /  128    63.8s Mpmcq parallel
[ ]   47    0    0   47 /  128    65.2s Mpmcq parallel
[ ]   48    0    0   48 /  128    66.5s Mpmcq parallel
[ ]   49    0    0   49 /  128    67.7s Mpmcq parallel
[ ]   50    0    0   50 /  128    68.9s Mpmcq parallel
[ ]   51    0    0   51 /  128    69.7s Mpmcq parallel
[ ]   52    0    0   52 /  128    70.6s Mpmcq parallel
[ ]   53    0    0   53 /  128    71.7s Mpmcq parallel
[ ]   54    0    0   54 /  128    72.7s Mpmcq parallel
[ ]   55    0    0   55 /  128    73.7s Mpmcq parallel
[ ]   56    0    0   56 /  128    75.1s Mpmcq parallel
[ ]   57    0    0   57 /  128    76.0s Mpmcq parallel
[ ]   58    0    0   58 /  128    77.3s Mpmcq parallel
[ ]   59    0    0   59 /  128    78.2s Mpmcq parallel
[ ]   60    0    0   60 /  128    79.0s Mpmcq parallel
[ ]   61    0    0   61 /  128    79.9s Mpmcq parallel
[ ]   62    0    0   62 /  128    80.6s Mpmcq parallel
[ ]   63    0    0   63 /  128    81.2s Mpmcq parallel
[ ]   64    0    0   64 /  128    82.5s Mpmcq parallel
[ ]   65    0    0   65 /  128    83.7s Mpmcq parallel
[ ]   66    0    0   66 /  128    85.0s Mpmcq parallel
[ ]   67    0    0   67 /  128    86.3s Mpmcq parallel
[ ]   68    0    0   68 /  128    87.6s Mpmcq parallel
[ ]   69    0    0   69 /  128    89.2s Mpmcq parallel
[ ]   70    0    0   70 /  128    91.0s Mpmcq parallel
[ ]   71    0    0   71 /  128    91.8s Mpmcq parallel
[ ]   72    0    0   72 /  128    92.8s Mpmcq parallel
[ ]   73    0    0   73 /  128    93.8s Mpmcq parallel
[ ]   74    0    0   74 /  128    94.7s Mpmcq parallel
[ ]   75    0    0   75 /  128    95.8s Mpmcq parallel
[ ]   76    0    0   76 /  128    96.9s Mpmcq parallel
[ ]   77    0    0   77 /  128    97.8s Mpmcq parallel
[ ]   78    0    0   78 /  128    98.9s Mpmcq parallel
[ ]   79    0    0   79 /  128   100.2s Mpmcq parallel
[ ]   80    0    0   80 /  128   101.6s Mpmcq parallel
[ ]   81    0    0   81 /  128   102.2s Mpmcq parallel
[ ]   82    0    0   82 /  128   103.0s Mpmcq parallel
[ ]   83    0    0   83 /  128   103.7s Mpmcq parallel
[ ]   84    0    0   84 /  128   104.9s Mpmcq parallel
[ ]   85    0    0   85 /  128   105.8s Mpmcq parallel
[ ]   86    0    0   86 /  128   107.0s Mpmcq parallel
[ ]   87    0    0   87 /  128   107.8s Mpmcq parallel
[ ]   88    0    0   88 /  128   108.6s Mpmcq parallel
[ ]   89    0    0   89 /  128   109.3s Mpmcq parallel
[ ]   90    0    0   90 /  128   110.4s Mpmcq parallel
[ ]   91    0    0   91 /  128   111.6s Mpmcq parallel
[ ]   92    0    0   92 /  128   112.2s Mpmcq parallel
[ ]   93    0    0   93 /  128   113.2s Mpmcq parallel
[ ]   94    0    0   94 /  128   114.0s Mpmcq parallel
[ ]   95    0    0   95 /  128   114.9s Mpmcq parallel
[ ]   96    0    0   96 /  128   115.7s Mpmcq parallel
[ ]   97    0    0   97 /  128   116.4s Mpmcq parallel
[ ]   98    0    0   98 /  128   117.1s Mpmcq parallel
[ ]   99    0    0   99 /  128   118.1s Mpmcq parallel
[ ]  100    0    0  100 /  128   119.1s Mpmcq parallel
[ ]  101    0    0  101 /  128   120.3s Mpmcq parallel
[ ]  102    0    0  102 /  128   121.1s Mpmcq parallel
[ ]  103    0    0  103 /  128   122.4s Mpmcq parallel
[ ]  104    0    0  104 /  128   123.7s Mpmcq parallel
[ ]  105    0    0  105 /  128   124.9s Mpmcq parallel
[ ]  106    0    0  106 /  128   125.8s Mpmcq parallel
[ ]  107    0    0  107 /  128   126.6s Mpmcq parallel
[ ]  108    0    0  108 /  128   127.4s Mpmcq parallel
[ ]  109    0    0  109 /  128   128.3s Mpmcq parallel
[ ]  110    0    0  110 /  128   129.2s Mpmcq parallel
[ ]  111    0    0  111 /  128   130.6s Mpmcq parallel
[ ]  112    0    0  112 /  128   131.8s Mpmcq parallel
[ ]  113    0    0  113 /  128   132.8s Mpmcq parallel
[ ]  114    0    0  114 /  128   134.3s Mpmcq parallel
[ ]  115    0    0  115 /  128   135.3s Mpmcq parallel
[ ]  116    0    0  116 /  128   136.4s Mpmcq parallel
[ ]  117    0    0  117 /  128   137.6s Mpmcq parallel
[ ]  118    0    0  118 /  128   140.1s Mpmcq parallel
[ ]  119    0    0  119 /  128   141.8s Mpmcq parallel
[ ]  120    0    0  120 /  128   142.9s Mpmcq parallel
[ ]  121    0    0  121 /  128   144.3s Mpmcq parallel
[ ]  122    0    0  122 /  128   145.4s Mpmcq parallel
[ ]  123    0    0  123 /  128   147.3s Mpmcq parallel
[ ]  124    0    0  124 /  128   150.0s Mpmcq parallel
[ ]  125    0    0  125 /  128   150.9s Mpmcq parallel
[ ]  126    0    0  126 /  128   152.9s Mpmcq parallel
[ ]  127    0    0  127 /  128   154.1s Mpmcq parallel
[ ]  128    0    0  128 /  128   155.5s Mpmcq parallel
[]  128    0    0  128 /  128   155.5s Mpmcq parallel
================================================================================
success (ran 2 tests)
(cd _build/default/bench && ./main.exe -brief Picos_mpmcq)
Picos_mpmcq:
  time per message/one domain:
    2433.70 ns
  messages over time/one domain:
    0.41 M/s
  time per message/1 nb adder, 1 nb taker:
    278.78 ns
  messages over time/1 nb adder, 1 nb taker:
    7.17 M/s
  time per message/1 nb adder, 2 nb takers:
    451.79 ns
  messages over time/1 nb adder, 2 nb takers:
    6.64 M/s
  time per message/1 nb adder, 4 nb takers:
    3706.38 ns
  messages over time/1 nb adder, 4 nb takers:
    1.35 M/s
  time per message/2 nb adders, 1 nb taker:
    440.10 ns
  messages over time/2 nb adders, 1 nb taker:
    6.82 M/s
  time per message/2 nb adders, 2 nb takers:
    2909.11 ns
  messages over time/2 nb adders, 2 nb takers:
    1.37 M/s
  time per message/2 nb adders, 4 nb takers:
    8559.03 ns
  messages over time/2 nb adders, 4 nb takers:
    0.70 M/s
  time per message/4 nb adders, 1 nb taker:
    2807.03 ns
  messages over time/4 nb adders, 1 nb taker:
    1.78 M/s
  time per message/4 nb adders, 2 nb takers:
    2974.33 ns
  messages over time/4 nb adders, 2 nb takers:
    2.02 M/s
  time per message/4 nb adders, 4 nb takers:
    29182.94 ns
  messages over time/4 nb adders, 4 nb takers:
    0.27 M/s
(cd _build/default/bench && ./main.exe -brief Picos_mpscq)
Picos_mpscq:
  time per message/one domain:
    83.20 ns
  messages over time/one domain:
    12.02 M/s
  time per message/1 nb adder, 1 nb taker:
    106.70 ns
  messages over time/1 nb adder, 1 nb taker:
    18.74 M/s
  time per message/2 nb adders, 1 nb taker:
    300.42 ns
  messages over time/2 nb adders, 1 nb taker:
    9.99 M/s
  time per message/4 nb adders, 1 nb taker:
    4037.89 ns
  messages over time/4 nb adders, 1 nb taker:
    1.24 M/s
(cd _build/default/bench && ./main.exe -brief Picos_htbl)
Picos_htbl:
  time per operation/1 worker, 10% reads:
    171.17 ns
  operations over time/1 worker, 10% reads:
    5.84 M/s
  time per operation/1 worker, 50% reads:
    200.43 ns
  operations over time/1 worker, 50% reads:
    4.99 M/s
  time per operation/1 worker, 90% reads:
    253.25 ns
  operations over time/1 worker, 90% reads:
    3.95 M/s
  time per operation/2 workers, 10% reads:
    490.26 ns
  operations over time/2 workers, 10% reads:
    4.08 M/s
  time per operation/2 workers, 50% reads:
    355.22 ns
  operations over time/2 workers, 50% reads:
    5.63 M/s
  time per operation/2 workers, 90% reads:
    318.72 ns
  operations over time/2 workers, 90% reads:
    6.28 M/s
  time per operation/4 workers, 10% reads:
    1307.37 ns
  operations over time/4 workers, 10% reads:
    3.06 M/s
  time per operation/4 workers, 50% reads:
    779.24 ns
  operations over time/4 workers, 50% reads:
    5.13 M/s
  time per operation/4 workers, 90% reads:
    340.45 ns
  operations over time/4 workers, 90% reads:
    11.75 M/s
  time per operation/8 workers, 10% reads:
    1423.86 ns
  operations over time/8 workers, 10% reads:
    5.62 M/s
  time per operation/8 workers, 50% reads:
    651.74 ns
  operations over time/8 workers, 50% reads:
    12.27 M/s
  time per operation/8 workers, 90% reads:
    267.73 ns
  operations over time/8 workers, 90% reads:
    29.88 M/s
(cd _build/default/bench && ./main.exe -brief Picos_stdio)
Picos_stdio:
  time per non-blocking read/1 worker:
    2289.39 ns
  non-blocking reads over time/1 worker:
    0.44 M/s
  time per non-blocking read/2 workers:
    10439.83 ns
  non-blocking reads over time/2 workers:
    0.19 M/s
  time per non-blocking read/4 workers:
    4891.22 ns
  non-blocking reads over time/4 workers:
    0.82 M/s
  time per blocking read/1 worker:
    4978.58 ns
  blocking reads over time/1 worker:
    0.20 M/s
  time per blocking read/2 workers:
    11468.04 ns
  blocking reads over time/2 workers:
    0.17 M/s
  time per blocking read/4 workers:
    20411.67 ns
  blocking reads over time/4 workers:
    0.20 M/s
(cd _build/default/bench && ./main.exe -brief 'Picos_sync Stream')
Picos_sync Stream:
  time per message/one domain:
    307.45 ns
  messages over time/one domain:
    3.25 M/s
  time per message/1 nb pusher, 1 nb reader:
    629.87 ns
  messages over time/1 nb pusher, 1 nb reader:
    3.18 M/s
  time per message/2 nb pushers, 1 nb reader:
    1028.88 ns
  messages over time/2 nb pushers, 1 nb reader:
    2.92 M/s
  time per message/4 nb pushers, 1 nb reader:
    2113.60 ns
  messages over time/4 nb pushers, 1 nb reader:
    2.37 M/s
(cd _build/default/bench && ./main.exe -brief Fib)
Fib:
  time per spawn/1 rando, fib 20:
    3455.02 ns
  spawns over time/1 rando, fib 20:
    0.29 M/s
  time per spawn/2 randos, fib 20:
    5220.80 ns
  spawns over time/2 randos, fib 20:
    0.38 M/s
  time per spawn/4 randos, fib 20:
    24440.06 ns
  spawns over time/4 randos, fib 20:
    0.16 M/s
  time per spawn/8 randos, fib 20:
    188258.47 ns
  spawns over time/8 randos, fib 20:
    0.04 M/s
  time per spawn/1 mfifo, fib 20:
    4539.52 ns
  spawns over time/1 mfifo, fib 20:
    0.22 M/s
  time per spawn/2 mfifos, fib 20:
    2671.97 ns
  spawns over time/2 mfifos, fib 20:
    0.75 M/s
  time per spawn/4 mfifos, fib 20:
    2167.39 ns
  spawns over time/4 mfifos, fib 20:
    1.85 M/s
  time per spawn/8 mfifos, fib 20:
    9163.08 ns
  spawns over time/8 mfifos, fib 20:
    0.87 M/s
(cd _build/default/bench && ./main.exe -brief 'Picos binaries')
Picos binaries:
  binary size/picos:
    85.07 kB
  binary size/picos.domain:
    3.63 kB
  binary size/picos.thread:
    3.07 kB
  binary size/picos_aux.htbl:
    42.82 kB
  binary size/picos_aux.mpmcq:
    15.25 kB
  binary size/picos_aux.mpscq:
    17.74 kB
  binary size/picos_aux.rc:
    16.09 kB
  binary size/picos_lwt:
    25.00 kB
  binary size/picos_lwt.unix:
    13.77 kB
  binary size/picos_mux.fifo:
    26.70 kB
  binary size/picos_mux.multififo:
    57.04 kB
  binary size/picos_mux.random:
    48.06 kB
  binary size/picos_mux.thread:
    21.17 kB
  binary size/picos_std.event:
    21.37 kB
  binary size/picos_std.finally:
    18.09 kB
  binary size/picos_std.structured:
    76.75 kB
  binary size/picos_std.sync:
    124.75 kB
  binary size/picos_io:
    107.52 kB
  binary size/picos_io.fd:
    9.00 kB
  binary size/picos_io.select:
    60.33 kB
  binary size/picos_io_cohttp:
    44.10 kB
(cd _build/default/bench && ./main.exe -brief 'Bounded_q with Picos_sync')
Bounded_q with Picos_sync:
  time per message/one domain:
    148.65 ns
  messages over time/one domain:
    6.73 M/s
  time per message/1 adder, 1 taker:
    12650.80 ns
  messages over time/1 adder, 1 taker:
    0.16 M/s
  time per message/1 adder, 2 takers:
    10523.68 ns
  messages over time/1 adder, 2 takers:
    0.29 M/s
  time per message/1 adder, 4 takers:
    129026.36 ns
  messages over time/1 adder, 4 takers:
    0.04 M/s
  time per message/2 adders, 1 taker:
    17232.34 ns
  messages over time/2 adders, 1 taker:
    0.17 M/s
  time per message/2 adders, 2 takers:
    77919.46 ns
  messages over time/2 adders, 2 takers:
    0.05 M/s
  time per message/2 adders, 4 takers:
    59403.23 ns
  messages over time/2 adders, 4 takers:
    0.10 M/s
  time per message/4 adders, 1 taker:
    215475.95 ns
  messages over time/4 adders, 1 taker:
    0.02 M/s
  time per message/4 adders, 2 takers:
    2684272.72 ns
  messages over time/4 adders, 2 takers:
    0.00 M/s
  time per message/4 adders, 4 takers:
    19082706.59 ns
  messages over time/4 adders, 4 takers:
    0.00 M/s
(cd _build/default/bench && ./main.exe -brief 'Memory usage')
Memory usage:
  stack and heap used/fiber in a bundle:
    232.00 B
  stack and heap used/promise in a bundle:
    352.00 B
  stack and heap used/fiber in a flock:
    248.00 B
  stack and heap used/promise in a flock:
    368.00 B
  stack and heap used/fiber with shared computation & latch:
    232.00 B
  stack and heap used/Fun.protect:
    80.00 B
  stack and heap used/lastly:
    32.00 B
  stack and heap used/finally:
    40.00 B
  stack and heap used/instantiate:
    96.00 B
  stack and heap used/join_after bundle:
    280.00 B
  stack and heap used/join_after flock:
    280.00 B
2024-10-09 17:48.08 ---> saved as "5c69c5483842c8e3fbad2baa4932ecc770e6e0d4732cb6e817e35185af9ac5e1"
Job succeeded
2024-10-09 17:57.59: Job succeeded