2025-05-22 12:20.03: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (9afb61d245b2724d2132450805c8b080ac7e0c9a) (linux-x86_64:fedora-42-5.3_opam-2.3)Base: ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373Opam project buildTo reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 9afb61d2cat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373# fedora-42-5.3_opam-2.3USER 1000:1000ENV CLICOLOR_FORCE="1"ENV OPAMCOLOR="always"WORKDIR /srcRUN sudo dnf install -y findutilsRUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opamRUN opam init --reinit -niRUN uname -rs && opam exec -- ocaml -version && opam --versionWORKDIR /srcRUN sudo chown opam /srcRUN cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -uCOPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./RUN opam pin add -yn neural_nets_lib.dev './' && \opam pin add -yn arrayjit.dev './'RUN echo '(lang dune 3.0)' > './dune-project'ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"ENV CI="true"ENV OCAMLCI="true"RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPSRUN opam install $DEPSCOPY --chown=1000:1000 . /srcRUN opam exec -- dune build @install @check @runtest && rm -rf _buildEND-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-05-22 12:20.03: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373-fedora-42-5.3_opam-2.3-cdc9572ad54e4d4bf194acfcdfaa690c"2025-05-22 12:20.03: Using OBuilder spec:((from ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373)(comment fedora-42-5.3_opam-2.3)(user (uid 1000) (gid 1000))(env CLICOLOR_FORCE 1)(env OPAMCOLOR always)(workdir /src)(run (network host)(shell "sudo dnf install -y findutils"))(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))(run (shell "opam init --reinit -ni"))(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))(workdir /src)(run (shell "sudo chown opam /src"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u"))(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))(run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))(run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")(env CI true)(env OCAMLCI true)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))(copy (src .) (dst /src))(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")))2025-05-22 12:20.03: Waiting for resource in pool OCluster2025-05-22 12:20.03: Waiting for worker…2025-05-22 12:20.03: Got resource from pool OClusterBuilding on laodoke.caelum.ci.devHEAD is now at 657f596d Migrate `Staged_compilation` to `PPrint.document`HEAD is now at 9afb61d2 In progress / broken: Format -> PPrint migration first pass by Claude(from ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373)2025-05-22 12:20.04 ---> using "58e80f2943667cc892930b8f00145b341640b9631e46b0e990690977929d47d4" from cache/: (comment fedora-42-5.3_opam-2.3)/: (user (uid 1000) (gid 1000))/: (env CLICOLOR_FORCE 1)/: (env OPAMCOLOR always)/: (workdir /src)/src: (run (network host)(shell "sudo dnf install -y findutils"))Updating and loading repositories:Fedora 42 - x86_64 - Updates 100% | 106.1 KiB/s | 12.8 KiB | 00m00sFedora 42 - x86_64 100% | 187.8 KiB/s | 24.4 KiB | 00m00sFedora 42 - x86_64 - Updates 100% | 4.6 MiB/s | 5.2 MiB | 00m01sFedora 42 - x86_64 100% | 468.3 KiB/s | 310.0 KiB | 00m01sRepositories loaded.Package "findutils-1:4.10.0-5.fc42.x86_64" is already installed.Nothing to do.2025-05-22 12:20.04 ---> using "5e75f4f90bede632f1905a7c2c6dd18b38094c39f21cf56cfd80e66f2a5ea1fb" from cache/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))2025-05-22 12:20.04 ---> using "99c48dd0c90841dd0557a3605ead22147b57247e14e3720a707113a3fd5cb79d" from cache/src: (run (shell "opam init --reinit -ni"))Configuring from /home/opam/.opamrc and then from built-in defaults.Checking for available remotes: rsync and local, git.- you won't be able to use mercurial repositories unless you install the hg command on your system.- you won't be able to use darcs repositories unless you install the darcs command on your system.Continue? [y/n] yThis development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.You may want to back it up before going further.Format upgrade done.<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>[default] Initialised2025-05-22 12:20.04 ---> using "f466099dbad0be430d5d88b3056e1b5546981294ac309650866ef1379cd6d212" from cache/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))Linux 5.15.0-139-genericThe OCaml toplevel, version 5.3.02.3.02025-05-22 12:20.04 ---> using "9495bb6675cefd0fde9c3d3172875d2f69d43b4b48aa2c1fc3cbbdc7685c23a4" from cache/src: (workdir /src)/src: (run (shell "sudo chown opam /src"))2025-05-22 12:20.04 ---> using "32bd88dfd4a72ae99f6d3d856e0c727304b5b119984d04b7c88a6f9d56b49fb7" from cache/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u"))From https://github.com/ocaml/opam-repository* branch master -> FETCH_HEAD0d013e603b..27f5ac67c2 master -> origin/masterc7d6d1d2aa Merge pull request #27880 from MisterDA/os-family-fedora<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] synchronised from git+file:///home/opam/opam-repositoryEverything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# To update the current shell environment, run: eval $(opam env)2025-05-22 12:20.04 ---> using "6d19d3996dbde981768da10e312cc45f7f699c4b087bc629901db816fcce6f43" from cache/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))2025-05-22 12:20.04 ---> using "4b1f34a01828d241bb43df747a0805ca6edefb5d3b3690b7a6649fa3e0b35812" from cache/src: (run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))[neural_nets_lib.dev] synchronised (file:///src)neural_nets_lib is now pinned to file:///src (version dev)[arrayjit.dev] synchronised (file:///src)arrayjit is now pinned to file:///src (version dev)2025-05-22 12:20.04 ---> using "b281501d70eec1b964fe055165109ef9bdfa962b0733065c5a726c9ebfcb95e7" from cache/src: (run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))2025-05-22 12:20.04 ---> using "1093f336765b0555b6295790215bc929b41cf35d3f482963b71c849ed281d560" from cache/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")/src: (env CI true)/src: (env OCAMLCI true)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))+ /usr/sbin/sudo "yum" "makecache"- Updating and loading repositories:- Fedora 42 - x86_64 - Updates 100% | 40.7 KiB/s | 12.8 KiB | 00m00s- Repositories loaded.- Metadata cache created.<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>[arrayjit.dev] synchronised (file:///src)[neural_nets_lib.dev] synchronised (file:///src)[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following system packages will first need to be installed:libffi-devel<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>+ /usr/sbin/sudo "yum" "install" "-y" "libffi-devel"- Updating and loading repositories:- Repositories loaded.- Package Arch Version Repository Size- Installing:- libffi-devel x86_64 3.4.6-5.fc42 fedora 33.1 KiB-- Transaction Summary:- Installing: 1 package-- Total size of inbound packages is 29 KiB. Need to download 29 KiB.- After this operation, 33 KiB extra will be used (install 33 KiB, remove 0 B).- [1/1] libffi-devel-0:3.4.6-5.fc42.x86_6 100% | 237.9 KiB/s | 28.8 KiB | 00m00s- --------------------------------------------------------------------------------- [1/1] Total 100% | 110.3 KiB/s | 28.8 KiB | 00m00s- Running transaction- [1/3] Verify package files 100% | 0.0 B/s | 1.0 B | 00m00s- [2/3] Prepare transaction 100% | 29.0 B/s | 1.0 B | 00m00s- [3/3] Installing libffi-devel-0:3.4.6-5 100% | 483.2 KiB/s | 34.8 KiB | 00m00s- Complete!+ /usr/sbin/rpm "-q" "--whatprovides" "libffi-devel"- libffi-devel-3.4.6-5.fc42.x86_642025-05-22 12:20.04 ---> using "4c2ab24e256e5649c481cb2902a6427a29d240da5ecf3085a651e84fa7f04bb6" from cache/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following actions will be performed:=== install 75 packages- install angstrom 0.16.1- install astring 0.8.5- install backoff 0.1.1- install base v0.17.2- install bigarray-compat 1.1.0- install bigstringaf 0.10.0- install camlp-streams 5.0.1- install cmdliner 1.3.0- install conf-libffi 2.0.0- install conf-pkg-config 4- install cppo 1.8.0- install csexp 1.5.2- install ctypes 0.23.0- install ctypes-foreign 0.23.0- install dune 3.18.2- install dune-configurator 3.18.2- install fieldslib v0.17.0- install fmt 0.10.0- install integers 0.7.0- install jane-street-headers v0.17.0- install jst-config v0.17.0- install logs 0.8.0- install mdx 2.5.0- install mtime 2.1.0- install multicore-magic 2.3.1- install num 1.5-1- install ocaml-compiler-libs v0.17.0- install ocaml-syntax-shims 1.0.0- install ocaml-version 4.0.0- install ocaml_intrinsics_kernel v0.17.1- install ocamlbuild 0.16.1- install ocamlfind 1.9.8- install parsexp v0.17.0- install pprint 20230830- install ppx_assert v0.17.0- install ppx_base v0.17.0- install ppx_cold v0.17.0- install ppx_compare v0.17.0- install ppx_derivers 1.2.1- install ppx_deriving 6.0.3- install ppx_enumerate v0.17.0- install ppx_expect v0.17.2- install ppx_fields_conv v0.17.0- install ppx_globalize v0.17.0- install ppx_hash v0.17.0- install ppx_here v0.17.0- install ppx_inline_test v0.17.0- install ppx_minidebug 2.2.0- install ppx_optcomp v0.17.0- install ppx_sexp_conv v0.17.0- install ppx_string v0.17.0- install ppx_variants_conv v0.17.0- install ppxlib 0.35.0- install ppxlib_jane v0.17.2- install printbox 0.12- install printbox-ext-plot 0.12- install printbox-html 0.12- install printbox-md 0.12- install printbox-text 0.12- install ptime 1.2.0- install re 1.12.0- install result 1.5- install saturn_lockfree 0.5.0- install seq base- install sexplib v0.17.0- install sexplib0 v0.17.0- install stdio v0.17.0- install stdlib-shims 0.3.0- install thread-local-storage 0.2- install time_now v0.17.0- install topkg 1.0.8- install tyxml 4.6.0- install uucp 16.0.0- install uutf 1.0.4- install variantslib v0.17.0<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> retrieved backoff.0.1.1 (cached)-> retrieved astring.0.8.5 (cached)-> retrieved angstrom.0.16.1 (cached)-> retrieved base.v0.17.2 (cached)-> retrieved bigarray-compat.1.1.0 (cached)-> retrieved bigstringaf.0.10.0 (cached)-> retrieved camlp-streams.5.0.1 (cached)-> retrieved cppo.1.8.0 (cached)-> retrieved cmdliner.1.3.0 (cached)-> installed conf-pkg-config.4-> retrieved csexp.1.5.2 (cached)-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached)-> installed conf-libffi.2.0.0-> retrieved fieldslib.v0.17.0 (cached)-> retrieved fmt.0.10.0 (cached)-> retrieved integers.0.7.0 (cached)-> retrieved jane-street-headers.v0.17.0 (cached)-> retrieved jst-config.v0.17.0 (cached)-> retrieved logs.0.8.0 (cached)-> retrieved mtime.2.1.0 (cached)-> retrieved mdx.2.5.0 (cached)-> retrieved multicore-magic.2.3.1 (cached)-> retrieved num.1.5-1 (cached)-> retrieved ocaml-compiler-libs.v0.17.0 (cached)-> retrieved ocaml-syntax-shims.1.0.0 (cached)-> retrieved ocaml-version.4.0.0 (cached)-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)-> retrieved ocamlbuild.0.16.1 (cached)-> retrieved ocamlfind.1.9.8 (cached)-> retrieved dune.3.18.2, dune-configurator.3.18.2 (cached)-> retrieved parsexp.v0.17.0 (cached)-> retrieved pprint.20230830 (cached)-> retrieved ppx_assert.v0.17.0 (cached)-> retrieved ppx_base.v0.17.0 (cached)-> retrieved ppx_cold.v0.17.0 (cached)-> retrieved ppx_compare.v0.17.0 (cached)-> retrieved ppx_derivers.1.2.1 (cached)-> retrieved ppx_enumerate.v0.17.0 (cached)-> retrieved ppx_deriving.6.0.3 (cached)-> retrieved ppx_expect.v0.17.2 (cached)-> retrieved ppx_fields_conv.v0.17.0 (cached)-> retrieved ppx_globalize.v0.17.0 (cached)-> installed cmdliner.1.3.0-> installed num.1.5-1-> retrieved ppx_hash.v0.17.0 (cached)-> retrieved ppx_here.v0.17.0 (cached)-> retrieved ppx_inline_test.v0.17.0 (cached)-> retrieved ppx_optcomp.v0.17.0 (cached)-> retrieved ppx_sexp_conv.v0.17.0 (cached)-> retrieved ppx_minidebug.2.2.0 (cached)-> retrieved ppx_string.v0.17.0 (cached)-> retrieved ppx_variants_conv.v0.17.0 (cached)-> retrieved ppxlib_jane.v0.17.2 (cached)-> retrieved ptime.1.2.0 (cached)-> retrieved re.1.12.0 (cached)-> retrieved result.1.5 (cached)-> retrieved saturn_lockfree.0.5.0 (cached)-> retrieved seq.base (cached)-> installed seq.base-> retrieved sexplib.v0.17.0 (cached)-> retrieved sexplib0.v0.17.0 (cached)-> retrieved stdio.v0.17.0 (cached)-> retrieved stdlib-shims.0.3.0 (cached)-> retrieved thread-local-storage.0.2 (cached)-> retrieved ppxlib.0.35.0 (cached)-> retrieved time_now.v0.17.0 (cached)-> retrieved topkg.1.0.8 (cached)-> retrieved tyxml.4.6.0 (cached)-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached)-> retrieved uutf.1.0.4 (cached)-> retrieved variantslib.v0.17.0 (cached)-> retrieved uucp.16.0.0 (cached)-> installed ocamlfind.1.9.8-> installed ocamlbuild.0.16.1-> installed topkg.1.0.8-> installed uutf.1.0.4-> installed mtime.2.1.0-> installed fmt.0.10.0-> installed ptime.1.2.0-> installed astring.0.8.5-> installed logs.0.8.0-> installed dune.3.18.2-> installed jane-street-headers.v0.17.0-> installed ppx_derivers.1.2.1-> installed csexp.1.5.2-> installed backoff.0.1.1-> installed bigarray-compat.1.1.0-> installed camlp-streams.5.0.1-> installed multicore-magic.2.3.1-> installed ocaml-syntax-shims.1.0.0-> installed ocaml-version.4.0.0-> installed ocaml_intrinsics_kernel.v0.17.1-> installed pprint.20230830-> installed printbox.0.12-> installed re.1.12.0-> installed cppo.1.8.0-> installed ocaml-compiler-libs.v0.17.0-> installed result.1.5-> installed sexplib0.v0.17.0-> installed stdlib-shims.0.3.0-> installed thread-local-storage.0.2-> installed saturn_lockfree.0.5.0-> installed integers.0.7.0-> installed parsexp.v0.17.0-> installed dune-configurator.3.18.2-> installed bigstringaf.0.10.0-> installed mdx.2.5.0-> installed sexplib.v0.17.0-> installed angstrom.0.16.1-> installed tyxml.4.6.0-> installed printbox-html.0.12-> installed ctypes.0.23.0-> installed base.v0.17.2-> installed fieldslib.v0.17.0-> installed variantslib.v0.17.0-> installed stdio.v0.17.0-> installed ctypes-foreign.0.23.0-> installed uucp.16.0.0-> installed ppxlib.0.35.0-> installed printbox-text.0.12-> installed printbox-md.0.12-> installed printbox-ext-plot.0.12-> installed ppxlib_jane.v0.17.2-> installed ppx_optcomp.v0.17.0-> installed ppx_cold.v0.17.0-> installed ppx_here.v0.17.0-> installed ppx_variants_conv.v0.17.0-> installed ppx_fields_conv.v0.17.0-> installed ppx_globalize.v0.17.0-> installed ppx_enumerate.v0.17.0-> installed ppx_deriving.6.0.3-> installed ppx_compare.v0.17.0-> installed ppx_sexp_conv.v0.17.0-> installed ppx_hash.v0.17.0-> installed ppx_assert.v0.17.0-> installed ppx_minidebug.2.2.0-> installed ppx_base.v0.17.0-> installed jst-config.v0.17.0-> installed ppx_string.v0.17.0-> installed time_now.v0.17.0-> installed ppx_inline_test.v0.17.0-> installed ppx_expect.v0.17.2Done.# To update the current shell environment, run: eval $(opam env)2025-05-22 12:20.04 ---> using "988ec11528caf4023f4cf8802ee33a6877da77cf607131ad018b314d3f377508" from cache/src: (copy (src .) (dst /src))2025-05-22 12:20.04 ---> saved as "9c1745bb67f2bd27a48bc265f3db8fe8379df4208233db1d6a4f7291f2b0b3e3"/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileWrote value of 'backend' to ocannl_backend.txt(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test_ppx && ./test_ppx_op.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/6ede0cf66242a160a326ecc4480b0cb5/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileFile "test/dune", lines 30-40, characters 0-281:30 | (rule31 | (alias runtest)32 | (target33 | (dir log_files))34 | (action35 | (run36 | %{dep:micrograd_demo_logging.exe}37 | "--ocannl_debug_backend=text"38 | "--ocannl_log_file_stem=micrograd_demo_logging"39 | "--ocannl_log_main_domain_to_stdout=false"40 | "--ocannl_debug_log_to_stream_files=true")))(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileRetrieving commandline, environment, or config file variable ocannl_backendFound multicore_cc, in the config fileRetrieving commandline, environment, or config file variable ocannl_cd_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformityFound true, in the config fileRetrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_filesFound true, commandline --ocannl_debug_log_to_stream_files=trueRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccFatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failedRaised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileFile "test/micrograd_demo.ml", line 1, characters 0-0:/usr/sbin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/micrograd_demo.ml.correcteddiff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/micrograd_demo.ml.correctedindex 77e46c6..3cb470c 100644--- a/_build/default/test/micrograd_demo.ml+++ b/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/micrograd_demo.ml.corrected@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =│├┼───────┤ ││││ -4.00 │ ││└┴───────┘ │- └─────────────────┘- ┌────────────────────────┐- │[0]: a shape 0:1 grad_a│- │┌┬─────────┐ │- │││axis 0 │ │- │├┼─────────┤ │- │││ 1.38e+2 │ │- │└┴─────────┘ │- └────────────────────────┘+ └─────────────────┘┌────────────────────────┐+ │[0]: a shape 0:1 grad_a│+ │┌┬─────────┐ │+ │││axis 0 │ │+ │├┼─────────┤ │+ │││ 1.38e+2 │ │+ │└┴─────────┘ │+ └────────────────────────┘|}];Tensor.print ~with_code:false ~with_grad:true `Default b;[%expect@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =│├┼──────┤ ││││ 2.00 │ ││└┴──────┘ │- └─────────────────┘- ┌────────────────────────┐- │[2]: b shape 0:1 grad_b│- │┌┬─────────┐ │- │││axis 0 │ │- │├┼─────────┤ │- │││ 6.45e+2 │ │- │└┴─────────┘ │- └────────────────────────┘+ └─────────────────┘┌────────────────────────┐+ │[2]: b shape 0:1 grad_b│+ │┌┬─────────┐ │+ │││axis 0 │ │+ │├┼─────────┤ │+ │││ 6.45e+2 │ │+ │└┴─────────┘ │+ └────────────────────────┘|}]let%expect_test "Micrograd half-moons example" =File "test/hello_world_op.ml", line 1, characters 0-0:/usr/sbin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/hello_world_op.ml.correcteddiff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/hello_world_op.ml.correctedindex ba9d7ef..6b90c44 100644--- a/_build/default/test/hello_world_op.ml+++ b/_build/.sandbox/2b14121714334e9d4ee1472a87371925/default/test/hello_world_op.ml.corrected@@ -102,36 +102,39 @@ let%expect_test "Print constant tensor" =let%op hey = [ (1, 2, 3); (4, 5, 6) ] inTrain.forward_and_forget backend ctx hey;Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;- [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];+ [%expect {| [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ][0]: c2x3_hey shape 1:3->0:2 |}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;[%expect{|- ┌─────────────────────────────────────────────────────────────┐- │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │- │┌──────┬──────────────────┐ │- ││ │axis 1 │ │- │├──────┼──────────────────┤ │- ││axis 0│ 1.00 2.00 3.00 │ │- ││ │ 4.00 5.00 6.00 │ │- │└──────┴──────────────────┘ │- └─────────────────────────────────────────────────────────────┘+ ┌─────────────────────────────┐+ │[0]: c2x3_hey shape 1:3->0:2 │+ │┌──────┬──────────────────┐ │+ ││ │axis 1 │ │+ │├──────┼──────────────────┤ │+ ││axis 0│ 1.00 2.00 3.00 │ │+ ││ │ 4.00 5.00 6.00 │ │+ │└──────┴──────────────────┘ │+ └─────────────────────────────┘|}];let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] inTrain.forward_and_forget backend ctx hoo;Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;- [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];+ [%expect {|+ [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |][1]: c2x3_hoo shape+ 0:2|1:3+ |}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;[%expect{|- ┌──────────────────────────────────────────────────────────────────┐- │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │- │┌──────┬──────────────────┐ │- ││ │axis 1 │ │- │├──────┼──────────────────┤ │- ││axis 0│ 1.00 2.00 3.00 │ │- ││ │ 4.00 5.00 6.00 │ │- │└──────┴──────────────────┘ │- └──────────────────────────────────────────────────────────────────┘+ ┌────────────────────────────┐+ │[1]: c2x3_hoo shape 0:2|1:3 │+ │┌──────┬──────────────────┐ │+ ││ │axis 1 │ │+ │├──────┼──────────────────┤ │+ ││axis 0│ 1.00 2.00 3.00 │ │+ ││ │ 4.00 5.00 6.00 │ │+ │└──────┴──────────────────┘ │+ └────────────────────────────┘|}];let%op hey2 =[@@ -145,10 +148,12 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;[%expect{|- [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);- (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);- (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);- (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]+ [+ ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )+ ; ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )+ ; ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )+ ; ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )+ ][2]: c4x2x3_hey2 shape 1:2,2:3->0:4|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;[%expect@@ -178,10 +183,12 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;[%expect{|- [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];- [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];- [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];- [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]+ [|+ [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]+ ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]+ ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]+ ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]+ |][3]: c4x2x3_hoo2 shape 0:4|1:2,2:3|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;[%expect@@ -209,10 +216,12 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;[%expect{|- [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];- [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];- [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];- [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]+ [|+ [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]+ ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]+ ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]+ ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]+ |][4]: c4x2x3_heyhoo shape 0:4,1:2|2:3|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;[%expect@@ -241,14 +250,23 @@ let%expect_test "Print constant tensor" =[%expect{|[|- [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];- [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];- [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];- [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];- [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];- [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];- [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];- [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]+ [|+ [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]+ ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]+ |]+ ; [|+ [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]+ ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]+ |]+ ; [|+ [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]+ ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]+ |]+ ; [|+ [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]+ ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]+ |]+ |][5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;[%expect@@ -295,15 +313,26 @@ let%expect_test "Print constant tensor" ={|[|[|- [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];- [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];- [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];- [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];- [|- [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];- [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];- [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];- [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]+ [+ [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]+ ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]+ ]+ ; [+ [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]+ ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]+ ]+ |]+ ; [|+ [+ [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]+ ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]+ ]+ ; [+ [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]+ ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]+ ]+ |]+ |][6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;[%expect@@ -355,15 +384,26 @@ let%expect_test "Print constant tensor" ={|[|[- [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];- [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];- [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];- [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];- [- [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];- [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];- [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];- [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]+ [+ [ 1.00 , 31.00 ; 2.00 , 32.00 ; 3.00 , 33.00 ]+ ; [ 4.00 , 34.00 ; 5.00 , 35.00 ; 6.00 , 36.00 ]+ ]+ ; [+ [ 7.00 , 37.00 ; 8.00 , 38.00 ; 9.00 , 39.00 ]+ ; [ 10.00 , 40.00 ; 11.00 , 41.00 ; 12.00 , 42.00 ]+ ]+ ]+ ; [+ [+ [ 13.00 , 43.00 ; 14.00 , 44.00 ; 15.00 , 45.00 ]+ ; [ 16.00 , 46.00 ; 17.00 , 47.00 ; 18.00 , 48.00 ]+ ]+ ; [+ [ 19.00 , 49.00 ; 20.00 , 50.00 ; 21.00 , 51.00 ]+ ; [ 22.00 , 52.00 ; 23.00 , 53.00 ; 24.00 , 54.00 ]+ ]+ ]+ |][7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;[%expect@@ -462,8 +502,29 @@ let%expect_test "Big matrix" =Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;[%expect{|- [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;- 12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]+ [+ 0.00+ ; 1.00+ ; 2.00+ ; 3.00+ ; 4.00+ ; 5.00+ ; 6.00+ ; 7.00+ ; 8.00+ ; 9.00+ ; 10.00+ ; 11.00+ ; 12.00+ ; 13.00+ ; 14.00+ ; 15.00+ ; 16.00+ ; 17.00+ ; 18.00+ ; 19.00+ ; 20.00+ ][2]: 0...20 shape 0:21|}];Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;[%expect(cd _build/default/test && ./moons_demo_parallel_run.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file("Set log_level to" 1)└─{orphaned from #2}Retrieving commandline, environment, or config file variable ocannl_backendFound multicore_cc, in the config fileProperties of devices:(multicore_devices(device ((device_name CPU) (device_ordinal 0) (num_domains 72))))@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformityFound true, in the config fileRetrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_filesNot found, using default falseRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccRetrieving commandline, environment, or config file variable ocannl_never_capture_stdoutNot found, using default falseBatch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762Batch=299, step=8700, lr=0.164000, batch loss=0.191994, epoch loss=1.062755Batch=359, step=8760, lr=0.163750, batch loss=0.248632, epoch loss=1.311388Batch=419, step=8820, lr=0.163500, batch loss=0.245601, epoch loss=1.556988Batch=479, step=8880, lr=0.163250, batch loss=0.228591, epoch loss=1.785580Batch=539, step=8940, lr=0.163000, batch loss=0.178132, epoch loss=1.963712Batch=599, step=9000, lr=0.162750, batch loss=0.217388, epoch loss=2.181101Batch=659, step=9060, lr=0.162500, batch loss=0.294814, epoch loss=2.475915Batch=719, step=9120, lr=0.162250, batch loss=0.296433, epoch loss=2.772348Batch=779, step=9180, lr=0.162000, batch loss=0.316728, epoch loss=3.089075Batch=839, step=9240, lr=0.161750, batch loss=0.287243, epoch loss=3.376318Batch=899, step=9300, lr=0.161500, batch loss=0.251060, epoch loss=3.627378Batch=959, step=9360, lr=0.161250, batch loss=0.190532, epoch loss=3.817911Batch=1019, step=9420, lr=0.161000, batch loss=0.311728, epoch loss=4.129639Batch=1079, step=9480, lr=0.160750, batch loss=0.191595, epoch loss=4.321234Batch=1139, step=9540, lr=0.160250, batch loss=0.215772, epoch loss=4.537006Batch=1199, step=9600, lr=0.160250, batch loss=0.165629, epoch loss=4.702635Epoch=7, step=9600, lr=0.160250, epoch loss=4.702635Batch=59, step=9660, lr=0.160000, batch loss=0.197122, epoch loss=0.197122Batch=119, step=9720, lr=0.159750, batch loss=0.165538, epoch loss=0.362660Batch=179, step=9780, lr=0.159500, batch loss=0.179295, epoch loss=0.541955Batch=239, step=9840, lr=0.159250, batch loss=0.263787, epoch loss=0.805742Batch=299, step=9900, lr=0.159000, batch loss=0.182307, epoch loss=0.988049Batch=359, step=9960, lr=0.158750, batch loss=0.240740, epoch loss=1.228788Batch=419, step=10020, lr=0.158500, batch loss=0.232952, epoch loss=1.461741Batch=479, step=10080, lr=0.158250, batch loss=0.212996, epoch loss=1.674736Batch=539, step=10140, lr=0.158000, batch loss=0.170764, epoch loss=1.845501Batch=599, step=10200, lr=0.157750, batch loss=0.200167, epoch loss=2.045668Batch=659, step=10260, lr=0.157500, batch loss=0.283092, epoch loss=2.328759Batch=719, step=10320, lr=0.157000, batch loss=0.288689, epoch loss=2.617448Batch=779, step=10380, lr=0.157000, batch loss=0.296884, epoch loss=2.914332Batch=839, step=10440, lr=0.156750, batch loss=0.267673, epoch loss=3.182005Batch=899, step=10500, lr=0.156500, batch loss=0.242693, epoch loss=3.424698Batch=959, step=10560, lr=0.156250, batch loss=0.198698, epoch loss=3.623396Batch=1019, step=10620, lr=0.156000, batch loss=0.295110, epoch loss=3.918506Batch=1079, step=10680, lr=0.155750, batch loss=0.178664, epoch loss=4.097169Batch=1139, step=10740, lr=0.155500, batch loss=0.205412, epoch loss=4.302581Batch=1199, step=10800, lr=0.155250, batch loss=0.156134, epoch loss=4.458715Epoch=8, step=10800, lr=0.155250, epoch loss=4.458715Batch=59, step=10860, lr=0.155000, batch loss=0.177436, epoch loss=0.177436Batch=119, step=10920, lr=0.154750, batch loss=0.152362, epoch loss=0.329798Batch=179, step=10980, lr=0.154500, batch loss=0.167123, epoch loss=0.496921Batch=239, step=11040, lr=0.154250, batch loss=0.242637, epoch loss=0.739558Batch=299, step=11100, lr=0.154000, batch loss=0.169969, epoch loss=0.909528Batch=359, step=11160, lr=0.153750, batch loss=0.222163, epoch loss=1.131691Batch=419, step=11220, lr=0.153500, batch loss=0.229246, epoch loss=1.360937Batch=479, step=11280, lr=0.153250, batch loss=0.202874, epoch loss=1.563810Batch=539, step=11340, lr=0.153000, batch loss=0.159114, epoch loss=1.722925Batch=599, step=11400, lr=0.152750, batch loss=0.178494, epoch loss=1.901419Batch=659, step=11460, lr=0.152500, batch loss=0.264732, epoch loss=2.166151Batch=719, step=11520, lr=0.152250, batch loss=0.256986, epoch loss=2.423137Batch=779, step=11580, lr=0.152000, batch loss=0.273280, epoch loss=2.696417Batch=839, step=11640, lr=0.151750, batch loss=0.255917, epoch loss=2.952335Batch=899, step=11700, lr=0.151500, batch loss=0.212862, epoch loss=3.165197Batch=959, step=11760, lr=0.151250, batch loss=0.168403, epoch loss=3.333600Batch=1019, step=11820, lr=0.151000, batch loss=0.265885, epoch loss=3.599485Batch=1079, step=11880, lr=0.150750, batch loss=0.149800, epoch loss=3.749285Batch=1139, step=11940, lr=0.150500, batch loss=0.185367, epoch loss=3.934652Batch=1199, step=12000, lr=0.150250, batch loss=0.139424, epoch loss=4.074077Epoch=9, step=12000, lr=0.150250, epoch loss=4.074077Batch=59, step=12060, lr=0.150000, batch loss=0.158092, epoch loss=0.158092Batch=119, step=12120, lr=0.149750, batch loss=0.128828, epoch loss=0.286920Batch=179, step=12180, lr=0.149500, batch loss=0.150450, epoch loss=0.437370Batch=239, step=12240, lr=0.149250, batch loss=0.223428, epoch loss=0.660798Batch=299, step=12300, lr=0.149000, batch loss=0.149369, epoch loss=0.810167Batch=359, step=12360, lr=0.148750, batch loss=0.201960, epoch loss=1.012127Batch=419, step=12420, lr=0.148500, batch loss=0.213751, epoch loss=1.225877Batch=479, step=12480, lr=0.148250, batch loss=0.172158, epoch loss=1.398035Batch=539, step=12540, lr=0.148000, batch loss=0.147465, epoch loss=1.545500Batch=599, step=12600, lr=0.147750, batch loss=0.150610, epoch loss=1.696110Batch=659, step=12660, lr=0.147500, batch loss=0.225170, epoch loss=1.921281Batch=719, step=12720, lr=0.147250, batch loss=0.236365, epoch loss=2.157646Batch=779, step=12780, lr=0.147000, batch loss=0.260145, epoch loss=2.417791Batch=839, step=12840, lr=0.146750, batch loss=0.232642, epoch loss=2.650433Batch=899, step=12900, lr=0.146500, batch loss=0.198238, epoch loss=2.848671Batch=959, step=12960, lr=0.146250, batch loss=0.155810, epoch loss=3.004482Batch=1019, step=13020, lr=0.146000, batch loss=0.259667, epoch loss=3.264149Batch=1079, step=13080, lr=0.145750, batch loss=0.116064, epoch loss=3.380212Batch=1139, step=13140, lr=0.145500, batch loss=0.153632, epoch loss=3.533844Batch=1199, step=13200, lr=0.145250, batch loss=0.118177, epoch loss=3.652022Epoch=10, step=13200, lr=0.145250, epoch loss=3.652022Batch=59, step=13260, lr=0.145000, batch loss=0.145221, epoch loss=0.145221Batch=119, step=13320, lr=0.144750, batch loss=0.118713, epoch loss=0.263934Batch=179, step=13380, lr=0.144500, batch loss=0.128038, epoch loss=0.391972Batch=239, step=13440, lr=0.144250, batch loss=0.188345, epoch loss=0.580317Batch=299, step=13500, lr=0.144000, batch loss=0.112549, epoch loss=0.692865Batch=359, step=13560, lr=0.143750, batch loss=0.162714, epoch loss=0.855580Batch=419, step=13620, lr=0.143500, batch loss=0.161407, epoch loss=1.016987Batch=479, step=13680, lr=0.143250, batch loss=0.147986, epoch loss=1.164972Batch=539, step=13740, lr=0.143000, batch loss=0.119096, epoch loss=1.284068Batch=599, step=13800, lr=0.142750, batch loss=0.120622, epoch loss=1.404690Batch=659, step=13860, lr=0.142500, batch loss=0.175910, epoch loss=1.580601Batch=719, step=13920, lr=0.142250, batch loss=0.176981, epoch loss=1.757582Batch=779, step=13980, lr=0.142000, batch loss=0.197836, epoch loss=1.955418Batch=839, step=14040, lr=0.141750, batch loss=0.188994, epoch loss=2.144412Batch=899, step=14100, lr=0.141500, batch loss=0.176219, epoch loss=2.320630Batch=959, step=14160, lr=0.141250, batch loss=0.140876, epoch loss=2.461506Batch=1019, step=14220, lr=0.141000, batch loss=0.276778, epoch loss=2.738283Batch=1079, step=14280, lr=0.140750, batch loss=0.083481, epoch loss=2.821765Batch=1139, step=14340, lr=0.140500, batch loss=0.125192, epoch loss=2.946957Batch=1199, step=14400, lr=0.140250, batch loss=0.093065, epoch loss=3.040022Epoch=11, step=14400, lr=0.140250, epoch loss=3.040022Batch=59, step=14460, lr=0.140000, batch loss=0.124146, epoch loss=0.124146Batch=119, step=14520, lr=0.139750, batch loss=0.114984, epoch loss=0.239130Batch=179, step=14580, lr=0.139500, batch loss=0.109514, epoch loss=0.348644Batch=239, step=14640, lr=0.139250, batch loss=0.144102, epoch loss=0.492746Batch=299, step=14700, lr=0.139000, batch loss=0.078343, epoch loss=0.571089Batch=359, step=14760, lr=0.138750, batch loss=0.120389, epoch loss=0.691478Batch=419, step=14820, lr=0.138500, batch loss=0.124484, epoch loss=0.815962Batch=479, step=14880, lr=0.138250, batch loss=0.106392, epoch loss=0.922354Batch=539, step=14940, lr=0.138000, batch loss=0.115653, epoch loss=1.038007Batch=599, step=15000, lr=0.137750, batch loss=0.089798, epoch loss=1.127805Batch=659, step=15060, lr=0.137500, batch loss=0.129048, epoch loss=1.256853Batch=719, step=15120, lr=0.137250, batch loss=0.132711, epoch loss=1.389563Batch=779, step=15180, lr=0.137000, batch loss=0.151629, epoch loss=1.541193Batch=839, step=15240, lr=0.136750, batch loss=0.161430, epoch loss=1.702623Batch=899, step=15300, lr=0.136500, batch loss=0.251761, epoch loss=1.954383Batch=959, step=15360, lr=0.136250, batch loss=0.055483, epoch loss=2.009866Batch=1019, step=15420, lr=0.136000, batch loss=0.137165, epoch loss=2.147031Batch=1079, step=15480, lr=0.135750, batch loss=0.048393, epoch loss=2.195424Batch=1139, step=15540, lr=0.135500, batch loss=0.102747, epoch loss=2.298171Batch=1199, step=15600, lr=0.135250, batch loss=0.061154, epoch loss=2.359326Epoch=12, step=15600, lr=0.135250, epoch loss=2.359326Batch=59, step=15660, lr=0.135000, batch loss=0.083675, epoch loss=0.083675Batch=119, step=15720, lr=0.134750, batch loss=0.127133, epoch loss=0.210808Batch=179, step=15780, lr=0.134500, batch loss=0.096367, epoch loss=0.307175Batch=239, step=15840, lr=0.134250, batch loss=0.097515, epoch loss=0.404689Batch=299, step=15900, lr=0.134000, batch loss=0.039316, epoch loss=0.444005Batch=359, step=15960, lr=0.133750, batch loss=0.084840, epoch loss=0.528845Batch=419, step=16020, lr=0.133500, batch loss=0.078253, epoch loss=0.607098Batch=479, step=16080, lr=0.133250, batch loss=0.056607, epoch loss=0.663705Batch=539, step=16140, lr=0.133000, batch loss=0.072362, epoch loss=0.736066Batch=599, step=16200, lr=0.132750, batch loss=0.168699, epoch loss=0.904766Batch=659, step=16260, lr=0.132500, batch loss=0.096124, epoch loss=1.000890Batch=719, step=16320, lr=0.132250, batch loss=0.130459, epoch loss=1.131349Batch=779, step=16380, lr=0.132000, batch loss=0.281739, epoch loss=1.413088Batch=839, step=16440, lr=0.131750, batch loss=0.092821, epoch loss=1.505909Batch=899, step=16500, lr=0.131500, batch loss=0.078710, epoch loss=1.584619Batch=959, step=16560, lr=0.131250, batch loss=0.031847, epoch loss=1.616465Batch=1019, step=16620, lr=0.131000, batch loss=0.064100, epoch loss=1.680565Batch=1079, step=16680, lr=0.130750, batch loss=0.054108, epoch loss=1.734672Batch=1139, step=16740, lr=0.130500, batch loss=0.096783, epoch loss=1.831456Batch=1199, step=16800, lr=0.130250, batch loss=0.048168, epoch loss=1.879624Epoch=13, step=16800, lr=0.130250, epoch loss=1.879624Batch=59, step=16860, lr=0.130000, batch loss=0.035675, epoch loss=0.035675Batch=119, step=16920, lr=0.129750, batch loss=0.038271, epoch loss=0.073945Batch=179, step=16980, lr=0.129500, batch loss=0.044410, epoch loss=0.118356Batch=239, step=17040, lr=0.129250, batch loss=0.059261, epoch loss=0.177617Batch=299, step=17100, lr=0.129000, batch loss=0.019769, epoch loss=0.197385Batch=359, step=17160, lr=0.128750, batch loss=0.043718, epoch loss=0.241103Batch=419, step=17220, lr=0.128500, batch loss=0.045638, epoch loss=0.286741Batch=479, step=17280, lr=0.128250, batch loss=0.023784, epoch loss=0.310525Batch=539, step=17340, lr=0.128000, batch loss=0.030329, epoch loss=0.340854Batch=599, step=17400, lr=0.127500, batch loss=0.035619, epoch loss=0.376473Batch=659, step=17460, lr=0.127250, batch loss=0.048265, epoch loss=0.424739Batch=719, step=17520, lr=0.127250, batch loss=0.046134, epoch loss=0.470872Batch=779, step=17580, lr=0.127000, batch loss=0.085106, epoch loss=0.555978Batch=839, step=17640, lr=0.126750, batch loss=0.227803, epoch loss=0.783780Batch=899, step=17700, lr=0.126500, batch loss=0.052559, epoch loss=0.836339Batch=959, step=17760, lr=0.126250, batch loss=0.028245, epoch loss=0.864584Batch=1019, step=17820, lr=0.126000, batch loss=0.055343, epoch loss=0.919928Batch=1079, step=17880, lr=0.125750, batch loss=0.018462, epoch loss=0.938390Batch=1139, step=17940, lr=0.125500, batch loss=0.040469, epoch loss=0.978859Batch=1199, step=18000, lr=0.125250, batch loss=0.017058, epoch loss=0.995917Epoch=14, step=18000, lr=0.125250, epoch loss=0.995917Batch=59, step=18060, lr=0.125000, batch loss=0.015943, epoch loss=0.015943Batch=119, step=18120, lr=0.124750, batch loss=0.024663, epoch loss=0.040607Batch=179, step=18180, lr=0.124500, batch loss=0.049575, epoch loss=0.090182Batch=239, step=18240, lr=0.124250, batch loss=0.036404, epoch loss=0.126586Batch=299, step=18300, lr=0.124000, batch loss=0.009346, epoch loss=0.135932Batch=359, step=18360, lr=0.123750, batch loss=0.024669, epoch loss=0.160601Batch=419, step=18420, lr=0.123500, batch loss=0.029558, epoch loss=0.190159Batch=479, step=18480, lr=0.123250, batch loss=0.018654, epoch loss=0.208812Batch=539, step=18540, lr=0.123000, batch loss=0.037976, epoch loss=0.246788Batch=599, step=18600, lr=0.122750, batch loss=0.026154, epoch loss=0.272942Batch=659, step=18660, lr=0.122500, batch loss=0.033424, epoch loss=0.306366Batch=719, step=18720, lr=0.122250, batch loss=0.051556, epoch loss=0.357922Batch=779, step=18780, lr=0.122000, batch loss=0.110977, epoch loss=0.468899Batch=839, step=18840, lr=0.121750, batch loss=0.057819, epoch loss=0.526717Batch=899, step=18900, lr=0.121500, batch loss=0.053243, epoch loss=0.579960Batch=959, step=18960, lr=0.121250, batch loss=0.015465, epoch loss=0.595425Batch=1019, step=19020, lr=0.121000, batch loss=0.021605, epoch loss=0.617030Batch=1079, step=19080, lr=0.120750, batch loss=0.011208, epoch loss=0.628237Batch=1139, step=19140, lr=0.120500, batch loss=0.023068, epoch loss=0.651306Batch=1199, step=19200, lr=0.120250, batch loss=0.009025, epoch loss=0.660330Epoch=15, step=19200, lr=0.120250, epoch loss=0.660330Batch=59, step=19260, lr=0.120000, batch loss=0.004805, epoch loss=0.004805Batch=119, step=19320, lr=0.119750, batch loss=0.017515, epoch loss=0.022320Batch=179, step=19380, lr=0.119500, batch loss=0.044435, epoch loss=0.066754Batch=239, step=19440, lr=0.119250, batch loss=0.021003, epoch loss=0.087757Batch=299, step=19500, lr=0.119000, batch loss=0.005827, epoch loss=0.093584Batch=359, step=19560, lr=0.118750, batch loss=0.021157, epoch loss=0.114741Batch=419, step=19620, lr=0.118500, batch loss=0.019524, epoch loss=0.134265Batch=479, step=19680, lr=0.118250, batch loss=0.008765, epoch loss=0.143030Batch=539, step=19740, lr=0.118000, batch loss=0.017443, epoch loss=0.160473Batch=599, step=19800, lr=0.117750, batch loss=0.024337, epoch loss=0.184811Batch=659, step=19860, lr=0.117500, batch loss=0.020754, epoch loss=0.205565Batch=719, step=19920, lr=0.117250, batch loss=0.055803, epoch loss=0.261367Batch=779, step=19980, lr=0.117000, batch loss=0.084774, epoch loss=0.346141Batch=839, step=20040, lr=0.116750, batch loss=0.031333, epoch loss=0.377474Batch=899, step=20100, lr=0.116500, batch loss=0.029133, epoch loss=0.406607Batch=959, step=20160, lr=0.116250, batch loss=0.017788, epoch loss=0.424395Batch=1019, step=20220, lr=0.116000, batch loss=0.024638, epoch loss=0.449033Batch=1079, step=20280, lr=0.115750, batch loss=0.002581, epoch loss=0.451615Batch=1139, step=20340, lr=0.115500, batch loss=0.015036, epoch loss=0.466651Batch=1199, step=20400, lr=0.115250, batch loss=0.006167, epoch loss=0.472818Epoch=16, step=20400, lr=0.115250, epoch loss=0.472818Batch=59, step=20460, lr=0.115000, batch loss=0.003237, epoch loss=0.003237Batch=119, step=20520, lr=0.114750, batch loss=0.009753, epoch loss=0.012990Batch=179, step=20580, lr=0.114500, batch loss=0.026251, epoch loss=0.039241Batch=239, step=20640, lr=0.114250, batch loss=0.014164, epoch loss=0.053405Batch=299, step=20700, lr=0.114000, batch loss=0.004200, epoch loss=0.057605Batch=359, step=20760, lr=0.113750, batch loss=0.016849, epoch loss=0.074454Batch=419, step=20820, lr=0.113500, batch loss=0.015015, epoch loss=0.089469Batch=479, step=20880, lr=0.113250, batch loss=0.003737, epoch loss=0.093206Batch=539, step=20940, lr=0.113000, batch loss=0.019243, epoch loss=0.112449Batch=599, step=21000, lr=0.112750, batch loss=0.020824, epoch loss=0.133273Batch=659, step=21060, lr=0.112500, batch loss=0.017585, epoch loss=0.150858Batch=719, step=21120, lr=0.112250, batch loss=0.052798, epoch loss=0.203656Batch=779, step=21180, lr=0.112000, batch loss=0.062081, epoch loss=0.265737Batch=839, step=21240, lr=0.111750, batch loss=0.024698, epoch loss=0.290436Batch=899, step=21300, lr=0.111500, batch loss=0.031414, epoch loss=0.321850Batch=959, step=21360, lr=0.111250, batch loss=0.010451, epoch loss=0.332301Batch=1019, step=21420, lr=0.111000, batch loss=0.011550, epoch loss=0.343851Batch=1079, step=21480, lr=0.110750, batch loss=0.000871, epoch loss=0.344722Batch=1139, step=21540, lr=0.110500, batch loss=0.013691, epoch loss=0.358413Batch=1199, step=21600, lr=0.110250, batch loss=0.005367, epoch loss=0.363781Epoch=17, step=21600, lr=0.110250, epoch loss=0.363781Batch=59, step=21660, lr=0.110000, batch loss=0.002707, epoch loss=0.002707Batch=119, step=21720, lr=0.109750, batch loss=0.006832, epoch loss=0.009539Batch=179, step=21780, lr=0.109500, batch loss=0.012801, epoch loss=0.022341Batch=239, step=21840, lr=0.109250, batch loss=0.009401, epoch loss=0.031742Batch=299, step=21900, lr=0.109000, batch loss=0.002681, epoch loss=0.034423Batch=359, step=21960, lr=0.108750, batch loss=0.015739, epoch loss=0.050161Batch=419, step=22020, lr=0.108500, batch loss=0.012323, epoch loss=0.062485Batch=479, step=22080, lr=0.108250, batch loss=0.002931, epoch loss=0.065415Batch=539, step=22140, lr=0.108000, batch loss=0.018512, epoch loss=0.083927Batch=599, step=22200, lr=0.107750, batch loss=0.017171, epoch loss=0.101098Batch=659, step=22260, lr=0.107500, batch loss=0.016307, epoch loss=0.117405Batch=719, step=22320, lr=0.107250, batch loss=0.020675, epoch loss=0.138080Batch=779, step=22380, lr=0.107000, batch loss=0.032796, epoch loss=0.170876Batch=839, step=22440, lr=0.106750, batch loss=0.028644, epoch loss=0.199520Batch=899, step=22500, lr=0.106500, batch loss=0.024772, epoch loss=0.224293Batch=959, step=22560, lr=0.106250, batch loss=0.009563, epoch loss=0.233855Batch=1019, step=22620, lr=0.106000, batch loss=0.010027, epoch loss=0.243882Batch=1079, step=22680, lr=0.105750, batch loss=0.001791, epoch loss=0.245674Batch=1139, step=22740, lr=0.105500, batch loss=0.011514, epoch loss=0.257187Batch=1199, step=22800, lr=0.105250, batch loss=0.005337, epoch loss=0.262524Epoch=18, step=22800, lr=0.105250, epoch loss=0.262524Batch=59, step=22860, lr=0.105000, batch loss=0.002932, epoch loss=0.002932Batch=119, step=22920, lr=0.104750, batch loss=0.005600, epoch loss=0.008532Batch=179, step=22980, lr=0.104500, batch loss=0.012767, epoch loss=0.021300Batch=239, step=23040, lr=0.104250, batch loss=0.008480, epoch loss=0.029780Batch=299, step=23100, lr=0.104000, batch loss=0.007554, epoch loss=0.037334Batch=359, step=23160, lr=0.103750, batch loss=0.012610, epoch loss=0.049944Batch=419, step=23220, lr=0.103500, batch loss=0.011471, epoch loss=0.061415Batch=479, step=23280, lr=0.103250, batch loss=0.003615, epoch loss=0.065031Batch=539, step=23340, lr=0.103000, batch loss=0.015968, epoch loss=0.080999Batch=599, step=23400, lr=0.102750, batch loss=0.014190, epoch loss=0.095190Batch=659, step=23460, lr=0.102500, batch loss=0.012568, epoch loss=0.107757Batch=719, step=23520, lr=0.102250, batch loss=0.015621, epoch loss=0.123379Batch=779, step=23580, lr=0.102000, batch loss=0.020467, epoch loss=0.143846Batch=839, step=23640, lr=0.101750, batch loss=0.025572, epoch loss=0.169417Batch=899, step=23700, lr=0.101500, batch loss=0.022454, epoch loss=0.191871Batch=959, step=23760, lr=0.101250, batch loss=0.008124, epoch loss=0.199996Batch=1019, step=23820, lr=0.101000, batch loss=0.006692, epoch loss=0.206687Batch=1079, step=23880, lr=0.100750, batch loss=0.002545, epoch loss=0.209232Batch=1139, step=23940, lr=0.100500, batch loss=0.008269, epoch loss=0.217501Batch=1199, step=24000, lr=0.100250, batch loss=0.004654, epoch loss=0.222156Epoch=19, step=24000, lr=0.100250, epoch loss=0.222156Half-moons scatterplot and decision boundary:┌────────────────────────────────────────────────────────────────────────────────────────────────────┐│********************************#*******************************************************************││**********************#*#*#######*###*#####*********************************************************││**********************#########################*****************************************************││*****************#**########*######*###########*###*************************************************││***************#################*###################************************************************││************######*#################*#################**********************************************││**********#*#####*########*#**************##*#########*#********************************************││***********########*##*#******************#*****##########******************************************││***********###########*************************############***************************************..││********######*####*********************************###*###*#**********************************.....││*******######**##*************....*****************#*######*#*******************************........││*******##*##**##**********...........***************########*##***************************..........││*****#######************.......%...%%...***************#########*************************.........%.││******######***********.........%........***************##*#####************************......%.%.%.││***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.││****#######**********..........%%%%.........************#########********************........%%.%%.%││**#######************..........%%%%%%%........*************###*###******************.........%%%%%%.││*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.││*#######************...........%%%%%%%..........************#######**************............%%%%%%.││*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.││*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..││#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..││#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.││###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...││#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....││######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...││*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....││#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....││**************.....................%.%%%%%%...................*******..................%.%%.%%......││*************........................%..%%%%%%%................*****..............%.%%%%%%%%%.......││*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......││************............................%..%%%%..%................................%%%%%%%%..........││************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........││***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........││***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............││**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............││**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................││*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................││********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................││********................................................%...%%%%.%%.%%%%..%.........................│└────────────────────────────────────────────────────────────────────────────────────────────────────┘"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 12025-05-22 12:20.31: Job failed: Failed: Build failed