2025-05-22 20:00.59: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (39741884b740497ac10065d5e464e6c70f9151f4) (linux-x86_64:fedora-42-5.3_opam-2.3)Base: ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373Opam project buildTo reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 39741884cat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373# fedora-42-5.3_opam-2.3USER 1000:1000ENV CLICOLOR_FORCE="1"ENV OPAMCOLOR="always"WORKDIR /srcRUN sudo dnf install -y findutilsRUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opamRUN opam init --reinit -niRUN uname -rs && opam exec -- ocaml -version && opam --versionWORKDIR /srcRUN sudo chown opam /srcRUN cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -uCOPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./RUN opam pin add -yn neural_nets_lib.dev './' && \opam pin add -yn arrayjit.dev './'RUN echo '(lang dune 3.0)' > './dune-project'ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"ENV CI="true"ENV OCAMLCI="true"RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPSRUN opam install $DEPSCOPY --chown=1000:1000 . /srcRUN opam exec -- dune build @install @check @runtest && rm -rf _buildEND-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-05-22 20:00.59: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373-fedora-42-5.3_opam-2.3-63d0fa7caba437c680f3f62d33f451da"2025-05-22 20:00.59: Using OBuilder spec:((from ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373)(comment fedora-42-5.3_opam-2.3)(user (uid 1000) (gid 1000))(env CLICOLOR_FORCE 1)(env OPAMCOLOR always)(workdir /src)(run (network host)(shell "sudo dnf install -y findutils"))(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))(run (shell "opam init --reinit -ni"))(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))(workdir /src)(run (shell "sudo chown opam /src"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))(run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))(run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")(env CI true)(env OCAMLCI true)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))(copy (src .) (dst /src))(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")))2025-05-22 20:00.59: Waiting for resource in pool OCluster2025-05-22 20:00.59: Waiting for worker…2025-05-22 20:01.08: Got resource from pool OClusterBuilding on doris.caelum.ci.devAll commits already cachedHEAD is now at 39741884 Untested: convert remaining uses of Format except where printing Sexp values(from ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373)2025-05-22 20:01.08 ---> using "58e80f2943667cc892930b8f00145b341640b9631e46b0e990690977929d47d4" from cache/: (comment fedora-42-5.3_opam-2.3)/: (user (uid 1000) (gid 1000))/: (env CLICOLOR_FORCE 1)/: (env OPAMCOLOR always)/: (workdir /src)/src: (run (network host)(shell "sudo dnf install -y findutils"))Updating and loading repositories:Fedora 42 - x86_64 - Updates 100% | 108.8 KiB/s | 12.8 KiB | 00m00sFedora 42 - x86_64 100% | 173.1 KiB/s | 24.4 KiB | 00m00sFedora 42 - x86_64 - Updates 100% | 6.9 MiB/s | 5.1 MiB | 00m01sFedora 42 - x86_64 100% | 574.1 KiB/s | 310.0 KiB | 00m01sRepositories loaded.Package "findutils-1:4.10.0-5.fc42.x86_64" is already installed.Nothing to do.2025-05-22 20:01.08 ---> using "5e75f4f90bede632f1905a7c2c6dd18b38094c39f21cf56cfd80e66f2a5ea1fb" from cache/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))2025-05-22 20:01.08 ---> using "99c48dd0c90841dd0557a3605ead22147b57247e14e3720a707113a3fd5cb79d" from cache/src: (run (shell "opam init --reinit -ni"))Configuring from /home/opam/.opamrc and then from built-in defaults.Checking for available remotes: rsync and local, git.- you won't be able to use mercurial repositories unless you install the hg command on your system.- you won't be able to use darcs repositories unless you install the darcs command on your system.This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.You may want to back it up before going further.Continue? [y/n] y[NOTE] The 'jobs' option was reset, its value was 71 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:opam option jobs=71 --globalFormat upgrade done.<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>[default] Initialised2025-05-22 20:01.08 ---> using "f466099dbad0be430d5d88b3056e1b5546981294ac309650866ef1379cd6d212" from cache/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))Linux 5.15.0-134-genericThe OCaml toplevel, version 5.3.02.3.02025-05-22 20:01.08 ---> using "9495bb6675cefd0fde9c3d3172875d2f69d43b4b48aa2c1fc3cbbdc7685c23a4" from cache/src: (workdir /src)/src: (run (shell "sudo chown opam /src"))2025-05-22 20:01.08 ---> using "32bd88dfd4a72ae99f6d3d856e0c727304b5b119984d04b7c88a6f9d56b49fb7" from cache/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))From https://github.com/ocaml/opam-repository* branch master -> FETCH_HEAD0d013e603b..2df846cb67 master -> origin/master2df846cb67 Merge pull request #27910 from maiste/release-dune-3.19.0<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] synchronised from git+file:///home/opam/opam-repositoryEverything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# To update the current shell environment, run: eval $(opam env)2025-05-22 20:01.08 ---> using "dcf64f3fd1f81049139dd4551f5e14f99abc2cb86c17720efd2add0463507556" from cache/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))2025-05-22 20:01.09 ---> saved as "0c0311c464b2eb1d9c9ccc52ec83d074ff8fb8d0074de6709806acbd7626486f"/src: (run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))[neural_nets_lib.dev] synchronised (file:///src)neural_nets_lib is now pinned to file:///src (version dev)[arrayjit.dev] synchronised (file:///src)arrayjit is now pinned to file:///src (version dev)2025-05-22 20:01.10 ---> saved as "717688da322b480918f10f1d6428473ebbec8e4aa51aabf3f267070231402e78"/src: (run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))2025-05-22 20:01.11 ---> saved as "eda520a0eec05a2ce25ba24354a062c1d6f55655e6b32209eba02d2fde29a346"/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")/src: (env CI true)/src: (env OCAMLCI true)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))+ /usr/sbin/sudo "yum" "makecache"- Updating and loading repositories:- Fedora 42 - x86_64 - Updates 100% | 145.5 KiB/s | 24.3 KiB | 00m00s- Fedora 42 - x86_64 - Updates 100% | 4.0 MiB/s | 1.7 MiB | 00m00s- Repositories loaded.- Metadata cache created.<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>[arrayjit.dev] synchronised (file:///src)[neural_nets_lib.dev] synchronised (file:///src)[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following system packages will first need to be installed:libffi-devel<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>+ /usr/sbin/sudo "yum" "install" "-y" "libffi-devel"- Updating and loading repositories:- Repositories loaded.- Package Arch Version Repository Size- Installing:- libffi-devel x86_64 3.4.6-5.fc42 fedora 33.1 KiB-- Transaction Summary:- Installing: 1 package-- Total size of inbound packages is 29 KiB. Need to download 29 KiB.- After this operation, 33 KiB extra will be used (install 33 KiB, remove 0 B).- [1/1] libffi-devel-0:3.4.6-5.fc42.x86_6 100% | 364.3 KiB/s | 28.8 KiB | 00m00s- --------------------------------------------------------------------------------- [1/1] Total 100% | 137.7 KiB/s | 28.8 KiB | 00m00s- Running transaction- [1/3] Verify package files 100% | 0.0 B/s | 1.0 B | 00m00s- [2/3] Prepare transaction 100% | 71.0 B/s | 1.0 B | 00m00s- [3/3] Installing libffi-devel-0:3.4.6-5 100% | 809.1 KiB/s | 34.8 KiB | 00m00s- Complete!+ /usr/sbin/rpm "-q" "--whatprovides" "libffi-devel"- libffi-devel-3.4.6-5.fc42.x86_642025-05-22 20:01.24 ---> saved as "4ba5063b886b60ef1f947efb4fe8c076d022817ed9aa43a34ab17354d432bb4c"/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following actions will be performed:=== install 75 packages- install angstrom 0.16.1- install astring 0.8.5- install backoff 0.1.1- install base v0.17.2- install bigarray-compat 1.1.0- install bigstringaf 0.10.0- install camlp-streams 5.0.1- install cmdliner 1.3.0- install conf-libffi 2.0.0- install conf-pkg-config 4- install cppo 1.8.0- install csexp 1.5.2- install ctypes 0.23.0- install ctypes-foreign 0.23.0- install dune 3.19.0- install dune-configurator 3.19.0- install fieldslib v0.17.0- install fmt 0.10.0- install integers 0.7.0- install jane-street-headers v0.17.0- install jst-config v0.17.0- install logs 0.8.0- install mdx 2.5.0- install mtime 2.1.0- install multicore-magic 2.3.1- install num 1.5-1- install ocaml-compiler-libs v0.17.0- install ocaml-syntax-shims 1.0.0- install ocaml-version 4.0.0- install ocaml_intrinsics_kernel v0.17.1- install ocamlbuild 0.16.1- install ocamlfind 1.9.8- install parsexp v0.17.0- install pprint 20230830- install ppx_assert v0.17.0- install ppx_base v0.17.0- install ppx_cold v0.17.0- install ppx_compare v0.17.0- install ppx_derivers 1.2.1- install ppx_deriving 6.0.3- install ppx_enumerate v0.17.0- install ppx_expect v0.17.2- install ppx_fields_conv v0.17.0- install ppx_globalize v0.17.0- install ppx_hash v0.17.0- install ppx_here v0.17.0- install ppx_inline_test v0.17.0- install ppx_minidebug 2.2.0- install ppx_optcomp v0.17.0- install ppx_sexp_conv v0.17.0- install ppx_string v0.17.0- install ppx_variants_conv v0.17.0- install ppxlib 0.35.0- install ppxlib_jane v0.17.2- install printbox 0.12- install printbox-ext-plot 0.12- install printbox-html 0.12- install printbox-md 0.12- install printbox-text 0.12- install ptime 1.2.0- install re 1.12.0- install result 1.5- install saturn_lockfree 0.5.0- install seq base- install sexplib v0.17.0- install sexplib0 v0.17.0- install stdio v0.17.0- install stdlib-shims 0.3.0- install thread-local-storage 0.2- install time_now v0.17.0- install topkg 1.0.8- install tyxml 4.6.0- install uucp 16.0.0- install uutf 1.0.4- install variantslib v0.17.0<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> retrieved backoff.0.1.1 (cached)-> retrieved astring.0.8.5 (cached)-> retrieved angstrom.0.16.1 (cached)-> retrieved base.v0.17.2 (cached)-> retrieved bigarray-compat.1.1.0 (cached)-> retrieved bigstringaf.0.10.0 (cached)-> retrieved camlp-streams.5.0.1 (cached)-> retrieved cppo.1.8.0 (cached)-> retrieved cmdliner.1.3.0 (cached)-> installed conf-pkg-config.4-> retrieved csexp.1.5.2 (cached)-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached)-> installed conf-libffi.2.0.0-> retrieved fieldslib.v0.17.0 (cached)-> retrieved fmt.0.10.0 (cached)-> retrieved integers.0.7.0 (cached)-> retrieved jane-street-headers.v0.17.0 (cached)-> retrieved jst-config.v0.17.0 (cached)-> retrieved logs.0.8.0 (cached)-> retrieved mtime.2.1.0 (cached)-> retrieved mdx.2.5.0 (cached)-> retrieved multicore-magic.2.3.1 (cached)-> retrieved num.1.5-1 (cached)-> retrieved ocaml-compiler-libs.v0.17.0 (cached)-> retrieved ocaml-syntax-shims.1.0.0 (cached)-> retrieved ocaml-version.4.0.0 (cached)-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)-> retrieved ocamlbuild.0.16.1 (cached)-> retrieved ocamlfind.1.9.8 (cached)-> retrieved dune.3.19.0, dune-configurator.3.19.0 (cached)-> retrieved parsexp.v0.17.0 (cached)-> retrieved pprint.20230830 (cached)-> retrieved ppx_assert.v0.17.0 (cached)-> retrieved ppx_base.v0.17.0 (cached)-> retrieved ppx_cold.v0.17.0 (cached)-> retrieved ppx_derivers.1.2.1 (cached)-> retrieved ppx_compare.v0.17.0 (cached)-> retrieved ppx_enumerate.v0.17.0 (cached)-> retrieved ppx_deriving.6.0.3 (cached)-> retrieved ppx_expect.v0.17.2 (cached)-> installed cmdliner.1.3.0-> installed num.1.5-1-> retrieved ppx_fields_conv.v0.17.0 (cached)-> retrieved ppx_globalize.v0.17.0 (cached)-> retrieved ppx_hash.v0.17.0 (cached)-> retrieved ppx_here.v0.17.0 (cached)-> retrieved ppx_inline_test.v0.17.0 (cached)-> retrieved ppx_optcomp.v0.17.0 (cached)-> retrieved ppx_sexp_conv.v0.17.0 (cached)-> retrieved ppx_string.v0.17.0 (cached)-> retrieved ppx_variants_conv.v0.17.0 (cached)-> retrieved ppxlib_jane.v0.17.2 (cached)-> retrieved ppx_minidebug.2.2.0 (cached)-> retrieved ptime.1.2.0 (cached)-> retrieved re.1.12.0 (cached)-> retrieved ppxlib.0.35.0 (cached)-> retrieved result.1.5 (cached)-> retrieved seq.base (cached)-> installed seq.base-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached)-> retrieved saturn_lockfree.0.5.0 (cached)-> retrieved sexplib.v0.17.0 (cached)-> retrieved sexplib0.v0.17.0 (cached)-> retrieved stdio.v0.17.0 (cached)-> retrieved stdlib-shims.0.3.0 (cached)-> retrieved thread-local-storage.0.2 (cached)-> retrieved time_now.v0.17.0 (cached)-> retrieved topkg.1.0.8 (cached)-> retrieved tyxml.4.6.0 (cached)-> retrieved uutf.1.0.4 (cached)-> retrieved variantslib.v0.17.0 (cached)-> retrieved uucp.16.0.0 (cached)-> installed ocamlfind.1.9.8-> installed ocamlbuild.0.16.1-> installed topkg.1.0.8-> installed uutf.1.0.4-> installed mtime.2.1.0-> installed fmt.0.10.0-> installed ptime.1.2.0-> installed astring.0.8.5-> installed logs.0.8.0-> installed dune.3.19.0-> installed jane-street-headers.v0.17.0-> installed ppx_derivers.1.2.1-> installed csexp.1.5.2-> installed backoff.0.1.1-> installed bigarray-compat.1.1.0-> installed camlp-streams.5.0.1-> installed cppo.1.8.0-> installed multicore-magic.2.3.1-> installed ocaml-compiler-libs.v0.17.0-> installed ocaml-syntax-shims.1.0.0-> installed ocaml-version.4.0.0-> installed ocaml_intrinsics_kernel.v0.17.1-> installed pprint.20230830-> installed printbox.0.12-> installed re.1.12.0-> installed result.1.5-> installed sexplib0.v0.17.0-> installed stdlib-shims.0.3.0-> installed thread-local-storage.0.2-> installed saturn_lockfree.0.5.0-> installed integers.0.7.0-> installed parsexp.v0.17.0-> installed dune-configurator.3.19.0-> installed bigstringaf.0.10.0-> installed mdx.2.5.0-> installed sexplib.v0.17.0-> installed angstrom.0.16.1-> installed tyxml.4.6.0-> installed printbox-html.0.12-> installed ctypes.0.23.0-> installed base.v0.17.2-> installed variantslib.v0.17.0-> installed fieldslib.v0.17.0-> installed stdio.v0.17.0-> installed ctypes-foreign.0.23.0-> installed uucp.16.0.0-> installed printbox-text.0.12-> installed printbox-md.0.12-> installed printbox-ext-plot.0.12-> installed ppxlib.0.35.0-> installed ppxlib_jane.v0.17.2-> installed ppx_optcomp.v0.17.0-> installed ppx_cold.v0.17.0-> installed ppx_here.v0.17.0-> installed ppx_variants_conv.v0.17.0-> installed ppx_fields_conv.v0.17.0-> installed ppx_enumerate.v0.17.0-> installed ppx_globalize.v0.17.0-> installed ppx_deriving.6.0.3-> installed ppx_compare.v0.17.0-> installed ppx_sexp_conv.v0.17.0-> installed ppx_hash.v0.17.0-> installed ppx_assert.v0.17.0-> installed ppx_minidebug.2.2.0-> installed ppx_base.v0.17.0-> installed jst-config.v0.17.0-> installed ppx_string.v0.17.0-> installed time_now.v0.17.0-> installed ppx_inline_test.v0.17.0-> installed ppx_expect.v0.17.2Done.# To update the current shell environment, run: eval $(opam env)2025-05-22 20:02.29 ---> saved as "760ccb2110108059af889004304c5723c04573f9ba33eb15f0891912c41163cc"/src: (copy (src .) (dst /src))2025-05-22 20:02.29 ---> saved as "3a93342eae6add98b548cfeca0a3ddcf0aeac3051b392255ac4c1fdd0fa4cd57"/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileWrote value of 'backend' to ocannl_backend.txt(cd _build/default/test_ppx && ./test_ppx_op.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/afefe02d281d2e1142725f3f51d6dd05/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileFile "test/dune", lines 30-40, characters 0-281:30 | (rule31 | (alias runtest)32 | (target33 | (dir log_files))34 | (action35 | (run36 | %{dep:micrograd_demo_logging.exe}37 | "--ocannl_debug_backend=text"38 | "--ocannl_log_file_stem=micrograd_demo_logging"39 | "--ocannl_log_main_domain_to_stdout=false"40 | "--ocannl_debug_log_to_stream_files=true")))(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileRetrieving commandline, environment, or config file variable ocannl_backendFound multicore_cc, in the config fileRetrieving commandline, environment, or config file variable ocannl_cd_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformityFound true, in the config fileRetrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_filesFound true, commandline --ocannl_debug_log_to_stream_files=trueRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccFatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failedRaised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileFile "test/micrograd_demo.ml", line 1, characters 0-0:/usr/sbin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/micrograd_demo.ml.correcteddiff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/micrograd_demo.ml.correctedindex 77e46c6..ab81526 100644--- a/_build/default/test/micrograd_demo.ml+++ b/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/micrograd_demo.ml.corrected@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =│├┼───────┤ ││││ -4.00 │ ││└┴───────┘ │- └─────────────────┘- ┌────────────────────────┐- │[0]: a shape 0:1 grad_a│- │┌┬─────────┐ │- │││axis 0 │ │- │├┼─────────┤ │- │││ 1.38e+2 │ │- │└┴─────────┘ │- └────────────────────────┘+ └─────────────────┘┌────────────────────────┐+ │[0]: a shape 0:1 grad_a│+ │┌┬─────────┐ │+ │││axis 0 │ │+ │├┼─────────┤ │+ │││ 1.38e+2 │ │+ │└┴─────────┘ │+ └────────────────────────┘|}];Tensor.print ~with_code:false ~with_grad:true `Default b;[%expect@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =│├┼──────┤ ││││ 2.00 │ ││└┴──────┘ │- └─────────────────┘- ┌────────────────────────┐- │[2]: b shape 0:1 grad_b│- │┌┬─────────┐ │- │││axis 0 │ │- │├┼─────────┤ │- │││ 6.45e+2 │ │- │└┴─────────┘ │- └────────────────────────┘+ └─────────────────┘┌────────────────────────┐+ │[2]: b shape 0:1 grad_b│+ │┌┬─────────┐ │+ │││axis 0 │ │+ │├┼─────────┤ │+ │││ 6.45e+2 │ │+ │└┴─────────┘ │+ └────────────────────────┘|}]let%expect_test "Micrograd half-moons example" =File "test/hello_world_op.ml", line 1, characters 0-0:/usr/sbin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/hello_world_op.ml.correcteddiff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/hello_world_op.ml.correctedindex ba9d7ef..6bfa309 100644--- a/_build/default/test/hello_world_op.ml+++ b/_build/.sandbox/89014c81ba3ae9bf3da0088099454939/default/test/hello_world_op.ml.corrected@@ -102,36 +102,46 @@ let%expect_test "Print constant tensor" =let%op hey = [ (1, 2, 3); (4, 5, 6) ] inTrain.forward_and_forget backend ctx hey;Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;- [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];+ [%expect {|+ [0]: [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ]_hey shape 1:3->0:2 [+ 1.00 , 2.00 , 3.00+ ; 4.00 , 5.00 , 6.00+ ]+ |}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;[%expect{|- ┌─────────────────────────────────────────────────────────────┐- │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │- │┌──────┬──────────────────┐ │- ││ │axis 1 │ │- │├──────┼──────────────────┤ │- ││axis 0│ 1.00 2.00 3.00 │ │- ││ │ 4.00 5.00 6.00 │ │- │└──────┴──────────────────┘ │- └─────────────────────────────────────────────────────────────┘+ ┌────────────────────────────────────────────────────────────────────────┐+ │[0]: [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ]_hey shape 1:3->0:2 │+ │┌──────┬──────────────────┐ │+ ││ │axis 1 │ │+ │├──────┼──────────────────┤ │+ ││axis 0│ 1.00 2.00 3.00 │ │+ ││ │ 4.00 5.00 6.00 │ │+ │└──────┴──────────────────┘ │+ └────────────────────────────────────────────────────────────────────────┘|}];let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] inTrain.forward_and_forget backend ctx hoo;Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;- [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];+ [%expect {|+ [1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 [|+ [ 1.00 ; 2.00 ; 3.00 ]+ ; [ 4.00 ; 5.00 ; 6.00 ]+ |]+ |}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;[%expect{|- ┌──────────────────────────────────────────────────────────────────┐- │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │- │┌──────┬──────────────────┐ │- ││ │axis 1 │ │- │├──────┼──────────────────┤ │- ││axis 0│ 1.00 2.00 3.00 │ │- ││ │ 4.00 5.00 6.00 │ │- │└──────┴──────────────────┘ │- └──────────────────────────────────────────────────────────────────┘+ ┌─────────────────────────────────────────────────────────────────────────────┐+ │[1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 │+ │┌──────┬──────────────────┐ │+ ││ │axis 1 │ │+ │├──────┼──────────────────┤ │+ ││axis 0│ 1.00 2.00 3.00 │ │+ ││ │ 4.00 5.00 6.00 │ │+ │└──────┴──────────────────┘ │+ └─────────────────────────────────────────────────────────────────────────────┘|}];let%op hey2 =[@@ -145,10 +155,12 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;[%expect{|- [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);- (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);- (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);- (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]+ [2]: c4x2x3_hey2 shape 1:2,2:3->0:4 [+ ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )+ ; ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )+ ; ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )+ ; ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )+ ]|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;[%expect@@ -178,10 +190,12 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;[%expect{|- [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];- [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];- [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];- [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]+ [3]: c4x2x3_hoo2 shape 0:4|1:2,2:3 [|+ [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]+ ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]+ ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]+ ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]+ |]|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;[%expect@@ -209,10 +223,12 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;[%expect{|- [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];- [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];- [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];- [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]+ [4]: c4x2x3_heyhoo shape 0:4,1:2|2:3 [|+ [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]+ ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]+ ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]+ ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]+ |]|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;[%expect@@ -240,15 +256,24 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo2;[%expect{|- [|- [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];- [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];- [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];- [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];- [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];- [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];- [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];- [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]+ [5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2 [|+ [|+ [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]+ ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]+ |]+ ; [|+ [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]+ ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]+ |]+ ; [|+ [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]+ ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]+ |]+ ; [|+ [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]+ ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]+ |]+ |]|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;[%expect@@ -293,17 +318,28 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo3;[%expect{|- [|+ [6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2 [|[|- [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];- [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];- [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];- [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];- [|- [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];- [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];- [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];- [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]+ [+ [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]+ ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]+ ]+ ; [+ [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]+ ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]+ ]+ |]+ ; [|+ [+ [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]+ ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]+ ]+ ; [+ [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]+ ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]+ ]+ |]+ |]|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;[%expect@@ -353,17 +389,28 @@ let%expect_test "Print constant tensor" =Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo4;[%expect{|- [|- [- [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];- [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];- [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];- [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];+ [7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3 [|[- [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];- [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];- [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];- [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]+ [+ [ 1.00 , 31.00 ; 2.00 , 32.00 ; 3.00 , 33.00 ]+ ; [ 4.00 , 34.00 ; 5.00 , 35.00 ; 6.00 , 36.00 ]+ ]+ ; [+ [ 7.00 , 37.00 ; 8.00 , 38.00 ; 9.00 , 39.00 ]+ ; [ 10.00 , 40.00 ; 11.00 , 41.00 ; 12.00 , 42.00 ]+ ]+ ]+ ; [+ [+ [ 13.00 , 43.00 ; 14.00 , 44.00 ; 15.00 , 45.00 ]+ ; [ 16.00 , 46.00 ; 17.00 , 47.00 ; 18.00 , 48.00 ]+ ]+ ; [+ [ 19.00 , 49.00 ; 20.00 , 50.00 ; 21.00 , 51.00 ]+ ; [ 22.00 , 52.00 ; 23.00 , 53.00 ; 24.00 , 54.00 ]+ ]+ ]+ |]|}];Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;[%expect@@ -462,8 +509,29 @@ let%expect_test "Big matrix" =Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;[%expect{|- [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;- 12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]+ [2]: 0...20 shape 0:21 [+ 0.00+ ; 1.00+ ; 2.00+ ; 3.00+ ; 4.00+ ; 5.00+ ; 6.00+ ; 7.00+ ; 8.00+ ; 9.00+ ; 10.00+ ; 11.00+ ; 12.00+ ; 13.00+ ; 14.00+ ; 15.00+ ; 16.00+ ; 17.00+ ; 18.00+ ; 19.00+ ; 20.00+ ]|}];Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;[%expect(cd _build/default/test && ./moons_demo_parallel_run.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file("Set log_level to" 1)└─{orphaned from #2}Retrieving commandline, environment, or config file variable ocannl_backendFound multicore_cc, in the config fileProperties of devices:(multicore_devices(device ((device_name CPU) (device_ordinal 0) (num_domains 128))))@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformityFound true, in the config fileRetrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_filesNot found, using default falseRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccRetrieving commandline, environment, or config file variable ocannl_never_capture_stdoutNot found, using default falseBatch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762Batch=299, step=8700, lr=0.164000, batch loss=0.191994, epoch loss=1.062755Batch=359, step=8760, lr=0.163750, batch loss=0.248632, epoch loss=1.311388Batch=419, step=8820, lr=0.163500, batch loss=0.245601, epoch loss=1.556988Batch=479, step=8880, lr=0.163250, batch loss=0.228591, epoch loss=1.785580Batch=539, step=8940, lr=0.163000, batch loss=0.178132, epoch loss=1.963712Batch=599, step=9000, lr=0.162750, batch loss=0.217388, epoch loss=2.181101Batch=659, step=9060, lr=0.162500, batch loss=0.294814, epoch loss=2.475915Batch=719, step=9120, lr=0.162250, batch loss=0.296433, epoch loss=2.772348Batch=779, step=9180, lr=0.162000, batch loss=0.316728, epoch loss=3.089075Batch=839, step=9240, lr=0.161750, batch loss=0.287243, epoch loss=3.376318Batch=899, step=9300, lr=0.161500, batch loss=0.251060, epoch loss=3.627378Batch=959, step=9360, lr=0.161250, batch loss=0.190532, epoch loss=3.817911Batch=1019, step=9420, lr=0.161000, batch loss=0.311728, epoch loss=4.129639Batch=1079, step=9480, lr=0.160750, batch loss=0.191595, epoch loss=4.321234Batch=1139, step=9540, lr=0.160500, batch loss=0.215772, epoch loss=4.537006Batch=1199, step=9600, lr=0.160250, batch loss=0.165620, epoch loss=4.702626Epoch=7, step=9600, lr=0.160250, epoch loss=4.702626Batch=59, step=9660, lr=0.160000, batch loss=0.197217, epoch loss=0.197217Batch=119, step=9720, lr=0.159750, batch loss=0.165467, epoch loss=0.362684Batch=179, step=9780, lr=0.159500, batch loss=0.179286, epoch loss=0.541970Batch=239, step=9840, lr=0.159250, batch loss=0.263837, epoch loss=0.805807Batch=299, step=9900, lr=0.159000, batch loss=0.182187, epoch loss=0.987994Batch=359, step=9960, lr=0.158750, batch loss=0.240842, epoch loss=1.228836Batch=419, step=10020, lr=0.158500, batch loss=0.232979, epoch loss=1.461815Batch=479, step=10080, lr=0.158250, batch loss=0.213194, epoch loss=1.675009Batch=539, step=10140, lr=0.158000, batch loss=0.170694, epoch loss=1.845703Batch=599, step=10200, lr=0.157750, batch loss=0.200247, epoch loss=2.045950Batch=659, step=10260, lr=0.157500, batch loss=0.283032, epoch loss=2.328982Batch=719, step=10320, lr=0.157250, batch loss=0.288754, epoch loss=2.617735Batch=779, step=10380, lr=0.157000, batch loss=0.296880, epoch loss=2.914615Batch=839, step=10440, lr=0.156750, batch loss=0.267657, epoch loss=3.182272Batch=899, step=10500, lr=0.156500, batch loss=0.242699, epoch loss=3.424972Batch=959, step=10560, lr=0.156250, batch loss=0.198668, epoch loss=3.623639Batch=1019, step=10620, lr=0.156000, batch loss=0.295119, epoch loss=3.918758Batch=1079, step=10680, lr=0.155750, batch loss=0.178662, epoch loss=4.097421Batch=1139, step=10740, lr=0.155500, batch loss=0.205425, epoch loss=4.302846Batch=1199, step=10800, lr=0.155250, batch loss=0.156138, epoch loss=4.458984Epoch=8, step=10800, lr=0.155250, epoch loss=4.458984Batch=59, step=10860, lr=0.155000, batch loss=0.177430, epoch loss=0.177430Batch=119, step=10920, lr=0.154750, batch loss=0.152366, epoch loss=0.329795Batch=179, step=10980, lr=0.154500, batch loss=0.167114, epoch loss=0.496909Batch=239, step=11040, lr=0.154250, batch loss=0.242622, epoch loss=0.739531Batch=299, step=11100, lr=0.154000, batch loss=0.169984, epoch loss=0.909515Batch=359, step=11160, lr=0.153750, batch loss=0.222140, epoch loss=1.131654Batch=419, step=11220, lr=0.153500, batch loss=0.229250, epoch loss=1.360905Batch=479, step=11280, lr=0.153250, batch loss=0.202871, epoch loss=1.563775Batch=539, step=11340, lr=0.153000, batch loss=0.159118, epoch loss=1.722894Batch=599, step=11400, lr=0.152750, batch loss=0.178498, epoch loss=1.901392Batch=659, step=11460, lr=0.152500, batch loss=0.264724, epoch loss=2.166116Batch=719, step=11520, lr=0.152250, batch loss=0.256959, epoch loss=2.423075Batch=779, step=11580, lr=0.152000, batch loss=0.273281, epoch loss=2.696355Batch=839, step=11640, lr=0.151750, batch loss=0.255783, epoch loss=2.952138Batch=899, step=11700, lr=0.151500, batch loss=0.212942, epoch loss=3.165080Batch=959, step=11760, lr=0.151250, batch loss=0.168195, epoch loss=3.333275Batch=1019, step=11820, lr=0.151000, batch loss=0.266032, epoch loss=3.599307Batch=1079, step=11880, lr=0.150750, batch loss=0.149776, epoch loss=3.749083Batch=1139, step=11940, lr=0.150500, batch loss=0.185522, epoch loss=3.934605Batch=1199, step=12000, lr=0.150250, batch loss=0.139483, epoch loss=4.074088Epoch=9, step=12000, lr=0.150250, epoch loss=4.074088Batch=59, step=12060, lr=0.150000, batch loss=0.158027, epoch loss=0.158027Batch=119, step=12120, lr=0.149750, batch loss=0.128834, epoch loss=0.286861Batch=179, step=12180, lr=0.149500, batch loss=0.150389, epoch loss=0.437250Batch=239, step=12240, lr=0.149250, batch loss=0.223284, epoch loss=0.660534Batch=299, step=12300, lr=0.149000, batch loss=0.142595, epoch loss=0.803130Batch=359, step=12360, lr=0.148750, batch loss=0.195474, epoch loss=0.998603Batch=419, step=12420, lr=0.148500, batch loss=0.206189, epoch loss=1.204793Batch=479, step=12480, lr=0.148250, batch loss=0.178781, epoch loss=1.383574Batch=539, step=12540, lr=0.148000, batch loss=0.142576, epoch loss=1.526150Batch=599, step=12600, lr=0.147750, batch loss=0.150450, epoch loss=1.676600Batch=659, step=12660, lr=0.147500, batch loss=0.224849, epoch loss=1.901449Batch=719, step=12720, lr=0.147250, batch loss=0.235381, epoch loss=2.136830Batch=779, step=12780, lr=0.147000, batch loss=0.252610, epoch loss=2.389441Batch=839, step=12840, lr=0.146750, batch loss=0.225004, epoch loss=2.614445Batch=899, step=12900, lr=0.146500, batch loss=0.185276, epoch loss=2.799721Batch=959, step=12960, lr=0.146250, batch loss=0.149057, epoch loss=2.948777Batch=1019, step=13020, lr=0.146000, batch loss=0.267918, epoch loss=3.216695Batch=1079, step=13080, lr=0.145750, batch loss=0.115425, epoch loss=3.332120Batch=1139, step=13140, lr=0.145500, batch loss=0.155344, epoch loss=3.487464Batch=1199, step=13200, lr=0.145250, batch loss=0.118495, epoch loss=3.605959Epoch=10, step=13200, lr=0.145250, epoch loss=3.605959Batch=59, step=13260, lr=0.145000, batch loss=0.143436, epoch loss=0.143436Batch=119, step=13320, lr=0.144750, batch loss=0.119649, epoch loss=0.263085Batch=179, step=13380, lr=0.144500, batch loss=0.127185, epoch loss=0.390270Batch=239, step=13440, lr=0.144250, batch loss=0.186827, epoch loss=0.577097Batch=299, step=13500, lr=0.144000, batch loss=0.112687, epoch loss=0.689784Batch=359, step=13560, lr=0.143750, batch loss=0.161652, epoch loss=0.851436Batch=419, step=13620, lr=0.143500, batch loss=0.160664, epoch loss=1.012101Batch=479, step=13680, lr=0.143250, batch loss=0.147468, epoch loss=1.159569Batch=539, step=13740, lr=0.143000, batch loss=0.118071, epoch loss=1.277640Batch=599, step=13800, lr=0.142750, batch loss=0.120059, epoch loss=1.397699Batch=659, step=13860, lr=0.142500, batch loss=0.175818, epoch loss=1.573518Batch=719, step=13920, lr=0.142250, batch loss=0.172357, epoch loss=1.745874Batch=779, step=13980, lr=0.142000, batch loss=0.178508, epoch loss=1.924382Batch=839, step=14040, lr=0.141750, batch loss=0.185974, epoch loss=2.110357Batch=899, step=14100, lr=0.141500, batch loss=0.178106, epoch loss=2.288463Batch=959, step=14160, lr=0.141250, batch loss=0.146433, epoch loss=2.434896Batch=1019, step=14220, lr=0.141000, batch loss=0.329970, epoch loss=2.764867Batch=1079, step=14280, lr=0.140750, batch loss=0.082738, epoch loss=2.847604Batch=1139, step=14340, lr=0.140500, batch loss=0.122232, epoch loss=2.969836Batch=1199, step=14400, lr=0.140250, batch loss=0.091701, epoch loss=3.061537Epoch=11, step=14400, lr=0.140250, epoch loss=3.061537Batch=59, step=14460, lr=0.140000, batch loss=0.112924, epoch loss=0.112924Batch=119, step=14520, lr=0.139750, batch loss=0.102689, epoch loss=0.215612Batch=179, step=14580, lr=0.139500, batch loss=0.106388, epoch loss=0.322000Batch=239, step=14640, lr=0.139250, batch loss=0.140570, epoch loss=0.462571Batch=299, step=14700, lr=0.139000, batch loss=0.080930, epoch loss=0.543500Batch=359, step=14760, lr=0.138750, batch loss=0.125428, epoch loss=0.668928Batch=419, step=14820, lr=0.138500, batch loss=0.129207, epoch loss=0.798135Batch=479, step=14880, lr=0.138250, batch loss=0.101530, epoch loss=0.899665Batch=539, step=14940, lr=0.138000, batch loss=0.090679, epoch loss=0.990344Batch=599, step=15000, lr=0.137750, batch loss=0.084902, epoch loss=1.075246Batch=659, step=15060, lr=0.137500, batch loss=0.129077, epoch loss=1.204323Batch=719, step=15120, lr=0.137250, batch loss=0.121366, epoch loss=1.325689Batch=779, step=15180, lr=0.137000, batch loss=0.134335, epoch loss=1.460024Batch=839, step=15240, lr=0.136750, batch loss=0.174882, epoch loss=1.634906Batch=899, step=15300, lr=0.136500, batch loss=0.294199, epoch loss=1.929105Batch=959, step=15360, lr=0.136250, batch loss=0.059187, epoch loss=1.988293Batch=1019, step=15420, lr=0.136000, batch loss=0.136122, epoch loss=2.124414Batch=1079, step=15480, lr=0.135750, batch loss=0.049794, epoch loss=2.174209Batch=1139, step=15540, lr=0.135500, batch loss=0.106083, epoch loss=2.280292Batch=1199, step=15600, lr=0.135250, batch loss=0.062137, epoch loss=2.342429Epoch=12, step=15600, lr=0.135250, epoch loss=2.342429Batch=59, step=15660, lr=0.135000, batch loss=0.077452, epoch loss=0.077452Batch=119, step=15720, lr=0.134750, batch loss=0.100599, epoch loss=0.178050Batch=179, step=15780, lr=0.134500, batch loss=0.088174, epoch loss=0.266224Batch=239, step=15840, lr=0.134250, batch loss=0.090842, epoch loss=0.357067Batch=299, step=15900, lr=0.134000, batch loss=0.041447, epoch loss=0.398514Batch=359, step=15960, lr=0.133750, batch loss=0.077481, epoch loss=0.475994Batch=419, step=16020, lr=0.133500, batch loss=0.083963, epoch loss=0.559957Batch=479, step=16080, lr=0.133250, batch loss=0.076759, epoch loss=0.636716Batch=539, step=16140, lr=0.133000, batch loss=0.052804, epoch loss=0.689520Batch=599, step=16200, lr=0.132750, batch loss=0.097482, epoch loss=0.787003Batch=659, step=16260, lr=0.132500, batch loss=0.075918, epoch loss=0.862921Batch=719, step=16320, lr=0.132250, batch loss=0.080522, epoch loss=0.943443Batch=779, step=16380, lr=0.132000, batch loss=0.103355, epoch loss=1.046798Batch=839, step=16440, lr=0.131750, batch loss=0.125720, epoch loss=1.172517Batch=899, step=16500, lr=0.131500, batch loss=0.154837, epoch loss=1.327355Batch=959, step=16560, lr=0.131250, batch loss=0.044182, epoch loss=1.371536Batch=1019, step=16620, lr=0.131000, batch loss=0.101953, epoch loss=1.473489Batch=1079, step=16680, lr=0.130750, batch loss=0.027883, epoch loss=1.501372Batch=1139, step=16740, lr=0.130500, batch loss=0.057510, epoch loss=1.558883Batch=1199, step=16800, lr=0.130250, batch loss=0.030008, epoch loss=1.588891Epoch=13, step=16800, lr=0.130250, epoch loss=1.588891Batch=59, step=16860, lr=0.130000, batch loss=0.039211, epoch loss=0.039211Batch=119, step=16920, lr=0.129750, batch loss=0.053098, epoch loss=0.092309Batch=179, step=16980, lr=0.129500, batch loss=0.051443, epoch loss=0.143753Batch=239, step=17040, lr=0.129250, batch loss=0.065421, epoch loss=0.209173Batch=299, step=17100, lr=0.129000, batch loss=0.033454, epoch loss=0.242627Batch=359, step=17160, lr=0.128750, batch loss=0.050520, epoch loss=0.293148Batch=419, step=17220, lr=0.128500, batch loss=0.089225, epoch loss=0.382373Batch=479, step=17280, lr=0.128250, batch loss=0.022977, epoch loss=0.405350Batch=539, step=17340, lr=0.128000, batch loss=0.026243, epoch loss=0.431593Batch=599, step=17400, lr=0.127750, batch loss=0.038758, epoch loss=0.470351Batch=659, step=17460, lr=0.127500, batch loss=0.050227, epoch loss=0.520578Batch=719, step=17520, lr=0.127250, batch loss=0.060510, epoch loss=0.581088Batch=779, step=17580, lr=0.127000, batch loss=0.061207, epoch loss=0.642295Batch=839, step=17640, lr=0.126750, batch loss=0.085610, epoch loss=0.727905Batch=899, step=17700, lr=0.126500, batch loss=0.052295, epoch loss=0.780200Batch=959, step=17760, lr=0.126250, batch loss=0.018304, epoch loss=0.798504Batch=1019, step=17820, lr=0.126000, batch loss=0.026868, epoch loss=0.825372Batch=1079, step=17880, lr=0.125750, batch loss=0.019160, epoch loss=0.844532Batch=1139, step=17940, lr=0.125500, batch loss=0.044057, epoch loss=0.888588Batch=1199, step=18000, lr=0.125250, batch loss=0.016336, epoch loss=0.904924Epoch=14, step=18000, lr=0.125250, epoch loss=0.904924Batch=59, step=18060, lr=0.125000, batch loss=0.012244, epoch loss=0.012244Batch=119, step=18120, lr=0.124750, batch loss=0.023180, epoch loss=0.035425Batch=179, step=18180, lr=0.124500, batch loss=0.028546, epoch loss=0.063971Batch=239, step=18240, lr=0.124250, batch loss=0.032382, epoch loss=0.096353Batch=299, step=18300, lr=0.124000, batch loss=0.008612, epoch loss=0.104965Batch=359, step=18360, lr=0.123750, batch loss=0.022888, epoch loss=0.127853Batch=419, step=18420, lr=0.123500, batch loss=0.031617, epoch loss=0.159470Batch=479, step=18480, lr=0.123250, batch loss=0.027580, epoch loss=0.187050Batch=539, step=18540, lr=0.123000, batch loss=0.052951, epoch loss=0.240001Batch=599, step=18600, lr=0.122750, batch loss=0.026499, epoch loss=0.266500Batch=659, step=18660, lr=0.122500, batch loss=0.033918, epoch loss=0.300417Batch=719, step=18720, lr=0.122250, batch loss=0.026719, epoch loss=0.327136Batch=779, step=18780, lr=0.122000, batch loss=0.071955, epoch loss=0.399091Batch=839, step=18840, lr=0.121750, batch loss=0.048072, epoch loss=0.447163Batch=899, step=18900, lr=0.121500, batch loss=0.049104, epoch loss=0.496267Batch=959, step=18960, lr=0.121250, batch loss=0.015382, epoch loss=0.511649Batch=1019, step=19020, lr=0.121000, batch loss=0.024065, epoch loss=0.535715Batch=1079, step=19080, lr=0.120750, batch loss=0.010568, epoch loss=0.546283Batch=1139, step=19140, lr=0.120500, batch loss=0.023562, epoch loss=0.569845Batch=1199, step=19200, lr=0.120250, batch loss=0.009264, epoch loss=0.579108Epoch=15, step=19200, lr=0.120250, epoch loss=0.579108Batch=59, step=19260, lr=0.120000, batch loss=0.005289, epoch loss=0.005289Batch=119, step=19320, lr=0.119750, batch loss=0.018908, epoch loss=0.024196Batch=179, step=19380, lr=0.119500, batch loss=0.042881, epoch loss=0.067077Batch=239, step=19440, lr=0.119250, batch loss=0.019535, epoch loss=0.086612Batch=299, step=19500, lr=0.119000, batch loss=0.005033, epoch loss=0.091645Batch=359, step=19560, lr=0.118750, batch loss=0.016692, epoch loss=0.108337Batch=419, step=19620, lr=0.118500, batch loss=0.019855, epoch loss=0.128192Batch=479, step=19680, lr=0.118250, batch loss=0.007134, epoch loss=0.135326Batch=539, step=19740, lr=0.118000, batch loss=0.017033, epoch loss=0.152359Batch=599, step=19800, lr=0.117750, batch loss=0.023213, epoch loss=0.175573Batch=659, step=19860, lr=0.117500, batch loss=0.019564, epoch loss=0.195137Batch=719, step=19920, lr=0.117250, batch loss=0.045886, epoch loss=0.241023Batch=779, step=19980, lr=0.117000, batch loss=0.084725, epoch loss=0.325749Batch=839, step=20040, lr=0.116750, batch loss=0.031667, epoch loss=0.357415Batch=899, step=20100, lr=0.116500, batch loss=0.027402, epoch loss=0.384817Batch=959, step=20160, lr=0.116250, batch loss=0.016943, epoch loss=0.401760Batch=1019, step=20220, lr=0.116000, batch loss=0.018501, epoch loss=0.420261Batch=1079, step=20280, lr=0.115750, batch loss=0.004974, epoch loss=0.425234Batch=1139, step=20340, lr=0.115500, batch loss=0.018826, epoch loss=0.444061Batch=1199, step=20400, lr=0.115250, batch loss=0.007289, epoch loss=0.451349Epoch=16, step=20400, lr=0.115250, epoch loss=0.451349Batch=59, step=20460, lr=0.115000, batch loss=0.003260, epoch loss=0.003260Batch=119, step=20520, lr=0.114750, batch loss=0.008471, epoch loss=0.011731Batch=179, step=20580, lr=0.114500, batch loss=0.014938, epoch loss=0.026669Batch=239, step=20640, lr=0.114250, batch loss=0.013102, epoch loss=0.039771Batch=299, step=20700, lr=0.114000, batch loss=0.004991, epoch loss=0.044762Batch=359, step=20760, lr=0.113750, batch loss=0.014150, epoch loss=0.058912Batch=419, step=20820, lr=0.113500, batch loss=0.014592, epoch loss=0.073504Batch=479, step=20880, lr=0.113250, batch loss=0.004838, epoch loss=0.078343Batch=539, step=20940, lr=0.113000, batch loss=0.015743, epoch loss=0.094086Batch=599, step=21000, lr=0.112750, batch loss=0.018412, epoch loss=0.112498Batch=659, step=21060, lr=0.112500, batch loss=0.015600, epoch loss=0.128098Batch=719, step=21120, lr=0.112250, batch loss=0.038294, epoch loss=0.166392Batch=779, step=21180, lr=0.112000, batch loss=0.075333, epoch loss=0.241725Batch=839, step=21240, lr=0.111750, batch loss=0.026051, epoch loss=0.267776Batch=899, step=21300, lr=0.111500, batch loss=0.034870, epoch loss=0.302645Batch=959, step=21360, lr=0.111250, batch loss=0.009735, epoch loss=0.312380Batch=1019, step=21420, lr=0.111000, batch loss=0.011526, epoch loss=0.323907Batch=1079, step=21480, lr=0.110750, batch loss=0.000682, epoch loss=0.324588Batch=1139, step=21540, lr=0.110500, batch loss=0.012946, epoch loss=0.337534Batch=1199, step=21600, lr=0.110250, batch loss=0.005304, epoch loss=0.342838Epoch=17, step=21600, lr=0.110250, epoch loss=0.342838Batch=59, step=21660, lr=0.110000, batch loss=0.002380, epoch loss=0.002380Batch=119, step=21720, lr=0.109750, batch loss=0.006285, epoch loss=0.008665Batch=179, step=21780, lr=0.109500, batch loss=0.012678, epoch loss=0.021344Batch=239, step=21840, lr=0.109250, batch loss=0.009193, epoch loss=0.030537Batch=299, step=21900, lr=0.109000, batch loss=0.003565, epoch loss=0.034102Batch=359, step=21960, lr=0.108750, batch loss=0.014979, epoch loss=0.049081Batch=419, step=22020, lr=0.108500, batch loss=0.011656, epoch loss=0.060738Batch=479, step=22080, lr=0.108250, batch loss=0.002806, epoch loss=0.063544Batch=539, step=22140, lr=0.108000, batch loss=0.018235, epoch loss=0.081778Batch=599, step=22200, lr=0.107750, batch loss=0.016543, epoch loss=0.098321Batch=659, step=22260, lr=0.107500, batch loss=0.013764, epoch loss=0.112085Batch=719, step=22320, lr=0.107250, batch loss=0.027807, epoch loss=0.139893Batch=779, step=22380, lr=0.107000, batch loss=0.042274, epoch loss=0.182167Batch=839, step=22440, lr=0.106750, batch loss=0.021447, epoch loss=0.203613Batch=899, step=22500, lr=0.106500, batch loss=0.021478, epoch loss=0.225092Batch=959, step=22560, lr=0.106250, batch loss=0.010742, epoch loss=0.235834Batch=1019, step=22620, lr=0.106000, batch loss=0.009538, epoch loss=0.245372Batch=1079, step=22680, lr=0.105750, batch loss=0.000000, epoch loss=0.245372Batch=1139, step=22740, lr=0.105500, batch loss=0.009666, epoch loss=0.255038Batch=1199, step=22800, lr=0.105250, batch loss=0.004483, epoch loss=0.259521Epoch=18, step=22800, lr=0.105250, epoch loss=0.259521Batch=59, step=22860, lr=0.105000, batch loss=0.002161, epoch loss=0.002161Batch=119, step=22920, lr=0.104750, batch loss=0.005374, epoch loss=0.007535Batch=179, step=22980, lr=0.104500, batch loss=0.010075, epoch loss=0.017610Batch=239, step=23040, lr=0.104250, batch loss=0.008698, epoch loss=0.026308Batch=299, step=23100, lr=0.104000, batch loss=0.006376, epoch loss=0.032684Batch=359, step=23160, lr=0.103750, batch loss=0.009561, epoch loss=0.042245Batch=419, step=23220, lr=0.103500, batch loss=0.009872, epoch loss=0.052117Batch=479, step=23280, lr=0.103250, batch loss=0.002715, epoch loss=0.054832Batch=539, step=23340, lr=0.103000, batch loss=0.016952, epoch loss=0.071784Batch=599, step=23400, lr=0.102750, batch loss=0.013478, epoch loss=0.085263Batch=659, step=23460, lr=0.102500, batch loss=0.011241, epoch loss=0.096504Batch=719, step=23520, lr=0.102250, batch loss=0.012392, epoch loss=0.108895Batch=779, step=23580, lr=0.102000, batch loss=0.022989, epoch loss=0.131884Batch=839, step=23640, lr=0.101750, batch loss=0.026973, epoch loss=0.158857Batch=899, step=23700, lr=0.101500, batch loss=0.020928, epoch loss=0.179786Batch=959, step=23760, lr=0.101250, batch loss=0.009811, epoch loss=0.189597Batch=1019, step=23820, lr=0.101000, batch loss=0.008466, epoch loss=0.198063Batch=1079, step=23880, lr=0.100750, batch loss=0.001744, epoch loss=0.199806Batch=1139, step=23940, lr=0.100500, batch loss=0.008125, epoch loss=0.207932Batch=1199, step=24000, lr=0.100250, batch loss=0.004777, epoch loss=0.212709Epoch=19, step=24000, lr=0.100250, epoch loss=0.212709Half-moons scatterplot and decision boundary:┌────────────────────────────────────────────────────────────────────────────────────────────────────┐│********************************#*******************************************************************││**********************#*#*#######*###*#####*********************************************************││**********************#########################*****************************************************││*****************#**########*######*###########*###*************************************************││***************#################*###################************************************************││************######*#################*#################**********************************************││**********#*#####*########*#**************##*#########*#********************************************││***********########*##*#******************#*****##########*****************************************.││***********###########*************************############*************************************....││********######*####*********************************###*###*#********************************.......││*******######**##*************....*****************#*######*#******************************.........││*******##*##**##**********...........***************########*##**************************...........││*****#######************.......%...%%...***************#########************************..........%.││******######**********..........%.........**************##*#####***********************.......%.%.%.││***#########**********.........%%%.%%......*************#*#######********************........%.%%%%.││****#######**********..........%%%%.........************#########*******************.........%%.%%.%││**#######************..........%%%%%%%........*************###*###*****************..........%%%%%%.││*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.││*#######************...........%%%%%%%..........************#######**************............%%%%%%.││*##*####***********............%%.%%%%%...........***********####**************.............%%%%%%%.││*#####*#***********.............%%%%%%%............**********##*###***********...............%%%%%..││#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..││#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.││###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...││#######*********.................%%%%%%%%...............*****###*###*****.................%%%%%%....││######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...││*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....││#****##********....................%%%%%%%%%.................**###*#**................%.%%%%%%%.....││**************.....................%.%%%%%%...................*******..................%.%%.%%......││**************.......................%..%%%%%%%................****...............%.%%%%%%%%%.......││*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......││*************...........................%..%%%%..%................................%%%%%%%%..........││************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........││***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........││***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............││**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............││**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................││*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................││*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................││********................................................%...%%%%.%%.%%%%..%.........................│└────────────────────────────────────────────────────────────────────────────────────────────────────┘"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 12025-05-22 20:02.47: Job failed: Failed: Build failed