Organisationsahrefsocannl9afb61 ()fedora-41-5.3_opam-2.3

fedora-41-5.3_opam-2.3

Link Copied
Code Copied

Logs

2025-05-22 12:20.03: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (9afb61d245b2724d2132450805c8b080ac7e0c9a) (linux-x86_64:fedora-41-5.3_opam-2.3)
Base: ocaml/opam:fedora-41-ocaml-5.3@sha256:abc80c782e7acbd09ffd49defcb056c2fc402593e57bcb537add09330e2c3992
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 9afb61d2
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:fedora-41-ocaml-5.3@sha256:abc80c782e7acbd09ffd49defcb056c2fc402593e57bcb537add09330e2c3992
# fedora-41-5.3_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo dnf install -y findutils
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-05-22 12:20.03: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-41-ocaml-5.3@sha256:abc80c782e7acbd09ffd49defcb056c2fc402593e57bcb537add09330e2c3992-fedora-41-5.3_opam-2.3-cdc9572ad54e4d4bf194acfcdfaa690c"
2025-05-22 12:20.03: Using OBuilder spec:
((from ocaml/opam:fedora-41-ocaml-5.3@sha256:abc80c782e7acbd09ffd49defcb056c2fc402593e57bcb537add09330e2c3992)
(comment fedora-41-5.3_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (network host)
(shell "sudo dnf install -y findutils"))
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-05-22 12:20.03: Waiting for resource in pool OCluster
2025-05-22 12:20.03: Waiting for worker…
2025-05-22 12:20.03: Got resource from pool OCluster
Building on laodoke.caelum.ci.dev
All commits already cached
HEAD is now at 9afb61d2 In progress / broken: Format -> PPrint migration first pass by Claude


(from ocaml/opam:fedora-41-ocaml-5.3@sha256:abc80c782e7acbd09ffd49defcb056c2fc402593e57bcb537add09330e2c3992)
2025-05-22 12:20.04 ---> using "da0437edefc3074e740b2380711421a58ad6af000252c27345a9c063faf0cc10" from cache


/: (comment fedora-41-5.3_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (network host)
(shell "sudo dnf install -y findutils"))
Updating and loading repositories:
Fedora 41 openh264 (From Cisco) - x86_ 100% |   9.7 KiB/s | 989.0   B |  00m00s
Fedora 41 - x86_64                     100% | 196.2 KiB/s |  25.7 KiB |  00m00s
Fedora 41 - x86_64 - Updates           100% | 183.7 KiB/s |  23.5 KiB |  00m00s
Fedora 41 - x86_64 - Updates           100% |   8.3 MiB/s |   9.6 MiB |  00m01s
Repositories loaded.
Package "findutils-1:4.10.0-4.fc41.x86_64" is already installed.


Nothing to do.
2025-05-22 12:20.04 ---> using "cda621b93ea4e5bc7b2f3a44f5472568fa9069c819fc4f9ee4dba7021db6599e" from cache


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-22 12:20.04 ---> using "00fdc0066ab5f68afc204c28620b3ce62c27dda0ac3863d3d421d30b1e5a7121" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
[NOTE] The 'jobs' option was reset, its value was 255 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:
opam option jobs=255 --global
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-22 12:20.04 ---> using "e4c6ec45de15317b19a5e44e28bcb7589e585c58ff662e7c78b23377170afeeb" from cache


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-139-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-05-22 12:20.04 ---> using "21404f6f0f15670f1371a8655b67d83eca7d3eb1d65bed293fe562e27fca1e04" from cache


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-05-22 12:20.04 ---> using "e6db08cef467f22bc6d82a42d4d4c7d0ba62091bad93cb74af70b44a2974850b" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
35eb2f107a..27f5ac67c2  master     -> origin/master
c7d6d1d2aa Merge pull request #27880 from MisterDA/os-family-fedora


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 12:20.04 ---> using "47e222691df9f48e96b82354930e643d109158150dc7c77fac9953e2867a0de2" from cache


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-22 12:20.04 ---> using "e7730057529b4754024c66acc0d1018233e8d2f2daf11327df013acb81dd02d8" from cache


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-22 12:20.04 ---> using "063b3fe81118ec0277acdaec22f1eecc2dd6a81fe61f9d9f1889e8d0cbd0aebf" from cache


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-22 12:20.04 ---> using "33e409d72e19a2f51ac75adada9ccd6f1a25cd1f594ac1fdee973b79ca82e452" from cache


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "yum" "makecache"
- Updating and loading repositories:
- Repositories loaded.
- Metadata cache created.


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-devel


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "yum" "install" "-y" "libffi-devel"
- Updating and loading repositories:
- Repositories loaded.
- Package       Arch   Version      Repository      Size
- Installing:
-  libffi-devel x86_64 3.4.6-3.fc41 fedora      33.1 KiB
- 
- Transaction Summary:
-  Installing:         1 package
- 
- Total size of inbound packages is 29 KiB. Need to download 29 KiB.
- After this operation, 33 KiB extra will be used (install 33 KiB, remove 0 B).
- [1/1] libffi-devel-0:3.4.6-3.fc41.x86_6 100% | 368.8 KiB/s |  28.8 KiB |  00m00s
- --------------------------------------------------------------------------------
- [1/1] Total                             100% | 109.4 KiB/s |  28.8 KiB |  00m00s
- Running transaction
- [1/3] Verify package files              100% |   0.0   B/s |   1.0   B |  00m00s
- [2/3] Prepare transaction               100% |  38.0   B/s |   1.0   B |  00m00s
- [3/3] Installing libffi-devel-0:3.4.6-3 100% | 669.1 KiB/s |  34.8 KiB |  00m00s
- Complete!
+ /usr/bin/rpm "-q" "--whatprovides" "libffi-devel"
- libffi-devel-3.4.6-3.fc41.x86_64
2025-05-22 12:20.04 ---> using "56f1fb25cdf08ccfa843e8cf68715585265b4862dc78f3ea7237d6f2aabdd617" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 75 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.2
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.18.2
- install dune-configurator       3.18.2
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install pprint                  20230830
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> retrieved cppo.1.8.0  (cached)
-> retrieved cmdliner.1.3.0  (cached)
-> installed conf-pkg-config.4
-> retrieved csexp.1.5.2  (cached)
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> installed conf-libffi.2.0.0
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved dune.3.18.2, dune-configurator.3.18.2  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved pprint.20230830  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved result.1.5  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlfind.1.9.8
-> installed ocamlbuild.0.16.1
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed fmt.0.10.0
-> installed ptime.1.2.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.18.2
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed multicore-magic.2.3.1
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-compiler-libs.v0.17.0
-> installed pprint.20230830
-> installed printbox.0.12
-> installed cppo.1.8.0
-> installed re.1.12.0
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed thread-local-storage.0.2
-> installed saturn_lockfree.0.5.0
-> installed integers.0.7.0
-> installed parsexp.v0.17.0
-> installed dune-configurator.3.18.2
-> installed bigstringaf.0.10.0
-> installed mdx.2.5.0
-> installed sexplib.v0.17.0
-> installed angstrom.0.16.1
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed base.v0.17.2
-> installed variantslib.v0.17.0
-> installed fieldslib.v0.17.0
-> installed stdio.v0.17.0
-> installed ctypes-foreign.0.23.0
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed ppxlib.0.35.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_optcomp.v0.17.0
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_compare.v0.17.0
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_base.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 12:20.04 ---> using "1de4cf5d4fa2045ccadabf4d63be5b5e21a9617c39609bd14197216061357a4d" from cache


/src: (copy (src .) (dst /src))
2025-05-22 12:20.05 ---> saved as "f86f420bad9bc9c324a54f6156859de659d804cdb879d96055dff0b9612c63b2"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Wrote value of 'backend' to ocannl_backend.txt
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/77c478fbfbf5b5f79f37815041cf490a/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/dune", lines 30-40, characters 0-281:
30 | (rule
31 |  (alias runtest)
32 |  (target
33 |   (dir log_files))
34 |  (action
35 |   (run
36 |    %{dep:micrograd_demo_logging.exe}
37 |    "--ocannl_debug_backend=text"
38 |    "--ocannl_log_file_stem=micrograd_demo_logging"
39 |    "--ocannl_log_main_domain_to_stdout=false"
40 |    "--ocannl_debug_log_to_stream_files=true")))
(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_cd_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Found true, commandline --ocannl_debug_log_to_stream_files=true
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed
Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64
Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31
Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59
Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38
Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82
Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92
Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92
Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/micrograd_demo.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/micrograd_demo.ml.corrected
diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/micrograd_demo.ml.corrected
index 77e46c6..3cb470c 100644
--- a/_build/default/test/micrograd_demo.ml
+++ b/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/micrograd_demo.ml.corrected
@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =
│├┼───────┤       │
│││ -4.00 │       │
│└┴───────┘       │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[0]: a shape 0:1  grad_a│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 1.38e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+                                                             │[0]: a shape 0:1  grad_a│
+                                                             │┌┬─────────┐            │
+                                                             │││axis 0   │            │
+                                                             │├┼─────────┤            │
+                                                             │││ 1.38e+2 │            │
+                                                             │└┴─────────┘            │
+                                                             └────────────────────────┘
|}];
Tensor.print ~with_code:false ~with_grad:true `Default b;
[%expect
@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =
│├┼──────┤        │
│││ 2.00 │        │
│└┴──────┘        │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[2]: b shape 0:1  grad_b│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 6.45e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+                                                             │[2]: b shape 0:1  grad_b│
+                                                             │┌┬─────────┐            │
+                                                             │││axis 0   │            │
+                                                             │├┼─────────┤            │
+                                                             │││ 6.45e+2 │            │
+                                                             │└┴─────────┘            │
+                                                             └────────────────────────┘
|}]


let%expect_test "Micrograd half-moons example" =
File "test/hello_world_op.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/hello_world_op.ml.corrected
diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/hello_world_op.ml.corrected
index ba9d7ef..6b90c44 100644
--- a/_build/default/test/hello_world_op.ml
+++ b/_build/.sandbox/55a5b0641fd7c10700572f19d8669cf5/default/test/hello_world_op.ml.corrected
@@ -102,36 +102,39 @@ let%expect_test "Print constant tensor" =
let%op hey = [ (1, 2, 3); (4, 5, 6) ] in
Train.forward_and_forget backend ctx hey;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;
-  [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];
+  [%expect {| [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ][0]: c2x3_hey shape 1:3->0:2 |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;
[%expect
{|
-    ┌─────────────────────────────────────────────────────────────┐
-    │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │
-    │┌──────┬──────────────────┐                                  │
-    ││      │axis 1            │                                  │
-    │├──────┼──────────────────┤                                  │
-    ││axis 0│ 1.00  2.00  3.00 │                                  │
-    ││      │ 4.00  5.00  6.00 │                                  │
-    │└──────┴──────────────────┘                                  │
-    └─────────────────────────────────────────────────────────────┘
+    ┌─────────────────────────────┐
+    │[0]: c2x3_hey shape 1:3->0:2 │
+    │┌──────┬──────────────────┐  │
+    ││      │axis 1            │  │
+    │├──────┼──────────────────┤  │
+    ││axis 0│ 1.00  2.00  3.00 │  │
+    ││      │ 4.00  5.00  6.00 │  │
+    │└──────┴──────────────────┘  │
+    └─────────────────────────────┘
|}];
let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in
Train.forward_and_forget backend ctx hoo;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;
-  [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];
+  [%expect {|
+    [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |][1]: c2x3_hoo shape
+    0:2|1:3
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;
[%expect
{|
-    ┌──────────────────────────────────────────────────────────────────┐
-    │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │
-    │┌──────┬──────────────────┐                                       │
-    ││      │axis 1            │                                       │
-    │├──────┼──────────────────┤                                       │
-    ││axis 0│ 1.00  2.00  3.00 │                                       │
-    ││      │ 4.00  5.00  6.00 │                                       │
-    │└──────┴──────────────────┘                                       │
-    └──────────────────────────────────────────────────────────────────┘
+    ┌────────────────────────────┐
+    │[1]: c2x3_hoo shape 0:2|1:3 │
+    │┌──────┬──────────────────┐ │
+    ││      │axis 1            │ │
+    │├──────┼──────────────────┤ │
+    ││axis 0│ 1.00  2.00  3.00 │ │
+    ││      │ 4.00  5.00  6.00 │ │
+    │└──────┴──────────────────┘ │
+    └────────────────────────────┘
|}];
let%op hey2 =
[
@@ -145,10 +148,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;
[%expect
{|
-    [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);
-      (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);
-      (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);
-      (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]
+    [
+       ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )
+      ;  ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )
+      ;  ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )
+      ;  ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )
+    ][2]: c4x2x3_hey2 shape 1:2,2:3->0:4
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;
[%expect
@@ -178,10 +183,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;
[%expect
{|
-    [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];
-      [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];
-      [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];
-      [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]
+    [|
+      [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]
+      ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]
+      ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]
+      ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]
+    |][3]: c4x2x3_hoo2 shape 0:4|1:2,2:3
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;
[%expect
@@ -209,10 +216,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;
[%expect
{|
-    [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];
-      [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];
-      [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];
-      [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]
+    [|
+      [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]
+      ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]
+      ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]
+      ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]
+    |][4]: c4x2x3_heyhoo shape 0:4,1:2|2:3
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;
[%expect
@@ -241,14 +250,23 @@ let%expect_test "Print constant tensor" =
[%expect
{|
[|
-      [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-        [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];
-      [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-        [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];
-      [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-        [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];
-      [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-        [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]
+      [|
+        [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+        ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+      |]
+      ; [|
+        [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+        ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+      |]
+      ; [|
+        [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+        ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+      |]
+      ; [|
+        [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+        ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+      |]
+    |][5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;
[%expect
@@ -295,15 +313,26 @@ let%expect_test "Print constant tensor" =
{|
[|
[|
-        [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-          [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];
-        [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-          [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];
-      [|
-        [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-          [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];
-        [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-          [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]
+        [
+          [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+          ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+        ]
+        ; [
+          [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+          ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+        ]
+      |]
+      ; [|
+        [
+          [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+          ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+        ]
+        ; [
+          [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+          ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+        ]
+      |]
+    |][6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;
[%expect
@@ -355,15 +384,26 @@ let%expect_test "Print constant tensor" =
{|
[|
[
-        [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];
-          [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];
-        [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];
-          [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];
-      [
-        [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];
-          [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];
-        [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];
-          [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]
+        [
+          [  1.00 , 31.00  ;  2.00 , 32.00  ;  3.00 , 33.00  ]
+          ; [  4.00 , 34.00  ;  5.00 , 35.00  ;  6.00 , 36.00  ]
+        ]
+        ; [
+          [  7.00 , 37.00  ;  8.00 , 38.00  ;  9.00 , 39.00  ]
+          ; [  10.00 , 40.00  ;  11.00 , 41.00  ;  12.00 , 42.00  ]
+        ]
+      ]
+      ; [
+        [
+          [  13.00 , 43.00  ;  14.00 , 44.00  ;  15.00 , 45.00  ]
+          ; [  16.00 , 46.00  ;  17.00 , 47.00  ;  18.00 , 48.00  ]
+        ]
+        ; [
+          [  19.00 , 49.00  ;  20.00 , 50.00  ;  21.00 , 51.00  ]
+          ; [  22.00 , 52.00  ;  23.00 , 53.00  ;  24.00 , 54.00  ]
+        ]
+      ]
+    |][7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;
[%expect
@@ -462,8 +502,29 @@ let%expect_test "Big matrix" =
Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;
[%expect
{|
-    [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;
-      12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]
+    [
+      0.00
+      ; 1.00
+      ; 2.00
+      ; 3.00
+      ; 4.00
+      ; 5.00
+      ; 6.00
+      ; 7.00
+      ; 8.00
+      ; 9.00
+      ; 10.00
+      ; 11.00
+      ; 12.00
+      ; 13.00
+      ; 14.00
+      ; 15.00
+      ; 16.00
+      ; 17.00
+      ; 18.00
+      ; 19.00
+      ; 20.00
+    ][2]: 0...20 shape 0:21
|}];
Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;
[%expect
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Properties of devices:
(multicore_devices
(device ((device_name CPU) (device_ordinal 0) (num_domains 72))))
@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Not found, using default false
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216
Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512
Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081
Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141
Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385
Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263
Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603
Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902
Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024
Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685
Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407
Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543
Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049
Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829
Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269
Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952
Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952
Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138
Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381
Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025
Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921
Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269
Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828
Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563
Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831
Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657
Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042
Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243
Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160
Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542
Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179
Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413
Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446
Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419
Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654
Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028
Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821
Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821
Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363
Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325
Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481
Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578
Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525
Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415
Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160
Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004
Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802
Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881
Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937
Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479
Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468
Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248
Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112
Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523
Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923
Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236
Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812
Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723
Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723
Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539
Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341
Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053
Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157
Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168
Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420
Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854
Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365
Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593
Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989
Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121
Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243
Batch=779, step=4380, lr=0.181750, batch loss=0.346320, epoch loss=3.454563
Batch=839, step=4440, lr=0.181750, batch loss=0.317728, epoch loss=3.772291
Batch=899, step=4500, lr=0.181500, batch loss=0.283974, epoch loss=4.056265
Batch=959, step=4560, lr=0.181250, batch loss=0.238280, epoch loss=4.294546
Batch=1019, step=4620, lr=0.181000, batch loss=0.337006, epoch loss=4.631552
Batch=1079, step=4680, lr=0.180750, batch loss=0.208471, epoch loss=4.840023
Batch=1139, step=4740, lr=0.180500, batch loss=0.249282, epoch loss=5.089305
Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281073
Epoch=3, step=4800, lr=0.180250, epoch loss=5.281073
Batch=59, step=4860, lr=0.180000, batch loss=0.228017, epoch loss=0.228017
Batch=119, step=4920, lr=0.179750, batch loss=0.190270, epoch loss=0.418287
Batch=179, step=4980, lr=0.179500, batch loss=0.205910, epoch loss=0.624197
Batch=239, step=5040, lr=0.179250, batch loss=0.309041, epoch loss=0.933238
Batch=299, step=5100, lr=0.179000, batch loss=0.204646, epoch loss=1.137884
Batch=359, step=5160, lr=0.178750, batch loss=0.271107, epoch loss=1.408991
Batch=419, step=5220, lr=0.178500, batch loss=0.264065, epoch loss=1.673057
Batch=479, step=5280, lr=0.178250, batch loss=0.239668, epoch loss=1.912725
Batch=539, step=5340, lr=0.178000, batch loss=0.189489, epoch loss=2.102215
Batch=599, step=5400, lr=0.177750, batch loss=0.230954, epoch loss=2.333169
Batch=659, step=5460, lr=0.177500, batch loss=0.323638, epoch loss=2.656807
Batch=719, step=5520, lr=0.177250, batch loss=0.325823, epoch loss=2.982630
Batch=779, step=5580, lr=0.177000, batch loss=0.343232, epoch loss=3.325862
Batch=839, step=5640, lr=0.176750, batch loss=0.309310, epoch loss=3.635172
Batch=899, step=5700, lr=0.176500, batch loss=0.273221, epoch loss=3.908392
Batch=959, step=5760, lr=0.176250, batch loss=0.214833, epoch loss=4.123225
Batch=1019, step=5820, lr=0.176000, batch loss=0.339281, epoch loss=4.462506
Batch=1079, step=5880, lr=0.175750, batch loss=0.207743, epoch loss=4.670249
Batch=1139, step=5940, lr=0.175500, batch loss=0.240048, epoch loss=4.910297
Batch=1199, step=6000, lr=0.175250, batch loss=0.186869, epoch loss=5.097166
Epoch=4, step=6000, lr=0.175250, epoch loss=5.097166
Batch=59, step=6060, lr=0.175000, batch loss=0.230518, epoch loss=0.230518
Batch=119, step=6120, lr=0.174750, batch loss=0.194310, epoch loss=0.424828
Batch=179, step=6180, lr=0.174500, batch loss=0.201550, epoch loss=0.626378
Batch=239, step=6240, lr=0.174250, batch loss=0.302377, epoch loss=0.928754
Batch=299, step=6300, lr=0.174000, batch loss=0.203945, epoch loss=1.132699
Batch=359, step=6360, lr=0.173750, batch loss=0.266100, epoch loss=1.398799
Batch=419, step=6420, lr=0.173500, batch loss=0.265299, epoch loss=1.664098
Batch=479, step=6480, lr=0.173250, batch loss=0.243310, epoch loss=1.907409
Batch=539, step=6540, lr=0.173000, batch loss=0.192980, epoch loss=2.100389
Batch=599, step=6600, lr=0.172750, batch loss=0.234488, epoch loss=2.334876
Batch=659, step=6660, lr=0.172500, batch loss=0.312120, epoch loss=2.646996
Batch=719, step=6720, lr=0.172250, batch loss=0.314230, epoch loss=2.961227
Batch=779, step=6780, lr=0.172000, batch loss=0.333233, epoch loss=3.294459
Batch=839, step=6840, lr=0.171750, batch loss=0.303759, epoch loss=3.598218
Batch=899, step=6900, lr=0.171500, batch loss=0.268478, epoch loss=3.866696
Batch=959, step=6960, lr=0.171250, batch loss=0.211032, epoch loss=4.077728
Batch=1019, step=7020, lr=0.171000, batch loss=0.330458, epoch loss=4.408187
Batch=1079, step=7080, lr=0.170750, batch loss=0.180851, epoch loss=4.589037
Batch=1139, step=7140, lr=0.170500, batch loss=0.216318, epoch loss=4.805355
Batch=1199, step=7200, lr=0.170250, batch loss=0.181918, epoch loss=4.987273
Epoch=5, step=7200, lr=0.170250, epoch loss=4.987273
Batch=59, step=7260, lr=0.170000, batch loss=0.232875, epoch loss=0.232875
Batch=119, step=7320, lr=0.169750, batch loss=0.184461, epoch loss=0.417336
Batch=179, step=7380, lr=0.169500, batch loss=0.196302, epoch loss=0.613638
Batch=239, step=7440, lr=0.169250, batch loss=0.290845, epoch loss=0.904483
Batch=299, step=7500, lr=0.169000, batch loss=0.200805, epoch loss=1.105288
Batch=359, step=7560, lr=0.168750, batch loss=0.258458, epoch loss=1.363747
Batch=419, step=7620, lr=0.168500, batch loss=0.256789, epoch loss=1.620536
Batch=479, step=7680, lr=0.168250, batch loss=0.236038, epoch loss=1.856573
Batch=539, step=7740, lr=0.168000, batch loss=0.187862, epoch loss=2.044435
Batch=599, step=7800, lr=0.167750, batch loss=0.223971, epoch loss=2.268406
Batch=659, step=7860, lr=0.167500, batch loss=0.305889, epoch loss=2.574295
Batch=719, step=7920, lr=0.167250, batch loss=0.309334, epoch loss=2.883629
Batch=779, step=7980, lr=0.166750, batch loss=0.329980, epoch loss=3.213609
Batch=839, step=8040, lr=0.166750, batch loss=0.292374, epoch loss=3.505983
Batch=899, step=8100, lr=0.166500, batch loss=0.261850, epoch loss=3.767833
Batch=959, step=8160, lr=0.166250, batch loss=0.193183, epoch loss=3.961016
Batch=1019, step=8220, lr=0.166000, batch loss=0.301075, epoch loss=4.262091
Batch=1079, step=8280, lr=0.165750, batch loss=0.183495, epoch loss=4.445586
Batch=1139, step=8340, lr=0.165250, batch loss=0.215283, epoch loss=4.660869
Batch=1199, step=8400, lr=0.165250, batch loss=0.172443, epoch loss=4.833312
Epoch=6, step=8400, lr=0.165250, epoch loss=4.833312
Batch=59, step=8460, lr=0.165000, batch loss=0.213083, epoch loss=0.213083
Batch=119, step=8520, lr=0.164750, batch loss=0.177617, epoch loss=0.390701
Batch=179, step=8580, lr=0.164500, batch loss=0.188843, epoch loss=0.579543
Batch=239, step=8640, lr=0.164250, batch loss=0.279680, epoch loss=0.859224
Batch=299, step=8700, lr=0.164000, batch loss=0.191912, epoch loss=1.051136
Batch=359, step=8760, lr=0.163750, batch loss=0.248286, epoch loss=1.299422
Batch=419, step=8820, lr=0.163500, batch loss=0.244511, epoch loss=1.543933
Batch=479, step=8880, lr=0.163250, batch loss=0.229612, epoch loss=1.773545
Batch=539, step=8940, lr=0.163000, batch loss=0.176868, epoch loss=1.950414
Batch=599, step=9000, lr=0.162750, batch loss=0.218401, epoch loss=2.168814
Batch=659, step=9060, lr=0.162500, batch loss=0.294772, epoch loss=2.463587
Batch=719, step=9120, lr=0.162250, batch loss=0.299183, epoch loss=2.762770
Batch=779, step=9180, lr=0.162000, batch loss=0.315969, epoch loss=3.078739
Batch=839, step=9240, lr=0.161750, batch loss=0.281910, epoch loss=3.360649
Batch=899, step=9300, lr=0.161500, batch loss=0.252928, epoch loss=3.613577
Batch=959, step=9360, lr=0.161250, batch loss=0.210650, epoch loss=3.824227
Batch=1019, step=9420, lr=0.161000, batch loss=0.298332, epoch loss=4.122559
Batch=1079, step=9480, lr=0.160750, batch loss=0.177888, epoch loss=4.300447
Batch=1139, step=9540, lr=0.160500, batch loss=0.206021, epoch loss=4.506468
Batch=1199, step=9600, lr=0.160250, batch loss=0.164503, epoch loss=4.670970
Epoch=7, step=9600, lr=0.160250, epoch loss=4.670970
Batch=59, step=9660, lr=0.160000, batch loss=0.191391, epoch loss=0.191391
Batch=119, step=9720, lr=0.159750, batch loss=0.162918, epoch loss=0.354309
Batch=179, step=9780, lr=0.159500, batch loss=0.178846, epoch loss=0.533155
Batch=239, step=9840, lr=0.159250, batch loss=0.261184, epoch loss=0.794339
Batch=299, step=9900, lr=0.159000, batch loss=0.183823, epoch loss=0.978162
Batch=359, step=9960, lr=0.158750, batch loss=0.245289, epoch loss=1.223452
Batch=419, step=10020, lr=0.158500, batch loss=0.243661, epoch loss=1.467112
Batch=479, step=10080, lr=0.158250, batch loss=0.222567, epoch loss=1.689679
Batch=539, step=10140, lr=0.158000, batch loss=0.164829, epoch loss=1.854508
Batch=599, step=10200, lr=0.157750, batch loss=0.197303, epoch loss=2.051811
Batch=659, step=10260, lr=0.157500, batch loss=0.282838, epoch loss=2.334648
Batch=719, step=10320, lr=0.157250, batch loss=0.278044, epoch loss=2.612693
Batch=779, step=10380, lr=0.157000, batch loss=0.295767, epoch loss=2.908460
Batch=839, step=10440, lr=0.156750, batch loss=0.269780, epoch loss=3.178240
Batch=899, step=10500, lr=0.156500, batch loss=0.237353, epoch loss=3.415593
Batch=959, step=10560, lr=0.156250, batch loss=0.176188, epoch loss=3.591781
Batch=1019, step=10620, lr=0.156000, batch loss=0.298612, epoch loss=3.890393
Batch=1079, step=10680, lr=0.155750, batch loss=0.179117, epoch loss=4.069510
Batch=1139, step=10740, lr=0.155500, batch loss=0.200034, epoch loss=4.269544
Batch=1199, step=10800, lr=0.155250, batch loss=0.155235, epoch loss=4.424779
Epoch=8, step=10800, lr=0.155250, epoch loss=4.424779
Batch=59, step=10860, lr=0.155000, batch loss=0.177595, epoch loss=0.177595
Batch=119, step=10920, lr=0.154750, batch loss=0.152798, epoch loss=0.330394
Batch=179, step=10980, lr=0.154500, batch loss=0.166502, epoch loss=0.496896
Batch=239, step=11040, lr=0.154250, batch loss=0.245783, epoch loss=0.742678
Batch=299, step=11100, lr=0.154000, batch loss=0.161161, epoch loss=0.903839
Batch=359, step=11160, lr=0.153750, batch loss=0.224733, epoch loss=1.128573
Batch=419, step=11220, lr=0.153500, batch loss=0.234405, epoch loss=1.362978
Batch=479, step=11280, lr=0.153250, batch loss=0.197961, epoch loss=1.560939
Batch=539, step=11340, lr=0.153000, batch loss=0.157921, epoch loss=1.718860
Batch=599, step=11400, lr=0.152750, batch loss=0.179983, epoch loss=1.898842
Batch=659, step=11460, lr=0.152500, batch loss=0.263029, epoch loss=2.161872
Batch=719, step=11520, lr=0.152250, batch loss=0.257562, epoch loss=2.419434
Batch=779, step=11580, lr=0.152000, batch loss=0.269217, epoch loss=2.688651
Batch=839, step=11640, lr=0.151750, batch loss=0.250690, epoch loss=2.939341
Batch=899, step=11700, lr=0.151500, batch loss=0.216362, epoch loss=3.155703
Batch=959, step=11760, lr=0.151250, batch loss=0.173815, epoch loss=3.329518
Batch=1019, step=11820, lr=0.151000, batch loss=0.260681, epoch loss=3.590199
Batch=1079, step=11880, lr=0.150750, batch loss=0.139080, epoch loss=3.729279
Batch=1139, step=11940, lr=0.150250, batch loss=0.173830, epoch loss=3.903109
Batch=1199, step=12000, lr=0.150250, batch loss=0.138370, epoch loss=4.041479
Epoch=9, step=12000, lr=0.150250, epoch loss=4.041479
Batch=59, step=12060, lr=0.150000, batch loss=0.162565, epoch loss=0.162565
Batch=119, step=12120, lr=0.149750, batch loss=0.135469, epoch loss=0.298035
Batch=179, step=12180, lr=0.149500, batch loss=0.149796, epoch loss=0.447830
Batch=239, step=12240, lr=0.149250, batch loss=0.218263, epoch loss=0.666094
Batch=299, step=12300, lr=0.149000, batch loss=0.141623, epoch loss=0.807717
Batch=359, step=12360, lr=0.148750, batch loss=0.197263, epoch loss=1.004979
Batch=419, step=12420, lr=0.148500, batch loss=0.204632, epoch loss=1.209611
Batch=479, step=12480, lr=0.148250, batch loss=0.179521, epoch loss=1.389133
Batch=539, step=12540, lr=0.148000, batch loss=0.141305, epoch loss=1.530437
Batch=599, step=12600, lr=0.147750, batch loss=0.149812, epoch loss=1.680249
Batch=659, step=12660, lr=0.147500, batch loss=0.224474, epoch loss=1.904723
Batch=719, step=12720, lr=0.147250, batch loss=0.235616, epoch loss=2.140339
Batch=779, step=12780, lr=0.147000, batch loss=0.263020, epoch loss=2.403359
Batch=839, step=12840, lr=0.146750, batch loss=0.235134, epoch loss=2.638492
Batch=899, step=12900, lr=0.146500, batch loss=0.229787, epoch loss=2.868280
Batch=959, step=12960, lr=0.146250, batch loss=0.139509, epoch loss=3.007789
Batch=1019, step=13020, lr=0.146000, batch loss=0.210324, epoch loss=3.218114
Batch=1079, step=13080, lr=0.145750, batch loss=0.118595, epoch loss=3.336708
Batch=1139, step=13140, lr=0.145500, batch loss=0.158064, epoch loss=3.494772
Batch=1199, step=13200, lr=0.145250, batch loss=0.116643, epoch loss=3.611416
Epoch=10, step=13200, lr=0.145250, epoch loss=3.611416
Batch=59, step=13260, lr=0.145000, batch loss=0.135540, epoch loss=0.135540
Batch=119, step=13320, lr=0.144750, batch loss=0.118161, epoch loss=0.253701
Batch=179, step=13380, lr=0.144500, batch loss=0.126243, epoch loss=0.379944
Batch=239, step=13440, lr=0.144250, batch loss=0.181355, epoch loss=0.561299
Batch=299, step=13500, lr=0.144000, batch loss=0.115306, epoch loss=0.676605
Batch=359, step=13560, lr=0.143750, batch loss=0.159266, epoch loss=0.835871
Batch=419, step=13620, lr=0.143500, batch loss=0.160712, epoch loss=0.996583
Batch=479, step=13680, lr=0.143250, batch loss=0.145111, epoch loss=1.141694
Batch=539, step=13740, lr=0.143000, batch loss=0.118429, epoch loss=1.260123
Batch=599, step=13800, lr=0.142750, batch loss=0.119740, epoch loss=1.379863
Batch=659, step=13860, lr=0.142500, batch loss=0.173772, epoch loss=1.553635
Batch=719, step=13920, lr=0.142000, batch loss=0.175736, epoch loss=1.729372
Batch=779, step=13980, lr=0.142000, batch loss=0.195238, epoch loss=1.924610
Batch=839, step=14040, lr=0.141750, batch loss=0.189123, epoch loss=2.113733
Batch=899, step=14100, lr=0.141500, batch loss=0.188336, epoch loss=2.302070
Batch=959, step=14160, lr=0.141250, batch loss=0.124872, epoch loss=2.426942
Batch=1019, step=14220, lr=0.141000, batch loss=0.238971, epoch loss=2.665912
Batch=1079, step=14280, lr=0.140750, batch loss=0.078813, epoch loss=2.744725
Batch=1139, step=14340, lr=0.140500, batch loss=0.126640, epoch loss=2.871365
Batch=1199, step=14400, lr=0.140250, batch loss=0.086870, epoch loss=2.958235
Epoch=11, step=14400, lr=0.140250, epoch loss=2.958235
Batch=59, step=14460, lr=0.140000, batch loss=0.101548, epoch loss=0.101548
Batch=119, step=14520, lr=0.139750, batch loss=0.099423, epoch loss=0.200971
Batch=179, step=14580, lr=0.139500, batch loss=0.100477, epoch loss=0.301448
Batch=239, step=14640, lr=0.139250, batch loss=0.139681, epoch loss=0.441129
Batch=299, step=14700, lr=0.139000, batch loss=0.076984, epoch loss=0.518112
Batch=359, step=14760, lr=0.138750, batch loss=0.118619, epoch loss=0.636732
Batch=419, step=14820, lr=0.138500, batch loss=0.130945, epoch loss=0.767676
Batch=479, step=14880, lr=0.138250, batch loss=0.098944, epoch loss=0.866621
Batch=539, step=14940, lr=0.138000, batch loss=0.110388, epoch loss=0.977009
Batch=599, step=15000, lr=0.137750, batch loss=0.083173, epoch loss=1.060182
Batch=659, step=15060, lr=0.137500, batch loss=0.126660, epoch loss=1.186842
Batch=719, step=15120, lr=0.137250, batch loss=0.127669, epoch loss=1.314511
Batch=779, step=15180, lr=0.137000, batch loss=0.169174, epoch loss=1.483686
Batch=839, step=15240, lr=0.136750, batch loss=0.169623, epoch loss=1.653309
Batch=899, step=15300, lr=0.136500, batch loss=0.301952, epoch loss=1.955261
Batch=959, step=15360, lr=0.136250, batch loss=0.061375, epoch loss=2.016636
Batch=1019, step=15420, lr=0.136000, batch loss=0.134478, epoch loss=2.151114
Batch=1079, step=15480, lr=0.135500, batch loss=0.042379, epoch loss=2.193493
Batch=1139, step=15540, lr=0.135500, batch loss=0.096354, epoch loss=2.289847
Batch=1199, step=15600, lr=0.135250, batch loss=0.060039, epoch loss=2.349886
Epoch=12, step=15600, lr=0.135250, epoch loss=2.349886
Batch=59, step=15660, lr=0.135000, batch loss=0.069804, epoch loss=0.069804
Batch=119, step=15720, lr=0.134750, batch loss=0.087158, epoch loss=0.156962
Batch=179, step=15780, lr=0.134500, batch loss=0.079254, epoch loss=0.236216
Batch=239, step=15840, lr=0.134250, batch loss=0.090016, epoch loss=0.326232
Batch=299, step=15900, lr=0.134000, batch loss=0.042144, epoch loss=0.368376
Batch=359, step=15960, lr=0.133750, batch loss=0.080553, epoch loss=0.448930
Batch=419, step=16020, lr=0.133500, batch loss=0.112973, epoch loss=0.561903
Batch=479, step=16080, lr=0.133250, batch loss=0.047282, epoch loss=0.609185
Batch=539, step=16140, lr=0.133000, batch loss=0.044959, epoch loss=0.654145
Batch=599, step=16200, lr=0.132750, batch loss=0.052082, epoch loss=0.706226
Batch=659, step=16260, lr=0.132500, batch loss=0.074907, epoch loss=0.781133
Batch=719, step=16320, lr=0.132250, batch loss=0.074010, epoch loss=0.855143
Batch=779, step=16380, lr=0.132000, batch loss=0.087998, epoch loss=0.943142
Batch=839, step=16440, lr=0.131750, batch loss=0.149718, epoch loss=1.092860
Batch=899, step=16500, lr=0.131500, batch loss=0.085499, epoch loss=1.178359
Batch=959, step=16560, lr=0.131250, batch loss=0.062488, epoch loss=1.240847
Batch=1019, step=16620, lr=0.131000, batch loss=0.068323, epoch loss=1.309170
Batch=1079, step=16680, lr=0.130750, batch loss=0.050406, epoch loss=1.359575
Batch=1139, step=16740, lr=0.130500, batch loss=0.089603, epoch loss=1.449178
Batch=1199, step=16800, lr=0.130250, batch loss=0.039345, epoch loss=1.488523
Epoch=13, step=16800, lr=0.130250, epoch loss=1.488523
Batch=59, step=16860, lr=0.130000, batch loss=0.031523, epoch loss=0.031523
Batch=119, step=16920, lr=0.129750, batch loss=0.034344, epoch loss=0.065867
Batch=179, step=16980, lr=0.129500, batch loss=0.040621, epoch loss=0.106487
Batch=239, step=17040, lr=0.129250, batch loss=0.057066, epoch loss=0.163553
Batch=299, step=17100, lr=0.129000, batch loss=0.029574, epoch loss=0.193127
Batch=359, step=17160, lr=0.128750, batch loss=0.046326, epoch loss=0.239453
Batch=419, step=17220, lr=0.128500, batch loss=0.078789, epoch loss=0.318242
Batch=479, step=17280, lr=0.128250, batch loss=0.021215, epoch loss=0.339457
Batch=539, step=17340, lr=0.128000, batch loss=0.026120, epoch loss=0.365577
Batch=599, step=17400, lr=0.127750, batch loss=0.034526, epoch loss=0.400103
Batch=659, step=17460, lr=0.127500, batch loss=0.045417, epoch loss=0.445520
Batch=719, step=17520, lr=0.127250, batch loss=0.043286, epoch loss=0.488805
Batch=779, step=17580, lr=0.127000, batch loss=0.078919, epoch loss=0.567724
Batch=839, step=17640, lr=0.126750, batch loss=0.175140, epoch loss=0.742864
Batch=899, step=17700, lr=0.126500, batch loss=0.059166, epoch loss=0.802030
Batch=959, step=17760, lr=0.126250, batch loss=0.020466, epoch loss=0.822496
Batch=1019, step=17820, lr=0.126000, batch loss=0.030844, epoch loss=0.853340
Batch=1079, step=17880, lr=0.125750, batch loss=0.012263, epoch loss=0.865603
Batch=1139, step=17940, lr=0.125500, batch loss=0.035815, epoch loss=0.901419
Batch=1199, step=18000, lr=0.125250, batch loss=0.015428, epoch loss=0.916847
Epoch=14, step=18000, lr=0.125250, epoch loss=0.916847
Batch=59, step=18060, lr=0.125000, batch loss=0.013196, epoch loss=0.013196
Batch=119, step=18120, lr=0.124750, batch loss=0.034364, epoch loss=0.047561
Batch=179, step=18180, lr=0.124500, batch loss=0.090773, epoch loss=0.138333
Batch=239, step=18240, lr=0.124250, batch loss=0.043862, epoch loss=0.182196
Batch=299, step=18300, lr=0.124000, batch loss=0.011338, epoch loss=0.193534
Batch=359, step=18360, lr=0.123750, batch loss=0.024800, epoch loss=0.218335
Batch=419, step=18420, lr=0.123500, batch loss=0.026162, epoch loss=0.244496
Batch=479, step=18480, lr=0.123250, batch loss=0.010511, epoch loss=0.255007
Batch=539, step=18540, lr=0.123000, batch loss=0.029193, epoch loss=0.284200
Batch=599, step=18600, lr=0.122750, batch loss=0.040581, epoch loss=0.324781
Batch=659, step=18660, lr=0.122500, batch loss=0.025921, epoch loss=0.350702
Batch=719, step=18720, lr=0.122250, batch loss=0.066913, epoch loss=0.417615
Batch=779, step=18780, lr=0.122000, batch loss=0.093350, epoch loss=0.510965
Batch=839, step=18840, lr=0.121750, batch loss=0.053714, epoch loss=0.564679
Batch=899, step=18900, lr=0.121500, batch loss=0.053090, epoch loss=0.617769
Batch=959, step=18960, lr=0.121250, batch loss=0.013491, epoch loss=0.631260
Batch=1019, step=19020, lr=0.121000, batch loss=0.025802, epoch loss=0.657062
Batch=1079, step=19080, lr=0.120750, batch loss=0.010615, epoch loss=0.667677
Batch=1139, step=19140, lr=0.120500, batch loss=0.022964, epoch loss=0.690641
Batch=1199, step=19200, lr=0.120250, batch loss=0.009276, epoch loss=0.699917
Epoch=15, step=19200, lr=0.120250, epoch loss=0.699917
Batch=59, step=19260, lr=0.120000, batch loss=0.004701, epoch loss=0.004701
Batch=119, step=19320, lr=0.119750, batch loss=0.011317, epoch loss=0.016019
Batch=179, step=19380, lr=0.119500, batch loss=0.020342, epoch loss=0.036360
Batch=239, step=19440, lr=0.119250, batch loss=0.022663, epoch loss=0.059023
Batch=299, step=19500, lr=0.119000, batch loss=0.018292, epoch loss=0.077315
Batch=359, step=19560, lr=0.118750, batch loss=0.032406, epoch loss=0.109721
Batch=419, step=19620, lr=0.118500, batch loss=0.019803, epoch loss=0.129525
Batch=479, step=19680, lr=0.118250, batch loss=0.008109, epoch loss=0.137633
Batch=539, step=19740, lr=0.118000, batch loss=0.017338, epoch loss=0.154972
Batch=599, step=19800, lr=0.117750, batch loss=0.024076, epoch loss=0.179048
Batch=659, step=19860, lr=0.117500, batch loss=0.019396, epoch loss=0.198443
Batch=719, step=19920, lr=0.117250, batch loss=0.043825, epoch loss=0.242268
Batch=779, step=19980, lr=0.117000, batch loss=0.081109, epoch loss=0.323378
Batch=839, step=20040, lr=0.116750, batch loss=0.030978, epoch loss=0.354356
Batch=899, step=20100, lr=0.116500, batch loss=0.032240, epoch loss=0.386596
Batch=959, step=20160, lr=0.116250, batch loss=0.011562, epoch loss=0.398158
Batch=1019, step=20220, lr=0.116000, batch loss=0.014790, epoch loss=0.412948
Batch=1079, step=20280, lr=0.115750, batch loss=0.002063, epoch loss=0.415011
Batch=1139, step=20340, lr=0.115500, batch loss=0.014919, epoch loss=0.429930
Batch=1199, step=20400, lr=0.115250, batch loss=0.006977, epoch loss=0.436908
Epoch=16, step=20400, lr=0.115250, epoch loss=0.436908
Batch=59, step=20460, lr=0.115000, batch loss=0.003335, epoch loss=0.003335
Batch=119, step=20520, lr=0.114750, batch loss=0.008879, epoch loss=0.012214
Batch=179, step=20580, lr=0.114500, batch loss=0.017292, epoch loss=0.029507
Batch=239, step=20640, lr=0.114250, batch loss=0.022579, epoch loss=0.052086
Batch=299, step=20700, lr=0.114000, batch loss=0.010154, epoch loss=0.062240
Batch=359, step=20760, lr=0.113750, batch loss=0.013797, epoch loss=0.076037
Batch=419, step=20820, lr=0.113500, batch loss=0.014207, epoch loss=0.090243
Batch=479, step=20880, lr=0.113250, batch loss=0.005118, epoch loss=0.095361
Batch=539, step=20940, lr=0.113000, batch loss=0.015733, epoch loss=0.111094
Batch=599, step=21000, lr=0.112750, batch loss=0.017772, epoch loss=0.128866
Batch=659, step=21060, lr=0.112500, batch loss=0.014466, epoch loss=0.143332
Batch=719, step=21120, lr=0.112250, batch loss=0.044693, epoch loss=0.188025
Batch=779, step=21180, lr=0.112000, batch loss=0.070993, epoch loss=0.259018
Batch=839, step=21240, lr=0.111750, batch loss=0.025914, epoch loss=0.284932
Batch=899, step=21300, lr=0.111500, batch loss=0.028553, epoch loss=0.313485
Batch=959, step=21360, lr=0.111250, batch loss=0.009872, epoch loss=0.323358
Batch=1019, step=21420, lr=0.111000, batch loss=0.011036, epoch loss=0.334393
Batch=1079, step=21480, lr=0.110750, batch loss=0.000972, epoch loss=0.335365
Batch=1139, step=21540, lr=0.110500, batch loss=0.013037, epoch loss=0.348402
Batch=1199, step=21600, lr=0.110250, batch loss=0.005142, epoch loss=0.353544
Epoch=17, step=21600, lr=0.110250, epoch loss=0.353544
Batch=59, step=21660, lr=0.110000, batch loss=0.002373, epoch loss=0.002373
Batch=119, step=21720, lr=0.109750, batch loss=0.006531, epoch loss=0.008905
Batch=179, step=21780, lr=0.109500, batch loss=0.012882, epoch loss=0.021786
Batch=239, step=21840, lr=0.109250, batch loss=0.009840, epoch loss=0.031626
Batch=299, step=21900, lr=0.109000, batch loss=0.013856, epoch loss=0.045482
Batch=359, step=21960, lr=0.108750, batch loss=0.012619, epoch loss=0.058101
Batch=419, step=22020, lr=0.108500, batch loss=0.012429, epoch loss=0.070530
Batch=479, step=22080, lr=0.108250, batch loss=0.003757, epoch loss=0.074287
Batch=539, step=22140, lr=0.108000, batch loss=0.014968, epoch loss=0.089255
Batch=599, step=22200, lr=0.107750, batch loss=0.015674, epoch loss=0.104928
Batch=659, step=22260, lr=0.107500, batch loss=0.015118, epoch loss=0.120046
Batch=719, step=22320, lr=0.107250, batch loss=0.028317, epoch loss=0.148363
Batch=779, step=22380, lr=0.107000, batch loss=0.039181, epoch loss=0.187544
Batch=839, step=22440, lr=0.106750, batch loss=0.021568, epoch loss=0.209113
Batch=899, step=22500, lr=0.106500, batch loss=0.024459, epoch loss=0.233572
Batch=959, step=22560, lr=0.106250, batch loss=0.009959, epoch loss=0.243531
Batch=1019, step=22620, lr=0.106000, batch loss=0.009816, epoch loss=0.253348
Batch=1079, step=22680, lr=0.105750, batch loss=0.000858, epoch loss=0.254206
Batch=1139, step=22740, lr=0.105500, batch loss=0.010060, epoch loss=0.264266
Batch=1199, step=22800, lr=0.105250, batch loss=0.004706, epoch loss=0.268972
Epoch=18, step=22800, lr=0.105250, epoch loss=0.268972
Batch=59, step=22860, lr=0.105000, batch loss=0.001895, epoch loss=0.001895
Batch=119, step=22920, lr=0.104750, batch loss=0.005382, epoch loss=0.007277
Batch=179, step=22980, lr=0.104500, batch loss=0.011039, epoch loss=0.018316
Batch=239, step=23040, lr=0.104250, batch loss=0.008472, epoch loss=0.026788
Batch=299, step=23100, lr=0.104000, batch loss=0.006777, epoch loss=0.033565
Batch=359, step=23160, lr=0.103750, batch loss=0.012795, epoch loss=0.046360
Batch=419, step=23220, lr=0.103500, batch loss=0.011221, epoch loss=0.057580
Batch=479, step=23280, lr=0.103250, batch loss=0.003192, epoch loss=0.060773
Batch=539, step=23340, lr=0.103000, batch loss=0.016520, epoch loss=0.077292
Batch=599, step=23400, lr=0.102750, batch loss=0.013484, epoch loss=0.090776
Batch=659, step=23460, lr=0.102500, batch loss=0.011814, epoch loss=0.102590
Batch=719, step=23520, lr=0.102250, batch loss=0.010517, epoch loss=0.113107
Batch=779, step=23580, lr=0.102000, batch loss=0.018017, epoch loss=0.131124
Batch=839, step=23640, lr=0.101750, batch loss=0.029114, epoch loss=0.160238
Batch=899, step=23700, lr=0.101500, batch loss=0.019253, epoch loss=0.179491
Batch=959, step=23760, lr=0.101250, batch loss=0.009268, epoch loss=0.188759
Batch=1019, step=23820, lr=0.101000, batch loss=0.007564, epoch loss=0.196323
Batch=1079, step=23880, lr=0.100750, batch loss=0.001364, epoch loss=0.197686
Batch=1139, step=23940, lr=0.100500, batch loss=0.008324, epoch loss=0.206010
Batch=1199, step=24000, lr=0.100250, batch loss=0.004503, epoch loss=0.210513
Epoch=19, step=24000, lr=0.100250, epoch loss=0.210513


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############***************************************..│
│********######*####*********************************###*###*#***********************************....│
│*******######**##*************....*****************#*######*#********************************.......│
│*******##*##**##**********...........***************########*##***************************..........│
│*****#######***********........%...%%...***************#########*************************.........%.│
│******######**********..........%.........**************##*#####************************......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│
│****#######**********..........%%%%.........************#########********************........%%.%%.%│
│**#######************..........%%%%%%%........*************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######***********............%%%%%%%..........************#######**************............%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.│
│*#####*#**********..............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..│
│#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│
│#######*********.................%%%%%%%%................****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│
│#****##********....................%%%%%%%%%.................**###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│**************.......................%..%%%%%%%................****...............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1
2025-05-22 12:20.32: Job failed: Failed: Build failed