Organisationsahrefsocannl9afb61 ()debian-12-5.3+flambda_opam-2.3

debian-12-5.3+flambda_opam-2.3

Logs

Show full logs
2025-05-22 12:20.03: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (9afb61d245b2724d2132450805c8b080ac7e0c9a) (linux-x86_64:debian-12-5.3+flambda_opam-2.3)
Base: ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4
Opam project build

To reproduce locally:

git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 9afb61d2
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4
# debian-12-5.3+flambda_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
    opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-syntax-shims.1.0.0 ocaml-variants.5.3.0+options ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build

END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK

2025-05-22 12:20.03: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4-debian-12-5.3+flambda_opam-2.3-3f0f1993e262afb948f4ec8f371e6e58"
2025-05-22 12:20.03: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4)
 (comment debian-12-5.3+flambda_opam-2.3)
 (user (uid 1000) (gid 1000))
 (env CLICOLOR_FORCE 1)
 (env OPAMCOLOR always)
 (workdir /src)
 (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
 (run (shell "opam init --reinit -ni"))
 (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
 (workdir /src)
 (run (shell "sudo chown opam /src"))
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u"))
 (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
 (run (network host)
      (shell  "opam pin add -yn neural_nets_lib.dev './' && \
             \nopam pin add -yn arrayjit.dev './'"))
 (run (network host)
      (shell "echo '(lang dune 3.0)' > './dune-project'"))
 (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-syntax-shims.1.0.0 ocaml-variants.5.3.0+options ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
 (env CI true)
 (env OCAMLCI true)
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "opam install $DEPS"))
 (copy (src .) (dst /src))
 (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)

2025-05-22 12:20.03: Waiting for resource in pool OCluster
2025-05-22 12:20.03: Waiting for worker…
2025-05-22 12:20.03: Got resource from pool OCluster
Building on toxis.caelum.ci.dev
All commits already cached
HEAD is now at 9afb61d2 In progress / broken: Format -> PPrint migration first pass by Claude

(from ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4)
2025-05-22 12:20.04 ---> using "21ca48779d7c8771accdf60eb888012ef48685ba9d539046f9f2e6ce2db9408d" from cache

/: (comment debian-12-5.3+flambda_opam-2.3)

/: (user (uid 1000) (gid 1000))

/: (env CLICOLOR_FORCE 1)

/: (env OPAMCOLOR always)

/: (workdir /src)

/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-22 12:20.04 ---> using "c3f3aa93c3355a64826565104635ed2c75e43663faf3bb1239283e2d0babab60" from cache

/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
  - you won't be able to use mercurial repositories unless you install the hg command on your system.
  - you won't be able to use darcs repositories unless you install the darcs command on your system.

This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.

Continue? [y/n] y
[NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:
           opam option jobs=39 --global
Format upgrade done.

<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-22 12:20.04 ---> using "292270fbfb1491d8dafae52ea17d8b381a33866b2212919c484f3fdce286b697" from cache

/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-05-22 12:20.04 ---> using "3a794d0297d6d3662e91621faf2070f604c02f28ce3386d50957daf0a3b38339" from cache

/src: (workdir /src)

/src: (run (shell "sudo chown opam /src"))
2025-05-22 12:20.04 ---> using "ec8f95289b8846ee13cf13cb3aea5f3aff51a6aaee7942d4311969425d84ec7c" from cache

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
 * branch                  master     -> FETCH_HEAD
   35eb2f107a..27f5ac67c2  master     -> origin/master
c7d6d1d2aa Merge pull request #27880 from MisterDA/os-family-fedora

<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository

Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 12:20.04 ---> using "f6d37af8382af8452b0afd00b405c94f4fd850985794ed76fd36d55411640fd5" from cache

/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-22 12:20.04 ---> using "28f0e2ce2bd84e5fdf4e9b751e90aeb7839b8951a7f3ff049d8c4ee2d0ebe35d" from cache

/src: (run (network host)
           (shell  "opam pin add -yn neural_nets_lib.dev './' && \
                  \nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-22 12:20.04 ---> using "96bc84b811273e1b692a4a468915ea678a0f9edb0ed69fc9933102ae180f461e" from cache

/src: (run (network host)
           (shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-22 12:20.04 ---> using "6aa67413b9e26b265ea29d1480ad9b5ff5d2b3fe007d440fbc7cd1631ef5eb13" from cache

/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-syntax-shims.1.0.0 ocaml-variants.5.3.0+options ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")

/src: (env CI true)

/src: (env OCAMLCI true)

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apt-get" "update"
- Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB]
- Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB]
- Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB]
- Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8793 kB]
- Get:5 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [259 kB]
- Fetched 9306 kB in 2s (5066 kB/s)
- Reading package lists...
- 

<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)

[NOTE] Package ocaml-variants is already installed (current version is 5.3.0+options).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).

The following system packages will first need to be installed:
    libffi-dev pkg-config

<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>

+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config"
- debconf: delaying package configuration, since apt-utils is not installed
- Selecting previously unselected package libffi-dev:amd64.
- (Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 18778 files and directories currently installed.)
- Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ...
- Unpacking libffi-dev:amd64 (3.4.4-1) ...
- Selecting previously unselected package libpkgconf3:amd64.
- Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ...
- Unpacking libpkgconf3:amd64 (1.8.1-1) ...
- Selecting previously unselected package pkgconf-bin.
- Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ...
- Unpacking pkgconf-bin (1.8.1-1) ...
- Selecting previously unselected package pkgconf:amd64.
- Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ...
- Unpacking pkgconf:amd64 (1.8.1-1) ...
- Selecting previously unselected package pkg-config:amd64.
- Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ...
- Unpacking pkg-config:amd64 (1.8.1-1) ...
- Setting up libffi-dev:amd64 (3.4.4-1) ...
- Setting up libpkgconf3:amd64 (1.8.1-1) ...
- Setting up pkgconf-bin (1.8.1-1) ...
- Setting up pkgconf:amd64 (1.8.1-1) ...
- Setting up pkg-config:amd64 (1.8.1-1) ...
- Processing triggers for libc-bin (2.36-9+deb12u10) ...
2025-05-22 12:20.04 ---> using "54ac0e1ab51681b2d6eb838d0ccbc0990b31104365e523fc053b4c6a0b7b3960" from cache

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "opam install $DEPS"))
[NOTE] Package ocaml-variants is already installed (current version is 5.3.0+options).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 75 packages
  - install angstrom                0.16.1
  - install astring                 0.8.5
  - install backoff                 0.1.1
  - install base                    v0.17.2
  - install bigarray-compat         1.1.0
  - install bigstringaf             0.10.0
  - install camlp-streams           5.0.1
  - install cmdliner                1.3.0
  - install conf-libffi             2.0.0
  - install conf-pkg-config         4
  - install cppo                    1.8.0
  - install csexp                   1.5.2
  - install ctypes                  0.23.0
  - install ctypes-foreign          0.23.0
  - install dune                    3.18.2
  - install dune-configurator       3.18.2
  - install fieldslib               v0.17.0
  - install fmt                     0.10.0
  - install integers                0.7.0
  - install jane-street-headers     v0.17.0
  - install jst-config              v0.17.0
  - install logs                    0.8.0
  - install mdx                     2.5.0
  - install mtime                   2.1.0
  - install multicore-magic         2.3.1
  - install num                     1.5-1
  - install ocaml-compiler-libs     v0.17.0
  - install ocaml-syntax-shims      1.0.0
  - install ocaml-version           4.0.0
  - install ocaml_intrinsics_kernel v0.17.1
  - install ocamlbuild              0.16.1
  - install ocamlfind               1.9.8
  - install parsexp                 v0.17.0
  - install pprint                  20230830
  - install ppx_assert              v0.17.0
  - install ppx_base                v0.17.0
  - install ppx_cold                v0.17.0
  - install ppx_compare             v0.17.0
  - install ppx_derivers            1.2.1
  - install ppx_deriving            6.0.3
  - install ppx_enumerate           v0.17.0
  - install ppx_expect              v0.17.2
  - install ppx_fields_conv         v0.17.0
  - install ppx_globalize           v0.17.0
  - install ppx_hash                v0.17.0
  - install ppx_here                v0.17.0
  - install ppx_inline_test         v0.17.0
  - install ppx_minidebug           2.2.0
  - install ppx_optcomp             v0.17.0
  - install ppx_sexp_conv           v0.17.0
  - install ppx_string              v0.17.0
  - install ppx_variants_conv       v0.17.0
  - install ppxlib                  0.35.0
  - install ppxlib_jane             v0.17.2
  - install printbox                0.12
  - install printbox-ext-plot       0.12
  - install printbox-html           0.12
  - install printbox-md             0.12
  - install printbox-text           0.12
  - install ptime                   1.2.0
  - install re                      1.12.0
  - install result                  1.5
  - install saturn_lockfree         0.5.0
  - install seq                     base
  - install sexplib                 v0.17.0
  - install sexplib0                v0.17.0
  - install stdio                   v0.17.0
  - install stdlib-shims            0.3.0
  - install thread-local-storage    0.2
  - install time_now                v0.17.0
  - install topkg                   1.0.8
  - install tyxml                   4.6.0
  - install uucp                    16.0.0
  - install uutf                    1.0.4
  - install variantslib             v0.17.0

<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> installed conf-pkg-config.4
-> retrieved cppo.1.8.0  (cached)
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved csexp.1.5.2  (cached)
-> installed conf-libffi.2.0.0
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved pprint.20230830  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved dune.3.18.2, dune-configurator.3.18.2  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ptime.1.2.0  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> retrieved re.1.12.0  (cached)
-> retrieved result.1.5  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlfind.1.9.8
-> installed ocamlbuild.0.16.1
-> installed topkg.1.0.8
-> installed mtime.2.1.0
-> installed uutf.1.0.4
-> installed fmt.0.10.0
-> installed ptime.1.2.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.18.2
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed printbox.0.12
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed multicore-magic.2.3.1
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed pprint.20230830
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed thread-local-storage.0.2
-> installed re.1.12.0
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-compiler-libs.v0.17.0
-> installed cppo.1.8.0
-> installed integers.0.7.0
-> installed saturn_lockfree.0.5.0
-> installed parsexp.v0.17.0
-> installed dune-configurator.3.18.2
-> installed bigstringaf.0.10.0
-> installed sexplib.v0.17.0
-> installed angstrom.0.16.1
-> installed mdx.2.5.0
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed ctypes-foreign.0.23.0
-> installed base.v0.17.2
-> installed printbox-md.0.12
-> installed variantslib.v0.17.0
-> installed fieldslib.v0.17.0
-> installed stdio.v0.17.0
-> installed printbox-ext-plot.0.12
-> installed ppxlib.0.35.0
-> installed ppx_optcomp.v0.17.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_compare.v0.17.0
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed ppx_base.v0.17.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 12:20.04 ---> using "ee0d62991985a0c96562cd79fbbad9f4ef129ea6b986ee48a35d1944601b8bd6" from cache

/src: (copy (src .) (dst /src))
2025-05-22 12:20.04 ---> saved as "2fca0e2947aa08b442ffb88e484d4f2a755090c7cdd5b50cb26dee5004d7b344"

/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)

Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Wrote value of 'backend' to ocannl_backend.txt
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)

Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op.exe)

Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/89bb3fcc0655bd8df341430360f2ada7/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/dune", lines 30-40, characters 0-281:
30 | (rule
31 |  (alias runtest)
32 |  (target
33 |   (dir log_files))
34 |  (action
35 |   (run
36 |    %{dep:micrograd_demo_logging.exe}
37 |    "--ocannl_debug_backend=text"
38 |    "--ocannl_log_file_stem=micrograd_demo_logging"
39 |    "--ocannl_log_main_domain_to_stdout=false"
40 |    "--ocannl_debug_log_to_stream_files=true")))
(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)

Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_cd_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Found true, commandline --ocannl_debug_log_to_stream_files=true
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed
Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml" (inlined), line 242, characters 21-46
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64
Called from PrintBox_text.Output.to_chan in file "src/printbox-text/PrintBox_text.ml" (inlined), line 269, characters 39-70
Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31
Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59
Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38
Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82
Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92
Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92
Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)

Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/micrograd_demo.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/micrograd_demo.ml.corrected
diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/micrograd_demo.ml.corrected
index 77e46c6..3cb470c 100644
--- a/_build/default/test/micrograd_demo.ml
+++ b/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/micrograd_demo.ml.corrected
@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =
     │├┼───────┤       │
     │││ -4.00 │       │
     │└┴───────┘       │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[0]: a shape 0:1  grad_a│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 1.38e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+                                                             │[0]: a shape 0:1  grad_a│
+                                                             │┌┬─────────┐            │
+                                                             │││axis 0   │            │
+                                                             │├┼─────────┤            │
+                                                             │││ 1.38e+2 │            │
+                                                             │└┴─────────┘            │
+                                                             └────────────────────────┘
     |}];
   Tensor.print ~with_code:false ~with_grad:true `Default b;
   [%expect
@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =
     │├┼──────┤        │
     │││ 2.00 │        │
     │└┴──────┘        │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[2]: b shape 0:1  grad_b│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 6.45e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+                                                             │[2]: b shape 0:1  grad_b│
+                                                             │┌┬─────────┐            │
+                                                             │││axis 0   │            │
+                                                             │├┼─────────┤            │
+                                                             │││ 6.45e+2 │            │
+                                                             │└┴─────────┘            │
+                                                             └────────────────────────┘
     |}]
 
 let%expect_test "Micrograd half-moons example" =
File "test/hello_world_op.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/hello_world_op.ml.corrected
diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/hello_world_op.ml.corrected
index ba9d7ef..6b90c44 100644
--- a/_build/default/test/hello_world_op.ml
+++ b/_build/.sandbox/691975734bc573740bb06ef58e1e4132/default/test/hello_world_op.ml.corrected
@@ -102,36 +102,39 @@ let%expect_test "Print constant tensor" =
   let%op hey = [ (1, 2, 3); (4, 5, 6) ] in
   Train.forward_and_forget backend ctx hey;
   Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;
-  [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];
+  [%expect {| [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ][0]: c2x3_hey shape 1:3->0:2 |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;
   [%expect
     {|
-    ┌─────────────────────────────────────────────────────────────┐
-    │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │
-    │┌──────┬──────────────────┐                                  │
-    ││      │axis 1            │                                  │
-    │├──────┼──────────────────┤                                  │
-    ││axis 0│ 1.00  2.00  3.00 │                                  │
-    ││      │ 4.00  5.00  6.00 │                                  │
-    │└──────┴──────────────────┘                                  │
-    └─────────────────────────────────────────────────────────────┘
+    ┌─────────────────────────────┐
+    │[0]: c2x3_hey shape 1:3->0:2 │
+    │┌──────┬──────────────────┐  │
+    ││      │axis 1            │  │
+    │├──────┼──────────────────┤  │
+    ││axis 0│ 1.00  2.00  3.00 │  │
+    ││      │ 4.00  5.00  6.00 │  │
+    │└──────┴──────────────────┘  │
+    └─────────────────────────────┘
     |}];
   let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in
   Train.forward_and_forget backend ctx hoo;
   Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;
-  [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];
+  [%expect {|
+    [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |][1]: c2x3_hoo shape
+    0:2|1:3
+    |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;
   [%expect
     {|
-    ┌──────────────────────────────────────────────────────────────────┐
-    │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │
-    │┌──────┬──────────────────┐                                       │
-    ││      │axis 1            │                                       │
-    │├──────┼──────────────────┤                                       │
-    ││axis 0│ 1.00  2.00  3.00 │                                       │
-    ││      │ 4.00  5.00  6.00 │                                       │
-    │└──────┴──────────────────┘                                       │
-    └──────────────────────────────────────────────────────────────────┘
+    ┌────────────────────────────┐
+    │[1]: c2x3_hoo shape 0:2|1:3 │
+    │┌──────┬──────────────────┐ │
+    ││      │axis 1            │ │
+    │├──────┼──────────────────┤ │
+    ││axis 0│ 1.00  2.00  3.00 │ │
+    ││      │ 4.00  5.00  6.00 │ │
+    │└──────┴──────────────────┘ │
+    └────────────────────────────┘
     |}];
   let%op hey2 =
     [
@@ -145,10 +148,12 @@ let%expect_test "Print constant tensor" =
   Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;
   [%expect
     {|
-    [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);
-      (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);
-      (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);
-      (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]
+    [
+       ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )
+      ;  ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )
+      ;  ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )
+      ;  ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )
+    ][2]: c4x2x3_hey2 shape 1:2,2:3->0:4
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;
   [%expect
@@ -178,10 +183,12 @@ let%expect_test "Print constant tensor" =
   Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;
   [%expect
     {|
-    [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];
-      [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];
-      [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];
-      [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]
+    [|
+      [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]
+      ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]
+      ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]
+      ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]
+    |][3]: c4x2x3_hoo2 shape 0:4|1:2,2:3
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;
   [%expect
@@ -209,10 +216,12 @@ let%expect_test "Print constant tensor" =
   Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;
   [%expect
     {|
-    [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];
-      [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];
-      [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];
-      [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]
+    [|
+      [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]
+      ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]
+      ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]
+      ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]
+    |][4]: c4x2x3_heyhoo shape 0:4,1:2|2:3
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;
   [%expect
@@ -241,14 +250,23 @@ let%expect_test "Print constant tensor" =
   [%expect
     {|
     [|
-      [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-        [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];
-      [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-        [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];
-      [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-        [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];
-      [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-        [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]
+      [|
+        [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+        ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+      |]
+      ; [|
+        [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+        ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+      |]
+      ; [|
+        [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+        ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+      |]
+      ; [|
+        [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+        ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+      |]
+    |][5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;
   [%expect
@@ -295,15 +313,26 @@ let%expect_test "Print constant tensor" =
     {|
     [|
       [|
-        [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-          [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];
-        [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-          [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];
-      [|
-        [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-          [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];
-        [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-          [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]
+        [
+          [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+          ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+        ]
+        ; [
+          [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+          ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+        ]
+      |]
+      ; [|
+        [
+          [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+          ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+        ]
+        ; [
+          [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+          ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+        ]
+      |]
+    |][6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;
   [%expect
@@ -355,15 +384,26 @@ let%expect_test "Print constant tensor" =
     {|
     [|
       [
-        [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];
-          [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];
-        [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];
-          [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];
-      [
-        [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];
-          [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];
-        [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];
-          [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]
+        [
+          [  1.00 , 31.00  ;  2.00 , 32.00  ;  3.00 , 33.00  ]
+          ; [  4.00 , 34.00  ;  5.00 , 35.00  ;  6.00 , 36.00  ]
+        ]
+        ; [
+          [  7.00 , 37.00  ;  8.00 , 38.00  ;  9.00 , 39.00  ]
+          ; [  10.00 , 40.00  ;  11.00 , 41.00  ;  12.00 , 42.00  ]
+        ]
+      ]
+      ; [
+        [
+          [  13.00 , 43.00  ;  14.00 , 44.00  ;  15.00 , 45.00  ]
+          ; [  16.00 , 46.00  ;  17.00 , 47.00  ;  18.00 , 48.00  ]
+        ]
+        ; [
+          [  19.00 , 49.00  ;  20.00 , 50.00  ;  21.00 , 51.00  ]
+          ; [  22.00 , 52.00  ;  23.00 , 53.00  ;  24.00 , 54.00  ]
+        ]
+      ]
+    |][7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;
   [%expect
@@ -462,8 +502,29 @@ let%expect_test "Big matrix" =
   Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;
   [%expect
     {|
-    [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;
-      12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]
+    [
+      0.00
+      ; 1.00
+      ; 2.00
+      ; 3.00
+      ; 4.00
+      ; 5.00
+      ; 6.00
+      ; 7.00
+      ; 8.00
+      ; 9.00
+      ; 10.00
+      ; 11.00
+      ; 12.00
+      ; 13.00
+      ; 14.00
+      ; 15.00
+      ; 16.00
+      ; 17.00
+      ; 18.00
+      ; 19.00
+      ; 20.00
+    ][2]: 0...20 shape 0:21
     |}];
   Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;
   [%expect
(cd _build/default/test && ./moons_demo_parallel_run.exe)

Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Properties of devices:
(multicore_devices
 (device ((device_name CPU) (device_ordinal 0) (num_domains 72))))
@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Not found, using default false
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216
Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512
Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081
Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141
Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385
Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263
Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603
Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902
Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024
Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685
Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407
Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543
Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049
Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829
Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269
Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952
Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952
Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138
Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381
Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025
Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921
Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269
Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828
Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563
Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831
Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657
Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042
Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243
Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160
Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542
Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179
Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413
Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446
Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419
Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654
Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028
Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821
Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821
Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363
Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325
Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481
Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578
Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525
Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415
Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160
Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004
Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802
Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881
Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937
Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479
Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468
Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248
Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112
Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523
Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923
Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236
Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812
Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723
Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723
Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539
Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341
Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053
Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157
Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168
Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420
Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854
Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365
Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593
Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989
Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121
Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243
Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563
Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333
Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239
Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610
Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501
Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094
Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306
Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074
Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074
Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079
Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298
Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203
Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269
Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862
Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001
Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056
Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748
Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194
Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213
Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805
Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672
Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851
Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195
Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366
Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287
Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436
Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264
Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319
Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180
Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180
Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529
Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820
Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361
Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741
Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671
Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786
Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067
Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387
Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355
Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855
Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963
Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184
Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407
Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164
Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633
Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671
Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133
Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999
Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312
Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223
Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223
Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877
Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301
Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593
Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416
Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253
Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689
Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497
Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495
Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390
Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314
Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229
Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518
Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460
Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885
Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660
Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955
Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988
Batch=1079, step=8280, lr=0.165500, batch loss=0.172099, epoch loss=4.447087
Batch=1139, step=8340, lr=0.165500, batch loss=0.209686, epoch loss=4.656773
Batch=1199, step=8400, lr=0.165250, batch loss=0.178207, epoch loss=4.834981
Epoch=6, step=8400, lr=0.165250, epoch loss=4.834981
Batch=59, step=8460, lr=0.165000, batch loss=0.229878, epoch loss=0.229878
Batch=119, step=8520, lr=0.164750, batch loss=0.174934, epoch loss=0.404811
Batch=179, step=8580, lr=0.164500, batch loss=0.187797, epoch loss=0.592608
Batch=239, step=8640, lr=0.164250, batch loss=0.278256, epoch loss=0.870864
Batch=299, step=8700, lr=0.164000, batch loss=0.191876, epoch loss=1.062740
Batch=359, step=8760, lr=0.163750, batch loss=0.248738, epoch loss=1.311478
Batch=419, step=8820, lr=0.163500, batch loss=0.245511, epoch loss=1.556989
Batch=479, step=8880, lr=0.163250, batch loss=0.228692, epoch loss=1.785681
Batch=539, step=8940, lr=0.163000, batch loss=0.178022, epoch loss=1.963703
Batch=599, step=9000, lr=0.162750, batch loss=0.217515, epoch loss=2.181218
Batch=659, step=9060, lr=0.162500, batch loss=0.294741, epoch loss=2.475958
Batch=719, step=9120, lr=0.162250, batch loss=0.296529, epoch loss=2.772488
Batch=779, step=9180, lr=0.162000, batch loss=0.316606, epoch loss=3.089094
Batch=839, step=9240, lr=0.161750, batch loss=0.287322, epoch loss=3.376416
Batch=899, step=9300, lr=0.161500, batch loss=0.250979, epoch loss=3.627395
Batch=959, step=9360, lr=0.161250, batch loss=0.190698, epoch loss=3.818093
Batch=1019, step=9420, lr=0.161000, batch loss=0.311482, epoch loss=4.129575
Batch=1079, step=9480, lr=0.160750, batch loss=0.191842, epoch loss=4.321417
Batch=1139, step=9540, lr=0.160500, batch loss=0.215646, epoch loss=4.537064
Batch=1199, step=9600, lr=0.160250, batch loss=0.165615, epoch loss=4.702678
Epoch=7, step=9600, lr=0.160250, epoch loss=4.702678
Batch=59, step=9660, lr=0.160000, batch loss=0.197342, epoch loss=0.197342
Batch=119, step=9720, lr=0.159750, batch loss=0.165381, epoch loss=0.362722
Batch=179, step=9780, lr=0.159500, batch loss=0.179272, epoch loss=0.541994
Batch=239, step=9840, lr=0.159250, batch loss=0.263895, epoch loss=0.805889
Batch=299, step=9900, lr=0.159000, batch loss=0.182048, epoch loss=0.987937
Batch=359, step=9960, lr=0.158750, batch loss=0.240968, epoch loss=1.228905
Batch=419, step=10020, lr=0.158500, batch loss=0.232887, epoch loss=1.461792
Batch=479, step=10080, lr=0.158250, batch loss=0.213304, epoch loss=1.675096
Batch=539, step=10140, lr=0.158000, batch loss=0.170580, epoch loss=1.845675
Batch=599, step=10200, lr=0.157750, batch loss=0.200381, epoch loss=2.046057
Batch=659, step=10260, lr=0.157500, batch loss=0.282939, epoch loss=2.328996
Batch=719, step=10320, lr=0.157250, batch loss=0.279264, epoch loss=2.608260
Batch=779, step=10380, lr=0.157000, batch loss=0.300545, epoch loss=2.908805
Batch=839, step=10440, lr=0.156750, batch loss=0.270843, epoch loss=3.179648
Batch=899, step=10500, lr=0.156500, batch loss=0.239858, epoch loss=3.419506
Batch=959, step=10560, lr=0.156250, batch loss=0.197370, epoch loss=3.616876
Batch=1019, step=10620, lr=0.156000, batch loss=0.282265, epoch loss=3.899141
Batch=1079, step=10680, lr=0.155750, batch loss=0.167874, epoch loss=4.067015
Batch=1139, step=10740, lr=0.155500, batch loss=0.197740, epoch loss=4.264755
Batch=1199, step=10800, lr=0.155250, batch loss=0.155116, epoch loss=4.419871
Epoch=8, step=10800, lr=0.155250, epoch loss=4.419871
Batch=59, step=10860, lr=0.155000, batch loss=0.194964, epoch loss=0.194964
Batch=119, step=10920, lr=0.154750, batch loss=0.160445, epoch loss=0.355408
Batch=179, step=10980, lr=0.154500, batch loss=0.167942, epoch loss=0.523351
Batch=239, step=11040, lr=0.154250, batch loss=0.244617, epoch loss=0.767967
Batch=299, step=11100, lr=0.154000, batch loss=0.166118, epoch loss=0.934085
Batch=359, step=11160, lr=0.153750, batch loss=0.222548, epoch loss=1.156633
Batch=419, step=11220, lr=0.153500, batch loss=0.227455, epoch loss=1.384088
Batch=479, step=11280, lr=0.153250, batch loss=0.204072, epoch loss=1.588159
Batch=539, step=11340, lr=0.153000, batch loss=0.157477, epoch loss=1.745636
Batch=599, step=11400, lr=0.152750, batch loss=0.179074, epoch loss=1.924710
Batch=659, step=11460, lr=0.152500, batch loss=0.265782, epoch loss=2.190492
Batch=719, step=11520, lr=0.152250, batch loss=0.258643, epoch loss=2.449135
Batch=779, step=11580, lr=0.152000, batch loss=0.274270, epoch loss=2.723405
Batch=839, step=11640, lr=0.151750, batch loss=0.254919, epoch loss=2.978324
Batch=899, step=11700, lr=0.151500, batch loss=0.213221, epoch loss=3.191545
Batch=959, step=11760, lr=0.151250, batch loss=0.166597, epoch loss=3.358142
Batch=1019, step=11820, lr=0.151000, batch loss=0.267670, epoch loss=3.625812
Batch=1079, step=11880, lr=0.150750, batch loss=0.147572, epoch loss=3.773384
Batch=1139, step=11940, lr=0.150500, batch loss=0.186985, epoch loss=3.960370
Batch=1199, step=12000, lr=0.150250, batch loss=0.139069, epoch loss=4.099439
Epoch=9, step=12000, lr=0.150250, epoch loss=4.099439
Batch=59, step=12060, lr=0.150000, batch loss=0.162367, epoch loss=0.162367
Batch=119, step=12120, lr=0.149750, batch loss=0.135129, epoch loss=0.297496
Batch=179, step=12180, lr=0.149500, batch loss=0.150624, epoch loss=0.448120
Batch=239, step=12240, lr=0.149250, batch loss=0.218431, epoch loss=0.666551
Batch=299, step=12300, lr=0.149000, batch loss=0.141827, epoch loss=0.808377
Batch=359, step=12360, lr=0.148750, batch loss=0.197393, epoch loss=1.005771
Batch=419, step=12420, lr=0.148500, batch loss=0.206608, epoch loss=1.212378
Batch=479, step=12480, lr=0.148250, batch loss=0.177442, epoch loss=1.389820
Batch=539, step=12540, lr=0.148000, batch loss=0.142525, epoch loss=1.532346
Batch=599, step=12600, lr=0.147750, batch loss=0.148226, epoch loss=1.680571
Batch=659, step=12660, lr=0.147500, batch loss=0.227339, epoch loss=1.907910
Batch=719, step=12720, lr=0.147250, batch loss=0.233010, epoch loss=2.140920
Batch=779, step=12780, lr=0.147000, batch loss=0.254135, epoch loss=2.395055
Batch=839, step=12840, lr=0.146750, batch loss=0.226181, epoch loss=2.621237
Batch=899, step=12900, lr=0.146500, batch loss=0.193382, epoch loss=2.814618
Batch=959, step=12960, lr=0.146250, batch loss=0.161132, epoch loss=2.975750
Batch=1019, step=13020, lr=0.146000, batch loss=0.260923, epoch loss=3.236673
Batch=1079, step=13080, lr=0.145750, batch loss=0.115469, epoch loss=3.352142
Batch=1139, step=13140, lr=0.145500, batch loss=0.153933, epoch loss=3.506075
Batch=1199, step=13200, lr=0.145250, batch loss=0.117363, epoch loss=3.623438
Epoch=10, step=13200, lr=0.145250, epoch loss=3.623438
Batch=59, step=13260, lr=0.145000, batch loss=0.143936, epoch loss=0.143936
Batch=119, step=13320, lr=0.144750, batch loss=0.124509, epoch loss=0.268444
Batch=179, step=13380, lr=0.144500, batch loss=0.130308, epoch loss=0.398752
Batch=239, step=13440, lr=0.144250, batch loss=0.189873, epoch loss=0.588625
Batch=299, step=13500, lr=0.144000, batch loss=0.111333, epoch loss=0.699958
Batch=359, step=13560, lr=0.143750, batch loss=0.162030, epoch loss=0.861989
Batch=419, step=13620, lr=0.143500, batch loss=0.159688, epoch loss=1.021677
Batch=479, step=13680, lr=0.143250, batch loss=0.147308, epoch loss=1.168985
Batch=539, step=13740, lr=0.143000, batch loss=0.115886, epoch loss=1.284870
Batch=599, step=13800, lr=0.142750, batch loss=0.120115, epoch loss=1.404985
Batch=659, step=13860, lr=0.142500, batch loss=0.179954, epoch loss=1.584939
Batch=719, step=13920, lr=0.142250, batch loss=0.196672, epoch loss=1.781611
Batch=779, step=13980, lr=0.142000, batch loss=0.231402, epoch loss=2.013014
Batch=839, step=14040, lr=0.141750, batch loss=0.198500, epoch loss=2.211514
Batch=899, step=14100, lr=0.141500, batch loss=0.206894, epoch loss=2.418408
Batch=959, step=14160, lr=0.141250, batch loss=0.106951, epoch loss=2.525359
Batch=1019, step=14220, lr=0.141000, batch loss=0.190634, epoch loss=2.715993
Batch=1079, step=14280, lr=0.140750, batch loss=0.077410, epoch loss=2.793402
Batch=1139, step=14340, lr=0.140500, batch loss=0.120731, epoch loss=2.914134
Batch=1199, step=14400, lr=0.140250, batch loss=0.086428, epoch loss=3.000562
Epoch=11, step=14400, lr=0.140250, epoch loss=3.000562
Batch=59, step=14460, lr=0.140000, batch loss=0.103804, epoch loss=0.103804
Batch=119, step=14520, lr=0.139750, batch loss=0.099337, epoch loss=0.203141
Batch=179, step=14580, lr=0.139500, batch loss=0.099579, epoch loss=0.302721
Batch=239, step=14640, lr=0.139250, batch loss=0.140178, epoch loss=0.442899
Batch=299, step=14700, lr=0.139000, batch loss=0.077403, epoch loss=0.520301
Batch=359, step=14760, lr=0.138750, batch loss=0.119622, epoch loss=0.639924
Batch=419, step=14820, lr=0.138500, batch loss=0.130157, epoch loss=0.770081
Batch=479, step=14880, lr=0.138250, batch loss=0.101563, epoch loss=0.871644
Batch=539, step=14940, lr=0.138000, batch loss=0.110514, epoch loss=0.982158
Batch=599, step=15000, lr=0.137750, batch loss=0.084744, epoch loss=1.066902
Batch=659, step=15060, lr=0.137500, batch loss=0.139189, epoch loss=1.206090
Batch=719, step=15120, lr=0.137250, batch loss=0.160796, epoch loss=1.366887
Batch=779, step=15180, lr=0.137000, batch loss=0.254734, epoch loss=1.621621
Batch=839, step=15240, lr=0.136750, batch loss=0.135939, epoch loss=1.757559
Batch=899, step=15300, lr=0.136500, batch loss=0.146432, epoch loss=1.903991
Batch=959, step=15360, lr=0.136250, batch loss=0.076220, epoch loss=1.980212
Batch=1019, step=15420, lr=0.136000, batch loss=0.164320, epoch loss=2.144532
Batch=1079, step=15480, lr=0.135750, batch loss=0.040574, epoch loss=2.185106
Batch=1139, step=15540, lr=0.135500, batch loss=0.094555, epoch loss=2.279661
Batch=1199, step=15600, lr=0.135250, batch loss=0.060108, epoch loss=2.339769
Epoch=12, step=15600, lr=0.135250, epoch loss=2.339769
Batch=59, step=15660, lr=0.135000, batch loss=0.084183, epoch loss=0.084183
Batch=119, step=15720, lr=0.134750, batch loss=0.147987, epoch loss=0.232169
Batch=179, step=15780, lr=0.134500, batch loss=0.102927, epoch loss=0.335096
Batch=239, step=15840, lr=0.134250, batch loss=0.098909, epoch loss=0.434005
Batch=299, step=15900, lr=0.134000, batch loss=0.043856, epoch loss=0.477861
Batch=359, step=15960, lr=0.133750, batch loss=0.082291, epoch loss=0.560152
Batch=419, step=16020, lr=0.133500, batch loss=0.079843, epoch loss=0.639996
Batch=479, step=16080, lr=0.133250, batch loss=0.060119, epoch loss=0.700114
Batch=539, step=16140, lr=0.133000, batch loss=0.061897, epoch loss=0.762011
Batch=599, step=16200, lr=0.132750, batch loss=0.134167, epoch loss=0.896179
Batch=659, step=16260, lr=0.132500, batch loss=0.084858, epoch loss=0.981037
Batch=719, step=16320, lr=0.132250, batch loss=0.120304, epoch loss=1.101341
Batch=779, step=16380, lr=0.132000, batch loss=0.292037, epoch loss=1.393378
Batch=839, step=16440, lr=0.131750, batch loss=0.091850, epoch loss=1.485228
Batch=899, step=16500, lr=0.131500, batch loss=0.078039, epoch loss=1.563267
Batch=959, step=16560, lr=0.131250, batch loss=0.032100, epoch loss=1.595367
Batch=1019, step=16620, lr=0.131000, batch loss=0.070238, epoch loss=1.665605
Batch=1079, step=16680, lr=0.130750, batch loss=0.043108, epoch loss=1.708713
Batch=1139, step=16740, lr=0.130500, batch loss=0.077692, epoch loss=1.786405
Batch=1199, step=16800, lr=0.130250, batch loss=0.037087, epoch loss=1.823492
Epoch=13, step=16800, lr=0.130250, epoch loss=1.823492
Batch=59, step=16860, lr=0.130000, batch loss=0.034076, epoch loss=0.034076
Batch=119, step=16920, lr=0.129750, batch loss=0.037744, epoch loss=0.071820
Batch=179, step=16980, lr=0.129500, batch loss=0.043298, epoch loss=0.115118
Batch=239, step=17040, lr=0.129250, batch loss=0.057628, epoch loss=0.172746
Batch=299, step=17100, lr=0.129000, batch loss=0.018925, epoch loss=0.191672
Batch=359, step=17160, lr=0.128750, batch loss=0.042383, epoch loss=0.234054
Batch=419, step=17220, lr=0.128500, batch loss=0.045780, epoch loss=0.279835
Batch=479, step=17280, lr=0.128250, batch loss=0.024364, epoch loss=0.304198
Batch=539, step=17340, lr=0.128000, batch loss=0.030391, epoch loss=0.334589
Batch=599, step=17400, lr=0.127750, batch loss=0.035068, epoch loss=0.369657
Batch=659, step=17460, lr=0.127500, batch loss=0.048140, epoch loss=0.417797
Batch=719, step=17520, lr=0.127250, batch loss=0.057325, epoch loss=0.475123
Batch=779, step=17580, lr=0.127000, batch loss=0.077219, epoch loss=0.552342
Batch=839, step=17640, lr=0.126750, batch loss=0.182024, epoch loss=0.734366
Batch=899, step=17700, lr=0.126500, batch loss=0.061792, epoch loss=0.796158
Batch=959, step=17760, lr=0.126250, batch loss=0.021472, epoch loss=0.817630
Batch=1019, step=17820, lr=0.126000, batch loss=0.032998, epoch loss=0.850628
Batch=1079, step=17880, lr=0.125750, batch loss=0.012688, epoch loss=0.863316
Batch=1139, step=17940, lr=0.125500, batch loss=0.034047, epoch loss=0.897363
Batch=1199, step=18000, lr=0.125250, batch loss=0.014590, epoch loss=0.911954
Epoch=14, step=18000, lr=0.125250, epoch loss=0.911954
Batch=59, step=18060, lr=0.125000, batch loss=0.012686, epoch loss=0.012686
Batch=119, step=18120, lr=0.124750, batch loss=0.022939, epoch loss=0.035624
Batch=179, step=18180, lr=0.124500, batch loss=0.046522, epoch loss=0.082147
Batch=239, step=18240, lr=0.124250, batch loss=0.038252, epoch loss=0.120399
Batch=299, step=18300, lr=0.124000, batch loss=0.016519, epoch loss=0.136917
Batch=359, step=18360, lr=0.123750, batch loss=0.024589, epoch loss=0.161506
Batch=419, step=18420, lr=0.123500, batch loss=0.031735, epoch loss=0.193241
Batch=479, step=18480, lr=0.123250, batch loss=0.025120, epoch loss=0.218361
Batch=539, step=18540, lr=0.123000, batch loss=0.055951, epoch loss=0.274313
Batch=599, step=18600, lr=0.122750, batch loss=0.028455, epoch loss=0.302768
Batch=659, step=18660, lr=0.122500, batch loss=0.034128, epoch loss=0.336895
Batch=719, step=18720, lr=0.122250, batch loss=0.048738, epoch loss=0.385633
Batch=779, step=18780, lr=0.122000, batch loss=0.117375, epoch loss=0.503008
Batch=839, step=18840, lr=0.121750, batch loss=0.056132, epoch loss=0.559139
Batch=899, step=18900, lr=0.121500, batch loss=0.051139, epoch loss=0.610278
Batch=959, step=18960, lr=0.121250, batch loss=0.014297, epoch loss=0.624575
Batch=1019, step=19020, lr=0.121000, batch loss=0.026785, epoch loss=0.651360
Batch=1079, step=19080, lr=0.120750, batch loss=0.011511, epoch loss=0.662871
Batch=1139, step=19140, lr=0.120500, batch loss=0.023034, epoch loss=0.685904
Batch=1199, step=19200, lr=0.120250, batch loss=0.009928, epoch loss=0.695832
Epoch=15, step=19200, lr=0.120250, epoch loss=0.695832
Batch=59, step=19260, lr=0.120000, batch loss=0.004749, epoch loss=0.004749
Batch=119, step=19320, lr=0.119750, batch loss=0.011759, epoch loss=0.016508
Batch=179, step=19380, lr=0.119500, batch loss=0.021693, epoch loss=0.038201
Batch=239, step=19440, lr=0.119250, batch loss=0.024080, epoch loss=0.062281
Batch=299, step=19500, lr=0.119000, batch loss=0.017968, epoch loss=0.080249
Batch=359, step=19560, lr=0.118750, batch loss=0.034796, epoch loss=0.115045
Batch=419, step=19620, lr=0.118500, batch loss=0.021239, epoch loss=0.136283
Batch=479, step=19680, lr=0.118250, batch loss=0.009529, epoch loss=0.145812
Batch=539, step=19740, lr=0.118000, batch loss=0.018323, epoch loss=0.164135
Batch=599, step=19800, lr=0.117750, batch loss=0.024489, epoch loss=0.188624
Batch=659, step=19860, lr=0.117500, batch loss=0.021755, epoch loss=0.210380
Batch=719, step=19920, lr=0.117250, batch loss=0.051379, epoch loss=0.261758
Batch=779, step=19980, lr=0.117000, batch loss=0.078267, epoch loss=0.340025
Batch=839, step=20040, lr=0.116750, batch loss=0.030117, epoch loss=0.370142
Batch=899, step=20100, lr=0.116500, batch loss=0.031078, epoch loss=0.401220
Batch=959, step=20160, lr=0.116250, batch loss=0.012080, epoch loss=0.413301
Batch=1019, step=20220, lr=0.116000, batch loss=0.015467, epoch loss=0.428768
Batch=1079, step=20280, lr=0.115750, batch loss=0.002211, epoch loss=0.430978
Batch=1139, step=20340, lr=0.115500, batch loss=0.015382, epoch loss=0.446361
Batch=1199, step=20400, lr=0.115250, batch loss=0.006396, epoch loss=0.452757
Epoch=16, step=20400, lr=0.115250, epoch loss=0.452757
Batch=59, step=20460, lr=0.115000, batch loss=0.003119, epoch loss=0.003119
Batch=119, step=20520, lr=0.114750, batch loss=0.008535, epoch loss=0.011654
Batch=179, step=20580, lr=0.114500, batch loss=0.016372, epoch loss=0.028026
Batch=239, step=20640, lr=0.114250, batch loss=0.014539, epoch loss=0.042565
Batch=299, step=20700, lr=0.114000, batch loss=0.004102, epoch loss=0.046667
Batch=359, step=20760, lr=0.113750, batch loss=0.013648, epoch loss=0.060315
Batch=419, step=20820, lr=0.113500, batch loss=0.017672, epoch loss=0.077988
Batch=479, step=20880, lr=0.113250, batch loss=0.005727, epoch loss=0.083714
Batch=539, step=20940, lr=0.113000, batch loss=0.016267, epoch loss=0.099981
Batch=599, step=21000, lr=0.112750, batch loss=0.018888, epoch loss=0.118869
Batch=659, step=21060, lr=0.112500, batch loss=0.013763, epoch loss=0.132632
Batch=719, step=21120, lr=0.112250, batch loss=0.032082, epoch loss=0.164714
Batch=779, step=21180, lr=0.112000, batch loss=0.052801, epoch loss=0.217515
Batch=839, step=21240, lr=0.111750, batch loss=0.026940, epoch loss=0.244455
Batch=899, step=21300, lr=0.111500, batch loss=0.027840, epoch loss=0.272295
Batch=959, step=21360, lr=0.111250, batch loss=0.015662, epoch loss=0.287957
Batch=1019, step=21420, lr=0.111000, batch loss=0.014797, epoch loss=0.302754
Batch=1079, step=21480, lr=0.110750, batch loss=0.001958, epoch loss=0.304712
Batch=1139, step=21540, lr=0.110500, batch loss=0.013223, epoch loss=0.317936
Batch=1199, step=21600, lr=0.110250, batch loss=0.005442, epoch loss=0.323377
Epoch=17, step=21600, lr=0.110250, epoch loss=0.323377
Batch=59, step=21660, lr=0.110000, batch loss=0.003186, epoch loss=0.003186
Batch=119, step=21720, lr=0.109750, batch loss=0.007126, epoch loss=0.010311
Batch=179, step=21780, lr=0.109500, batch loss=0.012702, epoch loss=0.023014
Batch=239, step=21840, lr=0.109250, batch loss=0.009536, epoch loss=0.032550
Batch=299, step=21900, lr=0.109000, batch loss=0.008626, epoch loss=0.041176
Batch=359, step=21960, lr=0.108750, batch loss=0.015923, epoch loss=0.057099
Batch=419, step=22020, lr=0.108500, batch loss=0.012861, epoch loss=0.069960
Batch=479, step=22080, lr=0.108250, batch loss=0.003310, epoch loss=0.073269
Batch=539, step=22140, lr=0.108000, batch loss=0.017218, epoch loss=0.090487
Batch=599, step=22200, lr=0.107750, batch loss=0.016489, epoch loss=0.106977
Batch=659, step=22260, lr=0.107500, batch loss=0.013295, epoch loss=0.120272
Batch=719, step=22320, lr=0.107250, batch loss=0.021448, epoch loss=0.141720
Batch=779, step=22380, lr=0.107000, batch loss=0.034640, epoch loss=0.176360
Batch=839, step=22440, lr=0.106750, batch loss=0.025937, epoch loss=0.202297
Batch=899, step=22500, lr=0.106500, batch loss=0.029369, epoch loss=0.231666
Batch=959, step=22560, lr=0.106250, batch loss=0.009186, epoch loss=0.240852
Batch=1019, step=22620, lr=0.106000, batch loss=0.010307, epoch loss=0.251159
Batch=1079, step=22680, lr=0.105750, batch loss=0.001138, epoch loss=0.252296
Batch=1139, step=22740, lr=0.105500, batch loss=0.011740, epoch loss=0.264036
Batch=1199, step=22800, lr=0.105250, batch loss=0.005126, epoch loss=0.269162
Epoch=18, step=22800, lr=0.105250, epoch loss=0.269162
Batch=59, step=22860, lr=0.105000, batch loss=0.002030, epoch loss=0.002030
Batch=119, step=22920, lr=0.104750, batch loss=0.005760, epoch loss=0.007790
Batch=179, step=22980, lr=0.104500, batch loss=0.011032, epoch loss=0.018822
Batch=239, step=23040, lr=0.104250, batch loss=0.009427, epoch loss=0.028249
Batch=299, step=23100, lr=0.104000, batch loss=0.001248, epoch loss=0.029497
Batch=359, step=23160, lr=0.103750, batch loss=0.011950, epoch loss=0.041447
Batch=419, step=23220, lr=0.103500, batch loss=0.010356, epoch loss=0.051803
Batch=479, step=23280, lr=0.103250, batch loss=0.002458, epoch loss=0.054260
Batch=539, step=23340, lr=0.103000, batch loss=0.017557, epoch loss=0.071817
Batch=599, step=23400, lr=0.102750, batch loss=0.015187, epoch loss=0.087005
Batch=659, step=23460, lr=0.102500, batch loss=0.013895, epoch loss=0.100900
Batch=719, step=23520, lr=0.102250, batch loss=0.019899, epoch loss=0.120800
Batch=779, step=23580, lr=0.102000, batch loss=0.025463, epoch loss=0.146263
Batch=839, step=23640, lr=0.101750, batch loss=0.028204, epoch loss=0.174467
Batch=899, step=23700, lr=0.101500, batch loss=0.021861, epoch loss=0.196328
Batch=959, step=23760, lr=0.101250, batch loss=0.010119, epoch loss=0.206447
Batch=1019, step=23820, lr=0.101000, batch loss=0.008635, epoch loss=0.215082
Batch=1079, step=23880, lr=0.100750, batch loss=0.001557, epoch loss=0.216639
Batch=1139, step=23940, lr=0.100500, batch loss=0.008463, epoch loss=0.225102
Batch=1199, step=24000, lr=0.100250, batch loss=0.004761, epoch loss=0.229864
Epoch=19, step=24000, lr=0.100250, epoch loss=0.229864

Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############**************************************...│
│********######*####*********************************###*###*#**********************************.....│
│*******######**##***************.******************#*######*#*******************************........│
│*******##*##**##**********...........***************########*##***************************..........│
│*****#######************.......%...%%...***************#########*************************.........%.│
│******######***********.........%........***************##*#####***********************.......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│
│****#######**********..........%%%%.........************#########********************........%%.%%.%│
│**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######************...........%%%%%%%..........************#######**************............%%%%%%.│
│*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│
│#####*#**********...............%%%%%%%..............********#######********...............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....│
│#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%................***...............%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1
2025-05-22 12:20.30: Job failed: Failed: Build failed