Organisationsahrefsocannl397418 ()debian-12-5.3_arm64_opam-2.3

debian-12-5.3_arm64_opam-2.3

Link Copied
Code Copied

Logs

2025-05-22 20:00.59: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (39741884b740497ac10065d5e464e6c70f9151f4) (linux-arm64:debian-12-5.3_arm64_opam-2.3)
Base: ocaml/opam:debian-12-ocaml-5.3@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 39741884
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-5.3@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3
# debian-12-5.3_arm64_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-05-22 20:00.59: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.3@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3-debian-12-5.3_arm64_opam-2.3-63d0fa7caba437c680f3f62d33f451da"
2025-05-22 20:00.59: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-5.3@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3)
(comment debian-12-5.3_arm64_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-05-22 20:00.59: Waiting for resource in pool OCluster
2025-05-22 20:00.59: Waiting for worker…
2025-05-22 20:01.50: Got resource from pool OCluster
Building on molpadia.caelum.ci.dev
HEAD is now at 9afb61d2 In progress / broken: Format -> PPrint migration first pass by Claude
HEAD is now at 39741884 Untested: convert remaining uses of Format except where printing Sexp values


(from ocaml/opam:debian-12-ocaml-5.3@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3)
Unable to find image 'ocaml/opam:debian-12-ocaml-5.3@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3' locally
docker.io/ocaml/opam@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3: Pulling from ocaml/opam
Digest: sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3
Status: Downloaded newer image for ocaml/opam@sha256:f3f7c954361569e1f7fd1e1b6ab41b5bd4f00147ee97d71ea399a22764f568d3
2025-05-22 20:01.51 ---> using "978b9672b52860448462f39ede8f8b4212c220fbb438fba1dfbc6def719b7210" from cache


/: (comment debian-12-5.3_arm64_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-22 20:01.51 ---> using "496cc35f114b05b801d3e9ee6474864c453fb63d6b47eb13a1549a1750ca635f" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-22 20:01.51 ---> using "7d05921eabd1fe38008746d72fe6393680f11c689b91eb2d7502bbd1bff3fb98" from cache


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-05-22 20:01.51 ---> using "9502e2c4f53b5580308524d5b4c73de1f298163f2699b8bac50471099eae33e6" from cache


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-05-22 20:01.51 ---> using "981e6ce16a11cbfdb1466639b83481acca0dd8d5862dfa3fd7af24a690882e61" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
35eb2f107a..2df846cb67  master     -> origin/master
2df846cb67 Merge pull request #27910 from maiste/release-dune-3.19.0


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:01.51 ---> using "5a64cce01cf26e9cb5fe0ca979b8fb41492277527403591f9b14714c4aeba7b8" from cache


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-22 20:01.51 ---> saved as "1dadfdc27f6fe779cba9d632473aa86879db7dab0d01bd7853a10a6bee38cdc6"


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-22 20:01.59 ---> saved as "e47b47c60490be33845f5ec3c29f34b0d03e25176d38230b00880539761e7b6e"


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-22 20:02.00 ---> saved as "21a38c2e2f6fbb337ec7225b865b4b9dadd39d9ccb06d84e4cc1fca4eb8a994f"


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apt-get" "update"
- Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB]
- Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB]
- Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB]
- Get:4 http://deb.debian.org/debian bookworm/main arm64 Packages [8693 kB]
- Get:5 http://deb.debian.org/debian-security bookworm-security/main arm64 Packages [254 kB]
- Fetched 9202 kB in 2s (4039 kB/s)
- Reading package lists...
- 


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[neural_nets_lib.dev] synchronised (file:///src)
[arrayjit.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-dev pkg-config


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config"
- debconf: delaying package configuration, since apt-utils is not installed
- Selecting previously unselected package libffi-dev:arm64.
- (Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 18801 files and directories currently installed.)
- Preparing to unpack .../libffi-dev_3.4.4-1_arm64.deb ...
- Unpacking libffi-dev:arm64 (3.4.4-1) ...
- Selecting previously unselected package libpkgconf3:arm64.
- Preparing to unpack .../libpkgconf3_1.8.1-1_arm64.deb ...
- Unpacking libpkgconf3:arm64 (1.8.1-1) ...
- Selecting previously unselected package pkgconf-bin.
- Preparing to unpack .../pkgconf-bin_1.8.1-1_arm64.deb ...
- Unpacking pkgconf-bin (1.8.1-1) ...
- Selecting previously unselected package pkgconf:arm64.
- Preparing to unpack .../pkgconf_1.8.1-1_arm64.deb ...
- Unpacking pkgconf:arm64 (1.8.1-1) ...
- Selecting previously unselected package pkg-config:arm64.
- Preparing to unpack .../pkg-config_1.8.1-1_arm64.deb ...
- Unpacking pkg-config:arm64 (1.8.1-1) ...
- Setting up libffi-dev:arm64 (3.4.4-1) ...
- Setting up libpkgconf3:arm64 (1.8.1-1) ...
- Setting up pkgconf-bin (1.8.1-1) ...
- Setting up pkgconf:arm64 (1.8.1-1) ...
- Setting up pkg-config:arm64 (1.8.1-1) ...
- Processing triggers for libc-bin (2.36-9+deb12u10) ...
2025-05-22 20:02.24 ---> saved as "3d7845e9ef85ba41380c52e1cb06bf07e71c7b22d6637956effa3b96d9478c72"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 75 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.2
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.19.0
- install dune-configurator       3.19.0
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install pprint                  20230830
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> installed conf-pkg-config.4
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> retrieved csexp.1.5.2  (cached)
-> installed conf-libffi.2.0.0
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved dune.3.19.0, dune-configurator.3.19.0  (cached)
-> retrieved pprint.20230830  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved result.1.5  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlfind.1.9.8
-> installed ocamlbuild.0.16.1
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed fmt.0.10.0
-> installed astring.0.8.5
-> installed ptime.1.2.0
-> installed logs.0.8.0
-> installed dune.3.19.0
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed backoff.0.1.1
-> installed stdlib-shims.0.3.0
-> installed csexp.1.5.2
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed multicore-magic.2.3.1
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed pprint.20230830
-> installed printbox.0.12
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed thread-local-storage.0.2
-> installed integers.0.7.0
-> installed cppo.1.8.0
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-compiler-libs.v0.17.0
-> installed re.1.12.0
-> installed saturn_lockfree.0.5.0
-> installed dune-configurator.3.19.0
-> installed parsexp.v0.17.0
-> installed bigstringaf.0.10.0
-> installed sexplib.v0.17.0
-> installed angstrom.0.16.1
-> installed mdx.2.5.0
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed base.v0.17.2
-> installed variantslib.v0.17.0
-> installed fieldslib.v0.17.0
-> installed stdio.v0.17.0
-> installed ctypes-foreign.0.23.0
-> installed ppxlib.0.35.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_cold.v0.17.0
-> installed ppx_optcomp.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_compare.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_enumerate.v0.17.0
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed ppx_sexp_conv.v0.17.0
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_base.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:04.46 ---> saved as "30839f749f033f33eb44885ad63bbb2bd84d70cff684831cb97e2ad922f9f312"


/src: (copy (src .) (dst /src))
2025-05-22 20:04.47 ---> saved as "92c89ac893d693bab01ed5dd49659c07fdfe350f3b845ea62791db6f812edae9"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Wrote value of 'backend' to ocannl_backend.txt
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/dune", lines 30-40, characters 0-281:
30 | (rule
31 |  (alias runtest)
32 |  (target
33 |   (dir log_files))
34 |  (action
35 |   (run
36 |    %{dep:micrograd_demo_logging.exe}
37 |    "--ocannl_debug_backend=text"
38 |    "--ocannl_log_file_stem=micrograd_demo_logging"
39 |    "--ocannl_log_main_domain_to_stdout=false"
40 |    "--ocannl_debug_log_to_stream_files=true")))
(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_cd_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Found true, commandline --ocannl_debug_log_to_stream_files=true
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed
Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64
Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31
Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59
Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38
Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82
Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92
Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92
Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8f1a37a959f740720f15a4fa034fcfb3/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/micrograd_demo.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/micrograd_demo.ml.corrected
diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/micrograd_demo.ml.corrected
index 77e46c6..ab81526 100644
--- a/_build/default/test/micrograd_demo.ml
+++ b/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/micrograd_demo.ml.corrected
@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =
│├┼───────┤       │
│││ -4.00 │       │
│└┴───────┘       │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[0]: a shape 0:1  grad_a│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 1.38e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[0]: a shape 0:1  grad_a│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 1.38e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}];
Tensor.print ~with_code:false ~with_grad:true `Default b;
[%expect
@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =
│├┼──────┤        │
│││ 2.00 │        │
│└┴──────┘        │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[2]: b shape 0:1  grad_b│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 6.45e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[2]: b shape 0:1  grad_b│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 6.45e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}]


let%expect_test "Micrograd half-moons example" =
File "test/hello_world_op.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/hello_world_op.ml.corrected
diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/hello_world_op.ml.corrected
index ba9d7ef..6bfa309 100644
--- a/_build/default/test/hello_world_op.ml
+++ b/_build/.sandbox/76866dfabc827e0637e14ecf7efaf3df/default/test/hello_world_op.ml.corrected
@@ -102,36 +102,46 @@ let%expect_test "Print constant tensor" =
let%op hey = [ (1, 2, 3); (4, 5, 6) ] in
Train.forward_and_forget backend ctx hey;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;
-  [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];
+  [%expect {|
+    [0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2  [
+       1.00 , 2.00 , 3.00
+      ;  4.00 , 5.00 , 6.00
+    ]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;
[%expect
{|
-    ┌─────────────────────────────────────────────────────────────┐
-    │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │
-    │┌──────┬──────────────────┐                                  │
-    ││      │axis 1            │                                  │
-    │├──────┼──────────────────┤                                  │
-    ││axis 0│ 1.00  2.00  3.00 │                                  │
-    ││      │ 4.00  5.00  6.00 │                                  │
-    │└──────┴──────────────────┘                                  │
-    └─────────────────────────────────────────────────────────────┘
+    ┌────────────────────────────────────────────────────────────────────────┐
+    │[0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2 │
+    │┌──────┬──────────────────┐                                             │
+    ││      │axis 1            │                                             │
+    │├──────┼──────────────────┤                                             │
+    ││axis 0│ 1.00  2.00  3.00 │                                             │
+    ││      │ 4.00  5.00  6.00 │                                             │
+    │└──────┴──────────────────┘                                             │
+    └────────────────────────────────────────────────────────────────────────┘
|}];
let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in
Train.forward_and_forget backend ctx hoo;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;
-  [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];
+  [%expect {|
+    [1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3  [|
+      [ 1.00 ; 2.00 ; 3.00 ]
+      ; [ 4.00 ; 5.00 ; 6.00 ]
+    |]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;
[%expect
{|
-    ┌──────────────────────────────────────────────────────────────────┐
-    │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │
-    │┌──────┬──────────────────┐                                       │
-    ││      │axis 1            │                                       │
-    │├──────┼──────────────────┤                                       │
-    ││axis 0│ 1.00  2.00  3.00 │                                       │
-    ││      │ 4.00  5.00  6.00 │                                       │
-    │└──────┴──────────────────┘                                       │
-    └──────────────────────────────────────────────────────────────────┘
+    ┌─────────────────────────────────────────────────────────────────────────────┐
+    │[1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 │
+    │┌──────┬──────────────────┐                                                  │
+    ││      │axis 1            │                                                  │
+    │├──────┼──────────────────┤                                                  │
+    ││axis 0│ 1.00  2.00  3.00 │                                                  │
+    ││      │ 4.00  5.00  6.00 │                                                  │
+    │└──────┴──────────────────┘                                                  │
+    └─────────────────────────────────────────────────────────────────────────────┘
|}];
let%op hey2 =
[
@@ -145,10 +155,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;
[%expect
{|
-    [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);
-      (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);
-      (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);
-      (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]
+    [2]: c4x2x3_hey2 shape 1:2,2:3->0:4  [
+       ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )
+      ;  ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )
+      ;  ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )
+      ;  ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;
[%expect
@@ -178,10 +190,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;
[%expect
{|
-    [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];
-      [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];
-      [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];
-      [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]
+    [3]: c4x2x3_hoo2 shape 0:4|1:2,2:3  [|
+      [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]
+      ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]
+      ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]
+      ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;
[%expect
@@ -209,10 +223,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;
[%expect
{|
-    [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];
-      [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];
-      [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];
-      [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]
+    [4]: c4x2x3_heyhoo shape 0:4,1:2|2:3  [|
+      [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]
+      ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]
+      ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]
+      ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;
[%expect
@@ -240,15 +256,24 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo2;
[%expect
{|
-    [|
-      [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-        [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];
-      [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-        [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];
-      [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-        [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];
-      [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-        [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]
+    [5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2  [|
+      [|
+        [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+        ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+      |]
+      ; [|
+        [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+        ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+      |]
+      ; [|
+        [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+        ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+      |]
+      ; [|
+        [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+        ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;
[%expect
@@ -293,17 +318,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo3;
[%expect
{|
-    [|
+    [6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2  [|
[|
-        [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-          [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];
-        [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-          [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];
-      [|
-        [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-          [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];
-        [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-          [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]
+        [
+          [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+          ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+        ]
+        ; [
+          [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+          ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+        ]
+      |]
+      ; [|
+        [
+          [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+          ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+        ]
+        ; [
+          [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+          ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+        ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;
[%expect
@@ -353,17 +389,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo4;
[%expect
{|
-    [|
-      [
-        [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];
-          [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];
-        [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];
-          [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];
+    [7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3  [|
[
-        [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];
-          [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];
-        [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];
-          [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]
+        [
+          [  1.00 , 31.00  ;  2.00 , 32.00  ;  3.00 , 33.00  ]
+          ; [  4.00 , 34.00  ;  5.00 , 35.00  ;  6.00 , 36.00  ]
+        ]
+        ; [
+          [  7.00 , 37.00  ;  8.00 , 38.00  ;  9.00 , 39.00  ]
+          ; [  10.00 , 40.00  ;  11.00 , 41.00  ;  12.00 , 42.00  ]
+        ]
+      ]
+      ; [
+        [
+          [  13.00 , 43.00  ;  14.00 , 44.00  ;  15.00 , 45.00  ]
+          ; [  16.00 , 46.00  ;  17.00 , 47.00  ;  18.00 , 48.00  ]
+        ]
+        ; [
+          [  19.00 , 49.00  ;  20.00 , 50.00  ;  21.00 , 51.00  ]
+          ; [  22.00 , 52.00  ;  23.00 , 53.00  ;  24.00 , 54.00  ]
+        ]
+      ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;
[%expect
@@ -462,8 +509,29 @@ let%expect_test "Big matrix" =
Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;
[%expect
{|
-    [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;
-      12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]
+    [2]: 0...20 shape 0:21  [
+      0.00
+      ; 1.00
+      ; 2.00
+      ; 3.00
+      ; 4.00
+      ; 5.00
+      ; 6.00
+      ; 7.00
+      ; 8.00
+      ; 9.00
+      ; 10.00
+      ; 11.00
+      ; 12.00
+      ; 13.00
+      ; 14.00
+      ; 15.00
+      ; 16.00
+      ; 17.00
+      ; 18.00
+      ; 19.00
+      ; 20.00
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;
[%expect
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Properties of devices:
(multicore_devices
(device ((device_name CPU) (device_ordinal 0) (num_domains 80))))
@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Not found, using default false
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.199750, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.516926, epoch loss=32.126379
Batch=179, step=180, lr=0.199500, batch loss=2.644440, epoch loss=34.770819
Batch=239, step=240, lr=0.199250, batch loss=0.855278, epoch loss=35.626097
Batch=299, step=300, lr=0.199000, batch loss=1.437143, epoch loss=37.063240
Batch=359, step=360, lr=0.198750, batch loss=1.338851, epoch loss=38.402091
Batch=419, step=420, lr=0.198500, batch loss=0.611320, epoch loss=39.013412
Batch=479, step=480, lr=0.198250, batch loss=0.745907, epoch loss=39.759319
Batch=539, step=540, lr=0.198000, batch loss=0.690502, epoch loss=40.449821
Batch=599, step=600, lr=0.197750, batch loss=1.101582, epoch loss=41.551403
Batch=659, step=660, lr=0.197500, batch loss=0.481828, epoch loss=42.033231
Batch=719, step=720, lr=0.197250, batch loss=0.410623, epoch loss=42.443854
Batch=779, step=780, lr=0.197000, batch loss=0.469027, epoch loss=42.912882
Batch=839, step=840, lr=0.196750, batch loss=0.450368, epoch loss=43.363250
Batch=899, step=900, lr=0.196500, batch loss=0.383530, epoch loss=43.746780
Batch=959, step=960, lr=0.196250, batch loss=0.267841, epoch loss=44.014621
Batch=1019, step=1020, lr=0.196000, batch loss=0.487679, epoch loss=44.502300
Batch=1079, step=1080, lr=0.195750, batch loss=0.264822, epoch loss=44.767122
Batch=1139, step=1140, lr=0.195500, batch loss=0.331032, epoch loss=45.098154
Batch=1199, step=1200, lr=0.195250, batch loss=0.265091, epoch loss=45.363245
Epoch=0, step=1200, lr=0.195250, epoch loss=45.363245
Batch=59, step=1260, lr=0.195000, batch loss=0.263180, epoch loss=0.263180
Batch=119, step=1320, lr=0.194750, batch loss=0.204640, epoch loss=0.467820
Batch=179, step=1380, lr=0.194500, batch loss=0.245275, epoch loss=0.713095
Batch=239, step=1440, lr=0.194250, batch loss=0.349908, epoch loss=1.063003
Batch=299, step=1500, lr=0.194000, batch loss=0.239475, epoch loss=1.302479
Batch=359, step=1560, lr=0.193750, batch loss=0.319191, epoch loss=1.621670
Batch=419, step=1620, lr=0.193500, batch loss=0.312608, epoch loss=1.934277
Batch=479, step=1680, lr=0.193250, batch loss=0.276740, epoch loss=2.211017
Batch=539, step=1740, lr=0.192750, batch loss=0.209530, epoch loss=2.420547
Batch=599, step=1800, lr=0.192750, batch loss=0.247516, epoch loss=2.668064
Batch=659, step=1860, lr=0.192500, batch loss=0.367108, epoch loss=3.035172
Batch=719, step=1920, lr=0.192250, batch loss=0.359126, epoch loss=3.394298
Batch=779, step=1980, lr=0.192000, batch loss=0.389531, epoch loss=3.783828
Batch=839, step=2040, lr=0.191750, batch loss=0.347071, epoch loss=4.130899
Batch=899, step=2100, lr=0.191500, batch loss=0.323429, epoch loss=4.454328
Batch=959, step=2160, lr=0.191250, batch loss=0.256368, epoch loss=4.710695
Batch=1019, step=2220, lr=0.191000, batch loss=0.391408, epoch loss=5.102103
Batch=1079, step=2280, lr=0.190750, batch loss=0.219939, epoch loss=5.322041
Batch=1139, step=2340, lr=0.190500, batch loss=0.265484, epoch loss=5.587525
Batch=1199, step=2400, lr=0.190250, batch loss=0.214216, epoch loss=5.801742
Epoch=1, step=2400, lr=0.190250, epoch loss=5.801742
Batch=59, step=2460, lr=0.190000, batch loss=0.230890, epoch loss=0.230890
Batch=119, step=2520, lr=0.189750, batch loss=0.190200, epoch loss=0.421090
Batch=179, step=2580, lr=0.189500, batch loss=0.221042, epoch loss=0.642131
Batch=239, step=2640, lr=0.189250, batch loss=0.330167, epoch loss=0.972298
Batch=299, step=2700, lr=0.188750, batch loss=0.206074, epoch loss=1.178373
Batch=359, step=2760, lr=0.188750, batch loss=0.290320, epoch loss=1.468692
Batch=419, step=2820, lr=0.188250, batch loss=0.282655, epoch loss=1.751347
Batch=479, step=2880, lr=0.188250, batch loss=0.254400, epoch loss=2.005747
Batch=539, step=2940, lr=0.188000, batch loss=0.193605, epoch loss=2.199352
Batch=599, step=3000, lr=0.187750, batch loss=0.227999, epoch loss=2.427352
Batch=659, step=3060, lr=0.187500, batch loss=0.339419, epoch loss=2.766770
Batch=719, step=3120, lr=0.187000, batch loss=0.332492, epoch loss=3.099262
Batch=779, step=3180, lr=0.186750, batch loss=0.355463, epoch loss=3.454725
Batch=839, step=3240, lr=0.186750, batch loss=0.329384, epoch loss=3.784109
Batch=899, step=3300, lr=0.186500, batch loss=0.295415, epoch loss=4.079524
Batch=959, step=3360, lr=0.186250, batch loss=0.236205, epoch loss=4.315729
Batch=1019, step=3420, lr=0.186000, batch loss=0.353871, epoch loss=4.669600
Batch=1079, step=3480, lr=0.185750, batch loss=0.218022, epoch loss=4.887622
Batch=1139, step=3540, lr=0.185500, batch loss=0.265630, epoch loss=5.153252
Batch=1199, step=3600, lr=0.185250, batch loss=0.200744, epoch loss=5.353996
Epoch=2, step=3600, lr=0.185250, epoch loss=5.353996
Batch=59, step=3660, lr=0.184750, batch loss=0.221724, epoch loss=0.221724
Batch=119, step=3720, lr=0.184750, batch loss=0.186371, epoch loss=0.408096
Batch=179, step=3780, lr=0.184500, batch loss=0.212479, epoch loss=0.620574
Batch=239, step=3840, lr=0.184250, batch loss=0.317758, epoch loss=0.938333
Batch=299, step=3900, lr=0.184000, batch loss=0.205085, epoch loss=1.143418
Batch=359, step=3960, lr=0.183750, batch loss=0.282281, epoch loss=1.425699
Batch=419, step=4020, lr=0.183500, batch loss=0.273820, epoch loss=1.699519
Batch=479, step=4080, lr=0.183250, batch loss=0.249069, epoch loss=1.948588
Batch=539, step=4140, lr=0.183000, batch loss=0.196284, epoch loss=2.144872
Batch=599, step=4200, lr=0.182750, batch loss=0.240915, epoch loss=2.385787
Batch=659, step=4260, lr=0.182500, batch loss=0.330625, epoch loss=2.716412
Batch=719, step=4320, lr=0.182250, batch loss=0.330401, epoch loss=3.046813
Batch=779, step=4380, lr=0.182000, batch loss=0.351427, epoch loss=3.398240
Batch=839, step=4440, lr=0.181750, batch loss=0.320510, epoch loss=3.718750
Batch=899, step=4500, lr=0.181500, batch loss=0.291589, epoch loss=4.010339
Batch=959, step=4560, lr=0.181250, batch loss=0.242666, epoch loss=4.253005
Batch=1019, step=4620, lr=0.181000, batch loss=0.338492, epoch loss=4.591497
Batch=1079, step=4680, lr=0.180750, batch loss=0.199368, epoch loss=4.790865
Batch=1139, step=4740, lr=0.180500, batch loss=0.237932, epoch loss=5.028797
Batch=1199, step=4800, lr=0.180250, batch loss=0.192788, epoch loss=5.221584
Epoch=3, step=4800, lr=0.180250, epoch loss=5.221584
Batch=59, step=4860, lr=0.180000, batch loss=0.224119, epoch loss=0.224119
Batch=119, step=4920, lr=0.179750, batch loss=0.186725, epoch loss=0.410844
Batch=179, step=4980, lr=0.179500, batch loss=0.206509, epoch loss=0.617353
Batch=239, step=5040, lr=0.179250, batch loss=0.309763, epoch loss=0.927116
Batch=299, step=5100, lr=0.179000, batch loss=0.205492, epoch loss=1.132608
Batch=359, step=5160, lr=0.178750, batch loss=0.271928, epoch loss=1.404536
Batch=419, step=5220, lr=0.178500, batch loss=0.265656, epoch loss=1.670192
Batch=479, step=5280, lr=0.178250, batch loss=0.240881, epoch loss=1.911073
Batch=539, step=5340, lr=0.178000, batch loss=0.192563, epoch loss=2.103635
Batch=599, step=5400, lr=0.177750, batch loss=0.228707, epoch loss=2.332342
Batch=659, step=5460, lr=0.177500, batch loss=0.327325, epoch loss=2.659667
Batch=719, step=5520, lr=0.177250, batch loss=0.332384, epoch loss=2.992051
Batch=779, step=5580, lr=0.177000, batch loss=0.345216, epoch loss=3.337266
Batch=839, step=5640, lr=0.176750, batch loss=0.308787, epoch loss=3.646053
Batch=899, step=5700, lr=0.176500, batch loss=0.273142, epoch loss=3.919195
Batch=959, step=5760, lr=0.176250, batch loss=0.214566, epoch loss=4.133761
Batch=1019, step=5820, lr=0.176000, batch loss=0.340796, epoch loss=4.474558
Batch=1079, step=5880, lr=0.175750, batch loss=0.207244, epoch loss=4.681802
Batch=1139, step=5940, lr=0.175500, batch loss=0.239059, epoch loss=4.920861
Batch=1199, step=6000, lr=0.175250, batch loss=0.188349, epoch loss=5.109211
Epoch=4, step=6000, lr=0.175250, epoch loss=5.109211
Batch=59, step=6060, lr=0.175000, batch loss=0.231325, epoch loss=0.231325
Batch=119, step=6120, lr=0.174750, batch loss=0.193393, epoch loss=0.424719
Batch=179, step=6180, lr=0.174500, batch loss=0.201789, epoch loss=0.626507
Batch=239, step=6240, lr=0.174250, batch loss=0.300230, epoch loss=0.926738
Batch=299, step=6300, lr=0.174000, batch loss=0.203828, epoch loss=1.130566
Batch=359, step=6360, lr=0.173750, batch loss=0.266885, epoch loss=1.397451
Batch=419, step=6420, lr=0.173500, batch loss=0.261497, epoch loss=1.658948
Batch=479, step=6480, lr=0.173250, batch loss=0.237753, epoch loss=1.896701
Batch=539, step=6540, lr=0.173000, batch loss=0.188731, epoch loss=2.085432
Batch=599, step=6600, lr=0.172750, batch loss=0.225776, epoch loss=2.311208
Batch=659, step=6660, lr=0.172500, batch loss=0.316658, epoch loss=2.627866
Batch=719, step=6720, lr=0.172250, batch loss=0.315650, epoch loss=2.943516
Batch=779, step=6780, lr=0.172000, batch loss=0.333036, epoch loss=3.276552
Batch=839, step=6840, lr=0.171750, batch loss=0.304527, epoch loss=3.581080
Batch=899, step=6900, lr=0.171500, batch loss=0.267170, epoch loss=3.848249
Batch=959, step=6960, lr=0.171250, batch loss=0.212679, epoch loss=4.060928
Batch=1019, step=7020, lr=0.171000, batch loss=0.323611, epoch loss=4.384539
Batch=1079, step=7080, lr=0.170750, batch loss=0.176831, epoch loss=4.561371
Batch=1139, step=7140, lr=0.170500, batch loss=0.213013, epoch loss=4.774384
Batch=1199, step=7200, lr=0.170250, batch loss=0.190962, epoch loss=4.965346
Epoch=5, step=7200, lr=0.170250, epoch loss=4.965346
Batch=59, step=7260, lr=0.170000, batch loss=0.225328, epoch loss=0.225328
Batch=119, step=7320, lr=0.169750, batch loss=0.190165, epoch loss=0.415493
Batch=179, step=7380, lr=0.169500, batch loss=0.195152, epoch loss=0.610645
Batch=239, step=7440, lr=0.169250, batch loss=0.291526, epoch loss=0.902171
Batch=299, step=7500, lr=0.169000, batch loss=0.209017, epoch loss=1.111188
Batch=359, step=7560, lr=0.168750, batch loss=0.261530, epoch loss=1.372718
Batch=419, step=7620, lr=0.168500, batch loss=0.255826, epoch loss=1.628544
Batch=479, step=7680, lr=0.168250, batch loss=0.239153, epoch loss=1.867696
Batch=539, step=7740, lr=0.168000, batch loss=0.186191, epoch loss=2.053887
Batch=599, step=7800, lr=0.167750, batch loss=0.225071, epoch loss=2.278958
Batch=659, step=7860, lr=0.167500, batch loss=0.305921, epoch loss=2.584880
Batch=719, step=7920, lr=0.167250, batch loss=0.308617, epoch loss=2.893496
Batch=779, step=7980, lr=0.167000, batch loss=0.323084, epoch loss=3.216580
Batch=839, step=8040, lr=0.166500, batch loss=0.298533, epoch loss=3.515113
Batch=899, step=8100, lr=0.166500, batch loss=0.262716, epoch loss=3.777829
Batch=959, step=8160, lr=0.166000, batch loss=0.202244, epoch loss=3.980074
Batch=1019, step=8220, lr=0.166000, batch loss=0.322021, epoch loss=4.302095
Batch=1079, step=8280, lr=0.165500, batch loss=0.189128, epoch loss=4.491223
Batch=1139, step=8340, lr=0.165500, batch loss=0.218746, epoch loss=4.709969
Batch=1199, step=8400, lr=0.165250, batch loss=0.173287, epoch loss=4.883256
Epoch=6, step=8400, lr=0.165250, epoch loss=4.883256
Batch=59, step=8460, lr=0.165000, batch loss=0.210654, epoch loss=0.210654
Batch=119, step=8520, lr=0.164750, batch loss=0.173594, epoch loss=0.384248
Batch=179, step=8580, lr=0.164500, batch loss=0.188991, epoch loss=0.573239
Batch=239, step=8640, lr=0.164250, batch loss=0.281056, epoch loss=0.854296
Batch=299, step=8700, lr=0.164000, batch loss=0.192348, epoch loss=1.046643
Batch=359, step=8760, lr=0.163750, batch loss=0.248594, epoch loss=1.295238
Batch=419, step=8820, lr=0.163250, batch loss=0.245066, epoch loss=1.540304
Batch=479, step=8880, lr=0.163250, batch loss=0.230239, epoch loss=1.770543
Batch=539, step=8940, lr=0.163000, batch loss=0.177796, epoch loss=1.948339
Batch=599, step=9000, lr=0.162750, batch loss=0.219454, epoch loss=2.167793
Batch=659, step=9060, lr=0.162250, batch loss=0.294280, epoch loss=2.462074
Batch=719, step=9120, lr=0.162250, batch loss=0.297985, epoch loss=2.760059
Batch=779, step=9180, lr=0.162000, batch loss=0.315500, epoch loss=3.075559
Batch=839, step=9240, lr=0.161500, batch loss=0.282503, epoch loss=3.358062
Batch=899, step=9300, lr=0.161500, batch loss=0.252930, epoch loss=3.610992
Batch=959, step=9360, lr=0.161250, batch loss=0.187864, epoch loss=3.798856
Batch=1019, step=9420, lr=0.161000, batch loss=0.303497, epoch loss=4.102353
Batch=1079, step=9480, lr=0.160750, batch loss=0.173502, epoch loss=4.275855
Batch=1139, step=9540, lr=0.160500, batch loss=0.201027, epoch loss=4.476881
Batch=1199, step=9600, lr=0.160250, batch loss=0.165916, epoch loss=4.642798
Epoch=7, step=9600, lr=0.160250, epoch loss=4.642798
Batch=59, step=9660, lr=0.159750, batch loss=0.189558, epoch loss=0.189558
Batch=119, step=9720, lr=0.159750, batch loss=0.159888, epoch loss=0.349447
Batch=179, step=9780, lr=0.159500, batch loss=0.179131, epoch loss=0.528577
Batch=239, step=9840, lr=0.159250, batch loss=0.263286, epoch loss=0.791864
Batch=299, step=9900, lr=0.159000, batch loss=0.183932, epoch loss=0.975796
Batch=359, step=9960, lr=0.158750, batch loss=0.241180, epoch loss=1.216976
Batch=419, step=10020, lr=0.158500, batch loss=0.233025, epoch loss=1.450001
Batch=479, step=10080, lr=0.158250, batch loss=0.215316, epoch loss=1.665317
Batch=539, step=10140, lr=0.158000, batch loss=0.171018, epoch loss=1.836335
Batch=599, step=10200, lr=0.157750, batch loss=0.203664, epoch loss=2.040000
Batch=659, step=10260, lr=0.157500, batch loss=0.281909, epoch loss=2.321908
Batch=719, step=10320, lr=0.157250, batch loss=0.287446, epoch loss=2.609354
Batch=779, step=10380, lr=0.157000, batch loss=0.295583, epoch loss=2.904938
Batch=839, step=10440, lr=0.156750, batch loss=0.267568, epoch loss=3.172506
Batch=899, step=10500, lr=0.156500, batch loss=0.245042, epoch loss=3.417547
Batch=959, step=10560, lr=0.156250, batch loss=0.198123, epoch loss=3.615671
Batch=1019, step=10620, lr=0.156000, batch loss=0.281408, epoch loss=3.897078
Batch=1079, step=10680, lr=0.155750, batch loss=0.168077, epoch loss=4.065155
Batch=1139, step=10740, lr=0.155500, batch loss=0.195190, epoch loss=4.260345
Batch=1199, step=10800, lr=0.155250, batch loss=0.155020, epoch loss=4.415365
Epoch=8, step=10800, lr=0.155250, epoch loss=4.415365
Batch=59, step=10860, lr=0.155000, batch loss=0.180623, epoch loss=0.180623
Batch=119, step=10920, lr=0.154750, batch loss=0.151741, epoch loss=0.332364
Batch=179, step=10980, lr=0.154500, batch loss=0.166752, epoch loss=0.499116
Batch=239, step=11040, lr=0.154250, batch loss=0.241768, epoch loss=0.740884
Batch=299, step=11100, lr=0.154000, batch loss=0.173751, epoch loss=0.914635
Batch=359, step=11160, lr=0.153750, batch loss=0.220565, epoch loss=1.135199
Batch=419, step=11220, lr=0.153500, batch loss=0.217202, epoch loss=1.352401
Batch=479, step=11280, lr=0.153250, batch loss=0.213587, epoch loss=1.565988
Batch=539, step=11340, lr=0.153000, batch loss=0.169986, epoch loss=1.735974
Batch=599, step=11400, lr=0.152750, batch loss=0.175914, epoch loss=1.911888
Batch=659, step=11460, lr=0.152500, batch loss=0.262603, epoch loss=2.174491
Batch=719, step=11520, lr=0.152250, batch loss=0.263801, epoch loss=2.438292
Batch=779, step=11580, lr=0.152000, batch loss=0.272026, epoch loss=2.710318
Batch=839, step=11640, lr=0.151750, batch loss=0.251726, epoch loss=2.962045
Batch=899, step=11700, lr=0.151250, batch loss=0.219245, epoch loss=3.181289
Batch=959, step=11760, lr=0.151250, batch loss=0.184904, epoch loss=3.366194
Batch=1019, step=11820, lr=0.151000, batch loss=0.275739, epoch loss=3.641932
Batch=1079, step=11880, lr=0.150750, batch loss=0.150114, epoch loss=3.792046
Batch=1139, step=11940, lr=0.150500, batch loss=0.179928, epoch loss=3.971974
Batch=1199, step=12000, lr=0.150250, batch loss=0.140113, epoch loss=4.112087
Epoch=9, step=12000, lr=0.150250, epoch loss=4.112087
Batch=59, step=12060, lr=0.149750, batch loss=0.160606, epoch loss=0.160606
Batch=119, step=12120, lr=0.149750, batch loss=0.138588, epoch loss=0.299195
Batch=179, step=12180, lr=0.149500, batch loss=0.152194, epoch loss=0.451388
Batch=239, step=12240, lr=0.149250, batch loss=0.221573, epoch loss=0.672962
Batch=299, step=12300, lr=0.149000, batch loss=0.142135, epoch loss=0.815097
Batch=359, step=12360, lr=0.148750, batch loss=0.196372, epoch loss=1.011470
Batch=419, step=12420, lr=0.148500, batch loss=0.201021, epoch loss=1.212491
Batch=479, step=12480, lr=0.148250, batch loss=0.178842, epoch loss=1.391332
Batch=539, step=12540, lr=0.148000, batch loss=0.144284, epoch loss=1.535616
Batch=599, step=12600, lr=0.147750, batch loss=0.150480, epoch loss=1.686096
Batch=659, step=12660, lr=0.147500, batch loss=0.228025, epoch loss=1.914122
Batch=719, step=12720, lr=0.147250, batch loss=0.236424, epoch loss=2.150545
Batch=779, step=12780, lr=0.147000, batch loss=0.263120, epoch loss=2.413666
Batch=839, step=12840, lr=0.146750, batch loss=0.234590, epoch loss=2.648256
Batch=899, step=12900, lr=0.146500, batch loss=0.207807, epoch loss=2.856063
Batch=959, step=12960, lr=0.146250, batch loss=0.143983, epoch loss=3.000046
Batch=1019, step=13020, lr=0.146000, batch loss=0.233611, epoch loss=3.233657
Batch=1079, step=13080, lr=0.145750, batch loss=0.110055, epoch loss=3.343711
Batch=1139, step=13140, lr=0.145500, batch loss=0.144817, epoch loss=3.488529
Batch=1199, step=13200, lr=0.145250, batch loss=0.118031, epoch loss=3.606560
Epoch=10, step=13200, lr=0.145250, epoch loss=3.606560
Batch=59, step=13260, lr=0.145000, batch loss=0.147710, epoch loss=0.147710
Batch=119, step=13320, lr=0.144750, batch loss=0.133502, epoch loss=0.281212
Batch=179, step=13380, lr=0.144500, batch loss=0.131346, epoch loss=0.412558
Batch=239, step=13440, lr=0.144250, batch loss=0.190286, epoch loss=0.602844
Batch=299, step=13500, lr=0.144000, batch loss=0.114759, epoch loss=0.717603
Batch=359, step=13560, lr=0.143750, batch loss=0.162632, epoch loss=0.880235
Batch=419, step=13620, lr=0.143500, batch loss=0.162770, epoch loss=1.043005
Batch=479, step=13680, lr=0.143000, batch loss=0.148827, epoch loss=1.191832
Batch=539, step=13740, lr=0.143000, batch loss=0.120495, epoch loss=1.312327
Batch=599, step=13800, lr=0.142750, batch loss=0.121596, epoch loss=1.433923
Batch=659, step=13860, lr=0.142500, batch loss=0.176658, epoch loss=1.610581
Batch=719, step=13920, lr=0.142250, batch loss=0.178069, epoch loss=1.788650
Batch=779, step=13980, lr=0.142000, batch loss=0.199674, epoch loss=1.988324
Batch=839, step=14040, lr=0.141750, batch loss=0.189547, epoch loss=2.177871
Batch=899, step=14100, lr=0.141500, batch loss=0.180927, epoch loss=2.358798
Batch=959, step=14160, lr=0.141250, batch loss=0.142714, epoch loss=2.501512
Batch=1019, step=14220, lr=0.141000, batch loss=0.302674, epoch loss=2.804186
Batch=1079, step=14280, lr=0.140750, batch loss=0.076990, epoch loss=2.881176
Batch=1139, step=14340, lr=0.140500, batch loss=0.125536, epoch loss=3.006713
Batch=1199, step=14400, lr=0.140250, batch loss=0.091389, epoch loss=3.098102
Epoch=11, step=14400, lr=0.140250, epoch loss=3.098102
Batch=59, step=14460, lr=0.140000, batch loss=0.113596, epoch loss=0.113596
Batch=119, step=14520, lr=0.139750, batch loss=0.110859, epoch loss=0.224455
Batch=179, step=14580, lr=0.139500, batch loss=0.107441, epoch loss=0.331896
Batch=239, step=14640, lr=0.139250, batch loss=0.140258, epoch loss=0.472154
Batch=299, step=14700, lr=0.139000, batch loss=0.085278, epoch loss=0.557433
Batch=359, step=14760, lr=0.138750, batch loss=0.129171, epoch loss=0.686603
Batch=419, step=14820, lr=0.138500, batch loss=0.126635, epoch loss=0.813238
Batch=479, step=14880, lr=0.138250, batch loss=0.108501, epoch loss=0.921739
Batch=539, step=14940, lr=0.138000, batch loss=0.111778, epoch loss=1.033517
Batch=599, step=15000, lr=0.137750, batch loss=0.087808, epoch loss=1.121324
Batch=659, step=15060, lr=0.137500, batch loss=0.130551, epoch loss=1.251875
Batch=719, step=15120, lr=0.137250, batch loss=0.127437, epoch loss=1.379312
Batch=779, step=15180, lr=0.137000, batch loss=0.130862, epoch loss=1.510174
Batch=839, step=15240, lr=0.136750, batch loss=0.148676, epoch loss=1.658849
Batch=899, step=15300, lr=0.136500, batch loss=0.158526, epoch loss=1.817375
Batch=959, step=15360, lr=0.136250, batch loss=0.113909, epoch loss=1.931284
Batch=1019, step=15420, lr=0.136000, batch loss=0.260940, epoch loss=2.192224
Batch=1079, step=15480, lr=0.135750, batch loss=0.031459, epoch loss=2.223683
Batch=1139, step=15540, lr=0.135500, batch loss=0.070987, epoch loss=2.294670
Batch=1199, step=15600, lr=0.135250, batch loss=0.058898, epoch loss=2.353568
Epoch=12, step=15600, lr=0.135250, epoch loss=2.353568
Batch=59, step=15660, lr=0.135000, batch loss=0.094213, epoch loss=0.094213
Batch=119, step=15720, lr=0.134750, batch loss=0.154703, epoch loss=0.248916
Batch=179, step=15780, lr=0.134500, batch loss=0.106490, epoch loss=0.355407
Batch=239, step=15840, lr=0.134000, batch loss=0.102018, epoch loss=0.457425
Batch=299, step=15900, lr=0.134000, batch loss=0.043406, epoch loss=0.500831
Batch=359, step=15960, lr=0.133750, batch loss=0.082490, epoch loss=0.583322
Batch=419, step=16020, lr=0.133500, batch loss=0.080854, epoch loss=0.664175
Batch=479, step=16080, lr=0.133250, batch loss=0.063260, epoch loss=0.727435
Batch=539, step=16140, lr=0.133000, batch loss=0.058449, epoch loss=0.785884
Batch=599, step=16200, lr=0.132750, batch loss=0.113338, epoch loss=0.899222
Batch=659, step=16260, lr=0.132500, batch loss=0.079460, epoch loss=0.978682
Batch=719, step=16320, lr=0.132000, batch loss=0.087154, epoch loss=1.065836
Batch=779, step=16380, lr=0.131750, batch loss=0.101572, epoch loss=1.167408
Batch=839, step=16440, lr=0.131500, batch loss=0.121740, epoch loss=1.289148
Batch=899, step=16500, lr=0.131250, batch loss=0.141463, epoch loss=1.430611
Batch=959, step=16560, lr=0.131000, batch loss=0.057127, epoch loss=1.487737
Batch=1019, step=16620, lr=0.131000, batch loss=0.146535, epoch loss=1.634272
Batch=1079, step=16680, lr=0.130500, batch loss=0.020905, epoch loss=1.655176
Batch=1139, step=16740, lr=0.130250, batch loss=0.043169, epoch loss=1.698346
Batch=1199, step=16800, lr=0.130250, batch loss=0.025734, epoch loss=1.724080
Epoch=13, step=16800, lr=0.130250, epoch loss=1.724080
Batch=59, step=16860, lr=0.130000, batch loss=0.037388, epoch loss=0.037388
Batch=119, step=16920, lr=0.129750, batch loss=0.065258, epoch loss=0.102646
Batch=179, step=16980, lr=0.129500, batch loss=0.053214, epoch loss=0.155860
Batch=239, step=17040, lr=0.129250, batch loss=0.067815, epoch loss=0.223675
Batch=299, step=17100, lr=0.129000, batch loss=0.039286, epoch loss=0.262961
Batch=359, step=17160, lr=0.128750, batch loss=0.048808, epoch loss=0.311769
Batch=419, step=17220, lr=0.128500, batch loss=0.049497, epoch loss=0.361266
Batch=479, step=17280, lr=0.128250, batch loss=0.033160, epoch loss=0.394426
Batch=539, step=17340, lr=0.128000, batch loss=0.062186, epoch loss=0.456612
Batch=599, step=17400, lr=0.127750, batch loss=0.036272, epoch loss=0.492884
Batch=659, step=17460, lr=0.127500, batch loss=0.051646, epoch loss=0.544530
Batch=719, step=17520, lr=0.127250, batch loss=0.057519, epoch loss=0.602049
Batch=779, step=17580, lr=0.127000, batch loss=0.059090, epoch loss=0.661140
Batch=839, step=17640, lr=0.126750, batch loss=0.092095, epoch loss=0.753235
Batch=899, step=17700, lr=0.126500, batch loss=0.050999, epoch loss=0.804234
Batch=959, step=17760, lr=0.126250, batch loss=0.018435, epoch loss=0.822669
Batch=1019, step=17820, lr=0.126000, batch loss=0.027551, epoch loss=0.850220
Batch=1079, step=17880, lr=0.125750, batch loss=0.020886, epoch loss=0.871106
Batch=1139, step=17940, lr=0.125500, batch loss=0.050613, epoch loss=0.921719
Batch=1199, step=18000, lr=0.125250, batch loss=0.016272, epoch loss=0.937991
Epoch=14, step=18000, lr=0.125250, epoch loss=0.937991
Batch=59, step=18060, lr=0.125000, batch loss=0.013869, epoch loss=0.013869
Batch=119, step=18120, lr=0.124750, batch loss=0.022987, epoch loss=0.036856
Batch=179, step=18180, lr=0.124500, batch loss=0.029254, epoch loss=0.066109
Batch=239, step=18240, lr=0.124250, batch loss=0.033214, epoch loss=0.099323
Batch=299, step=18300, lr=0.124000, batch loss=0.009448, epoch loss=0.108771
Batch=359, step=18360, lr=0.123750, batch loss=0.023396, epoch loss=0.132166
Batch=419, step=18420, lr=0.123500, batch loss=0.030172, epoch loss=0.162338
Batch=479, step=18480, lr=0.123250, batch loss=0.018497, epoch loss=0.180835
Batch=539, step=18540, lr=0.123000, batch loss=0.028790, epoch loss=0.209625
Batch=599, step=18600, lr=0.122750, batch loss=0.025246, epoch loss=0.234871
Batch=659, step=18660, lr=0.122500, batch loss=0.033494, epoch loss=0.268365
Batch=719, step=18720, lr=0.122250, batch loss=0.038540, epoch loss=0.306905
Batch=779, step=18780, lr=0.122000, batch loss=0.124206, epoch loss=0.431111
Batch=839, step=18840, lr=0.121750, batch loss=0.058949, epoch loss=0.490060
Batch=899, step=18900, lr=0.121500, batch loss=0.078595, epoch loss=0.568655
Batch=959, step=18960, lr=0.121250, batch loss=0.013766, epoch loss=0.582421
Batch=1019, step=19020, lr=0.121000, batch loss=0.022810, epoch loss=0.605232
Batch=1079, step=19080, lr=0.120750, batch loss=0.008829, epoch loss=0.614060
Batch=1139, step=19140, lr=0.120500, batch loss=0.023991, epoch loss=0.638052
Batch=1199, step=19200, lr=0.120250, batch loss=0.009948, epoch loss=0.648000
Epoch=15, step=19200, lr=0.120250, epoch loss=0.648000
Batch=59, step=19260, lr=0.120000, batch loss=0.005323, epoch loss=0.005323
Batch=119, step=19320, lr=0.119750, batch loss=0.019136, epoch loss=0.024460
Batch=179, step=19380, lr=0.119500, batch loss=0.048845, epoch loss=0.073305
Batch=239, step=19440, lr=0.119250, batch loss=0.021105, epoch loss=0.094410
Batch=299, step=19500, lr=0.119000, batch loss=0.021160, epoch loss=0.115569
Batch=359, step=19560, lr=0.118750, batch loss=0.038914, epoch loss=0.154484
Batch=419, step=19620, lr=0.118500, batch loss=0.020544, epoch loss=0.175027
Batch=479, step=19680, lr=0.118250, batch loss=0.008468, epoch loss=0.183495
Batch=539, step=19740, lr=0.118000, batch loss=0.017337, epoch loss=0.200832
Batch=599, step=19800, lr=0.117750, batch loss=0.023708, epoch loss=0.224540
Batch=659, step=19860, lr=0.117500, batch loss=0.021727, epoch loss=0.246267
Batch=719, step=19920, lr=0.117250, batch loss=0.052505, epoch loss=0.298772
Batch=779, step=19980, lr=0.117000, batch loss=0.080862, epoch loss=0.379634
Batch=839, step=20040, lr=0.116750, batch loss=0.030962, epoch loss=0.410597
Batch=899, step=20100, lr=0.116500, batch loss=0.034293, epoch loss=0.444889
Batch=959, step=20160, lr=0.116250, batch loss=0.011666, epoch loss=0.456556
Batch=1019, step=20220, lr=0.116000, batch loss=0.015404, epoch loss=0.471960
Batch=1079, step=20280, lr=0.115750, batch loss=0.003062, epoch loss=0.475022
Batch=1139, step=20340, lr=0.115500, batch loss=0.016609, epoch loss=0.491631
Batch=1199, step=20400, lr=0.115250, batch loss=0.006607, epoch loss=0.498238
Epoch=16, step=20400, lr=0.115250, epoch loss=0.498238
Batch=59, step=20460, lr=0.115000, batch loss=0.003037, epoch loss=0.003037
Batch=119, step=20520, lr=0.114750, batch loss=0.009909, epoch loss=0.012946
Batch=179, step=20580, lr=0.114500, batch loss=0.022892, epoch loss=0.035837
Batch=239, step=20640, lr=0.114250, batch loss=0.016796, epoch loss=0.052633
Batch=299, step=20700, lr=0.114000, batch loss=0.006138, epoch loss=0.058771
Batch=359, step=20760, lr=0.113750, batch loss=0.013082, epoch loss=0.071853
Batch=419, step=20820, lr=0.113500, batch loss=0.017315, epoch loss=0.089169
Batch=479, step=20880, lr=0.113250, batch loss=0.004448, epoch loss=0.093617
Batch=539, step=20940, lr=0.113000, batch loss=0.015652, epoch loss=0.109268
Batch=599, step=21000, lr=0.112750, batch loss=0.018026, epoch loss=0.127294
Batch=659, step=21060, lr=0.112500, batch loss=0.013473, epoch loss=0.140768
Batch=719, step=21120, lr=0.112250, batch loss=0.032787, epoch loss=0.173555
Batch=779, step=21180, lr=0.112000, batch loss=0.042922, epoch loss=0.216477
Batch=839, step=21240, lr=0.111750, batch loss=0.031711, epoch loss=0.248188
Batch=899, step=21300, lr=0.111500, batch loss=0.033660, epoch loss=0.281848
Batch=959, step=21360, lr=0.111250, batch loss=0.011080, epoch loss=0.292928
Batch=1019, step=21420, lr=0.111000, batch loss=0.011716, epoch loss=0.304643
Batch=1079, step=21480, lr=0.110750, batch loss=0.002675, epoch loss=0.307318
Batch=1139, step=21540, lr=0.110500, batch loss=0.012058, epoch loss=0.319377
Batch=1199, step=21600, lr=0.110250, batch loss=0.005005, epoch loss=0.324381
Epoch=17, step=21600, lr=0.110250, epoch loss=0.324381
Batch=59, step=21660, lr=0.110000, batch loss=0.002247, epoch loss=0.002247
Batch=119, step=21720, lr=0.109750, batch loss=0.006854, epoch loss=0.009101
Batch=179, step=21780, lr=0.109500, batch loss=0.012724, epoch loss=0.021825
Batch=239, step=21840, lr=0.109250, batch loss=0.010556, epoch loss=0.032381
Batch=299, step=21900, lr=0.109000, batch loss=0.012513, epoch loss=0.044893
Batch=359, step=21960, lr=0.108750, batch loss=0.013096, epoch loss=0.057989
Batch=419, step=22020, lr=0.108500, batch loss=0.012318, epoch loss=0.070308
Batch=479, step=22080, lr=0.108250, batch loss=0.002781, epoch loss=0.073089
Batch=539, step=22140, lr=0.108000, batch loss=0.017491, epoch loss=0.090580
Batch=599, step=22200, lr=0.107500, batch loss=0.017336, epoch loss=0.107916
Batch=659, step=22260, lr=0.107500, batch loss=0.016025, epoch loss=0.123941
Batch=719, step=22320, lr=0.107250, batch loss=0.022069, epoch loss=0.146009
Batch=779, step=22380, lr=0.107000, batch loss=0.029476, epoch loss=0.175485
Batch=839, step=22440, lr=0.106750, batch loss=0.029676, epoch loss=0.205162
Batch=899, step=22500, lr=0.106500, batch loss=0.023071, epoch loss=0.228232
Batch=959, step=22560, lr=0.106250, batch loss=0.010583, epoch loss=0.238815
Batch=1019, step=22620, lr=0.106000, batch loss=0.008140, epoch loss=0.246955
Batch=1079, step=22680, lr=0.105750, batch loss=0.002207, epoch loss=0.249162
Batch=1139, step=22740, lr=0.105500, batch loss=0.010393, epoch loss=0.259555
Batch=1199, step=22800, lr=0.105250, batch loss=0.005292, epoch loss=0.264847
Epoch=18, step=22800, lr=0.105250, epoch loss=0.264847
Batch=59, step=22860, lr=0.105000, batch loss=0.002877, epoch loss=0.002877
Batch=119, step=22920, lr=0.104750, batch loss=0.004663, epoch loss=0.007540
Batch=179, step=22980, lr=0.104500, batch loss=0.012761, epoch loss=0.020301
Batch=239, step=23040, lr=0.104250, batch loss=0.008284, epoch loss=0.028584
Batch=299, step=23100, lr=0.104000, batch loss=0.003869, epoch loss=0.032453
Batch=359, step=23160, lr=0.103750, batch loss=0.012112, epoch loss=0.044565
Batch=419, step=23220, lr=0.103500, batch loss=0.011402, epoch loss=0.055967
Batch=479, step=23280, lr=0.103250, batch loss=0.002702, epoch loss=0.058669
Batch=539, step=23340, lr=0.103000, batch loss=0.016654, epoch loss=0.075323
Batch=599, step=23400, lr=0.102750, batch loss=0.014615, epoch loss=0.089937
Batch=659, step=23460, lr=0.102500, batch loss=0.011930, epoch loss=0.101867
Batch=719, step=23520, lr=0.102250, batch loss=0.016462, epoch loss=0.118328
Batch=779, step=23580, lr=0.102000, batch loss=0.021122, epoch loss=0.139450
Batch=839, step=23640, lr=0.101750, batch loss=0.022220, epoch loss=0.161671
Batch=899, step=23700, lr=0.101500, batch loss=0.025779, epoch loss=0.187449
Batch=959, step=23760, lr=0.101250, batch loss=0.007794, epoch loss=0.195243
Batch=1019, step=23820, lr=0.101000, batch loss=0.007651, epoch loss=0.202894
Batch=1079, step=23880, lr=0.100750, batch loss=0.000852, epoch loss=0.203746
Batch=1139, step=23940, lr=0.100500, batch loss=0.008538, epoch loss=0.212284
Batch=1199, step=24000, lr=0.100250, batch loss=0.004289, epoch loss=0.216573
Epoch=19, step=24000, lr=0.100250, epoch loss=0.216573


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############***************************************..│
│********######*####*********************************###*###*#***********************************....│
│*******######**##*************....*****************#*######*#********************************.......│
│*******##*##**##**********...........***************########*##****************************.........│
│*****#######************.......%...%%...***************#########**************************........%.│
│******######**********..........%........***************##*#####************************......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######**********************......%.%%%%.│
│****#######**********..........%%%%.........************#########*********************.......%%.%%.%│
│**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######************...........%%%%%%%..........************#######***************...........%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####****************...........%%%%%%%.│
│*#####*#**********..............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│
│#####*#**********...............%%%%%%%...............*******#######*********..............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######********...............%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....│
│#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│*************........................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................**...............%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1
2025-05-22 20:05.22: Job failed: Failed: Build failed