Organisationsahrefsocannl397418 ()debian-12-5.2_opam-2.3

debian-12-5.2_opam-2.3

Link Copied
Code Copied

Logs

2025-05-22 20:00.59: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (39741884b740497ac10065d5e464e6c70f9151f4) (linux-x86_64:debian-12-5.2_opam-2.3)
Base: ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 39741884
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357
# debian-12-5.2_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-05-22 20:00.59: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357-debian-12-5.2_opam-2.3-f1d22c9a39b51ed645a4a58d29fbcce5"
2025-05-22 20:00.59: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357)
(comment debian-12-5.2_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-05-22 20:00.59: Waiting for resource in pool OCluster
2025-05-22 20:00.59: Waiting for worker…
2025-05-22 20:01.13: Got resource from pool OCluster
Building on toxis.caelum.ci.dev
All commits already cached
HEAD is now at 39741884 Untested: convert remaining uses of Format except where printing Sexp values


(from ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357)
2025-05-22 20:01.18 ---> using "54ec013a6f149facc0d7142b09647559c040b9be9f10f3b28aedc92eb0aa3eda" from cache


/: (comment debian-12-5.2_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-22 20:01.18 ---> using "c2927949a690e79730c631d7a8829146d4d2b9a42c6543f46b7f5eb020a6256d" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
[NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:
opam option jobs=39 --global
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-22 20:01.18 ---> using "a7fd5cacf3665fbb975eef60f5b8ef985d00ea4b339eac782682eead64b75f01" from cache


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.2.1
2.3.0
2025-05-22 20:01.18 ---> using "7adba0ccf89541c88b795935f2449fd37f815bccab093d57590be9ba50021241" from cache


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-05-22 20:01.18 ---> using "b491f791e6cf8593356bc2c9bd1da33ed7889986fb1cee97a62247171886e89a" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
0d013e603b..2df846cb67  master     -> origin/master
2df846cb67 Merge pull request #27910 from maiste/release-dune-3.19.0


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:02.01 ---> saved as "e899542b767f8b08f9affbbfd4b98bf8872e1d0f269d4d0826246c95c1050a82"


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-22 20:02.01 ---> saved as "8ba6a4a9a08333968e5b77dd3f74a94abc810cc426a4cc29c4f5f3e8b2d8ef4b"


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-22 20:02.06 ---> saved as "6fba3d151fabf1ee013f84f1d399864e8772fd39fa46bbb1722a22137c9346bc"


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-22 20:02.06 ---> saved as "8859150331e1220b6123ff33db8bbea75670be329c38591444c3c05d78a3658d"


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apt-get" "update"
- Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB]
- Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB]
- Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB]
- Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8793 kB]
- Get:5 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [259 kB]
- Fetched 9306 kB in 2s (3816 kB/s)
- Reading package lists...
- 


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.1).
[NOTE] Package ocaml is already installed (current version is 5.2.1).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-dev pkg-config


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config"
- debconf: delaying package configuration, since apt-utils is not installed
- Selecting previously unselected package libffi-dev:amd64.
- (Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
- (Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 18778 files and directories currently installed.)
- Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ...
- Unpacking libffi-dev:amd64 (3.4.4-1) ...
- Selecting previously unselected package libpkgconf3:amd64.
- Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ...
- Unpacking libpkgconf3:amd64 (1.8.1-1) ...
- Selecting previously unselected package pkgconf-bin.
- Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ...
- Unpacking pkgconf-bin (1.8.1-1) ...
- Selecting previously unselected package pkgconf:amd64.
- Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ...
- Unpacking pkgconf:amd64 (1.8.1-1) ...
- Selecting previously unselected package pkg-config:amd64.
- Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ...
- Unpacking pkg-config:amd64 (1.8.1-1) ...
- Setting up libffi-dev:amd64 (3.4.4-1) ...
- Setting up libpkgconf3:amd64 (1.8.1-1) ...
- Setting up pkgconf-bin (1.8.1-1) ...
- Setting up pkgconf:amd64 (1.8.1-1) ...
- Setting up pkg-config:amd64 (1.8.1-1) ...
- Processing triggers for libc-bin (2.36-9+deb12u10) ...
2025-05-22 20:02.40 ---> saved as "d17e8dcf88ff5735ace1e1e2b6e94d6585fcb7bec92fe58c6fe7d65d3a74846d"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.1).
[NOTE] Package ocaml is already installed (current version is 5.2.1).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 75 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.2
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.19.0
- install dune-configurator       3.19.0
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install pprint                  20230830
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.0
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> installed conf-pkg-config.4
-> retrieved csexp.1.5.2  (cached)
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> installed conf-libffi.2.0.0
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved pprint.20230830  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppxlib_jane.v0.17.0  (cached)
-> retrieved dune.3.19.0, dune-configurator.3.19.0  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved seq.base  (cached)
-> retrieved result.1.5  (cached)
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> installed seq.base
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlbuild.0.16.1
-> installed ocamlfind.1.9.8
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed fmt.0.10.0
-> installed ptime.1.2.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.19.0
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed multicore-magic.2.3.1
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed pprint.20230830
-> installed printbox.0.12
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed ocaml-syntax-shims.1.0.0
-> installed thread-local-storage.0.2
-> installed ocaml-compiler-libs.v0.17.0
-> installed cppo.1.8.0
-> installed re.1.12.0
-> installed integers.0.7.0
-> installed saturn_lockfree.0.5.0
-> installed parsexp.v0.17.0
-> installed dune-configurator.3.19.0
-> installed bigstringaf.0.10.0
-> installed sexplib.v0.17.0
-> installed mdx.2.5.0
-> installed angstrom.0.16.1
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed ctypes-foreign.0.23.0
-> installed base.v0.17.2
-> installed fieldslib.v0.17.0
-> installed variantslib.v0.17.0
-> installed stdio.v0.17.0
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed ppxlib.0.35.0
-> installed ppxlib_jane.v0.17.0
-> installed ppx_optcomp.v0.17.0
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_compare.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_base.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:05.12 ---> saved as "6ff7e024a11b4589bd6972ecb1f4f25ee35a2227e23f2272266ab68ee843f4b4"


/src: (copy (src .) (dst /src))
2025-05-22 20:05.13 ---> saved as "cb39b0ac4fcbc74079aefdcc9d48519dff3c2d9fa002c53976c4a9ef0485b03c"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Wrote value of 'backend' to ocannl_backend.txt
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/6222ca0f1fd23b1c64112c5d2659a276/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/dune", lines 30-40, characters 0-281:
30 | (rule
31 |  (alias runtest)
32 |  (target
33 |   (dir log_files))
34 |  (action
35 |   (run
36 |    %{dep:micrograd_demo_logging.exe}
37 |    "--ocannl_debug_backend=text"
38 |    "--ocannl_log_file_stem=micrograd_demo_logging"
39 |    "--ocannl_log_main_domain_to_stdout=false"
40 |    "--ocannl_debug_log_to_stream_files=true")))
(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_cd_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Found true, commandline --ocannl_debug_log_to_stream_files=true
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed
Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64
Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31
Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59
Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38
Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82
Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92
Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92
Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/micrograd_demo.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/micrograd_demo.ml.corrected
diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/micrograd_demo.ml.corrected
index 77e46c6..ab81526 100644
--- a/_build/default/test/micrograd_demo.ml
+++ b/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/micrograd_demo.ml.corrected
@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =
│├┼───────┤       │
│││ -4.00 │       │
│└┴───────┘       │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[0]: a shape 0:1  grad_a│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 1.38e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[0]: a shape 0:1  grad_a│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 1.38e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}];
Tensor.print ~with_code:false ~with_grad:true `Default b;
[%expect
@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =
│├┼──────┤        │
│││ 2.00 │        │
│└┴──────┘        │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[2]: b shape 0:1  grad_b│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 6.45e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[2]: b shape 0:1  grad_b│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 6.45e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}]


let%expect_test "Micrograd half-moons example" =
File "test/hello_world_op.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/hello_world_op.ml.corrected
diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/hello_world_op.ml.corrected
index ba9d7ef..6bfa309 100644
--- a/_build/default/test/hello_world_op.ml
+++ b/_build/.sandbox/b12ed6cdc190b115c071468af82e0450/default/test/hello_world_op.ml.corrected
@@ -102,36 +102,46 @@ let%expect_test "Print constant tensor" =
let%op hey = [ (1, 2, 3); (4, 5, 6) ] in
Train.forward_and_forget backend ctx hey;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;
-  [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];
+  [%expect {|
+    [0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2  [
+       1.00 , 2.00 , 3.00
+      ;  4.00 , 5.00 , 6.00
+    ]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;
[%expect
{|
-    ┌─────────────────────────────────────────────────────────────┐
-    │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │
-    │┌──────┬──────────────────┐                                  │
-    ││      │axis 1            │                                  │
-    │├──────┼──────────────────┤                                  │
-    ││axis 0│ 1.00  2.00  3.00 │                                  │
-    ││      │ 4.00  5.00  6.00 │                                  │
-    │└──────┴──────────────────┘                                  │
-    └─────────────────────────────────────────────────────────────┘
+    ┌────────────────────────────────────────────────────────────────────────┐
+    │[0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2 │
+    │┌──────┬──────────────────┐                                             │
+    ││      │axis 1            │                                             │
+    │├──────┼──────────────────┤                                             │
+    ││axis 0│ 1.00  2.00  3.00 │                                             │
+    ││      │ 4.00  5.00  6.00 │                                             │
+    │└──────┴──────────────────┘                                             │
+    └────────────────────────────────────────────────────────────────────────┘
|}];
let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in
Train.forward_and_forget backend ctx hoo;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;
-  [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];
+  [%expect {|
+    [1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3  [|
+      [ 1.00 ; 2.00 ; 3.00 ]
+      ; [ 4.00 ; 5.00 ; 6.00 ]
+    |]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;
[%expect
{|
-    ┌──────────────────────────────────────────────────────────────────┐
-    │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │
-    │┌──────┬──────────────────┐                                       │
-    ││      │axis 1            │                                       │
-    │├──────┼──────────────────┤                                       │
-    ││axis 0│ 1.00  2.00  3.00 │                                       │
-    ││      │ 4.00  5.00  6.00 │                                       │
-    │└──────┴──────────────────┘                                       │
-    └──────────────────────────────────────────────────────────────────┘
+    ┌─────────────────────────────────────────────────────────────────────────────┐
+    │[1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 │
+    │┌──────┬──────────────────┐                                                  │
+    ││      │axis 1            │                                                  │
+    │├──────┼──────────────────┤                                                  │
+    ││axis 0│ 1.00  2.00  3.00 │                                                  │
+    ││      │ 4.00  5.00  6.00 │                                                  │
+    │└──────┴──────────────────┘                                                  │
+    └─────────────────────────────────────────────────────────────────────────────┘
|}];
let%op hey2 =
[
@@ -145,10 +155,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;
[%expect
{|
-    [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);
-      (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);
-      (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);
-      (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]
+    [2]: c4x2x3_hey2 shape 1:2,2:3->0:4  [
+       ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )
+      ;  ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )
+      ;  ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )
+      ;  ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;
[%expect
@@ -178,10 +190,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;
[%expect
{|
-    [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];
-      [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];
-      [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];
-      [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]
+    [3]: c4x2x3_hoo2 shape 0:4|1:2,2:3  [|
+      [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]
+      ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]
+      ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]
+      ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;
[%expect
@@ -209,10 +223,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;
[%expect
{|
-    [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];
-      [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];
-      [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];
-      [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]
+    [4]: c4x2x3_heyhoo shape 0:4,1:2|2:3  [|
+      [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]
+      ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]
+      ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]
+      ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;
[%expect
@@ -240,15 +256,24 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo2;
[%expect
{|
-    [|
-      [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-        [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];
-      [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-        [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];
-      [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-        [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];
-      [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-        [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]
+    [5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2  [|
+      [|
+        [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+        ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+      |]
+      ; [|
+        [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+        ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+      |]
+      ; [|
+        [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+        ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+      |]
+      ; [|
+        [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+        ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;
[%expect
@@ -293,17 +318,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo3;
[%expect
{|
-    [|
+    [6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2  [|
[|
-        [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-          [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];
-        [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-          [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];
-      [|
-        [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-          [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];
-        [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-          [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]
+        [
+          [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+          ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+        ]
+        ; [
+          [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+          ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+        ]
+      |]
+      ; [|
+        [
+          [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+          ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+        ]
+        ; [
+          [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+          ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+        ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;
[%expect
@@ -353,17 +389,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo4;
[%expect
{|
-    [|
-      [
-        [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];
-          [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];
-        [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];
-          [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];
+    [7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3  [|
[
-        [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];
-          [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];
-        [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];
-          [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]
+        [
+          [  1.00 , 31.00  ;  2.00 , 32.00  ;  3.00 , 33.00  ]
+          ; [  4.00 , 34.00  ;  5.00 , 35.00  ;  6.00 , 36.00  ]
+        ]
+        ; [
+          [  7.00 , 37.00  ;  8.00 , 38.00  ;  9.00 , 39.00  ]
+          ; [  10.00 , 40.00  ;  11.00 , 41.00  ;  12.00 , 42.00  ]
+        ]
+      ]
+      ; [
+        [
+          [  13.00 , 43.00  ;  14.00 , 44.00  ;  15.00 , 45.00  ]
+          ; [  16.00 , 46.00  ;  17.00 , 47.00  ;  18.00 , 48.00  ]
+        ]
+        ; [
+          [  19.00 , 49.00  ;  20.00 , 50.00  ;  21.00 , 51.00  ]
+          ; [  22.00 , 52.00  ;  23.00 , 53.00  ;  24.00 , 54.00  ]
+        ]
+      ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;
[%expect
@@ -462,8 +509,29 @@ let%expect_test "Big matrix" =
Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;
[%expect
{|
-    [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;
-      12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]
+    [2]: 0...20 shape 0:21  [
+      0.00
+      ; 1.00
+      ; 2.00
+      ; 3.00
+      ; 4.00
+      ; 5.00
+      ; 6.00
+      ; 7.00
+      ; 8.00
+      ; 9.00
+      ; 10.00
+      ; 11.00
+      ; 12.00
+      ; 13.00
+      ; 14.00
+      ; 15.00
+      ; 16.00
+      ; 17.00
+      ; 18.00
+      ; 19.00
+      ; 20.00
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;
[%expect
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Properties of devices:
(multicore_devices
(device ((device_name CPU) (device_ordinal 0) (num_domains 72))))
@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Not found, using default false
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216
Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512
Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081
Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141
Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385
Batch=599, step=600, lr=0.197500, batch loss=1.063878, epoch loss=41.596263
Batch=659, step=660, lr=0.197250, batch loss=0.483159, epoch loss=42.079422
Batch=719, step=720, lr=0.197000, batch loss=0.411291, epoch loss=42.490714
Batch=779, step=780, lr=0.196750, batch loss=0.468697, epoch loss=42.959411
Batch=839, step=840, lr=0.196500, batch loss=0.443342, epoch loss=43.402753
Batch=899, step=900, lr=0.196250, batch loss=0.383518, epoch loss=43.786271
Batch=959, step=960, lr=0.196000, batch loss=0.243127, epoch loss=44.029398
Batch=1019, step=1020, lr=0.195750, batch loss=0.454392, epoch loss=44.483790
Batch=1079, step=1080, lr=0.195500, batch loss=0.254356, epoch loss=44.738146
Batch=1139, step=1140, lr=0.195250, batch loss=0.335735, epoch loss=45.073882
Batch=1199, step=1200, lr=0.195250, batch loss=0.262306, epoch loss=45.336188
Epoch=0, step=1200, lr=0.195250, epoch loss=45.336188
Batch=59, step=1260, lr=0.195000, batch loss=0.261125, epoch loss=0.261125
Batch=119, step=1320, lr=0.194750, batch loss=0.206825, epoch loss=0.467950
Batch=179, step=1380, lr=0.194500, batch loss=0.246683, epoch loss=0.714633
Batch=239, step=1440, lr=0.194250, batch loss=0.352130, epoch loss=1.066763
Batch=299, step=1500, lr=0.194000, batch loss=0.235771, epoch loss=1.302534
Batch=359, step=1560, lr=0.193750, batch loss=0.312398, epoch loss=1.614932
Batch=419, step=1620, lr=0.193250, batch loss=0.309011, epoch loss=1.923943
Batch=479, step=1680, lr=0.193000, batch loss=0.273480, epoch loss=2.197423
Batch=539, step=1740, lr=0.192750, batch loss=0.210916, epoch loss=2.408340
Batch=599, step=1800, lr=0.192750, batch loss=0.250444, epoch loss=2.658784
Batch=659, step=1860, lr=0.192500, batch loss=0.369694, epoch loss=3.028478
Batch=719, step=1920, lr=0.192250, batch loss=0.365337, epoch loss=3.393815
Batch=779, step=1980, lr=0.192000, batch loss=0.384071, epoch loss=3.777886
Batch=839, step=2040, lr=0.191750, batch loss=0.341756, epoch loss=4.119642
Batch=899, step=2100, lr=0.191500, batch loss=0.302667, epoch loss=4.422308
Batch=959, step=2160, lr=0.191250, batch loss=0.234160, epoch loss=4.656469
Batch=1019, step=2220, lr=0.191000, batch loss=0.355501, epoch loss=5.011969
Batch=1079, step=2280, lr=0.190750, batch loss=0.237096, epoch loss=5.249066
Batch=1139, step=2340, lr=0.190500, batch loss=0.282087, epoch loss=5.531153
Batch=1199, step=2400, lr=0.190250, batch loss=0.218490, epoch loss=5.749643
Epoch=1, step=2400, lr=0.190250, epoch loss=5.749643
Batch=59, step=2460, lr=0.190000, batch loss=0.230366, epoch loss=0.230366
Batch=119, step=2520, lr=0.189750, batch loss=0.188721, epoch loss=0.419087
Batch=179, step=2580, lr=0.189500, batch loss=0.220992, epoch loss=0.640080
Batch=239, step=2640, lr=0.189250, batch loss=0.335059, epoch loss=0.975138
Batch=299, step=2700, lr=0.189000, batch loss=0.215251, epoch loss=1.190390
Batch=359, step=2760, lr=0.188500, batch loss=0.298966, epoch loss=1.489356
Batch=419, step=2820, lr=0.188500, batch loss=0.287067, epoch loss=1.776423
Batch=479, step=2880, lr=0.188250, batch loss=0.261263, epoch loss=2.037685
Batch=539, step=2940, lr=0.188000, batch loss=0.196766, epoch loss=2.234451
Batch=599, step=3000, lr=0.187750, batch loss=0.239101, epoch loss=2.473552
Batch=659, step=3060, lr=0.187500, batch loss=0.346441, epoch loss=2.819993
Batch=719, step=3120, lr=0.187250, batch loss=0.349298, epoch loss=3.169291
Batch=779, step=3180, lr=0.186750, batch loss=0.363549, epoch loss=3.532839
Batch=839, step=3240, lr=0.186750, batch loss=0.325904, epoch loss=3.858744
Batch=899, step=3300, lr=0.186500, batch loss=0.295177, epoch loss=4.153921
Batch=959, step=3360, lr=0.186250, batch loss=0.231043, epoch loss=4.384964
Batch=1019, step=3420, lr=0.186000, batch loss=0.342050, epoch loss=4.727015
Batch=1079, step=3480, lr=0.185750, batch loss=0.188653, epoch loss=4.915668
Batch=1139, step=3540, lr=0.185250, batch loss=0.232545, epoch loss=5.148213
Batch=1199, step=3600, lr=0.185250, batch loss=0.199158, epoch loss=5.347371
Epoch=2, step=3600, lr=0.185250, epoch loss=5.347371
Batch=59, step=3660, lr=0.185000, batch loss=0.231475, epoch loss=0.231475
Batch=119, step=3720, lr=0.184500, batch loss=0.196898, epoch loss=0.428373
Batch=179, step=3780, lr=0.184500, batch loss=0.212976, epoch loss=0.641349
Batch=239, step=3840, lr=0.184250, batch loss=0.319342, epoch loss=0.960691
Batch=299, step=3900, lr=0.183750, batch loss=0.207216, epoch loss=1.167907
Batch=359, step=3960, lr=0.183500, batch loss=0.286826, epoch loss=1.454733
Batch=419, step=4020, lr=0.183250, batch loss=0.274745, epoch loss=1.729478
Batch=479, step=4080, lr=0.183000, batch loss=0.257201, epoch loss=1.986679
Batch=539, step=4140, lr=0.182750, batch loss=0.197440, epoch loss=2.184119
Batch=599, step=4200, lr=0.182750, batch loss=0.238735, epoch loss=2.422854
Batch=659, step=4260, lr=0.182500, batch loss=0.330054, epoch loss=2.752908
Batch=719, step=4320, lr=0.182250, batch loss=0.328687, epoch loss=3.081595
Batch=779, step=4380, lr=0.182000, batch loss=0.351134, epoch loss=3.432729
Batch=839, step=4440, lr=0.181750, batch loss=0.319240, epoch loss=3.751968
Batch=899, step=4500, lr=0.181500, batch loss=0.291437, epoch loss=4.043405
Batch=959, step=4560, lr=0.181250, batch loss=0.241015, epoch loss=4.284420
Batch=1019, step=4620, lr=0.181000, batch loss=0.340057, epoch loss=4.624477
Batch=1079, step=4680, lr=0.180750, batch loss=0.198429, epoch loss=4.822905
Batch=1139, step=4740, lr=0.180500, batch loss=0.228382, epoch loss=5.051287
Batch=1199, step=4800, lr=0.180000, batch loss=0.192079, epoch loss=5.243366
Epoch=3, step=4800, lr=0.180000, epoch loss=5.243366
Batch=59, step=4860, lr=0.180000, batch loss=0.229817, epoch loss=0.229817
Batch=119, step=4920, lr=0.179750, batch loss=0.188595, epoch loss=0.418412
Batch=179, step=4980, lr=0.179500, batch loss=0.205878, epoch loss=0.624289
Batch=239, step=5040, lr=0.179250, batch loss=0.307707, epoch loss=0.931996
Batch=299, step=5100, lr=0.179000, batch loss=0.208623, epoch loss=1.140619
Batch=359, step=5160, lr=0.178750, batch loss=0.274423, epoch loss=1.415042
Batch=419, step=5220, lr=0.178500, batch loss=0.266338, epoch loss=1.681379
Batch=479, step=5280, lr=0.178250, batch loss=0.241050, epoch loss=1.922429
Batch=539, step=5340, lr=0.178000, batch loss=0.189866, epoch loss=2.112296
Batch=599, step=5400, lr=0.177500, batch loss=0.231233, epoch loss=2.343529
Batch=659, step=5460, lr=0.177500, batch loss=0.324245, epoch loss=2.667774
Batch=719, step=5520, lr=0.177250, batch loss=0.325535, epoch loss=2.993309
Batch=779, step=5580, lr=0.177000, batch loss=0.339473, epoch loss=3.332782
Batch=839, step=5640, lr=0.176500, batch loss=0.313036, epoch loss=3.645818
Batch=899, step=5700, lr=0.176250, batch loss=0.276423, epoch loss=3.922241
Batch=959, step=5760, lr=0.176000, batch loss=0.209790, epoch loss=4.132032
Batch=1019, step=5820, lr=0.176000, batch loss=0.337963, epoch loss=4.469995
Batch=1079, step=5880, lr=0.175500, batch loss=0.191396, epoch loss=4.661391
Batch=1139, step=5940, lr=0.175500, batch loss=0.219878, epoch loss=4.881269
Batch=1199, step=6000, lr=0.175250, batch loss=0.190166, epoch loss=5.071435
Epoch=4, step=6000, lr=0.175250, epoch loss=5.071435
Batch=59, step=6060, lr=0.175000, batch loss=0.234250, epoch loss=0.234250
Batch=119, step=6120, lr=0.174750, batch loss=0.190816, epoch loss=0.425066
Batch=179, step=6180, lr=0.174250, batch loss=0.200947, epoch loss=0.626013
Batch=239, step=6240, lr=0.174000, batch loss=0.300531, epoch loss=0.926544
Batch=299, step=6300, lr=0.174000, batch loss=0.205213, epoch loss=1.131757
Batch=359, step=6360, lr=0.173750, batch loss=0.268123, epoch loss=1.399880
Batch=419, step=6420, lr=0.173500, batch loss=0.266315, epoch loss=1.666195
Batch=479, step=6480, lr=0.173250, batch loss=0.242813, epoch loss=1.909008
Batch=539, step=6540, lr=0.173000, batch loss=0.193870, epoch loss=2.102877
Batch=599, step=6600, lr=0.172750, batch loss=0.234248, epoch loss=2.337126
Batch=659, step=6660, lr=0.172500, batch loss=0.313184, epoch loss=2.650310
Batch=719, step=6720, lr=0.172250, batch loss=0.318126, epoch loss=2.968436
Batch=779, step=6780, lr=0.171750, batch loss=0.336604, epoch loss=3.305040
Batch=839, step=6840, lr=0.171750, batch loss=0.302124, epoch loss=3.607164
Batch=899, step=6900, lr=0.171500, batch loss=0.269504, epoch loss=3.876669
Batch=959, step=6960, lr=0.171000, batch loss=0.206425, epoch loss=4.083093
Batch=1019, step=7020, lr=0.171000, batch loss=0.330980, epoch loss=4.414073
Batch=1079, step=7080, lr=0.170750, batch loss=0.179607, epoch loss=4.593680
Batch=1139, step=7140, lr=0.170500, batch loss=0.214157, epoch loss=4.807837
Batch=1199, step=7200, lr=0.170250, batch loss=0.184020, epoch loss=4.991857
Epoch=5, step=7200, lr=0.170250, epoch loss=4.991857
Batch=59, step=7260, lr=0.170000, batch loss=0.238523, epoch loss=0.238523
Batch=119, step=7320, lr=0.169750, batch loss=0.180329, epoch loss=0.418852
Batch=179, step=7380, lr=0.169500, batch loss=0.194959, epoch loss=0.613811
Batch=239, step=7440, lr=0.169250, batch loss=0.289512, epoch loss=0.903322
Batch=299, step=7500, lr=0.169000, batch loss=0.199192, epoch loss=1.102514
Batch=359, step=7560, lr=0.168750, batch loss=0.253876, epoch loss=1.356390
Batch=419, step=7620, lr=0.168500, batch loss=0.249711, epoch loss=1.606101
Batch=479, step=7680, lr=0.168250, batch loss=0.224450, epoch loss=1.830551
Batch=539, step=7740, lr=0.168000, batch loss=0.185013, epoch loss=2.015563
Batch=599, step=7800, lr=0.167750, batch loss=0.218139, epoch loss=2.233702
Batch=659, step=7860, lr=0.167500, batch loss=0.306521, epoch loss=2.540223
Batch=719, step=7920, lr=0.167250, batch loss=0.303717, epoch loss=2.843940
Batch=779, step=7980, lr=0.167000, batch loss=0.323018, epoch loss=3.166957
Batch=839, step=8040, lr=0.166750, batch loss=0.293648, epoch loss=3.460605
Batch=899, step=8100, lr=0.166500, batch loss=0.261224, epoch loss=3.721830
Batch=959, step=8160, lr=0.166250, batch loss=0.201388, epoch loss=3.923218
Batch=1019, step=8220, lr=0.166000, batch loss=0.327523, epoch loss=4.250741
Batch=1079, step=8280, lr=0.165750, batch loss=0.199075, epoch loss=4.449816
Batch=1139, step=8340, lr=0.165500, batch loss=0.220107, epoch loss=4.669924
Batch=1199, step=8400, lr=0.165250, batch loss=0.174761, epoch loss=4.844685
Epoch=6, step=8400, lr=0.165250, epoch loss=4.844685
Batch=59, step=8460, lr=0.165000, batch loss=0.206109, epoch loss=0.206109
Batch=119, step=8520, lr=0.164750, batch loss=0.175278, epoch loss=0.381386
Batch=179, step=8580, lr=0.164500, batch loss=0.187779, epoch loss=0.569165
Batch=239, step=8640, lr=0.164250, batch loss=0.275952, epoch loss=0.845117
Batch=299, step=8700, lr=0.164000, batch loss=0.184659, epoch loss=1.029776
Batch=359, step=8760, lr=0.163750, batch loss=0.244196, epoch loss=1.273972
Batch=419, step=8820, lr=0.163500, batch loss=0.238134, epoch loss=1.512106
Batch=479, step=8880, lr=0.163250, batch loss=0.213322, epoch loss=1.725427
Batch=539, step=8940, lr=0.163000, batch loss=0.178182, epoch loss=1.903609
Batch=599, step=9000, lr=0.162750, batch loss=0.218081, epoch loss=2.121690
Batch=659, step=9060, lr=0.162500, batch loss=0.293041, epoch loss=2.414731
Batch=719, step=9120, lr=0.162250, batch loss=0.294347, epoch loss=2.709078
Batch=779, step=9180, lr=0.162000, batch loss=0.314843, epoch loss=3.023921
Batch=839, step=9240, lr=0.161750, batch loss=0.286580, epoch loss=3.310501
Batch=899, step=9300, lr=0.161500, batch loss=0.251369, epoch loss=3.561870
Batch=959, step=9360, lr=0.161250, batch loss=0.190441, epoch loss=3.752311
Batch=1019, step=9420, lr=0.161000, batch loss=0.316397, epoch loss=4.068707
Batch=1079, step=9480, lr=0.160750, batch loss=0.201300, epoch loss=4.270008
Batch=1139, step=9540, lr=0.160500, batch loss=0.211184, epoch loss=4.481191
Batch=1199, step=9600, lr=0.160250, batch loss=0.167491, epoch loss=4.648683
Epoch=7, step=9600, lr=0.160250, epoch loss=4.648683
Batch=59, step=9660, lr=0.160000, batch loss=0.200551, epoch loss=0.200551
Batch=119, step=9720, lr=0.159750, batch loss=0.164912, epoch loss=0.365463
Batch=179, step=9780, lr=0.159500, batch loss=0.178022, epoch loss=0.543485
Batch=239, step=9840, lr=0.159250, batch loss=0.262513, epoch loss=0.805999
Batch=299, step=9900, lr=0.159000, batch loss=0.181444, epoch loss=0.987443
Batch=359, step=9960, lr=0.158750, batch loss=0.241469, epoch loss=1.228911
Batch=419, step=10020, lr=0.158500, batch loss=0.233045, epoch loss=1.461957
Batch=479, step=10080, lr=0.158250, batch loss=0.213299, epoch loss=1.675256
Batch=539, step=10140, lr=0.158000, batch loss=0.171895, epoch loss=1.847151
Batch=599, step=10200, lr=0.157750, batch loss=0.201595, epoch loss=2.048746
Batch=659, step=10260, lr=0.157500, batch loss=0.281348, epoch loss=2.330094
Batch=719, step=10320, lr=0.157250, batch loss=0.285377, epoch loss=2.615472
Batch=779, step=10380, lr=0.157000, batch loss=0.295715, epoch loss=2.911187
Batch=839, step=10440, lr=0.156750, batch loss=0.266351, epoch loss=3.177538
Batch=899, step=10500, lr=0.156500, batch loss=0.237318, epoch loss=3.414856
Batch=959, step=10560, lr=0.156000, batch loss=0.179050, epoch loss=3.593906
Batch=1019, step=10620, lr=0.156000, batch loss=0.299303, epoch loss=3.893209
Batch=1079, step=10680, lr=0.155750, batch loss=0.175352, epoch loss=4.068561
Batch=1139, step=10740, lr=0.155500, batch loss=0.199812, epoch loss=4.268373
Batch=1199, step=10800, lr=0.155250, batch loss=0.155210, epoch loss=4.423583
Epoch=8, step=10800, lr=0.155250, epoch loss=4.423583
Batch=59, step=10860, lr=0.155000, batch loss=0.177359, epoch loss=0.177359
Batch=119, step=10920, lr=0.154750, batch loss=0.152914, epoch loss=0.330273
Batch=179, step=10980, lr=0.154500, batch loss=0.167663, epoch loss=0.497936
Batch=239, step=11040, lr=0.154250, batch loss=0.252419, epoch loss=0.750355
Batch=299, step=11100, lr=0.154000, batch loss=0.162587, epoch loss=0.912942
Batch=359, step=11160, lr=0.153500, batch loss=0.226998, epoch loss=1.139940
Batch=419, step=11220, lr=0.153500, batch loss=0.226751, epoch loss=1.366691
Batch=479, step=11280, lr=0.153250, batch loss=0.204336, epoch loss=1.571027
Batch=539, step=11340, lr=0.153000, batch loss=0.158011, epoch loss=1.729038
Batch=599, step=11400, lr=0.152750, batch loss=0.181204, epoch loss=1.910242
Batch=659, step=11460, lr=0.152250, batch loss=0.262899, epoch loss=2.173141
Batch=719, step=11520, lr=0.152000, batch loss=0.256425, epoch loss=2.429566
Batch=779, step=11580, lr=0.152000, batch loss=0.271629, epoch loss=2.701195
Batch=839, step=11640, lr=0.151750, batch loss=0.256466, epoch loss=2.957661
Batch=899, step=11700, lr=0.151500, batch loss=0.217579, epoch loss=3.175241
Batch=959, step=11760, lr=0.151250, batch loss=0.173484, epoch loss=3.348725
Batch=1019, step=11820, lr=0.151000, batch loss=0.260130, epoch loss=3.608855
Batch=1079, step=11880, lr=0.150750, batch loss=0.140082, epoch loss=3.748936
Batch=1139, step=11940, lr=0.150500, batch loss=0.173722, epoch loss=3.922658
Batch=1199, step=12000, lr=0.150250, batch loss=0.141873, epoch loss=4.064531
Epoch=9, step=12000, lr=0.150250, epoch loss=4.064531
Batch=59, step=12060, lr=0.150000, batch loss=0.174650, epoch loss=0.174650
Batch=119, step=12120, lr=0.149750, batch loss=0.148354, epoch loss=0.323004
Batch=179, step=12180, lr=0.149250, batch loss=0.151603, epoch loss=0.474607
Batch=239, step=12240, lr=0.149250, batch loss=0.221107, epoch loss=0.695715
Batch=299, step=12300, lr=0.148750, batch loss=0.138602, epoch loss=0.834317
Batch=359, step=12360, lr=0.148750, batch loss=0.198882, epoch loss=1.033199
Batch=419, step=12420, lr=0.148250, batch loss=0.208068, epoch loss=1.241267
Batch=479, step=12480, lr=0.148250, batch loss=0.177616, epoch loss=1.418883
Batch=539, step=12540, lr=0.148000, batch loss=0.143903, epoch loss=1.562785
Batch=599, step=12600, lr=0.147500, batch loss=0.148448, epoch loss=1.711233
Batch=659, step=12660, lr=0.147250, batch loss=0.222557, epoch loss=1.933790
Batch=719, step=12720, lr=0.147250, batch loss=0.230652, epoch loss=2.164441
Batch=779, step=12780, lr=0.147000, batch loss=0.235778, epoch loss=2.400219
Batch=839, step=12840, lr=0.146750, batch loss=0.232887, epoch loss=2.633106
Batch=899, step=12900, lr=0.146250, batch loss=0.220776, epoch loss=2.853882
Batch=959, step=12960, lr=0.146000, batch loss=0.153183, epoch loss=3.007065
Batch=1019, step=13020, lr=0.145750, batch loss=0.261430, epoch loss=3.268496
Batch=1079, step=13080, lr=0.145500, batch loss=0.113545, epoch loss=3.382041
Batch=1139, step=13140, lr=0.145250, batch loss=0.156175, epoch loss=3.538216
Batch=1199, step=13200, lr=0.145250, batch loss=0.119197, epoch loss=3.657413
Epoch=10, step=13200, lr=0.145250, epoch loss=3.657413
Batch=59, step=13260, lr=0.145000, batch loss=0.143357, epoch loss=0.143357
Batch=119, step=13320, lr=0.144750, batch loss=0.119634, epoch loss=0.262991
Batch=179, step=13380, lr=0.144500, batch loss=0.128698, epoch loss=0.391689
Batch=239, step=13440, lr=0.144000, batch loss=0.187838, epoch loss=0.579527
Batch=299, step=13500, lr=0.144000, batch loss=0.114891, epoch loss=0.694417
Batch=359, step=13560, lr=0.143750, batch loss=0.161027, epoch loss=0.855444
Batch=419, step=13620, lr=0.143500, batch loss=0.161225, epoch loss=1.016669
Batch=479, step=13680, lr=0.143250, batch loss=0.146232, epoch loss=1.162901
Batch=539, step=13740, lr=0.143000, batch loss=0.118737, epoch loss=1.281637
Batch=599, step=13800, lr=0.142500, batch loss=0.120643, epoch loss=1.402281
Batch=659, step=13860, lr=0.142250, batch loss=0.176146, epoch loss=1.578427
Batch=719, step=13920, lr=0.142250, batch loss=0.178942, epoch loss=1.757369
Batch=779, step=13980, lr=0.142000, batch loss=0.196804, epoch loss=1.954172
Batch=839, step=14040, lr=0.141750, batch loss=0.188041, epoch loss=2.142213
Batch=899, step=14100, lr=0.141500, batch loss=0.157394, epoch loss=2.299607
Batch=959, step=14160, lr=0.141250, batch loss=0.136123, epoch loss=2.435731
Batch=1019, step=14220, lr=0.141000, batch loss=0.288862, epoch loss=2.724593
Batch=1079, step=14280, lr=0.140750, batch loss=0.084950, epoch loss=2.809544
Batch=1139, step=14340, lr=0.140250, batch loss=0.133412, epoch loss=2.942956
Batch=1199, step=14400, lr=0.140000, batch loss=0.095241, epoch loss=3.038197
Epoch=11, step=14400, lr=0.140000, epoch loss=3.038197
Batch=59, step=14460, lr=0.139750, batch loss=0.115037, epoch loss=0.115037
Batch=119, step=14520, lr=0.139500, batch loss=0.108232, epoch loss=0.223269
Batch=179, step=14580, lr=0.139500, batch loss=0.105677, epoch loss=0.328946
Batch=239, step=14640, lr=0.139250, batch loss=0.139482, epoch loss=0.468428
Batch=299, step=14700, lr=0.138750, batch loss=0.082672, epoch loss=0.551100
Batch=359, step=14760, lr=0.138750, batch loss=0.129524, epoch loss=0.680624
Batch=419, step=14820, lr=0.138250, batch loss=0.140005, epoch loss=0.820629
Batch=479, step=14880, lr=0.138250, batch loss=0.091670, epoch loss=0.912299
Batch=539, step=14940, lr=0.138000, batch loss=0.089208, epoch loss=1.001508
Batch=599, step=15000, lr=0.137750, batch loss=0.085526, epoch loss=1.087034
Batch=659, step=15060, lr=0.137500, batch loss=0.137779, epoch loss=1.224813
Batch=719, step=15120, lr=0.137250, batch loss=0.174011, epoch loss=1.398823
Batch=779, step=15180, lr=0.137000, batch loss=0.324749, epoch loss=1.723573
Batch=839, step=15240, lr=0.136750, batch loss=0.135903, epoch loss=1.859476
Batch=899, step=15300, lr=0.136500, batch loss=0.111476, epoch loss=1.970952
Batch=959, step=15360, lr=0.136250, batch loss=0.101452, epoch loss=2.072404
Batch=1019, step=15420, lr=0.136000, batch loss=0.165397, epoch loss=2.237801
Batch=1079, step=15480, lr=0.135750, batch loss=0.041442, epoch loss=2.279242
Batch=1139, step=15540, lr=0.135250, batch loss=0.093866, epoch loss=2.373108
Batch=1199, step=15600, lr=0.135250, batch loss=0.059561, epoch loss=2.432669
Epoch=12, step=15600, lr=0.135250, epoch loss=2.432669
Batch=59, step=15660, lr=0.135000, batch loss=0.076747, epoch loss=0.076747
Batch=119, step=15720, lr=0.134750, batch loss=0.106001, epoch loss=0.182748
Batch=179, step=15780, lr=0.134500, batch loss=0.088408, epoch loss=0.271156
Batch=239, step=15840, lr=0.134250, batch loss=0.093964, epoch loss=0.365120
Batch=299, step=15900, lr=0.133750, batch loss=0.036018, epoch loss=0.401138
Batch=359, step=15960, lr=0.133750, batch loss=0.073311, epoch loss=0.474448
Batch=419, step=16020, lr=0.133500, batch loss=0.076718, epoch loss=0.551166
Batch=479, step=16080, lr=0.133250, batch loss=0.046040, epoch loss=0.597206
Batch=539, step=16140, lr=0.133000, batch loss=0.049489, epoch loss=0.646695
Batch=599, step=16200, lr=0.132750, batch loss=0.077777, epoch loss=0.724472
Batch=659, step=16260, lr=0.132250, batch loss=0.235213, epoch loss=0.959685
Batch=719, step=16320, lr=0.132250, batch loss=0.072938, epoch loss=1.032623
Batch=779, step=16380, lr=0.132000, batch loss=0.078215, epoch loss=1.110838
Batch=839, step=16440, lr=0.131750, batch loss=0.124480, epoch loss=1.235319
Batch=899, step=16500, lr=0.131500, batch loss=0.150216, epoch loss=1.385534
Batch=959, step=16560, lr=0.131250, batch loss=0.048718, epoch loss=1.434252
Batch=1019, step=16620, lr=0.131000, batch loss=0.131577, epoch loss=1.565829
Batch=1079, step=16680, lr=0.130750, batch loss=0.021536, epoch loss=1.587365
Batch=1139, step=16740, lr=0.130500, batch loss=0.046382, epoch loss=1.633748
Batch=1199, step=16800, lr=0.130250, batch loss=0.026382, epoch loss=1.660130
Epoch=13, step=16800, lr=0.130250, epoch loss=1.660130
Batch=59, step=16860, lr=0.130000, batch loss=0.038255, epoch loss=0.038255
Batch=119, step=16920, lr=0.129750, batch loss=0.065791, epoch loss=0.104046
Batch=179, step=16980, lr=0.129500, batch loss=0.053167, epoch loss=0.157213
Batch=239, step=17040, lr=0.129250, batch loss=0.063764, epoch loss=0.220977
Batch=299, step=17100, lr=0.129000, batch loss=0.029326, epoch loss=0.250304
Batch=359, step=17160, lr=0.128750, batch loss=0.047415, epoch loss=0.297718
Batch=419, step=17220, lr=0.128500, batch loss=0.048657, epoch loss=0.346376
Batch=479, step=17280, lr=0.128250, batch loss=0.028885, epoch loss=0.375261
Batch=539, step=17340, lr=0.128000, batch loss=0.057397, epoch loss=0.432657
Batch=599, step=17400, lr=0.127750, batch loss=0.035436, epoch loss=0.468093
Batch=659, step=17460, lr=0.127500, batch loss=0.048247, epoch loss=0.516341
Batch=719, step=17520, lr=0.127250, batch loss=0.044969, epoch loss=0.561310
Batch=779, step=17580, lr=0.127000, batch loss=0.078956, epoch loss=0.640265
Batch=839, step=17640, lr=0.126750, batch loss=0.175941, epoch loss=0.816206
Batch=899, step=17700, lr=0.126500, batch loss=0.059909, epoch loss=0.876115
Batch=959, step=17760, lr=0.126250, batch loss=0.021326, epoch loss=0.897441
Batch=1019, step=17820, lr=0.126000, batch loss=0.032484, epoch loss=0.929926
Batch=1079, step=17880, lr=0.125750, batch loss=0.012434, epoch loss=0.942359
Batch=1139, step=17940, lr=0.125500, batch loss=0.033607, epoch loss=0.975967
Batch=1199, step=18000, lr=0.125250, batch loss=0.014450, epoch loss=0.990417
Epoch=14, step=18000, lr=0.125250, epoch loss=0.990417
Batch=59, step=18060, lr=0.125000, batch loss=0.012182, epoch loss=0.012182
Batch=119, step=18120, lr=0.124750, batch loss=0.020520, epoch loss=0.032702
Batch=179, step=18180, lr=0.124500, batch loss=0.046749, epoch loss=0.079451
Batch=239, step=18240, lr=0.124250, batch loss=0.036405, epoch loss=0.115856
Batch=299, step=18300, lr=0.124000, batch loss=0.011546, epoch loss=0.127402
Batch=359, step=18360, lr=0.123750, batch loss=0.024256, epoch loss=0.151658
Batch=419, step=18420, lr=0.123500, batch loss=0.031178, epoch loss=0.182836
Batch=479, step=18480, lr=0.123250, batch loss=0.033495, epoch loss=0.216331
Batch=539, step=18540, lr=0.123000, batch loss=0.055238, epoch loss=0.271569
Batch=599, step=18600, lr=0.122750, batch loss=0.027328, epoch loss=0.298897
Batch=659, step=18660, lr=0.122500, batch loss=0.032432, epoch loss=0.331329
Batch=719, step=18720, lr=0.122250, batch loss=0.041459, epoch loss=0.372787
Batch=779, step=18780, lr=0.122000, batch loss=0.103310, epoch loss=0.476098
Batch=839, step=18840, lr=0.121750, batch loss=0.056842, epoch loss=0.532940
Batch=899, step=18900, lr=0.121500, batch loss=0.057731, epoch loss=0.590671
Batch=959, step=18960, lr=0.121250, batch loss=0.013968, epoch loss=0.604639
Batch=1019, step=19020, lr=0.121000, batch loss=0.025534, epoch loss=0.630173
Batch=1079, step=19080, lr=0.120750, batch loss=0.014646, epoch loss=0.644819
Batch=1139, step=19140, lr=0.120500, batch loss=0.030642, epoch loss=0.675461
Batch=1199, step=19200, lr=0.120250, batch loss=0.012625, epoch loss=0.688086
Epoch=15, step=19200, lr=0.120250, epoch loss=0.688086
Batch=59, step=19260, lr=0.120000, batch loss=0.011173, epoch loss=0.011173
Batch=119, step=19320, lr=0.119750, batch loss=0.022789, epoch loss=0.033962
Batch=179, step=19380, lr=0.119500, batch loss=0.051211, epoch loss=0.085174
Batch=239, step=19440, lr=0.119250, batch loss=0.023436, epoch loss=0.108610
Batch=299, step=19500, lr=0.119000, batch loss=0.015751, epoch loss=0.124360
Batch=359, step=19560, lr=0.118750, batch loss=0.033271, epoch loss=0.157631
Batch=419, step=19620, lr=0.118500, batch loss=0.020430, epoch loss=0.178061
Batch=479, step=19680, lr=0.118250, batch loss=0.007600, epoch loss=0.185661
Batch=539, step=19740, lr=0.118000, batch loss=0.018190, epoch loss=0.203851
Batch=599, step=19800, lr=0.117750, batch loss=0.021967, epoch loss=0.225818
Batch=659, step=19860, lr=0.117500, batch loss=0.019206, epoch loss=0.245024
Batch=719, step=19920, lr=0.117000, batch loss=0.035601, epoch loss=0.280625
Batch=779, step=19980, lr=0.116750, batch loss=0.080348, epoch loss=0.360973
Batch=839, step=20040, lr=0.116500, batch loss=0.031029, epoch loss=0.392003
Batch=899, step=20100, lr=0.116250, batch loss=0.030745, epoch loss=0.422748
Batch=959, step=20160, lr=0.116250, batch loss=0.011197, epoch loss=0.433944
Batch=1019, step=20220, lr=0.116000, batch loss=0.017048, epoch loss=0.450992
Batch=1079, step=20280, lr=0.115500, batch loss=0.003144, epoch loss=0.454136
Batch=1139, step=20340, lr=0.115250, batch loss=0.015524, epoch loss=0.469660
Batch=1199, step=20400, lr=0.115250, batch loss=0.006082, epoch loss=0.475743
Epoch=16, step=20400, lr=0.115250, epoch loss=0.475743
Batch=59, step=20460, lr=0.114750, batch loss=0.002815, epoch loss=0.002815
Batch=119, step=20520, lr=0.114500, batch loss=0.008271, epoch loss=0.011086
Batch=179, step=20580, lr=0.114500, batch loss=0.015160, epoch loss=0.026246
Batch=239, step=20640, lr=0.114250, batch loss=0.015272, epoch loss=0.041518
Batch=299, step=20700, lr=0.114000, batch loss=0.004737, epoch loss=0.046255
Batch=359, step=20760, lr=0.113750, batch loss=0.014061, epoch loss=0.060317
Batch=419, step=20820, lr=0.113500, batch loss=0.014475, epoch loss=0.074791
Batch=479, step=20880, lr=0.113000, batch loss=0.004113, epoch loss=0.078904
Batch=539, step=20940, lr=0.112750, batch loss=0.017444, epoch loss=0.096348
Batch=599, step=21000, lr=0.112500, batch loss=0.021024, epoch loss=0.117372
Batch=659, step=21060, lr=0.112500, batch loss=0.014083, epoch loss=0.131455
Batch=719, step=21120, lr=0.112000, batch loss=0.039837, epoch loss=0.171292
Batch=779, step=21180, lr=0.112000, batch loss=0.068729, epoch loss=0.240021
Batch=839, step=21240, lr=0.111750, batch loss=0.025483, epoch loss=0.265504
Batch=899, step=21300, lr=0.111500, batch loss=0.035923, epoch loss=0.301427
Batch=959, step=21360, lr=0.111000, batch loss=0.010848, epoch loss=0.312275
Batch=1019, step=21420, lr=0.110750, batch loss=0.011481, epoch loss=0.323756
Batch=1079, step=21480, lr=0.110750, batch loss=0.000632, epoch loss=0.324387
Batch=1139, step=21540, lr=0.110250, batch loss=0.013682, epoch loss=0.338069
Batch=1199, step=21600, lr=0.110250, batch loss=0.005314, epoch loss=0.343383
Epoch=17, step=21600, lr=0.110250, epoch loss=0.343383
Batch=59, step=21660, lr=0.110000, batch loss=0.002613, epoch loss=0.002613
Batch=119, step=21720, lr=0.109750, batch loss=0.006638, epoch loss=0.009251
Batch=179, step=21780, lr=0.109500, batch loss=0.012669, epoch loss=0.021920
Batch=239, step=21840, lr=0.109250, batch loss=0.009562, epoch loss=0.031482
Batch=299, step=21900, lr=0.109000, batch loss=0.002279, epoch loss=0.033761
Batch=359, step=21960, lr=0.108750, batch loss=0.016801, epoch loss=0.050562
Batch=419, step=22020, lr=0.108500, batch loss=0.012193, epoch loss=0.062755
Batch=479, step=22080, lr=0.108250, batch loss=0.002761, epoch loss=0.065517
Batch=539, step=22140, lr=0.108000, batch loss=0.018258, epoch loss=0.083774
Batch=599, step=22200, lr=0.107750, batch loss=0.017042, epoch loss=0.100817
Batch=659, step=22260, lr=0.107500, batch loss=0.013921, epoch loss=0.114738
Batch=719, step=22320, lr=0.107250, batch loss=0.026199, epoch loss=0.140937
Batch=779, step=22380, lr=0.107000, batch loss=0.041619, epoch loss=0.182555
Batch=839, step=22440, lr=0.106750, batch loss=0.021396, epoch loss=0.203952
Batch=899, step=22500, lr=0.106500, batch loss=0.021254, epoch loss=0.225206
Batch=959, step=22560, lr=0.106250, batch loss=0.010477, epoch loss=0.235682
Batch=1019, step=22620, lr=0.106000, batch loss=0.009570, epoch loss=0.245253
Batch=1079, step=22680, lr=0.105750, batch loss=0.000518, epoch loss=0.245771
Batch=1139, step=22740, lr=0.105500, batch loss=0.011336, epoch loss=0.257107
Batch=1199, step=22800, lr=0.105250, batch loss=0.004220, epoch loss=0.261327
Epoch=18, step=22800, lr=0.105250, epoch loss=0.261327
Batch=59, step=22860, lr=0.104750, batch loss=0.001435, epoch loss=0.001435
Batch=119, step=22920, lr=0.104750, batch loss=0.005450, epoch loss=0.006885
Batch=179, step=22980, lr=0.104500, batch loss=0.010680, epoch loss=0.017565
Batch=239, step=23040, lr=0.104250, batch loss=0.009057, epoch loss=0.026622
Batch=299, step=23100, lr=0.104000, batch loss=0.007918, epoch loss=0.034540
Batch=359, step=23160, lr=0.103750, batch loss=0.011337, epoch loss=0.045878
Batch=419, step=23220, lr=0.103500, batch loss=0.010065, epoch loss=0.055943
Batch=479, step=23280, lr=0.103250, batch loss=0.002426, epoch loss=0.058369
Batch=539, step=23340, lr=0.103000, batch loss=0.017299, epoch loss=0.075669
Batch=599, step=23400, lr=0.102750, batch loss=0.013442, epoch loss=0.089111
Batch=659, step=23460, lr=0.102500, batch loss=0.011494, epoch loss=0.100605
Batch=719, step=23520, lr=0.102000, batch loss=0.009376, epoch loss=0.109981
Batch=779, step=23580, lr=0.102000, batch loss=0.010281, epoch loss=0.120262
Batch=839, step=23640, lr=0.101500, batch loss=0.018320, epoch loss=0.138582
Batch=899, step=23700, lr=0.101500, batch loss=0.020185, epoch loss=0.158768
Batch=959, step=23760, lr=0.101250, batch loss=0.008518, epoch loss=0.167286
Batch=1019, step=23820, lr=0.100750, batch loss=0.009621, epoch loss=0.176907
Batch=1079, step=23880, lr=0.100500, batch loss=0.000825, epoch loss=0.177731
Batch=1139, step=23940, lr=0.100250, batch loss=0.008460, epoch loss=0.186191
Batch=1199, step=24000, lr=0.100250, batch loss=0.004220, epoch loss=0.190412
Epoch=19, step=24000, lr=0.100250, epoch loss=0.190412


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############****************************************.│
│********######*####*********************************###*###*#***********************************....│
│*******######**##*************....*****************#*######*#********************************.......│
│*******##*##**##**********...........***************########*##****************************.........│
│*****#######************.......%...%%...***************#########**************************........%.│
│******######***********.........%.........**************##*#####************************......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######**********************......%.%%%%.│
│****#######**********..........%%%%.........************#########*********************.......%%.%%.%│
│**#######************..........%%%%%%%........*************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######************...........%%%%%%%..........************#######***************...........%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####****************...........%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│
│#####*#**********...............%%%%%%%...............*******#######*********..............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....│
│#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1
2025-05-22 20:06.28: Job failed: Failed: Build failed