Organisationsahrefsocannl397418 ()debian-12-5.3+flambda_opam-2.3

debian-12-5.3+flambda_opam-2.3

Link Copied
Code Copied

Logs

2025-05-22 20:00.59: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (39741884b740497ac10065d5e464e6c70f9151f4) (linux-x86_64:debian-12-5.3+flambda_opam-2.3)
Base: ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 39741884
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4
# debian-12-5.3+flambda_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-syntax-shims.1.0.0 ocaml-variants.5.3.0+options ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-05-22 20:00.59: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4-debian-12-5.3+flambda_opam-2.3-5221748b47df38784e4032b8eb1be8cf"
2025-05-22 20:00.59: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4)
(comment debian-12-5.3+flambda_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-syntax-shims.1.0.0 ocaml-variants.5.3.0+options ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-05-22 20:00.59: Waiting for resource in pool OCluster
2025-05-22 20:00.59: Waiting for worker…
2025-05-22 20:01.12: Got resource from pool OCluster
Building on toxis.caelum.ci.dev
All commits already cached
HEAD is now at 39741884 Untested: convert remaining uses of Format except where printing Sexp values


(from ocaml/opam:debian-12-ocaml-5.3-flambda@sha256:102e61eadd02c8453b955d72f3a495806f17f5de549d3643cc924c320fabd8f4)
2025-05-22 20:01.17 ---> using "21ca48779d7c8771accdf60eb888012ef48685ba9d539046f9f2e6ce2db9408d" from cache


/: (comment debian-12-5.3+flambda_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-22 20:01.17 ---> using "c3f3aa93c3355a64826565104635ed2c75e43663faf3bb1239283e2d0babab60" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
[NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:
opam option jobs=39 --global
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-22 20:01.17 ---> using "292270fbfb1491d8dafae52ea17d8b381a33866b2212919c484f3fdce286b697" from cache


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-05-22 20:01.17 ---> using "3a794d0297d6d3662e91621faf2070f604c02f28ce3386d50957daf0a3b38339" from cache


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-05-22 20:01.17 ---> using "ec8f95289b8846ee13cf13cb3aea5f3aff51a6aaee7942d4311969425d84ec7c" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
35eb2f107a..2df846cb67  master     -> origin/master
2df846cb67 Merge pull request #27910 from maiste/release-dune-3.19.0


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:01.17 ---> using "6f1ee2c0f5dfc21336f120bd54934d4d2d62fe2e6c274ae35a9d37cd0d991d98" from cache


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-22 20:01.17 ---> saved as "084a8e7e59581c591ee9b6a43c489d22ca4db919bade071f24b069436f27f375"


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-22 20:01.24 ---> saved as "bfc575b4ef73ba744844b72fee301ed2043b6f85d2f2a53a761788f4b5d3cb46"


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-22 20:01.24 ---> saved as "0f6915cbf4b3fd363df4c6ad1e5ca66fc09b6a196977b8d60d10760efabfc457"


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-syntax-shims.1.0.0 ocaml-variants.5.3.0+options ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apt-get" "update"
- Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB]
- Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB]
- Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB]
- Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8793 kB]
- Get:5 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [259 kB]
- Fetched 9306 kB in 3s (3627 kB/s)
- Reading package lists...
- 


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-variants is already installed (current version is 5.3.0+options).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-dev pkg-config


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config"
- debconf: delaying package configuration, since apt-utils is not installed
- Selecting previously unselected package libffi-dev:amd64.
- (Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 18778 files and directories currently installed.)
- Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ...
- Unpacking libffi-dev:amd64 (3.4.4-1) ...
- Selecting previously unselected package libpkgconf3:amd64.
- Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ...
- Unpacking libpkgconf3:amd64 (1.8.1-1) ...
- Selecting previously unselected package pkgconf-bin.
- Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ...
- Unpacking pkgconf-bin (1.8.1-1) ...
- Selecting previously unselected package pkgconf:amd64.
- Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ...
- Unpacking pkgconf:amd64 (1.8.1-1) ...
- Selecting previously unselected package pkg-config:amd64.
- Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ...
- Unpacking pkg-config:amd64 (1.8.1-1) ...
- Setting up libffi-dev:amd64 (3.4.4-1) ...
- Setting up libpkgconf3:amd64 (1.8.1-1) ...
- Setting up pkgconf-bin (1.8.1-1) ...
- Setting up pkgconf:amd64 (1.8.1-1) ...
- Setting up pkg-config:amd64 (1.8.1-1) ...
- Processing triggers for libc-bin (2.36-9+deb12u10) ...
2025-05-22 20:01.50 ---> saved as "8c89f0144f2ff5cc8be816175fea38540f0aaed6835082bbb36fa26796b8d1f8"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-variants is already installed (current version is 5.3.0+options).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 75 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.2
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.19.0
- install dune-configurator       3.19.0
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install pprint                  20230830
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> installed conf-pkg-config.4
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> retrieved csexp.1.5.2  (cached)
-> installed conf-libffi.2.0.0
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved pprint.20230830  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved dune.3.19.0, dune-configurator.3.19.0  (cached)
-> installed num.1.5-1
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved seq.base  (cached)
-> retrieved result.1.5  (cached)
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> installed cmdliner.1.3.0
-> installed seq.base
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlfind.1.9.8
-> installed ocamlbuild.0.16.1
-> installed topkg.1.0.8
-> installed mtime.2.1.0
-> installed uutf.1.0.4
-> installed ptime.1.2.0
-> installed fmt.0.10.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.19.0
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed ocaml-version.4.0.0
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed multicore-magic.2.3.1
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed pprint.20230830
-> installed printbox.0.12
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed thread-local-storage.0.2
-> installed cppo.1.8.0
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-compiler-libs.v0.17.0
-> installed re.1.12.0
-> installed integers.0.7.0
-> installed saturn_lockfree.0.5.0
-> installed parsexp.v0.17.0
-> installed dune-configurator.3.19.0
-> installed bigstringaf.0.10.0
-> installed uucp.16.0.0
-> installed sexplib.v0.17.0
-> installed angstrom.0.16.1
-> installed printbox-text.0.12
-> installed printbox-md.0.12
-> installed mdx.2.5.0
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed printbox-ext-plot.0.12
-> installed ctypes.0.23.0
-> installed ctypes-foreign.0.23.0
-> installed base.v0.17.2
-> installed variantslib.v0.17.0
-> installed fieldslib.v0.17.0
-> installed stdio.v0.17.0
-> installed ppxlib.0.35.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_optcomp.v0.17.0
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_compare.v0.17.0
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed ppx_base.v0.17.0
-> installed jst-config.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_string.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:05.17 ---> saved as "b65c9d54f99ef608cdadff87d253d3d15651307a52646b07ef225308226bcb56"


/src: (copy (src .) (dst /src))
2025-05-22 20:05.19 ---> saved as "88a8211f0ab260f00b79733b0dffc249497856321348d940a0ee788c8c128402"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Wrote value of 'backend' to ocannl_backend.txt
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/de170ae2001296851a4a3886e3db499e/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/dune", lines 30-40, characters 0-281:
30 | (rule
31 |  (alias runtest)
32 |  (target
33 |   (dir log_files))
34 |  (action
35 |   (run
36 |    %{dep:micrograd_demo_logging.exe}
37 |    "--ocannl_debug_backend=text"
38 |    "--ocannl_log_file_stem=micrograd_demo_logging"
39 |    "--ocannl_log_main_domain_to_stdout=false"
40 |    "--ocannl_debug_log_to_stream_files=true")))
(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_cd_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Found true, commandline --ocannl_debug_log_to_stream_files=true
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed
Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml" (inlined), line 242, characters 21-46
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41
Called from Stdlib__Map.Make.fold in file "map.ml" (inlined), lines 325-329, characters 17-42
Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64
Called from PrintBox_text.Output.to_chan in file "src/printbox-text/PrintBox_text.ml" (inlined), line 269, characters 39-70
Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31
Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59
Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38
Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82
Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92
Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92
Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/micrograd_demo.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/micrograd_demo.ml.corrected
diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/micrograd_demo.ml.corrected
index 77e46c6..ab81526 100644
--- a/_build/default/test/micrograd_demo.ml
+++ b/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/micrograd_demo.ml.corrected
@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =
│├┼───────┤       │
│││ -4.00 │       │
│└┴───────┘       │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[0]: a shape 0:1  grad_a│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 1.38e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[0]: a shape 0:1  grad_a│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 1.38e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}];
Tensor.print ~with_code:false ~with_grad:true `Default b;
[%expect
@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =
│├┼──────┤        │
│││ 2.00 │        │
│└┴──────┘        │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[2]: b shape 0:1  grad_b│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 6.45e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[2]: b shape 0:1  grad_b│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 6.45e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}]


let%expect_test "Micrograd half-moons example" =
File "test/hello_world_op.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/hello_world_op.ml.corrected
diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/hello_world_op.ml.corrected
index ba9d7ef..6bfa309 100644
--- a/_build/default/test/hello_world_op.ml
+++ b/_build/.sandbox/e241391d469389a7da52306c1c22bcfb/default/test/hello_world_op.ml.corrected
@@ -102,36 +102,46 @@ let%expect_test "Print constant tensor" =
let%op hey = [ (1, 2, 3); (4, 5, 6) ] in
Train.forward_and_forget backend ctx hey;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;
-  [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];
+  [%expect {|
+    [0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2  [
+       1.00 , 2.00 , 3.00
+      ;  4.00 , 5.00 , 6.00
+    ]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;
[%expect
{|
-    ┌─────────────────────────────────────────────────────────────┐
-    │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │
-    │┌──────┬──────────────────┐                                  │
-    ││      │axis 1            │                                  │
-    │├──────┼──────────────────┤                                  │
-    ││axis 0│ 1.00  2.00  3.00 │                                  │
-    ││      │ 4.00  5.00  6.00 │                                  │
-    │└──────┴──────────────────┘                                  │
-    └─────────────────────────────────────────────────────────────┘
+    ┌────────────────────────────────────────────────────────────────────────┐
+    │[0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2 │
+    │┌──────┬──────────────────┐                                             │
+    ││      │axis 1            │                                             │
+    │├──────┼──────────────────┤                                             │
+    ││axis 0│ 1.00  2.00  3.00 │                                             │
+    ││      │ 4.00  5.00  6.00 │                                             │
+    │└──────┴──────────────────┘                                             │
+    └────────────────────────────────────────────────────────────────────────┘
|}];
let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in
Train.forward_and_forget backend ctx hoo;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;
-  [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];
+  [%expect {|
+    [1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3  [|
+      [ 1.00 ; 2.00 ; 3.00 ]
+      ; [ 4.00 ; 5.00 ; 6.00 ]
+    |]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;
[%expect
{|
-    ┌──────────────────────────────────────────────────────────────────┐
-    │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │
-    │┌──────┬──────────────────┐                                       │
-    ││      │axis 1            │                                       │
-    │├──────┼──────────────────┤                                       │
-    ││axis 0│ 1.00  2.00  3.00 │                                       │
-    ││      │ 4.00  5.00  6.00 │                                       │
-    │└──────┴──────────────────┘                                       │
-    └──────────────────────────────────────────────────────────────────┘
+    ┌─────────────────────────────────────────────────────────────────────────────┐
+    │[1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 │
+    │┌──────┬──────────────────┐                                                  │
+    ││      │axis 1            │                                                  │
+    │├──────┼──────────────────┤                                                  │
+    ││axis 0│ 1.00  2.00  3.00 │                                                  │
+    ││      │ 4.00  5.00  6.00 │                                                  │
+    │└──────┴──────────────────┘                                                  │
+    └─────────────────────────────────────────────────────────────────────────────┘
|}];
let%op hey2 =
[
@@ -145,10 +155,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;
[%expect
{|
-    [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);
-      (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);
-      (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);
-      (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]
+    [2]: c4x2x3_hey2 shape 1:2,2:3->0:4  [
+       ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )
+      ;  ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )
+      ;  ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )
+      ;  ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;
[%expect
@@ -178,10 +190,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;
[%expect
{|
-    [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];
-      [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];
-      [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];
-      [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]
+    [3]: c4x2x3_hoo2 shape 0:4|1:2,2:3  [|
+      [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]
+      ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]
+      ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]
+      ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;
[%expect
@@ -209,10 +223,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;
[%expect
{|
-    [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];
-      [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];
-      [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];
-      [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]
+    [4]: c4x2x3_heyhoo shape 0:4,1:2|2:3  [|
+      [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]
+      ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]
+      ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]
+      ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;
[%expect
@@ -240,15 +256,24 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo2;
[%expect
{|
-    [|
-      [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-        [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];
-      [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-        [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];
-      [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-        [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];
-      [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-        [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]
+    [5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2  [|
+      [|
+        [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+        ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+      |]
+      ; [|
+        [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+        ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+      |]
+      ; [|
+        [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+        ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+      |]
+      ; [|
+        [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+        ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;
[%expect
@@ -293,17 +318,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo3;
[%expect
{|
-    [|
+    [6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2  [|
[|
-        [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-          [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];
-        [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-          [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];
-      [|
-        [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-          [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];
-        [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-          [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]
+        [
+          [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+          ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+        ]
+        ; [
+          [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+          ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+        ]
+      |]
+      ; [|
+        [
+          [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+          ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+        ]
+        ; [
+          [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+          ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+        ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;
[%expect
@@ -353,17 +389,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo4;
[%expect
{|
-    [|
-      [
-        [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];
-          [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];
-        [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];
-          [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];
+    [7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3  [|
[
-        [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];
-          [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];
-        [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];
-          [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]
+        [
+          [  1.00 , 31.00  ;  2.00 , 32.00  ;  3.00 , 33.00  ]
+          ; [  4.00 , 34.00  ;  5.00 , 35.00  ;  6.00 , 36.00  ]
+        ]
+        ; [
+          [  7.00 , 37.00  ;  8.00 , 38.00  ;  9.00 , 39.00  ]
+          ; [  10.00 , 40.00  ;  11.00 , 41.00  ;  12.00 , 42.00  ]
+        ]
+      ]
+      ; [
+        [
+          [  13.00 , 43.00  ;  14.00 , 44.00  ;  15.00 , 45.00  ]
+          ; [  16.00 , 46.00  ;  17.00 , 47.00  ;  18.00 , 48.00  ]
+        ]
+        ; [
+          [  19.00 , 49.00  ;  20.00 , 50.00  ;  21.00 , 51.00  ]
+          ; [  22.00 , 52.00  ;  23.00 , 53.00  ;  24.00 , 54.00  ]
+        ]
+      ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;
[%expect
@@ -462,8 +509,29 @@ let%expect_test "Big matrix" =
Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;
[%expect
{|
-    [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;
-      12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]
+    [2]: 0...20 shape 0:21  [
+      0.00
+      ; 1.00
+      ; 2.00
+      ; 3.00
+      ; 4.00
+      ; 5.00
+      ; 6.00
+      ; 7.00
+      ; 8.00
+      ; 9.00
+      ; 10.00
+      ; 11.00
+      ; 12.00
+      ; 13.00
+      ; 14.00
+      ; 15.00
+      ; 16.00
+      ; 17.00
+      ; 18.00
+      ; 19.00
+      ; 20.00
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;
[%expect
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Properties of devices:
(multicore_devices
(device ((device_name CPU) (device_ordinal 0) (num_domains 72))))
@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Not found, using default false
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.199750, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.516926, epoch loss=32.126379
Batch=179, step=180, lr=0.199500, batch loss=2.644440, epoch loss=34.770819
Batch=239, step=240, lr=0.199250, batch loss=0.855278, epoch loss=35.626096
Batch=299, step=300, lr=0.199000, batch loss=1.437143, epoch loss=37.063240
Batch=359, step=360, lr=0.198750, batch loss=1.338851, epoch loss=38.402091
Batch=419, step=420, lr=0.198500, batch loss=0.611321, epoch loss=39.013412
Batch=479, step=480, lr=0.198250, batch loss=0.745907, epoch loss=39.759319
Batch=539, step=540, lr=0.197750, batch loss=0.690502, epoch loss=40.449821
Batch=599, step=600, lr=0.197500, batch loss=1.100005, epoch loss=41.549826
Batch=659, step=660, lr=0.197250, batch loss=0.481906, epoch loss=42.031732
Batch=719, step=720, lr=0.197250, batch loss=0.410651, epoch loss=42.442383
Batch=779, step=780, lr=0.196750, batch loss=0.469012, epoch loss=42.911395
Batch=839, step=840, lr=0.196500, batch loss=0.450444, epoch loss=43.361839
Batch=899, step=900, lr=0.196250, batch loss=0.383619, epoch loss=43.745458
Batch=959, step=960, lr=0.196250, batch loss=0.267798, epoch loss=44.013256
Batch=1019, step=1020, lr=0.196000, batch loss=0.488032, epoch loss=44.501287
Batch=1079, step=1080, lr=0.195750, batch loss=0.264843, epoch loss=44.766131
Batch=1139, step=1140, lr=0.195500, batch loss=0.331246, epoch loss=45.097377
Batch=1199, step=1200, lr=0.195000, batch loss=0.265148, epoch loss=45.362525
Epoch=0, step=1200, lr=0.195000, epoch loss=45.362525
Batch=59, step=1260, lr=0.195000, batch loss=0.263255, epoch loss=0.263255
Batch=119, step=1320, lr=0.194750, batch loss=0.204773, epoch loss=0.468029
Batch=179, step=1380, lr=0.194500, batch loss=0.245273, epoch loss=0.713302
Batch=239, step=1440, lr=0.194000, batch loss=0.349884, epoch loss=1.063186
Batch=299, step=1500, lr=0.193750, batch loss=0.239716, epoch loss=1.302902
Batch=359, step=1560, lr=0.193500, batch loss=0.319113, epoch loss=1.622015
Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.934750
Batch=479, step=1680, lr=0.193250, batch loss=0.276751, epoch loss=2.211501
Batch=539, step=1740, lr=0.193000, batch loss=0.209649, epoch loss=2.421150
Batch=599, step=1800, lr=0.192750, batch loss=0.247605, epoch loss=2.668755
Batch=659, step=1860, lr=0.192500, batch loss=0.367394, epoch loss=3.036150
Batch=719, step=1920, lr=0.192250, batch loss=0.358710, epoch loss=3.394860
Batch=779, step=1980, lr=0.192000, batch loss=0.389832, epoch loss=3.784692
Batch=839, step=2040, lr=0.191750, batch loss=0.346711, epoch loss=4.131402
Batch=899, step=2100, lr=0.191500, batch loss=0.323615, epoch loss=4.455018
Batch=959, step=2160, lr=0.191250, batch loss=0.256138, epoch loss=4.711156
Batch=1019, step=2220, lr=0.191000, batch loss=0.391744, epoch loss=5.102900
Batch=1079, step=2280, lr=0.190750, batch loss=0.219769, epoch loss=5.322669
Batch=1139, step=2340, lr=0.190500, batch loss=0.265683, epoch loss=5.588352
Batch=1199, step=2400, lr=0.190000, batch loss=0.214211, epoch loss=5.802563
Epoch=1, step=2400, lr=0.190000, epoch loss=5.802563
Batch=59, step=2460, lr=0.190000, batch loss=0.231008, epoch loss=0.231008
Batch=119, step=2520, lr=0.189750, batch loss=0.190269, epoch loss=0.421277
Batch=179, step=2580, lr=0.189500, batch loss=0.221097, epoch loss=0.642374
Batch=239, step=2640, lr=0.189250, batch loss=0.330475, epoch loss=0.972849
Batch=299, step=2700, lr=0.189000, batch loss=0.210726, epoch loss=1.183575
Batch=359, step=2760, lr=0.188750, batch loss=0.293430, epoch loss=1.477005
Batch=419, step=2820, lr=0.188500, batch loss=0.285028, epoch loss=1.762032
Batch=479, step=2880, lr=0.188250, batch loss=0.262970, epoch loss=2.025002
Batch=539, step=2940, lr=0.187750, batch loss=0.196668, epoch loss=2.221670
Batch=599, step=3000, lr=0.187500, batch loss=0.235756, epoch loss=2.457426
Batch=659, step=3060, lr=0.187500, batch loss=0.341640, epoch loss=2.799066
Batch=719, step=3120, lr=0.187250, batch loss=0.337887, epoch loss=3.136953
Batch=779, step=3180, lr=0.187000, batch loss=0.359974, epoch loss=3.496927
Batch=839, step=3240, lr=0.186750, batch loss=0.331364, epoch loss=3.828291
Batch=899, step=3300, lr=0.186500, batch loss=0.304476, epoch loss=4.132767
Batch=959, step=3360, lr=0.186000, batch loss=0.243870, epoch loss=4.376637
Batch=1019, step=3420, lr=0.186000, batch loss=0.363149, epoch loss=4.739786
Batch=1079, step=3480, lr=0.185750, batch loss=0.216401, epoch loss=4.956187
Batch=1139, step=3540, lr=0.185500, batch loss=0.266090, epoch loss=5.222277
Batch=1199, step=3600, lr=0.185250, batch loss=0.201399, epoch loss=5.423676
Epoch=2, step=3600, lr=0.185250, epoch loss=5.423676
Batch=59, step=3660, lr=0.185000, batch loss=0.225373, epoch loss=0.225373
Batch=119, step=3720, lr=0.184750, batch loss=0.185067, epoch loss=0.410440
Batch=179, step=3780, lr=0.184500, batch loss=0.213756, epoch loss=0.624196
Batch=239, step=3840, lr=0.184250, batch loss=0.317331, epoch loss=0.941526
Batch=299, step=3900, lr=0.184000, batch loss=0.208381, epoch loss=1.149907
Batch=359, step=3960, lr=0.183500, batch loss=0.286283, epoch loss=1.436190
Batch=419, step=4020, lr=0.183250, batch loss=0.273614, epoch loss=1.709804
Batch=479, step=4080, lr=0.183250, batch loss=0.249291, epoch loss=1.959095
Batch=539, step=4140, lr=0.183000, batch loss=0.194613, epoch loss=2.153708
Batch=599, step=4200, lr=0.182500, batch loss=0.238428, epoch loss=2.392136
Batch=659, step=4260, lr=0.182500, batch loss=0.332415, epoch loss=2.724551
Batch=719, step=4320, lr=0.182000, batch loss=0.331021, epoch loss=3.055572
Batch=779, step=4380, lr=0.182000, batch loss=0.353542, epoch loss=3.409115
Batch=839, step=4440, lr=0.181750, batch loss=0.320620, epoch loss=3.729735
Batch=899, step=4500, lr=0.181250, batch loss=0.293850, epoch loss=4.023585
Batch=959, step=4560, lr=0.181250, batch loss=0.240337, epoch loss=4.263921
Batch=1019, step=4620, lr=0.181000, batch loss=0.340795, epoch loss=4.604717
Batch=1079, step=4680, lr=0.180750, batch loss=0.212376, epoch loss=4.817093
Batch=1139, step=4740, lr=0.180500, batch loss=0.248072, epoch loss=5.065165
Batch=1199, step=4800, lr=0.180250, batch loss=0.192422, epoch loss=5.257587
Epoch=3, step=4800, lr=0.180250, epoch loss=5.257587
Batch=59, step=4860, lr=0.179750, batch loss=0.221809, epoch loss=0.221809
Batch=119, step=4920, lr=0.179750, batch loss=0.188886, epoch loss=0.410695
Batch=179, step=4980, lr=0.179500, batch loss=0.208224, epoch loss=0.618918
Batch=239, step=5040, lr=0.179250, batch loss=0.308826, epoch loss=0.927744
Batch=299, step=5100, lr=0.179000, batch loss=0.198199, epoch loss=1.125943
Batch=359, step=5160, lr=0.178750, batch loss=0.275551, epoch loss=1.401494
Batch=419, step=5220, lr=0.178500, batch loss=0.265934, epoch loss=1.667428
Batch=479, step=5280, lr=0.178250, batch loss=0.241131, epoch loss=1.908559
Batch=539, step=5340, lr=0.178000, batch loss=0.191786, epoch loss=2.100345
Batch=599, step=5400, lr=0.177750, batch loss=0.228293, epoch loss=2.328637
Batch=659, step=5460, lr=0.177500, batch loss=0.326466, epoch loss=2.655103
Batch=719, step=5520, lr=0.177250, batch loss=0.328491, epoch loss=2.983594
Batch=779, step=5580, lr=0.177000, batch loss=0.349485, epoch loss=3.333079
Batch=839, step=5640, lr=0.176500, batch loss=0.307160, epoch loss=3.640238
Batch=899, step=5700, lr=0.176250, batch loss=0.267629, epoch loss=3.907867
Batch=959, step=5760, lr=0.176250, batch loss=0.201659, epoch loss=4.109526
Batch=1019, step=5820, lr=0.176000, batch loss=0.300184, epoch loss=4.409710
Batch=1079, step=5880, lr=0.175750, batch loss=0.178298, epoch loss=4.588008
Batch=1139, step=5940, lr=0.175500, batch loss=0.219668, epoch loss=4.807676
Batch=1199, step=6000, lr=0.175250, batch loss=0.190557, epoch loss=4.998233
Epoch=4, step=6000, lr=0.175250, epoch loss=4.998233
Batch=59, step=6060, lr=0.175000, batch loss=0.234160, epoch loss=0.234160
Batch=119, step=6120, lr=0.174750, batch loss=0.193702, epoch loss=0.427862
Batch=179, step=6180, lr=0.174500, batch loss=0.200390, epoch loss=0.628252
Batch=239, step=6240, lr=0.174250, batch loss=0.301193, epoch loss=0.929445
Batch=299, step=6300, lr=0.174000, batch loss=0.209946, epoch loss=1.139391
Batch=359, step=6360, lr=0.173750, batch loss=0.273400, epoch loss=1.412791
Batch=419, step=6420, lr=0.173500, batch loss=0.268071, epoch loss=1.680862
Batch=479, step=6480, lr=0.173250, batch loss=0.242530, epoch loss=1.923392
Batch=539, step=6540, lr=0.173000, batch loss=0.196726, epoch loss=2.120118
Batch=599, step=6600, lr=0.172750, batch loss=0.233271, epoch loss=2.353389
Batch=659, step=6660, lr=0.172500, batch loss=0.313699, epoch loss=2.667089
Batch=719, step=6720, lr=0.172250, batch loss=0.318337, epoch loss=2.985425
Batch=779, step=6780, lr=0.172000, batch loss=0.329777, epoch loss=3.315203
Batch=839, step=6840, lr=0.171750, batch loss=0.301757, epoch loss=3.616960
Batch=899, step=6900, lr=0.171500, batch loss=0.271181, epoch loss=3.888141
Batch=959, step=6960, lr=0.171250, batch loss=0.205724, epoch loss=4.093866
Batch=1019, step=7020, lr=0.171000, batch loss=0.331624, epoch loss=4.425490
Batch=1079, step=7080, lr=0.170750, batch loss=0.193727, epoch loss=4.619217
Batch=1139, step=7140, lr=0.170500, batch loss=0.232255, epoch loss=4.851472
Batch=1199, step=7200, lr=0.170250, batch loss=0.181396, epoch loss=5.032868
Epoch=5, step=7200, lr=0.170250, epoch loss=5.032868
Batch=59, step=7260, lr=0.170000, batch loss=0.221280, epoch loss=0.221280
Batch=119, step=7320, lr=0.169750, batch loss=0.179538, epoch loss=0.400818
Batch=179, step=7380, lr=0.169500, batch loss=0.196143, epoch loss=0.596960
Batch=239, step=7440, lr=0.169250, batch loss=0.292797, epoch loss=0.889758
Batch=299, step=7500, lr=0.169000, batch loss=0.198834, epoch loss=1.088591
Batch=359, step=7560, lr=0.168750, batch loss=0.260664, epoch loss=1.349255
Batch=419, step=7620, lr=0.168500, batch loss=0.256205, epoch loss=1.605460
Batch=479, step=7680, lr=0.168000, batch loss=0.238060, epoch loss=1.843520
Batch=539, step=7740, lr=0.168000, batch loss=0.186515, epoch loss=2.030035
Batch=599, step=7800, lr=0.167500, batch loss=0.223903, epoch loss=2.253938
Batch=659, step=7860, lr=0.167250, batch loss=0.305774, epoch loss=2.559712
Batch=719, step=7920, lr=0.167250, batch loss=0.312220, epoch loss=2.871932
Batch=779, step=7980, lr=0.167000, batch loss=0.329911, epoch loss=3.201843
Batch=839, step=8040, lr=0.166750, batch loss=0.292670, epoch loss=3.494513
Batch=899, step=8100, lr=0.166500, batch loss=0.263888, epoch loss=3.758401
Batch=959, step=8160, lr=0.166250, batch loss=0.197921, epoch loss=3.956322
Batch=1019, step=8220, lr=0.166000, batch loss=0.321995, epoch loss=4.278317
Batch=1079, step=8280, lr=0.165500, batch loss=0.178643, epoch loss=4.456961
Batch=1139, step=8340, lr=0.165250, batch loss=0.209099, epoch loss=4.666060
Batch=1199, step=8400, lr=0.165250, batch loss=0.177436, epoch loss=4.843496
Epoch=6, step=8400, lr=0.165250, epoch loss=4.843496
Batch=59, step=8460, lr=0.165000, batch loss=0.225582, epoch loss=0.225582
Batch=119, step=8520, lr=0.164750, batch loss=0.178077, epoch loss=0.403659
Batch=179, step=8580, lr=0.164250, batch loss=0.188140, epoch loss=0.591798
Batch=239, step=8640, lr=0.164000, batch loss=0.277838, epoch loss=0.869637
Batch=299, step=8700, lr=0.164000, batch loss=0.187592, epoch loss=1.057228
Batch=359, step=8760, lr=0.163500, batch loss=0.246672, epoch loss=1.303901
Batch=419, step=8820, lr=0.163250, batch loss=0.242528, epoch loss=1.546429
Batch=479, step=8880, lr=0.163250, batch loss=0.219890, epoch loss=1.766319
Batch=539, step=8940, lr=0.162750, batch loss=0.175627, epoch loss=1.941946
Batch=599, step=9000, lr=0.162750, batch loss=0.213443, epoch loss=2.155388
Batch=659, step=9060, lr=0.162500, batch loss=0.297844, epoch loss=2.453233
Batch=719, step=9120, lr=0.162000, batch loss=0.295442, epoch loss=2.748675
Batch=779, step=9180, lr=0.161750, batch loss=0.320228, epoch loss=3.068903
Batch=839, step=9240, lr=0.161500, batch loss=0.286723, epoch loss=3.355626
Batch=899, step=9300, lr=0.161250, batch loss=0.253943, epoch loss=3.609568
Batch=959, step=9360, lr=0.161000, batch loss=0.210446, epoch loss=3.820014
Batch=1019, step=9420, lr=0.161000, batch loss=0.298788, epoch loss=4.118803
Batch=1079, step=9480, lr=0.160750, batch loss=0.174418, epoch loss=4.293221
Batch=1139, step=9540, lr=0.160250, batch loss=0.201114, epoch loss=4.494335
Batch=1199, step=9600, lr=0.160000, batch loss=0.165249, epoch loss=4.659584
Epoch=7, step=9600, lr=0.160000, epoch loss=4.659584
Batch=59, step=9660, lr=0.160000, batch loss=0.190561, epoch loss=0.190561
Batch=119, step=9720, lr=0.159500, batch loss=0.163830, epoch loss=0.354391
Batch=179, step=9780, lr=0.159250, batch loss=0.179697, epoch loss=0.534088
Batch=239, step=9840, lr=0.159250, batch loss=0.263420, epoch loss=0.797508
Batch=299, step=9900, lr=0.158750, batch loss=0.181572, epoch loss=0.979080
Batch=359, step=9960, lr=0.158500, batch loss=0.243853, epoch loss=1.222933
Batch=419, step=10020, lr=0.158500, batch loss=0.233260, epoch loss=1.456193
Batch=479, step=10080, lr=0.158000, batch loss=0.215399, epoch loss=1.671592
Batch=539, step=10140, lr=0.157750, batch loss=0.170962, epoch loss=1.842554
Batch=599, step=10200, lr=0.157500, batch loss=0.203261, epoch loss=2.045816
Batch=659, step=10260, lr=0.157500, batch loss=0.282454, epoch loss=2.328270
Batch=719, step=10320, lr=0.157000, batch loss=0.280719, epoch loss=2.608989
Batch=779, step=10380, lr=0.156750, batch loss=0.300302, epoch loss=2.909291
Batch=839, step=10440, lr=0.156750, batch loss=0.273108, epoch loss=3.182399
Batch=899, step=10500, lr=0.156250, batch loss=0.239398, epoch loss=3.421797
Batch=959, step=10560, lr=0.156250, batch loss=0.202670, epoch loss=3.624467
Batch=1019, step=10620, lr=0.156000, batch loss=0.293132, epoch loss=3.917599
Batch=1079, step=10680, lr=0.155500, batch loss=0.185126, epoch loss=4.102725
Batch=1139, step=10740, lr=0.155250, batch loss=0.203688, epoch loss=4.306413
Batch=1199, step=10800, lr=0.155000, batch loss=0.155806, epoch loss=4.462219
Epoch=8, step=10800, lr=0.155000, epoch loss=4.462219
Batch=59, step=10860, lr=0.154750, batch loss=0.177644, epoch loss=0.177644
Batch=119, step=10920, lr=0.154500, batch loss=0.153412, epoch loss=0.331056
Batch=179, step=10980, lr=0.154250, batch loss=0.167246, epoch loss=0.498302
Batch=239, step=11040, lr=0.154000, batch loss=0.243207, epoch loss=0.741509
Batch=299, step=11100, lr=0.154000, batch loss=0.172972, epoch loss=0.914481
Batch=359, step=11160, lr=0.153750, batch loss=0.222557, epoch loss=1.137039
Batch=419, step=11220, lr=0.153500, batch loss=0.218276, epoch loss=1.355314
Batch=479, step=11280, lr=0.153250, batch loss=0.213812, epoch loss=1.569126
Batch=539, step=11340, lr=0.153000, batch loss=0.171160, epoch loss=1.740286
Batch=599, step=11400, lr=0.152750, batch loss=0.176663, epoch loss=1.916949
Batch=659, step=11460, lr=0.152500, batch loss=0.263391, epoch loss=2.180340
Batch=719, step=11520, lr=0.152250, batch loss=0.264309, epoch loss=2.444649
Batch=779, step=11580, lr=0.151750, batch loss=0.273414, epoch loss=2.718064
Batch=839, step=11640, lr=0.151500, batch loss=0.252840, epoch loss=2.970904
Batch=899, step=11700, lr=0.151250, batch loss=0.219849, epoch loss=3.190753
Batch=959, step=11760, lr=0.151250, batch loss=0.186586, epoch loss=3.377339
Batch=1019, step=11820, lr=0.151000, batch loss=0.276725, epoch loss=3.654063
Batch=1079, step=11880, lr=0.150500, batch loss=0.151042, epoch loss=3.805105
Batch=1139, step=11940, lr=0.150500, batch loss=0.180056, epoch loss=3.985161
Batch=1199, step=12000, lr=0.150250, batch loss=0.141088, epoch loss=4.126249
Epoch=9, step=12000, lr=0.150250, epoch loss=4.126249
Batch=59, step=12060, lr=0.149750, batch loss=0.162225, epoch loss=0.162225
Batch=119, step=12120, lr=0.149500, batch loss=0.138975, epoch loss=0.301200
Batch=179, step=12180, lr=0.149250, batch loss=0.152704, epoch loss=0.453904
Batch=239, step=12240, lr=0.149250, batch loss=0.222586, epoch loss=0.676491
Batch=299, step=12300, lr=0.149000, batch loss=0.143521, epoch loss=0.820011
Batch=359, step=12360, lr=0.148750, batch loss=0.201886, epoch loss=1.021897
Batch=419, step=12420, lr=0.148500, batch loss=0.207311, epoch loss=1.229208
Batch=479, step=12480, lr=0.148250, batch loss=0.182257, epoch loss=1.411465
Batch=539, step=12540, lr=0.148000, batch loss=0.143172, epoch loss=1.554636
Batch=599, step=12600, lr=0.147500, batch loss=0.151570, epoch loss=1.706206
Batch=659, step=12660, lr=0.147500, batch loss=0.228695, epoch loss=1.934901
Batch=719, step=12720, lr=0.147250, batch loss=0.246134, epoch loss=2.181035
Batch=779, step=12780, lr=0.147000, batch loss=0.264017, epoch loss=2.445052
Batch=839, step=12840, lr=0.146750, batch loss=0.240469, epoch loss=2.685520
Batch=899, step=12900, lr=0.146500, batch loss=0.214990, epoch loss=2.900510
Batch=959, step=12960, lr=0.146250, batch loss=0.155812, epoch loss=3.056322
Batch=1019, step=13020, lr=0.146000, batch loss=0.269035, epoch loss=3.325357
Batch=1079, step=13080, lr=0.145750, batch loss=0.110762, epoch loss=3.436120
Batch=1139, step=13140, lr=0.145500, batch loss=0.146380, epoch loss=3.582499
Batch=1199, step=13200, lr=0.145250, batch loss=0.124174, epoch loss=3.706674
Epoch=10, step=13200, lr=0.145250, epoch loss=3.706674
Batch=59, step=13260, lr=0.145000, batch loss=0.150047, epoch loss=0.150047
Batch=119, step=13320, lr=0.144750, batch loss=0.133927, epoch loss=0.283975
Batch=179, step=13380, lr=0.144500, batch loss=0.134362, epoch loss=0.418336
Batch=239, step=13440, lr=0.144250, batch loss=0.191266, epoch loss=0.609602
Batch=299, step=13500, lr=0.144000, batch loss=0.119017, epoch loss=0.728619
Batch=359, step=13560, lr=0.143750, batch loss=0.167647, epoch loss=0.896266
Batch=419, step=13620, lr=0.143500, batch loss=0.163839, epoch loss=1.060104
Batch=479, step=13680, lr=0.143250, batch loss=0.150244, epoch loss=1.210349
Batch=539, step=13740, lr=0.143000, batch loss=0.121639, epoch loss=1.331988
Batch=599, step=13800, lr=0.142750, batch loss=0.123502, epoch loss=1.455489
Batch=659, step=13860, lr=0.142500, batch loss=0.180223, epoch loss=1.635712
Batch=719, step=13920, lr=0.142250, batch loss=0.181612, epoch loss=1.817324
Batch=779, step=13980, lr=0.142000, batch loss=0.201285, epoch loss=2.018609
Batch=839, step=14040, lr=0.141750, batch loss=0.190617, epoch loss=2.209226
Batch=899, step=14100, lr=0.141500, batch loss=0.161315, epoch loss=2.370541
Batch=959, step=14160, lr=0.141250, batch loss=0.135433, epoch loss=2.505974
Batch=1019, step=14220, lr=0.141000, batch loss=0.284101, epoch loss=2.790075
Batch=1079, step=14280, lr=0.140750, batch loss=0.088409, epoch loss=2.878484
Batch=1139, step=14340, lr=0.140500, batch loss=0.134866, epoch loss=3.013350
Batch=1199, step=14400, lr=0.140250, batch loss=0.093598, epoch loss=3.106948
Epoch=11, step=14400, lr=0.140250, epoch loss=3.106948
Batch=59, step=14460, lr=0.140000, batch loss=0.115158, epoch loss=0.115158
Batch=119, step=14520, lr=0.139750, batch loss=0.111266, epoch loss=0.226425
Batch=179, step=14580, lr=0.139500, batch loss=0.107994, epoch loss=0.334419
Batch=239, step=14640, lr=0.139250, batch loss=0.145117, epoch loss=0.479535
Batch=299, step=14700, lr=0.139000, batch loss=0.083805, epoch loss=0.563340
Batch=359, step=14760, lr=0.138750, batch loss=0.121713, epoch loss=0.685053
Batch=419, step=14820, lr=0.138500, batch loss=0.129940, epoch loss=0.814993
Batch=479, step=14880, lr=0.138250, batch loss=0.106485, epoch loss=0.921478
Batch=539, step=14940, lr=0.138000, batch loss=0.100583, epoch loss=1.022061
Batch=599, step=15000, lr=0.137500, batch loss=0.087711, epoch loss=1.109772
Batch=659, step=15060, lr=0.137250, batch loss=0.136303, epoch loss=1.246075
Batch=719, step=15120, lr=0.137250, batch loss=0.166490, epoch loss=1.412566
Batch=779, step=15180, lr=0.137000, batch loss=0.262513, epoch loss=1.675078
Batch=839, step=15240, lr=0.136750, batch loss=0.143240, epoch loss=1.818318
Batch=899, step=15300, lr=0.136500, batch loss=0.152069, epoch loss=1.970387
Batch=959, step=15360, lr=0.136250, batch loss=0.081232, epoch loss=2.051619
Batch=1019, step=15420, lr=0.136000, batch loss=0.165864, epoch loss=2.217483
Batch=1079, step=15480, lr=0.135750, batch loss=0.044373, epoch loss=2.261856
Batch=1139, step=15540, lr=0.135500, batch loss=0.102027, epoch loss=2.363884
Batch=1199, step=15600, lr=0.135000, batch loss=0.063356, epoch loss=2.427239
Epoch=12, step=15600, lr=0.135000, epoch loss=2.427239
Batch=59, step=15660, lr=0.135000, batch loss=0.088859, epoch loss=0.088859
Batch=119, step=15720, lr=0.134750, batch loss=0.152624, epoch loss=0.241483
Batch=179, step=15780, lr=0.134500, batch loss=0.107822, epoch loss=0.349304
Batch=239, step=15840, lr=0.134250, batch loss=0.105278, epoch loss=0.454582
Batch=299, step=15900, lr=0.134000, batch loss=0.044088, epoch loss=0.498671
Batch=359, step=15960, lr=0.133750, batch loss=0.086229, epoch loss=0.584899
Batch=419, step=16020, lr=0.133500, batch loss=0.084160, epoch loss=0.669060
Batch=479, step=16080, lr=0.133250, batch loss=0.063740, epoch loss=0.732800
Batch=539, step=16140, lr=0.133000, batch loss=0.063834, epoch loss=0.796634
Batch=599, step=16200, lr=0.132750, batch loss=0.137791, epoch loss=0.934425
Batch=659, step=16260, lr=0.132500, batch loss=0.088463, epoch loss=1.022888
Batch=719, step=16320, lr=0.132250, batch loss=0.102661, epoch loss=1.125549
Batch=779, step=16380, lr=0.131750, batch loss=0.186078, epoch loss=1.311627
Batch=839, step=16440, lr=0.131750, batch loss=0.093508, epoch loss=1.405135
Batch=899, step=16500, lr=0.131500, batch loss=0.082340, epoch loss=1.487475
Batch=959, step=16560, lr=0.131250, batch loss=0.045916, epoch loss=1.533391
Batch=1019, step=16620, lr=0.130750, batch loss=0.117463, epoch loss=1.650854
Batch=1079, step=16680, lr=0.130500, batch loss=0.022333, epoch loss=1.673187
Batch=1139, step=16740, lr=0.130500, batch loss=0.057586, epoch loss=1.730773
Batch=1199, step=16800, lr=0.130000, batch loss=0.028298, epoch loss=1.759070
Epoch=13, step=16800, lr=0.130000, epoch loss=1.759070
Batch=59, step=16860, lr=0.130000, batch loss=0.040251, epoch loss=0.040251
Batch=119, step=16920, lr=0.129500, batch loss=0.065418, epoch loss=0.105668
Batch=179, step=16980, lr=0.129250, batch loss=0.051634, epoch loss=0.157302
Batch=239, step=17040, lr=0.129000, batch loss=0.068433, epoch loss=0.225735
Batch=299, step=17100, lr=0.128750, batch loss=0.028699, epoch loss=0.254434
Batch=359, step=17160, lr=0.128750, batch loss=0.050337, epoch loss=0.304771
Batch=419, step=17220, lr=0.128250, batch loss=0.064464, epoch loss=0.369235
Batch=479, step=17280, lr=0.128000, batch loss=0.027446, epoch loss=0.396681
Batch=539, step=17340, lr=0.127750, batch loss=0.028675, epoch loss=0.425356
Batch=599, step=17400, lr=0.127500, batch loss=0.040655, epoch loss=0.466011
Batch=659, step=17460, lr=0.127500, batch loss=0.058138, epoch loss=0.524149
Batch=719, step=17520, lr=0.127250, batch loss=0.077911, epoch loss=0.602060
Batch=779, step=17580, lr=0.126750, batch loss=0.061491, epoch loss=0.663551
Batch=839, step=17640, lr=0.126750, batch loss=0.093908, epoch loss=0.757459
Batch=899, step=17700, lr=0.126250, batch loss=0.054078, epoch loss=0.811537
Batch=959, step=17760, lr=0.126250, batch loss=0.020256, epoch loss=0.831793
Batch=1019, step=17820, lr=0.126000, batch loss=0.029644, epoch loss=0.861438
Batch=1079, step=17880, lr=0.125500, batch loss=0.021975, epoch loss=0.883413
Batch=1139, step=17940, lr=0.125500, batch loss=0.048155, epoch loss=0.931568
Batch=1199, step=18000, lr=0.125000, batch loss=0.019092, epoch loss=0.950660
Epoch=14, step=18000, lr=0.125000, epoch loss=0.950660
Batch=59, step=18060, lr=0.124750, batch loss=0.013600, epoch loss=0.013600
Batch=119, step=18120, lr=0.124500, batch loss=0.021008, epoch loss=0.034608
Batch=179, step=18180, lr=0.124250, batch loss=0.030555, epoch loss=0.065163
Batch=239, step=18240, lr=0.124000, batch loss=0.035067, epoch loss=0.100229
Batch=299, step=18300, lr=0.124000, batch loss=0.012027, epoch loss=0.112257
Batch=359, step=18360, lr=0.123750, batch loss=0.025834, epoch loss=0.138091
Batch=419, step=18420, lr=0.123500, batch loss=0.030085, epoch loss=0.168175
Batch=479, step=18480, lr=0.123250, batch loss=0.015342, epoch loss=0.183517
Batch=539, step=18540, lr=0.123000, batch loss=0.022015, epoch loss=0.205532
Batch=599, step=18600, lr=0.122750, batch loss=0.029467, epoch loss=0.234999
Batch=659, step=18660, lr=0.122500, batch loss=0.028914, epoch loss=0.263913
Batch=719, step=18720, lr=0.122250, batch loss=0.029357, epoch loss=0.293270
Batch=779, step=18780, lr=0.122000, batch loss=0.077632, epoch loss=0.370901
Batch=839, step=18840, lr=0.121750, batch loss=0.205536, epoch loss=0.576438
Batch=899, step=18900, lr=0.121500, batch loss=0.042083, epoch loss=0.618521
Batch=959, step=18960, lr=0.121250, batch loss=0.016682, epoch loss=0.635204
Batch=1019, step=19020, lr=0.121000, batch loss=0.028473, epoch loss=0.663676
Batch=1079, step=19080, lr=0.120750, batch loss=0.006669, epoch loss=0.670345
Batch=1139, step=19140, lr=0.120500, batch loss=0.025444, epoch loss=0.695789
Batch=1199, step=19200, lr=0.120250, batch loss=0.011860, epoch loss=0.707649
Epoch=15, step=19200, lr=0.120250, epoch loss=0.707649
Batch=59, step=19260, lr=0.120000, batch loss=0.005929, epoch loss=0.005929
Batch=119, step=19320, lr=0.119750, batch loss=0.012035, epoch loss=0.017964
Batch=179, step=19380, lr=0.119500, batch loss=0.021525, epoch loss=0.039489
Batch=239, step=19440, lr=0.119250, batch loss=0.026872, epoch loss=0.066361
Batch=299, step=19500, lr=0.119000, batch loss=0.010407, epoch loss=0.076769
Batch=359, step=19560, lr=0.118500, batch loss=0.017082, epoch loss=0.093851
Batch=419, step=19620, lr=0.118250, batch loss=0.020432, epoch loss=0.114282
Batch=479, step=19680, lr=0.118000, batch loss=0.010150, epoch loss=0.124432
Batch=539, step=19740, lr=0.118000, batch loss=0.018827, epoch loss=0.143259
Batch=599, step=19800, lr=0.117750, batch loss=0.020695, epoch loss=0.163954
Batch=659, step=19860, lr=0.117250, batch loss=0.019067, epoch loss=0.183021
Batch=719, step=19920, lr=0.117000, batch loss=0.021722, epoch loss=0.204743
Batch=779, step=19980, lr=0.116750, batch loss=0.035020, epoch loss=0.239762
Batch=839, step=20040, lr=0.116500, batch loss=0.036519, epoch loss=0.276282
Batch=899, step=20100, lr=0.116250, batch loss=0.032601, epoch loss=0.308882
Batch=959, step=20160, lr=0.116000, batch loss=0.021628, epoch loss=0.330510
Batch=1019, step=20220, lr=0.115750, batch loss=0.036735, epoch loss=0.367245
Batch=1079, step=20280, lr=0.115500, batch loss=0.006886, epoch loss=0.374132
Batch=1139, step=20340, lr=0.115250, batch loss=0.018652, epoch loss=0.392784
Batch=1199, step=20400, lr=0.115000, batch loss=0.007730, epoch loss=0.400513
Epoch=16, step=20400, lr=0.115000, epoch loss=0.400513
Batch=59, step=20460, lr=0.115000, batch loss=0.003687, epoch loss=0.003687
Batch=119, step=20520, lr=0.114500, batch loss=0.010533, epoch loss=0.014220
Batch=179, step=20580, lr=0.114250, batch loss=0.021128, epoch loss=0.035348
Batch=239, step=20640, lr=0.114000, batch loss=0.021098, epoch loss=0.056446
Batch=299, step=20700, lr=0.113750, batch loss=0.010549, epoch loss=0.066995
Batch=359, step=20760, lr=0.113500, batch loss=0.014509, epoch loss=0.081504
Batch=419, step=20820, lr=0.113250, batch loss=0.015970, epoch loss=0.097474
Batch=479, step=20880, lr=0.113000, batch loss=0.004196, epoch loss=0.101670
Batch=539, step=20940, lr=0.112750, batch loss=0.015494, epoch loss=0.117164
Batch=599, step=21000, lr=0.112500, batch loss=0.020662, epoch loss=0.137826
Batch=659, step=21060, lr=0.112250, batch loss=0.016019, epoch loss=0.153845
Batch=719, step=21120, lr=0.112250, batch loss=0.042634, epoch loss=0.196479
Batch=779, step=21180, lr=0.111750, batch loss=0.079353, epoch loss=0.275832
Batch=839, step=21240, lr=0.111750, batch loss=0.026885, epoch loss=0.302717
Batch=899, step=21300, lr=0.111250, batch loss=0.037276, epoch loss=0.339994
Batch=959, step=21360, lr=0.111000, batch loss=0.011637, epoch loss=0.351631
Batch=1019, step=21420, lr=0.110750, batch loss=0.013035, epoch loss=0.364666
Batch=1079, step=21480, lr=0.110500, batch loss=0.001855, epoch loss=0.366522
Batch=1139, step=21540, lr=0.110250, batch loss=0.013169, epoch loss=0.379691
Batch=1199, step=21600, lr=0.110000, batch loss=0.005359, epoch loss=0.385050
Epoch=17, step=21600, lr=0.110000, epoch loss=0.385050
Batch=59, step=21660, lr=0.109750, batch loss=0.002053, epoch loss=0.002053
Batch=119, step=21720, lr=0.109500, batch loss=0.007202, epoch loss=0.009255
Batch=179, step=21780, lr=0.109500, batch loss=0.013193, epoch loss=0.022448
Batch=239, step=21840, lr=0.109250, batch loss=0.013616, epoch loss=0.036065
Batch=299, step=21900, lr=0.109000, batch loss=0.002478, epoch loss=0.038543
Batch=359, step=21960, lr=0.108500, batch loss=0.012608, epoch loss=0.051151
Batch=419, step=22020, lr=0.108250, batch loss=0.012204, epoch loss=0.063355
Batch=479, step=22080, lr=0.108250, batch loss=0.003107, epoch loss=0.066462
Batch=539, step=22140, lr=0.108000, batch loss=0.017933, epoch loss=0.084395
Batch=599, step=22200, lr=0.107750, batch loss=0.017052, epoch loss=0.101446
Batch=659, step=22260, lr=0.107500, batch loss=0.014893, epoch loss=0.116339
Batch=719, step=22320, lr=0.107250, batch loss=0.029713, epoch loss=0.146052
Batch=779, step=22380, lr=0.107000, batch loss=0.045042, epoch loss=0.191095
Batch=839, step=22440, lr=0.106750, batch loss=0.021904, epoch loss=0.212999
Batch=899, step=22500, lr=0.106500, batch loss=0.022516, epoch loss=0.235514
Batch=959, step=22560, lr=0.106250, batch loss=0.011390, epoch loss=0.246904
Batch=1019, step=22620, lr=0.106000, batch loss=0.008986, epoch loss=0.255890
Batch=1079, step=22680, lr=0.105750, batch loss=0.000070, epoch loss=0.255960
Batch=1139, step=22740, lr=0.105250, batch loss=0.010166, epoch loss=0.266126
Batch=1199, step=22800, lr=0.105250, batch loss=0.004542, epoch loss=0.270668
Epoch=18, step=22800, lr=0.105250, epoch loss=0.270668
Batch=59, step=22860, lr=0.105000, batch loss=0.001546, epoch loss=0.001546
Batch=119, step=22920, lr=0.104750, batch loss=0.005432, epoch loss=0.006977
Batch=179, step=22980, lr=0.104500, batch loss=0.011040, epoch loss=0.018017
Batch=239, step=23040, lr=0.104000, batch loss=0.009120, epoch loss=0.027137
Batch=299, step=23100, lr=0.104000, batch loss=0.012097, epoch loss=0.039234
Batch=359, step=23160, lr=0.103750, batch loss=0.011510, epoch loss=0.050744
Batch=419, step=23220, lr=0.103500, batch loss=0.010579, epoch loss=0.061323
Batch=479, step=23280, lr=0.103250, batch loss=0.002350, epoch loss=0.063673
Batch=539, step=23340, lr=0.102750, batch loss=0.016814, epoch loss=0.080486
Batch=599, step=23400, lr=0.102750, batch loss=0.013843, epoch loss=0.094330
Batch=659, step=23460, lr=0.102250, batch loss=0.010445, epoch loss=0.104775
Batch=719, step=23520, lr=0.102250, batch loss=0.016752, epoch loss=0.121527
Batch=779, step=23580, lr=0.102000, batch loss=0.021624, epoch loss=0.143151
Batch=839, step=23640, lr=0.101750, batch loss=0.024651, epoch loss=0.167801
Batch=899, step=23700, lr=0.101500, batch loss=0.024423, epoch loss=0.192224
Batch=959, step=23760, lr=0.101250, batch loss=0.008117, epoch loss=0.200342
Batch=1019, step=23820, lr=0.101000, batch loss=0.007434, epoch loss=0.207776
Batch=1079, step=23880, lr=0.100750, batch loss=0.000611, epoch loss=0.208387
Batch=1139, step=23940, lr=0.100500, batch loss=0.009140, epoch loss=0.217526
Batch=1199, step=24000, lr=0.100250, batch loss=0.004453, epoch loss=0.221980
Epoch=19, step=24000, lr=0.100250, epoch loss=0.221980


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############***************************************..│
│********######*####*********************************###*###*#**********************************.....│
│*******######**##**********************************#*######*#********************************.......│
│*******##*##**##***********..........***************########*##****************************.........│
│*****#######************.......%...%%...***************#########*************************.........%.│
│******######***********.........%........***************##*#####************************......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######**********************......%.%%%%.│
│****#######**********..........%%%%.........************#########*********************.......%%.%%.%│
│**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######************...........%%%%%%%..........************#######***************...........%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####****************...........%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│
│#####*#**********...............%%%%%%%...............*******#######*********..............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######********...............%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....│
│#****##********....................%%%%%%%%%................***###*#***...............%.%%%%%%%.....│
│**************.....................%.%%%%%%...................********.................%.%%.%%......│
│**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................**...............%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1
2025-05-22 20:06.27: Job failed: Failed: Build failed