Organisationsahrefsocannl397418 ()ubuntu-24.04-5.3_opam-2.3

ubuntu-24.04-5.3_opam-2.3

Link Copied
Code Copied

Logs

2025-05-22 20:00.59: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (39741884b740497ac10065d5e464e6c70f9151f4) (linux-x86_64:ubuntu-24.04-5.3_opam-2.3)
Base: ocaml/opam:ubuntu-24.04-ocaml-5.3@sha256:b6ac059a3005d298c3948e374b5fba1871ea7c1979a96018feb417d440b27ef0
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 39741884
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:ubuntu-24.04-ocaml-5.3@sha256:b6ac059a3005d298c3948e374b5fba1871ea7c1979a96018feb417d440b27ef0
# ubuntu-24.04-5.3_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-05-22 20:00.59: Using cache hint "ahrefs/ocannl-ocaml/opam:ubuntu-24.04-ocaml-5.3@sha256:b6ac059a3005d298c3948e374b5fba1871ea7c1979a96018feb417d440b27ef0-ubuntu-24.04-5.3_opam-2.3-63d0fa7caba437c680f3f62d33f451da"
2025-05-22 20:00.59: Using OBuilder spec:
((from ocaml/opam:ubuntu-24.04-ocaml-5.3@sha256:b6ac059a3005d298c3948e374b5fba1871ea7c1979a96018feb417d440b27ef0)
(comment ubuntu-24.04-5.3_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-05-22 20:00.59: Waiting for resource in pool OCluster
2025-05-22 20:00.59: Waiting for worker…
2025-05-22 20:01.03: Got resource from pool OCluster
Building on laodoke.caelum.ci.dev
All commits already cached
HEAD is now at 39741884 Untested: convert remaining uses of Format except where printing Sexp values


(from ocaml/opam:ubuntu-24.04-ocaml-5.3@sha256:b6ac059a3005d298c3948e374b5fba1871ea7c1979a96018feb417d440b27ef0)
2025-05-22 20:12.22 ---> using "10c823be6d6df8b8b953a1da7016d5c0a852e2e87d76746b4c929b5ce9cf0ea7" from cache


/: (comment ubuntu-24.04-5.3_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-22 20:12.22 ---> using "7e4e95fbdd960f07cc45e7caadc95ccc19437cf1fa3ac4cf9ffe38fb71ba29a6" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
[NOTE] The 'jobs' option was reset, its value was 255 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using:
opam option jobs=255 --global
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-22 20:12.22 ---> using "165aa512348c81560d0d9826f796b98dc48f4c9a7c0b4e56e3102119a9192813" from cache


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-139-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-05-22 20:12.22 ---> using "df15c08d3bd246393f4369064e82160ecd55ec0511fc0edfa706fbdbbf9fc3b6" from cache


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-05-22 20:12.22 ---> using "b48a5f00b86917e79b6c3f17793c62a1c228f36dd2ae72f3747baabaf72144bf" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 2df846cb67d6f96ae4fced111519ff4ae27d19ae || git fetch origin master) && git reset -q --hard 2df846cb67d6f96ae4fced111519ff4ae27d19ae && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
35eb2f107a..2df846cb67  master     -> origin/master
2df846cb67 Merge pull request #27910 from maiste/release-dune-3.19.0


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:13.04 ---> saved as "533b9167ed3fb393306be973817b8647cc52675a4f55cc554b66ee6dbb7542a9"


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-22 20:13.05 ---> saved as "32c6851dab32be5bb65f855cfdc4ae3ff8ffa379992835b8a952c0ff2de22164"


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-22 20:13.11 ---> saved as "afe9d4fdecc0350e428cb8b1c5cceb1f5db864601aa544b39edf60f8c5f18e35"


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-22 20:13.11 ---> saved as "e889d681eded8642b0519e5daa652520adba6260b784b29d56451b7b8ade4429"


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.19.0 dune-configurator.3.19.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apt-get" "update"
- Get:1 http://security.ubuntu.com/ubuntu noble-security InRelease [126 kB]
- Get:2 http://security.ubuntu.com/ubuntu noble-security/universe amd64 Packages [1086 kB]
- Get:3 http://security.ubuntu.com/ubuntu noble-security/restricted amd64 Packages [1434 kB]
- Get:4 http://security.ubuntu.com/ubuntu noble-security/main amd64 Packages [1081 kB]
- Get:5 http://security.ubuntu.com/ubuntu noble-security/multiverse amd64 Packages [22.1 kB]
- Hit:6 http://archive.ubuntu.com/ubuntu noble InRelease
- Get:7 http://archive.ubuntu.com/ubuntu noble-updates InRelease [126 kB]
- Get:8 http://archive.ubuntu.com/ubuntu noble-backports InRelease [126 kB]
- Get:9 http://archive.ubuntu.com/ubuntu noble-updates/universe amd64 Packages [1382 kB]
- Get:10 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 Packages [1399 kB]
- Get:11 http://archive.ubuntu.com/ubuntu noble-updates/restricted amd64 Packages [1478 kB]
- Get:12 http://archive.ubuntu.com/ubuntu noble-updates/multiverse amd64 Packages [26.7 kB]
- Get:13 http://archive.ubuntu.com/ubuntu noble-backports/universe amd64 Packages [31.8 kB]
- Get:14 http://archive.ubuntu.com/ubuntu noble-backports/main amd64 Packages [48.0 kB]
- Fetched 8367 kB in 1s (10.7 MB/s)
- Reading package lists...
- 


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-dev pkg-config


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config"
- debconf: delaying package configuration, since apt-utils is not installed
- Selecting previously unselected package libpkgconf3:amd64.
- (Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 16365 files and directories currently installed.)
- Preparing to unpack .../libpkgconf3_1.8.1-2build1_amd64.deb ...
- Unpacking libpkgconf3:amd64 (1.8.1-2build1) ...
- Selecting previously unselected package pkgconf-bin.
- Preparing to unpack .../pkgconf-bin_1.8.1-2build1_amd64.deb ...
- Unpacking pkgconf-bin (1.8.1-2build1) ...
- Selecting previously unselected package pkgconf:amd64.
- Preparing to unpack .../pkgconf_1.8.1-2build1_amd64.deb ...
- Unpacking pkgconf:amd64 (1.8.1-2build1) ...
- Selecting previously unselected package pkg-config:amd64.
- Preparing to unpack .../pkg-config_1.8.1-2build1_amd64.deb ...
- Unpacking pkg-config:amd64 (1.8.1-2build1) ...
- Selecting previously unselected package libffi-dev:amd64.
- Preparing to unpack .../libffi-dev_3.4.6-1build1_amd64.deb ...
- Unpacking libffi-dev:amd64 (3.4.6-1build1) ...
- Setting up libffi-dev:amd64 (3.4.6-1build1) ...
- Setting up libpkgconf3:amd64 (1.8.1-2build1) ...
- Setting up pkgconf-bin (1.8.1-2build1) ...
- Setting up pkgconf:amd64 (1.8.1-2build1) ...
- Setting up pkg-config:amd64 (1.8.1-2build1) ...
- Processing triggers for libc-bin (2.39-0ubuntu8.4) ...
2025-05-22 20:13.47 ---> saved as "0f3a74b2e6fea44d7a6f53bee0286ee6dc1b4b761f1dded97ccee4dd149514b2"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 75 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.2
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.19.0
- install dune-configurator       3.19.0
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install pprint                  20230830
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> installed conf-pkg-config.4
-> retrieved csexp.1.5.2  (cached)
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> installed conf-libffi.2.0.0
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved pprint.20230830  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved dune.3.19.0, dune-configurator.3.19.0  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved result.1.5  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlbuild.0.16.1
-> installed ocamlfind.1.9.8
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed fmt.0.10.0
-> installed ptime.1.2.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.19.0
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed multicore-magic.2.3.1
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed pprint.20230830
-> installed printbox.0.12
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed thread-local-storage.0.2
-> installed ocaml-compiler-libs.v0.17.0
-> installed re.1.12.0
-> installed cppo.1.8.0
-> installed integers.0.7.0
-> installed saturn_lockfree.0.5.0
-> installed parsexp.v0.17.0
-> installed dune-configurator.3.19.0
-> installed bigstringaf.0.10.0
-> installed sexplib.v0.17.0
-> installed mdx.2.5.0
-> installed angstrom.0.16.1
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed ctypes.0.23.0
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed base.v0.17.2
-> installed fieldslib.v0.17.0
-> installed variantslib.v0.17.0
-> installed stdio.v0.17.0
-> installed ctypes-foreign.0.23.0
-> installed ppxlib.0.35.0
-> installed ppx_optcomp.v0.17.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_here.v0.17.0
-> installed ppx_cold.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_compare.v0.17.0
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_base.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-22 20:16.54 ---> saved as "46755afc4c92b6140007a045587686a4c0d1f6b11d5d0cd761382351cee9db50"


/src: (copy (src .) (dst /src))
2025-05-22 20:16.54 ---> saved as "4eb2eb87461e823cad22b73eb6eac4006e6c64c1a82246b0c82286ed6c4b294d"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Wrote value of 'backend' to ocannl_backend.txt
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/dune", lines 30-40, characters 0-281:
30 | (rule
31 |  (alias runtest)
32 |  (target
33 |   (dir log_files))
34 |  (action
35 |   (run
36 |    %{dep:micrograd_demo_logging.exe}
37 |    "--ocannl_debug_backend=text"
38 |    "--ocannl_log_file_stem=micrograd_demo_logging"
39 |    "--ocannl_log_main_domain_to_stdout=false"
40 |    "--ocannl_debug_log_to_stream_files=true")))
(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
Fatal error: exception Sys_error("log_files/micrograd_demo_logging.log: No such file or directory")
Raised by primitive operation at Stdlib.open_out_gen in file "stdlib.ml", line 331, characters 29-55
Called from Stdlib.open_out in file "stdlib.ml" (inlined), line 336, characters 2-74
Called from Minidebug_runtime.shared_config.Result.find_ch in file "minidebug_runtime.ml", line 98, characters 17-34
Called from Minidebug_runtime.shared_config.Result.current_ch in file "minidebug_runtime.ml", line 125, characters 34-44
Called from CamlinternalLazy.do_force_val_block in file "camlinternalLazy.ml", line 63, characters 15-25
Called from Stdlib__Lazy.force_val in file "lazy.ml" (inlined), line 56, characters 18-61
Called from Minidebug_runtime.shared_config.Result.(!!) in file "minidebug_runtime.ml" (inlined), line 126, characters 21-40
Called from Minidebug_runtime.shared_config.Result.debug_ch in file "minidebug_runtime.ml", line 142, characters 6-18
Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1570, characters 15-26
Called from Minidebug_runtime.PrintBox.log_value_sexp in file "minidebug_runtime.ml", lines 1985-1986, characters 9-73
Called from Utils.set_log_level in file "arrayjit/lib/utils.ml", line 418, characters 2-42
Re-raised at Utils.set_log_level in file "arrayjit/lib/utils.ml", lines 415-418, characters 29-42
Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 16, characters 2-23
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/da4f699ee42e1c93691db104da29b8ef/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
File "test/micrograd_demo.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/micrograd_demo.ml.corrected
diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/micrograd_demo.ml.corrected
index 77e46c6..ab81526 100644
--- a/_build/default/test/micrograd_demo.ml
+++ b/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/micrograd_demo.ml.corrected
@@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" =
│├┼───────┤       │
│││ -4.00 │       │
│└┴───────┘       │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[0]: a shape 0:1  grad_a│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 1.38e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[0]: a shape 0:1  grad_a│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 1.38e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}];
Tensor.print ~with_code:false ~with_grad:true `Default b;
[%expect
@@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" =
│├┼──────┤        │
│││ 2.00 │        │
│└┴──────┘        │
-    └─────────────────┘
-    ┌────────────────────────┐
-                                                              │[2]: b shape 0:1  grad_b│
-                                                              │┌┬─────────┐            │
-                                                              │││axis 0   │            │
-                                                              │├┼─────────┤            │
-                                                              │││ 6.45e+2 │            │
-                                                              │└┴─────────┘            │
-                                                              └────────────────────────┘
+    └─────────────────┘┌────────────────────────┐
+    │[2]: b shape 0:1  grad_b│
+    │┌┬─────────┐            │
+    │││axis 0   │            │
+    │├┼─────────┤            │
+    │││ 6.45e+2 │            │
+    │└┴─────────┘            │
+    └────────────────────────┘
|}]


let%expect_test "Micrograd half-moons example" =
File "test/hello_world_op.ml", line 1, characters 0-0:
/usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/hello_world_op.ml.corrected
diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/hello_world_op.ml.corrected
index ba9d7ef..6bfa309 100644
--- a/_build/default/test/hello_world_op.ml
+++ b/_build/.sandbox/fcf396dbab86ff3fb93a9f41553dc58b/default/test/hello_world_op.ml.corrected
@@ -102,36 +102,46 @@ let%expect_test "Print constant tensor" =
let%op hey = [ (1, 2, 3); (4, 5, 6) ] in
Train.forward_and_forget backend ctx hey;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey;
-  [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}];
+  [%expect {|
+    [0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2  [
+       1.00 , 2.00 , 3.00
+      ;  4.00 , 5.00 , 6.00
+    ]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey;
[%expect
{|
-    ┌─────────────────────────────────────────────────────────────┐
-    │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │
-    │┌──────┬──────────────────┐                                  │
-    ││      │axis 1            │                                  │
-    │├──────┼──────────────────┤                                  │
-    ││axis 0│ 1.00  2.00  3.00 │                                  │
-    ││      │ 4.00  5.00  6.00 │                                  │
-    │└──────┴──────────────────┘                                  │
-    └─────────────────────────────────────────────────────────────┘
+    ┌────────────────────────────────────────────────────────────────────────┐
+    │[0]: [  1.00 , 2.00 , 3.00  ;  4.00 , 5.00 , 6.00  ]_hey shape 1:3->0:2 │
+    │┌──────┬──────────────────┐                                             │
+    ││      │axis 1            │                                             │
+    │├──────┼──────────────────┤                                             │
+    ││axis 0│ 1.00  2.00  3.00 │                                             │
+    ││      │ 4.00  5.00  6.00 │                                             │
+    │└──────┴──────────────────┘                                             │
+    └────────────────────────────────────────────────────────────────────────┘
|}];
let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in
Train.forward_and_forget backend ctx hoo;
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo;
-  [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}];
+  [%expect {|
+    [1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3  [|
+      [ 1.00 ; 2.00 ; 3.00 ]
+      ; [ 4.00 ; 5.00 ; 6.00 ]
+    |]
+    |}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo;
[%expect
{|
-    ┌──────────────────────────────────────────────────────────────────┐
-    │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │
-    │┌──────┬──────────────────┐                                       │
-    ││      │axis 1            │                                       │
-    │├──────┼──────────────────┤                                       │
-    ││axis 0│ 1.00  2.00  3.00 │                                       │
-    ││      │ 4.00  5.00  6.00 │                                       │
-    │└──────┴──────────────────┘                                       │
-    └──────────────────────────────────────────────────────────────────┘
+    ┌─────────────────────────────────────────────────────────────────────────────┐
+    │[1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 │
+    │┌──────┬──────────────────┐                                                  │
+    ││      │axis 1            │                                                  │
+    │├──────┼──────────────────┤                                                  │
+    ││axis 0│ 1.00  2.00  3.00 │                                                  │
+    ││      │ 4.00  5.00  6.00 │                                                  │
+    │└──────┴──────────────────┘                                                  │
+    └─────────────────────────────────────────────────────────────────────────────┘
|}];
let%op hey2 =
[
@@ -145,10 +155,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2;
[%expect
{|
-    [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00);
-      (7.00, 8.00, 9.00), (10.00, 11.00, 12.00);
-      (13.00, 14.00, 15.00), (16.00, 17.00, 18.00);
-      (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)]
+    [2]: c4x2x3_hey2 shape 1:2,2:3->0:4  [
+       ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 )
+      ;  ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 )
+      ;  ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 )
+      ;  ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 )
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2;
[%expect
@@ -178,10 +190,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2;
[%expect
{|
-    [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]];
-      [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]];
-      [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]];
-      [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|]
+    [3]: c4x2x3_hoo2 shape 0:4|1:2,2:3  [|
+      [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ]
+      ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ]
+      ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ]
+      ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2;
[%expect
@@ -209,10 +223,12 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo;
[%expect
{|
-    [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|];
-      [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|];
-      [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|];
-      [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|]
+    [4]: c4x2x3_heyhoo shape 0:4,1:2|2:3  [|
+      [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]
+      ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |]
+      ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |]
+      ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo;
[%expect
@@ -240,15 +256,24 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo2;
[%expect
{|
-    [|
-      [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-        [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|];
-      [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-        [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|];
-      [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-        [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|];
-      [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-        [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|]
+    [5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2  [|
+      [|
+        [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+        ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+      |]
+      ; [|
+        [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+        ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+      |]
+      ; [|
+        [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+        ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+      |]
+      ; [|
+        [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+        ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2;
[%expect
@@ -293,17 +318,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo3;
[%expect
{|
-    [|
+    [6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2  [|
[|
-        [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]];
-          [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]];
-        [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]];
-          [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|];
-      [|
-        [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]];
-          [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]];
-        [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]];
-          [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|]
+        [
+          [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ]
+          ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ]
+        ]
+        ; [
+          [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ]
+          ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ]
+        ]
+      |]
+      ; [|
+        [
+          [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ]
+          ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ]
+        ]
+        ; [
+          [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ]
+          ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ]
+        ]
+      |]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3;
[%expect
@@ -353,17 +389,28 @@ let%expect_test "Print constant tensor" =
Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo4;
[%expect
{|
-    [|
-      [
-        [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00];
-          [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]];
-        [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00];
-          [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]];
+    [7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3  [|
[
-        [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00];
-          [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]];
-        [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00];
-          [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|]
+        [
+          [  1.00 , 31.00  ;  2.00 , 32.00  ;  3.00 , 33.00  ]
+          ; [  4.00 , 34.00  ;  5.00 , 35.00  ;  6.00 , 36.00  ]
+        ]
+        ; [
+          [  7.00 , 37.00  ;  8.00 , 38.00  ;  9.00 , 39.00  ]
+          ; [  10.00 , 40.00  ;  11.00 , 41.00  ;  12.00 , 42.00  ]
+        ]
+      ]
+      ; [
+        [
+          [  13.00 , 43.00  ;  14.00 , 44.00  ;  15.00 , 45.00  ]
+          ; [  16.00 , 46.00  ;  17.00 , 47.00  ;  18.00 , 48.00  ]
+        ]
+        ; [
+          [  19.00 , 49.00  ;  20.00 , 50.00  ;  21.00 , 51.00  ]
+          ; [  22.00 , 52.00  ;  23.00 , 53.00  ;  24.00 , 54.00  ]
+        ]
+      ]
+    |]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4;
[%expect
@@ -462,8 +509,29 @@ let%expect_test "Big matrix" =
Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty;
[%expect
{|
-    [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00;
-      12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00]
+    [2]: 0...20 shape 0:21  [
+      0.00
+      ; 1.00
+      ; 2.00
+      ; 3.00
+      ; 4.00
+      ; 5.00
+      ; 6.00
+      ; 7.00
+      ; 8.00
+      ; 9.00
+      ; 10.00
+      ; 11.00
+      ; 12.00
+      ; 13.00
+      ; 14.00
+      ; 15.00
+      ; 16.00
+      ; 17.00
+      ; 18.00
+      ; 19.00
+      ; 20.00
+    ]
|}];
Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty;
[%expect
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found multicore_cc, in the config file
Properties of devices:
(multicore_devices
(device ((device_name CPU) (device_ordinal 0) (num_domains 72))))
@!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity
Found true, in the config file
Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files
Not found, using default false
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216
Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512
Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081
Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141
Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385
Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263
Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603
Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902
Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024
Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685
Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407
Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543
Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049
Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829
Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269
Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952
Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952
Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138
Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381
Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025
Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921
Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269
Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828
Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563
Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831
Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657
Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042
Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243
Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160
Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542
Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179
Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413
Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446
Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419
Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654
Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028
Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821
Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821
Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363
Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325
Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481
Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578
Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525
Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415
Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160
Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004
Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802
Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881
Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937
Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479
Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468
Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248
Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112
Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523
Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923
Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236
Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812
Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723
Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723
Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539
Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341
Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053
Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157
Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168
Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420
Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854
Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365
Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593
Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989
Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121
Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243
Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563
Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333
Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239
Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610
Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501
Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094
Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306
Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074
Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074
Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079
Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298
Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203
Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269
Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862
Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001
Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056
Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748
Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194
Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213
Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805
Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672
Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851
Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195
Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366
Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287
Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436
Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264
Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319
Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180
Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180
Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529
Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820
Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361
Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741
Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671
Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786
Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067
Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387
Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355
Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855
Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963
Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184
Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407
Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164
Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633
Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671
Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133
Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999
Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312
Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223
Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223
Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877
Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301
Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593
Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416
Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253
Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689
Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497
Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495
Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390
Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314
Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229
Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518
Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460
Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885
Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660
Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955
Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988
Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087
Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829
Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103
Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103
Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725
Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742
Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559
Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762
Batch=299, step=8700, lr=0.164000, batch loss=0.191994, epoch loss=1.062755
Batch=359, step=8760, lr=0.163750, batch loss=0.248632, epoch loss=1.311388
Batch=419, step=8820, lr=0.163500, batch loss=0.245601, epoch loss=1.556988
Batch=479, step=8880, lr=0.163250, batch loss=0.228591, epoch loss=1.785580
Batch=539, step=8940, lr=0.163000, batch loss=0.178132, epoch loss=1.963712
Batch=599, step=9000, lr=0.162750, batch loss=0.217388, epoch loss=2.181101
Batch=659, step=9060, lr=0.162500, batch loss=0.294814, epoch loss=2.475915
Batch=719, step=9120, lr=0.162250, batch loss=0.296433, epoch loss=2.772348
Batch=779, step=9180, lr=0.162000, batch loss=0.316728, epoch loss=3.089075
Batch=839, step=9240, lr=0.161750, batch loss=0.287243, epoch loss=3.376318
Batch=899, step=9300, lr=0.161500, batch loss=0.251060, epoch loss=3.627378
Batch=959, step=9360, lr=0.161250, batch loss=0.190532, epoch loss=3.817911
Batch=1019, step=9420, lr=0.161000, batch loss=0.311728, epoch loss=4.129639
Batch=1079, step=9480, lr=0.160750, batch loss=0.191595, epoch loss=4.321234
Batch=1139, step=9540, lr=0.160500, batch loss=0.215772, epoch loss=4.537006
Batch=1199, step=9600, lr=0.160250, batch loss=0.165620, epoch loss=4.702626
Epoch=7, step=9600, lr=0.160250, epoch loss=4.702626
Batch=59, step=9660, lr=0.160000, batch loss=0.197217, epoch loss=0.197217
Batch=119, step=9720, lr=0.159750, batch loss=0.165467, epoch loss=0.362684
Batch=179, step=9780, lr=0.159500, batch loss=0.179286, epoch loss=0.541970
Batch=239, step=9840, lr=0.159250, batch loss=0.263837, epoch loss=0.805807
Batch=299, step=9900, lr=0.159000, batch loss=0.182187, epoch loss=0.987994
Batch=359, step=9960, lr=0.158750, batch loss=0.240842, epoch loss=1.228836
Batch=419, step=10020, lr=0.158500, batch loss=0.232979, epoch loss=1.461815
Batch=479, step=10080, lr=0.158250, batch loss=0.213194, epoch loss=1.675009
Batch=539, step=10140, lr=0.158000, batch loss=0.170694, epoch loss=1.845703
Batch=599, step=10200, lr=0.157750, batch loss=0.200247, epoch loss=2.045950
Batch=659, step=10260, lr=0.157500, batch loss=0.283032, epoch loss=2.328982
Batch=719, step=10320, lr=0.157250, batch loss=0.288754, epoch loss=2.617735
Batch=779, step=10380, lr=0.157000, batch loss=0.296880, epoch loss=2.914615
Batch=839, step=10440, lr=0.156750, batch loss=0.267657, epoch loss=3.182272
Batch=899, step=10500, lr=0.156500, batch loss=0.242699, epoch loss=3.424972
Batch=959, step=10560, lr=0.156250, batch loss=0.198668, epoch loss=3.623639
Batch=1019, step=10620, lr=0.156000, batch loss=0.295119, epoch loss=3.918758
Batch=1079, step=10680, lr=0.155750, batch loss=0.178662, epoch loss=4.097421
Batch=1139, step=10740, lr=0.155500, batch loss=0.205425, epoch loss=4.302846
Batch=1199, step=10800, lr=0.155250, batch loss=0.156138, epoch loss=4.458984
Epoch=8, step=10800, lr=0.155250, epoch loss=4.458984
Batch=59, step=10860, lr=0.155000, batch loss=0.177430, epoch loss=0.177430
Batch=119, step=10920, lr=0.154750, batch loss=0.152366, epoch loss=0.329795
Batch=179, step=10980, lr=0.154500, batch loss=0.167114, epoch loss=0.496909
Batch=239, step=11040, lr=0.154250, batch loss=0.242622, epoch loss=0.739531
Batch=299, step=11100, lr=0.154000, batch loss=0.169984, epoch loss=0.909515
Batch=359, step=11160, lr=0.153750, batch loss=0.222140, epoch loss=1.131654
Batch=419, step=11220, lr=0.153500, batch loss=0.229250, epoch loss=1.360905
Batch=479, step=11280, lr=0.153250, batch loss=0.202871, epoch loss=1.563775
Batch=539, step=11340, lr=0.153000, batch loss=0.159118, epoch loss=1.722894
Batch=599, step=11400, lr=0.152750, batch loss=0.178498, epoch loss=1.901392
Batch=659, step=11460, lr=0.152500, batch loss=0.264724, epoch loss=2.166116
Batch=719, step=11520, lr=0.152250, batch loss=0.256959, epoch loss=2.423075
Batch=779, step=11580, lr=0.151750, batch loss=0.273281, epoch loss=2.696355
Batch=839, step=11640, lr=0.151750, batch loss=0.255710, epoch loss=2.952065
Batch=899, step=11700, lr=0.151500, batch loss=0.213014, epoch loss=3.165080
Batch=959, step=11760, lr=0.151250, batch loss=0.168047, epoch loss=3.333127
Batch=1019, step=11820, lr=0.151000, batch loss=0.266229, epoch loss=3.599356
Batch=1079, step=11880, lr=0.150750, batch loss=0.149586, epoch loss=3.748942
Batch=1139, step=11940, lr=0.150500, batch loss=0.185657, epoch loss=3.934599
Batch=1199, step=12000, lr=0.150250, batch loss=0.139466, epoch loss=4.074064
Epoch=9, step=12000, lr=0.150250, epoch loss=4.074064
Batch=59, step=12060, lr=0.150000, batch loss=0.157966, epoch loss=0.157966
Batch=119, step=12120, lr=0.149750, batch loss=0.128883, epoch loss=0.286849
Batch=179, step=12180, lr=0.149500, batch loss=0.150425, epoch loss=0.437274
Batch=239, step=12240, lr=0.149250, batch loss=0.223219, epoch loss=0.660494
Batch=299, step=12300, lr=0.149000, batch loss=0.142706, epoch loss=0.803199
Batch=359, step=12360, lr=0.148750, batch loss=0.195349, epoch loss=0.998549
Batch=419, step=12420, lr=0.148500, batch loss=0.206310, epoch loss=1.204859
Batch=479, step=12480, lr=0.148250, batch loss=0.178675, epoch loss=1.383534
Batch=539, step=12540, lr=0.148000, batch loss=0.142649, epoch loss=1.526183
Batch=599, step=12600, lr=0.147750, batch loss=0.150398, epoch loss=1.676582
Batch=659, step=12660, lr=0.147500, batch loss=0.224937, epoch loss=1.901519
Batch=719, step=12720, lr=0.147250, batch loss=0.235244, epoch loss=2.136763
Batch=779, step=12780, lr=0.147000, batch loss=0.252750, epoch loss=2.389513
Batch=839, step=12840, lr=0.146750, batch loss=0.224930, epoch loss=2.614443
Batch=899, step=12900, lr=0.146500, batch loss=0.185356, epoch loss=2.799799
Batch=959, step=12960, lr=0.146250, batch loss=0.148862, epoch loss=2.948661
Batch=1019, step=13020, lr=0.146000, batch loss=0.268134, epoch loss=3.216795
Batch=1079, step=13080, lr=0.145750, batch loss=0.115262, epoch loss=3.332058
Batch=1139, step=13140, lr=0.145500, batch loss=0.155448, epoch loss=3.487506
Batch=1199, step=13200, lr=0.145250, batch loss=0.118524, epoch loss=3.606030
Epoch=10, step=13200, lr=0.145250, epoch loss=3.606030
Batch=59, step=13260, lr=0.145000, batch loss=0.143320, epoch loss=0.143320
Batch=119, step=13320, lr=0.144750, batch loss=0.119740, epoch loss=0.263060
Batch=179, step=13380, lr=0.144500, batch loss=0.127202, epoch loss=0.390262
Batch=239, step=13440, lr=0.144250, batch loss=0.186770, epoch loss=0.577032
Batch=299, step=13500, lr=0.144000, batch loss=0.112788, epoch loss=0.689820
Batch=359, step=13560, lr=0.143750, batch loss=0.161562, epoch loss=0.851382
Batch=419, step=13620, lr=0.143500, batch loss=0.160625, epoch loss=1.012006
Batch=479, step=13680, lr=0.143250, batch loss=0.147292, epoch loss=1.159299
Batch=539, step=13740, lr=0.143000, batch loss=0.118094, epoch loss=1.277393
Batch=599, step=13800, lr=0.142750, batch loss=0.119989, epoch loss=1.397382
Batch=659, step=13860, lr=0.142500, batch loss=0.175919, epoch loss=1.573301
Batch=719, step=13920, lr=0.142250, batch loss=0.172503, epoch loss=1.745804
Batch=779, step=13980, lr=0.142000, batch loss=0.178552, epoch loss=1.924355
Batch=839, step=14040, lr=0.141750, batch loss=0.186017, epoch loss=2.110372
Batch=899, step=14100, lr=0.141500, batch loss=0.178347, epoch loss=2.288719
Batch=959, step=14160, lr=0.141250, batch loss=0.145930, epoch loss=2.434648
Batch=1019, step=14220, lr=0.141000, batch loss=0.330391, epoch loss=2.765040
Batch=1079, step=14280, lr=0.140750, batch loss=0.082584, epoch loss=2.847624
Batch=1139, step=14340, lr=0.140500, batch loss=0.122383, epoch loss=2.970007
Batch=1199, step=14400, lr=0.140250, batch loss=0.091772, epoch loss=3.061779
Epoch=11, step=14400, lr=0.140250, epoch loss=3.061779
Batch=59, step=14460, lr=0.140000, batch loss=0.112708, epoch loss=0.112708
Batch=119, step=14520, lr=0.139750, batch loss=0.102827, epoch loss=0.215534
Batch=179, step=14580, lr=0.139500, batch loss=0.105866, epoch loss=0.321400
Batch=239, step=14640, lr=0.139250, batch loss=0.140926, epoch loss=0.462326
Batch=299, step=14700, lr=0.139000, batch loss=0.081139, epoch loss=0.543465
Batch=359, step=14760, lr=0.138750, batch loss=0.125301, epoch loss=0.668766
Batch=419, step=14820, lr=0.138500, batch loss=0.129287, epoch loss=0.798053
Batch=479, step=14880, lr=0.138250, batch loss=0.101388, epoch loss=0.899441
Batch=539, step=14940, lr=0.138000, batch loss=0.090746, epoch loss=0.990187
Batch=599, step=15000, lr=0.137750, batch loss=0.084918, epoch loss=1.075105
Batch=659, step=15060, lr=0.137500, batch loss=0.129189, epoch loss=1.204294
Batch=719, step=15120, lr=0.137250, batch loss=0.121462, epoch loss=1.325756
Batch=779, step=15180, lr=0.137000, batch loss=0.134528, epoch loss=1.460283
Batch=839, step=15240, lr=0.136750, batch loss=0.174514, epoch loss=1.634797
Batch=899, step=15300, lr=0.136500, batch loss=0.294599, epoch loss=1.929396
Batch=959, step=15360, lr=0.136250, batch loss=0.059325, epoch loss=1.988722
Batch=1019, step=15420, lr=0.136000, batch loss=0.136264, epoch loss=2.124985
Batch=1079, step=15480, lr=0.135750, batch loss=0.049528, epoch loss=2.174513
Batch=1139, step=15540, lr=0.135500, batch loss=0.106152, epoch loss=2.280666
Batch=1199, step=15600, lr=0.135250, batch loss=0.062301, epoch loss=2.342967
Epoch=12, step=15600, lr=0.135250, epoch loss=2.342967
Batch=59, step=15660, lr=0.135000, batch loss=0.077216, epoch loss=0.077216
Batch=119, step=15720, lr=0.134750, batch loss=0.100726, epoch loss=0.177942
Batch=179, step=15780, lr=0.134500, batch loss=0.088270, epoch loss=0.266212
Batch=239, step=15840, lr=0.134250, batch loss=0.090862, epoch loss=0.357074
Batch=299, step=15900, lr=0.134000, batch loss=0.041372, epoch loss=0.398446
Batch=359, step=15960, lr=0.133750, batch loss=0.077603, epoch loss=0.476049
Batch=419, step=16020, lr=0.133500, batch loss=0.083799, epoch loss=0.559848
Batch=479, step=16080, lr=0.133250, batch loss=0.076951, epoch loss=0.636799
Batch=539, step=16140, lr=0.133000, batch loss=0.052615, epoch loss=0.689414
Batch=599, step=16200, lr=0.132750, batch loss=0.097668, epoch loss=0.787082
Batch=659, step=16260, lr=0.132500, batch loss=0.075914, epoch loss=0.862995
Batch=719, step=16320, lr=0.132250, batch loss=0.086903, epoch loss=0.949898
Batch=779, step=16380, lr=0.132000, batch loss=0.127548, epoch loss=1.077446
Batch=839, step=16440, lr=0.131750, batch loss=0.121445, epoch loss=1.198891
Batch=899, step=16500, lr=0.131500, batch loss=0.132593, epoch loss=1.331483
Batch=959, step=16560, lr=0.131250, batch loss=0.042506, epoch loss=1.373990
Batch=1019, step=16620, lr=0.131000, batch loss=0.101950, epoch loss=1.475940
Batch=1079, step=16680, lr=0.130750, batch loss=0.028003, epoch loss=1.503943
Batch=1139, step=16740, lr=0.130500, batch loss=0.056153, epoch loss=1.560096
Batch=1199, step=16800, lr=0.130250, batch loss=0.029377, epoch loss=1.589473
Epoch=13, step=16800, lr=0.130250, epoch loss=1.589473
Batch=59, step=16860, lr=0.130000, batch loss=0.039048, epoch loss=0.039048
Batch=119, step=16920, lr=0.129750, batch loss=0.065305, epoch loss=0.104353
Batch=179, step=16980, lr=0.129500, batch loss=0.053335, epoch loss=0.157688
Batch=239, step=17040, lr=0.129250, batch loss=0.063669, epoch loss=0.221357
Batch=299, step=17100, lr=0.129000, batch loss=0.025505, epoch loss=0.246862
Batch=359, step=17160, lr=0.128750, batch loss=0.046181, epoch loss=0.293043
Batch=419, step=17220, lr=0.128500, batch loss=0.047755, epoch loss=0.340798
Batch=479, step=17280, lr=0.128250, batch loss=0.027239, epoch loss=0.368037
Batch=539, step=17340, lr=0.128000, batch loss=0.054245, epoch loss=0.422281
Batch=599, step=17400, lr=0.127750, batch loss=0.034581, epoch loss=0.456862
Batch=659, step=17460, lr=0.127500, batch loss=0.045897, epoch loss=0.502759
Batch=719, step=17520, lr=0.127250, batch loss=0.040187, epoch loss=0.542946
Batch=779, step=17580, lr=0.127000, batch loss=0.072503, epoch loss=0.615450
Batch=839, step=17640, lr=0.126750, batch loss=0.086975, epoch loss=0.702425
Batch=899, step=17700, lr=0.126500, batch loss=0.116829, epoch loss=0.819253
Batch=959, step=17760, lr=0.126250, batch loss=0.028207, epoch loss=0.847461
Batch=1019, step=17820, lr=0.126000, batch loss=0.047849, epoch loss=0.895310
Batch=1079, step=17880, lr=0.125750, batch loss=0.012697, epoch loss=0.908007
Batch=1139, step=17940, lr=0.125500, batch loss=0.030645, epoch loss=0.938652
Batch=1199, step=18000, lr=0.125250, batch loss=0.014422, epoch loss=0.953074
Epoch=14, step=18000, lr=0.125250, epoch loss=0.953074
Batch=59, step=18060, lr=0.125000, batch loss=0.010754, epoch loss=0.010754
Batch=119, step=18120, lr=0.124750, batch loss=0.041100, epoch loss=0.051854
Batch=179, step=18180, lr=0.124500, batch loss=0.100186, epoch loss=0.152041
Batch=239, step=18240, lr=0.124250, batch loss=0.046401, epoch loss=0.198442
Batch=299, step=18300, lr=0.124000, batch loss=0.009684, epoch loss=0.208126
Batch=359, step=18360, lr=0.123750, batch loss=0.026792, epoch loss=0.234918
Batch=419, step=18420, lr=0.123500, batch loss=0.028259, epoch loss=0.263178
Batch=479, step=18480, lr=0.123250, batch loss=0.015583, epoch loss=0.278760
Batch=539, step=18540, lr=0.123000, batch loss=0.020628, epoch loss=0.299388
Batch=599, step=18600, lr=0.122750, batch loss=0.025931, epoch loss=0.325319
Batch=659, step=18660, lr=0.122500, batch loss=0.028644, epoch loss=0.353963
Batch=719, step=18720, lr=0.122250, batch loss=0.040003, epoch loss=0.393965
Batch=779, step=18780, lr=0.122000, batch loss=0.097780, epoch loss=0.491745
Batch=839, step=18840, lr=0.121750, batch loss=0.058392, epoch loss=0.550137
Batch=899, step=18900, lr=0.121500, batch loss=0.071568, epoch loss=0.621705
Batch=959, step=18960, lr=0.121250, batch loss=0.014288, epoch loss=0.635993
Batch=1019, step=19020, lr=0.121000, batch loss=0.021214, epoch loss=0.657207
Batch=1079, step=19080, lr=0.120750, batch loss=0.007994, epoch loss=0.665201
Batch=1139, step=19140, lr=0.120500, batch loss=0.024108, epoch loss=0.689309
Batch=1199, step=19200, lr=0.120250, batch loss=0.009652, epoch loss=0.698961
Epoch=15, step=19200, lr=0.120250, epoch loss=0.698961
Batch=59, step=19260, lr=0.120000, batch loss=0.005098, epoch loss=0.005098
Batch=119, step=19320, lr=0.119750, batch loss=0.017396, epoch loss=0.022494
Batch=179, step=19380, lr=0.119500, batch loss=0.046105, epoch loss=0.068599
Batch=239, step=19440, lr=0.119250, batch loss=0.021218, epoch loss=0.089817
Batch=299, step=19500, lr=0.119000, batch loss=0.011918, epoch loss=0.101736
Batch=359, step=19560, lr=0.118750, batch loss=0.022883, epoch loss=0.124619
Batch=419, step=19620, lr=0.118500, batch loss=0.018936, epoch loss=0.143554
Batch=479, step=19680, lr=0.118250, batch loss=0.006487, epoch loss=0.150042
Batch=539, step=19740, lr=0.118000, batch loss=0.018652, epoch loss=0.168694
Batch=599, step=19800, lr=0.117750, batch loss=0.023898, epoch loss=0.192592
Batch=659, step=19860, lr=0.117500, batch loss=0.019934, epoch loss=0.212526
Batch=719, step=19920, lr=0.117250, batch loss=0.049988, epoch loss=0.262514
Batch=779, step=19980, lr=0.117000, batch loss=0.081055, epoch loss=0.343570
Batch=839, step=20040, lr=0.116750, batch loss=0.031265, epoch loss=0.374835
Batch=899, step=20100, lr=0.116500, batch loss=0.028647, epoch loss=0.403482
Batch=959, step=20160, lr=0.116250, batch loss=0.018368, epoch loss=0.421850
Batch=1019, step=20220, lr=0.116000, batch loss=0.022910, epoch loss=0.444760
Batch=1079, step=20280, lr=0.115750, batch loss=0.002500, epoch loss=0.447261
Batch=1139, step=20340, lr=0.115500, batch loss=0.014546, epoch loss=0.461807
Batch=1199, step=20400, lr=0.115250, batch loss=0.005835, epoch loss=0.467642
Epoch=16, step=20400, lr=0.115250, epoch loss=0.467642
Batch=59, step=20460, lr=0.115000, batch loss=0.003155, epoch loss=0.003155
Batch=119, step=20520, lr=0.114750, batch loss=0.008583, epoch loss=0.011738
Batch=179, step=20580, lr=0.114500, batch loss=0.014741, epoch loss=0.026479
Batch=239, step=20640, lr=0.114250, batch loss=0.013847, epoch loss=0.040326
Batch=299, step=20700, lr=0.114000, batch loss=0.004876, epoch loss=0.045202
Batch=359, step=20760, lr=0.113750, batch loss=0.014042, epoch loss=0.059244
Batch=419, step=20820, lr=0.113500, batch loss=0.014663, epoch loss=0.073907
Batch=479, step=20880, lr=0.113250, batch loss=0.004883, epoch loss=0.078791
Batch=539, step=20940, lr=0.113000, batch loss=0.015622, epoch loss=0.094413
Batch=599, step=21000, lr=0.112750, batch loss=0.018484, epoch loss=0.112897
Batch=659, step=21060, lr=0.112500, batch loss=0.015348, epoch loss=0.128245
Batch=719, step=21120, lr=0.112250, batch loss=0.039749, epoch loss=0.167994
Batch=779, step=21180, lr=0.112000, batch loss=0.073167, epoch loss=0.241161
Batch=839, step=21240, lr=0.111750, batch loss=0.026053, epoch loss=0.267215
Batch=899, step=21300, lr=0.111500, batch loss=0.035571, epoch loss=0.302785
Batch=959, step=21360, lr=0.111250, batch loss=0.009723, epoch loss=0.312509
Batch=1019, step=21420, lr=0.111000, batch loss=0.011428, epoch loss=0.323937
Batch=1079, step=21480, lr=0.110750, batch loss=0.000665, epoch loss=0.324602
Batch=1139, step=21540, lr=0.110500, batch loss=0.012942, epoch loss=0.337544
Batch=1199, step=21600, lr=0.110250, batch loss=0.005295, epoch loss=0.342839
Epoch=17, step=21600, lr=0.110250, epoch loss=0.342839
Batch=59, step=21660, lr=0.110000, batch loss=0.002236, epoch loss=0.002236
Batch=119, step=21720, lr=0.109750, batch loss=0.006276, epoch loss=0.008512
Batch=179, step=21780, lr=0.109500, batch loss=0.012855, epoch loss=0.021366
Batch=239, step=21840, lr=0.109250, batch loss=0.009924, epoch loss=0.031290
Batch=299, step=21900, lr=0.109000, batch loss=0.015074, epoch loss=0.046364
Batch=359, step=21960, lr=0.108750, batch loss=0.017044, epoch loss=0.063407
Batch=419, step=22020, lr=0.108500, batch loss=0.013212, epoch loss=0.076619
Batch=479, step=22080, lr=0.108250, batch loss=0.003612, epoch loss=0.080231
Batch=539, step=22140, lr=0.108000, batch loss=0.014890, epoch loss=0.095121
Batch=599, step=22200, lr=0.107750, batch loss=0.015977, epoch loss=0.111098
Batch=659, step=22260, lr=0.107500, batch loss=0.014971, epoch loss=0.126068
Batch=719, step=22320, lr=0.107250, batch loss=0.027280, epoch loss=0.153348
Batch=779, step=22380, lr=0.107000, batch loss=0.035681, epoch loss=0.189030
Batch=839, step=22440, lr=0.106750, batch loss=0.026060, epoch loss=0.215090
Batch=899, step=22500, lr=0.106500, batch loss=0.026342, epoch loss=0.241432
Batch=959, step=22560, lr=0.106250, batch loss=0.008992, epoch loss=0.250424
Batch=1019, step=22620, lr=0.106000, batch loss=0.008982, epoch loss=0.259406
Batch=1079, step=22680, lr=0.105750, batch loss=0.000337, epoch loss=0.259743
Batch=1139, step=22740, lr=0.105500, batch loss=0.010766, epoch loss=0.270508
Batch=1199, step=22800, lr=0.105250, batch loss=0.004566, epoch loss=0.275075
Epoch=18, step=22800, lr=0.105250, epoch loss=0.275075
Batch=59, step=22860, lr=0.105000, batch loss=0.001691, epoch loss=0.001691
Batch=119, step=22920, lr=0.104750, batch loss=0.005324, epoch loss=0.007015
Batch=179, step=22980, lr=0.104500, batch loss=0.010688, epoch loss=0.017703
Batch=239, step=23040, lr=0.104000, batch loss=0.009209, epoch loss=0.026912
Batch=299, step=23100, lr=0.104000, batch loss=0.011529, epoch loss=0.038441
Batch=359, step=23160, lr=0.103750, batch loss=0.011145, epoch loss=0.049586
Batch=419, step=23220, lr=0.103500, batch loss=0.011337, epoch loss=0.060922
Batch=479, step=23280, lr=0.103250, batch loss=0.003223, epoch loss=0.064145
Batch=539, step=23340, lr=0.103000, batch loss=0.016608, epoch loss=0.080753
Batch=599, step=23400, lr=0.102750, batch loss=0.013587, epoch loss=0.094340
Batch=659, step=23460, lr=0.102500, batch loss=0.011177, epoch loss=0.105517
Batch=719, step=23520, lr=0.102250, batch loss=0.012485, epoch loss=0.118003
Batch=779, step=23580, lr=0.102000, batch loss=0.022977, epoch loss=0.140979
Batch=839, step=23640, lr=0.101750, batch loss=0.026084, epoch loss=0.167064
Batch=899, step=23700, lr=0.101500, batch loss=0.022498, epoch loss=0.189562
Batch=959, step=23760, lr=0.101250, batch loss=0.008053, epoch loss=0.197614
Batch=1019, step=23820, lr=0.101000, batch loss=0.008340, epoch loss=0.205954
Batch=1079, step=23880, lr=0.100750, batch loss=0.000382, epoch loss=0.206336
Batch=1139, step=23940, lr=0.100500, batch loss=0.010108, epoch loss=0.216445
Batch=1199, step=24000, lr=0.100250, batch loss=0.004755, epoch loss=0.221200
Epoch=19, step=24000, lr=0.100250, epoch loss=0.221200


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########*****************************************.│
│***********###########*************************############*************************************....│
│********######*####*********************************###*###*#*********************************......│
│*******######**##**********************************#*######*#******************************.........│
│*******##*##**##***********..........***************########*##***************************..........│
│*****#######************.......%...%%...***************#########************************..........%.│
│******######***********.........%........***************##*#####***********************.......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│
│****#######**********..........%%%%.........************#########********************........%%.%%.%│
│**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.│
│*#######************...........%%%%%%%..........************#######**************............%%%%%%.│
│*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..│
│#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│
│#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%................***...............%%%%%%%.%.%.......│
│*************...........................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1
2025-05-22 20:17.23: Job failed: Failed: Build failed