Organisationsahrefsocannl151162 ()fedora-41-5.3_opam-2.3

fedora-41-5.3_opam-2.3

Link Copied
Code Copied

Logs

2025-03-21 16:19.55: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (151162601bc8b3df560578aa1056d12d2dcb7acf) (linux-x86_64:fedora-41-5.3_opam-2.3)
Base: ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 15116260
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925
# fedora-41-5.3_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo dnf install -y findutils
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-03-21 16:19.55: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925-fedora-41-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5"
2025-03-21 16:19.55: Using OBuilder spec:
((from ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925)
(comment fedora-41-5.3_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (network host)
(shell "sudo dnf install -y findutils"))
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-03-21 16:19.55: Waiting for resource in pool OCluster
2025-03-21 16:19.55: Waiting for worker…
2025-03-21 16:20.06: Got resource from pool OCluster
Building on toxis.caelum.ci.dev
HEAD is now at f6ea3750 Untested: revert the Cmpne primitive op: can be used to test for NaN (x <> x ==> x = NaN)
HEAD is now at 15116260 Differentiable conditional -> piecewise-defined functions


(from ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925)
2025-03-21 16:20.28 ---> saved as "189447b5abf4f84b3141564fa16299d77370e888a929cebc6495802de6bce562"


/: (comment fedora-41-5.3_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (network host)
(shell "sudo dnf install -y findutils"))
Updating and loading repositories:
Fedora 41 - x86_64 - Updates           100% | 187.5 KiB/s |  24.2 KiB |  00m00s
Fedora 41 - x86_64                     100% | 150.5 KiB/s |  26.5 KiB |  00m00s
Fedora 41 - x86_64 - Updates           100% |   4.2 MiB/s |   4.4 MiB |  00m01s
Repositories loaded.
Package "findutils-1:4.10.0-4.fc41.x86_64" is already installed.


Nothing to do.
2025-03-21 16:20.33 ---> saved as "947881be467bcf2fd69517dfbab49f9c08bd1fde805595ee55ebf2224778ca57"


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-03-21 16:20.33 ---> saved as "92a7f2480fc833b8b6e6f7c055e96edecd7a23b7a1425abe59b46f58f5769a58"


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com"
[default] synchronised from file:///home/opam/opam-repository
2025-03-21 16:22.00 ---> saved as "22323ebe734803b688970a8a4b44abf93aa58863ccedfb0ae4ead813beb5698c"


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-03-21 16:22.00 ---> saved as "2024869fea33faf19369ee49d7e2bfcec68103aae198063a36b6d1df2607faff"


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-03-21 16:22.00 ---> saved as "b766911cfcef799c6c2bb8aec82e30e2bd662c95ed64a05054ff7a173457d537"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
862a7640b1..acfb0e6e94  master     -> origin/master
4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive
[default] synchronised from file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-03-21 16:23.28 ---> saved as "ece3a959e2ef17442b29b6ddfc99e8140b85a2f969c4a3b7d543a174bcd5db3d"


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-03-21 16:23.29 ---> saved as "7e95f06579daf2f8a9f219d2425e561a61d328ab09c027200da74538bbb47901"


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-03-21 16:23.33 ---> saved as "c662d6f34b7cc78ea7f9596fd8633c27c714706696abf0deb230179c4f1adb49"


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-03-21 16:23.33 ---> saved as "555e2af3f1f6b4e33264116930934cc11b3a68e741eb24b68ea3c25a73746872"


/src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "yum" "makecache"
- Updating and loading repositories:
- Repositories loaded.
- Metadata cache created.


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-devel


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "yum" "install" "-y" "libffi-devel"
- Updating and loading repositories:
- Repositories loaded.
- Package       Arch   Version      Repository      Size
- Installing:
-  libffi-devel x86_64 3.4.6-3.fc41 fedora      33.1 KiB
- 
- Transaction Summary:
-  Installing:         1 package
- 
- Total size of inbound packages is 29 KiB. Need to download 29 KiB.
- After this operation, 33 KiB extra will be used (install 33 KiB, remove 0 B).
- [1/1] libffi-devel-0:3.4.6-3.fc41.x86_6 100% | 487.5 KiB/s |  28.8 KiB |  00m00s
- --------------------------------------------------------------------------------
- [1/1] Total                             100% | 137.0 KiB/s |  28.8 KiB |  00m00s
- Running transaction
- [1/3] Verify package files              100% |   1.0 KiB/s |   1.0   B |  00m00s
- [2/3] Prepare transaction               100% |  20.0   B/s |   1.0   B |  00m00s
- [3/3] Installing libffi-devel-0:3.4.6-3 100% | 328.2 KiB/s |  34.8 KiB |  00m00s
- Complete!
+ /usr/bin/rpm "-q" "--whatprovides" "libffi-devel"
- libffi-devel-3.4.6-3.fc41.x86_64
2025-03-21 16:24.10 ---> saved as "abdc4c5564966ea384be2de700573543d19e3544adba4f09ce4a49d721c13cd0"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 65 packages
- install angstrom                0.16.1
- install backoff                 0.1.1
- install base                    v0.17.1
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.17.2
- install dune-configurator       3.17.2
- install fieldslib               v0.17.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.1.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved base.v0.17.1  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> installed conf-pkg-config.4
-> retrieved csexp.1.5.2  (cached)
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> installed conf-libffi.2.0.0
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.1.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved dune.3.17.2, dune-configurator.3.17.2  (cached)
-> installed num.1.5-1
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved re.1.12.0  (cached)
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlbuild.0.16.1
-> installed ocamlfind.1.9.8
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed ptime.1.2.0
-> installed dune.3.17.2
-> installed jane-street-headers.v0.17.0
-> installed printbox.0.12
-> installed backoff.0.1.1
-> installed ppx_derivers.1.2.1
-> installed csexp.1.5.2
-> installed bigarray-compat.1.1.0
-> installed multicore-magic.2.3.1
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed re.1.12.0
-> installed cppo.1.8.0
-> installed ocaml-compiler-libs.v0.17.0
-> installed integers.0.7.0
-> installed saturn_lockfree.0.5.0
-> installed dune-configurator.3.17.2
-> installed parsexp.v0.17.0
-> installed bigstringaf.0.10.0
-> installed angstrom.0.16.1
-> installed sexplib.v0.17.0
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed base.v0.17.1
-> installed fieldslib.v0.17.0
-> installed variantslib.v0.17.0
-> installed stdio.v0.17.0
-> installed uucp.16.0.0
-> installed ctypes-foreign.0.23.0
-> installed printbox-text.0.12
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed ppxlib.0.35.0
-> installed ppx_optcomp.v0.17.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_globalize.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_compare.v0.17.0
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_base.v0.17.0
-> installed ppx_minidebug.2.1.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-03-21 16:27.37 ---> saved as "1baad0712f2c318dde6ffa8b7fd7a0bf7af1810e6f813cb83095005a7b19bbed"


/src: (copy (src .) (dst /src))
2025-03-21 16:27.38 ---> saved as "d1f26551036ef95e9317a8e1978c8b139e3a940b2c2b0f458612bdd191083b52"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/0e5d4b9a9a3ac81031e44a899e72e550/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ed88700ce8eca66c2eaa807eb038958a/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199000, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.199000, batch loss=1.445660, epoch loss=37.070699
Batch=359, step=360, lr=0.198500, batch loss=1.330503, epoch loss=38.401201
Batch=419, step=420, lr=0.198250, batch loss=0.617268, epoch loss=39.018469
Batch=479, step=480, lr=0.198250, batch loss=0.806908, epoch loss=39.825377
Batch=539, step=540, lr=0.197750, batch loss=0.690578, epoch loss=40.515955
Batch=599, step=600, lr=0.197500, batch loss=1.070360, epoch loss=41.586315
Batch=659, step=660, lr=0.197500, batch loss=0.483038, epoch loss=42.069353
Batch=719, step=720, lr=0.197000, batch loss=0.411283, epoch loss=42.480636
Batch=779, step=780, lr=0.196750, batch loss=0.468928, epoch loss=42.949564
Batch=839, step=840, lr=0.196750, batch loss=0.443355, epoch loss=43.392918
Batch=899, step=900, lr=0.196500, batch loss=0.384157, epoch loss=43.777075
Batch=959, step=960, lr=0.196000, batch loss=0.238502, epoch loss=44.015577
Batch=1019, step=1020, lr=0.195750, batch loss=0.440489, epoch loss=44.456066
Batch=1079, step=1080, lr=0.195500, batch loss=0.230881, epoch loss=44.686947
Batch=1139, step=1140, lr=0.195250, batch loss=0.311797, epoch loss=44.998745
Batch=1199, step=1200, lr=0.195250, batch loss=0.265636, epoch loss=45.264380
Epoch=0, step=1200, lr=0.195250, epoch loss=45.264380
Batch=59, step=1260, lr=0.194750, batch loss=0.261094, epoch loss=0.261094
Batch=119, step=1320, lr=0.194750, batch loss=0.203156, epoch loss=0.464251
Batch=179, step=1380, lr=0.194500, batch loss=0.244214, epoch loss=0.708465
Batch=239, step=1440, lr=0.194250, batch loss=0.346879, epoch loss=1.055344
Batch=299, step=1500, lr=0.194000, batch loss=0.240687, epoch loss=1.296031
Batch=359, step=1560, lr=0.193750, batch loss=0.311923, epoch loss=1.607954
Batch=419, step=1620, lr=0.193500, batch loss=0.310901, epoch loss=1.918855
Batch=479, step=1680, lr=0.193250, batch loss=0.276594, epoch loss=2.195449
Batch=539, step=1740, lr=0.193000, batch loss=0.213248, epoch loss=2.408696
Batch=599, step=1800, lr=0.192750, batch loss=0.261532, epoch loss=2.670228
Batch=659, step=1860, lr=0.192500, batch loss=0.380305, epoch loss=3.050533
Batch=719, step=1920, lr=0.192250, batch loss=0.355996, epoch loss=3.406529
Batch=779, step=1980, lr=0.191750, batch loss=0.379285, epoch loss=3.785814
Batch=839, step=2040, lr=0.191750, batch loss=0.341328, epoch loss=4.127143
Batch=899, step=2100, lr=0.191500, batch loss=0.295716, epoch loss=4.422858
Batch=959, step=2160, lr=0.191250, batch loss=0.209167, epoch loss=4.632025
Batch=1019, step=2220, lr=0.191000, batch loss=0.331370, epoch loss=4.963395
Batch=1079, step=2280, lr=0.190500, batch loss=0.195640, epoch loss=5.159035
Batch=1139, step=2340, lr=0.190500, batch loss=0.271870, epoch loss=5.430905
Batch=1199, step=2400, lr=0.190250, batch loss=0.219280, epoch loss=5.650184
Epoch=1, step=2400, lr=0.190250, epoch loss=5.650184
Batch=59, step=2460, lr=0.190000, batch loss=0.229395, epoch loss=0.229395
Batch=119, step=2520, lr=0.189500, batch loss=0.193216, epoch loss=0.422611
Batch=179, step=2580, lr=0.189500, batch loss=0.222174, epoch loss=0.644785
Batch=239, step=2640, lr=0.189250, batch loss=0.327770, epoch loss=0.972554
Batch=299, step=2700, lr=0.189000, batch loss=0.203821, epoch loss=1.176375
Batch=359, step=2760, lr=0.188750, batch loss=0.288334, epoch loss=1.464710
Batch=419, step=2820, lr=0.188500, batch loss=0.280359, epoch loss=1.745069
Batch=479, step=2880, lr=0.188000, batch loss=0.252151, epoch loss=1.997219
Batch=539, step=2940, lr=0.188000, batch loss=0.192004, epoch loss=2.189224
Batch=599, step=3000, lr=0.187750, batch loss=0.224918, epoch loss=2.414141
Batch=659, step=3060, lr=0.187500, batch loss=0.331834, epoch loss=2.745975
Batch=719, step=3120, lr=0.187250, batch loss=0.331880, epoch loss=3.077855
Batch=779, step=3180, lr=0.187000, batch loss=0.359246, epoch loss=3.437101
Batch=839, step=3240, lr=0.186750, batch loss=0.325125, epoch loss=3.762226
Batch=899, step=3300, lr=0.186500, batch loss=0.294858, epoch loss=4.057084
Batch=959, step=3360, lr=0.186250, batch loss=0.237969, epoch loss=4.295053
Batch=1019, step=3420, lr=0.186000, batch loss=0.358904, epoch loss=4.653957
Batch=1079, step=3480, lr=0.185750, batch loss=0.236064, epoch loss=4.890021
Batch=1139, step=3540, lr=0.185500, batch loss=0.263354, epoch loss=5.153375
Batch=1199, step=3600, lr=0.185250, batch loss=0.201455, epoch loss=5.354830
Epoch=2, step=3600, lr=0.185250, epoch loss=5.354830
Batch=59, step=3660, lr=0.185000, batch loss=0.222333, epoch loss=0.222333
Batch=119, step=3720, lr=0.184750, batch loss=0.184472, epoch loss=0.406805
Batch=179, step=3780, lr=0.184500, batch loss=0.211175, epoch loss=0.617979
Batch=239, step=3840, lr=0.184250, batch loss=0.315750, epoch loss=0.933729
Batch=299, step=3900, lr=0.184000, batch loss=0.204940, epoch loss=1.138669
Batch=359, step=3960, lr=0.183750, batch loss=0.281489, epoch loss=1.420157
Batch=419, step=4020, lr=0.183500, batch loss=0.272499, epoch loss=1.692657
Batch=479, step=4080, lr=0.183250, batch loss=0.248854, epoch loss=1.941511
Batch=539, step=4140, lr=0.183000, batch loss=0.196142, epoch loss=2.137653
Batch=599, step=4200, lr=0.182750, batch loss=0.242073, epoch loss=2.379726
Batch=659, step=4260, lr=0.182500, batch loss=0.328630, epoch loss=2.708356
Batch=719, step=4320, lr=0.182000, batch loss=0.327972, epoch loss=3.036327
Batch=779, step=4380, lr=0.182000, batch loss=0.351676, epoch loss=3.388003
Batch=839, step=4440, lr=0.181750, batch loss=0.317980, epoch loss=3.705983
Batch=899, step=4500, lr=0.181500, batch loss=0.293111, epoch loss=3.999095
Batch=959, step=4560, lr=0.181250, batch loss=0.243094, epoch loss=4.242188
Batch=1019, step=4620, lr=0.181000, batch loss=0.344490, epoch loss=4.586679
Batch=1079, step=4680, lr=0.180750, batch loss=0.221282, epoch loss=4.807961
Batch=1139, step=4740, lr=0.180500, batch loss=0.248848, epoch loss=5.056809
Batch=1199, step=4800, lr=0.180250, batch loss=0.191093, epoch loss=5.247903
Epoch=3, step=4800, lr=0.180250, epoch loss=5.247903
Batch=59, step=4860, lr=0.180000, batch loss=0.228511, epoch loss=0.228511
Batch=119, step=4920, lr=0.179750, batch loss=0.189871, epoch loss=0.418382
Batch=179, step=4980, lr=0.179500, batch loss=0.206802, epoch loss=0.625184
Batch=239, step=5040, lr=0.179250, batch loss=0.309923, epoch loss=0.935107
Batch=299, step=5100, lr=0.179000, batch loss=0.205388, epoch loss=1.140495
Batch=359, step=5160, lr=0.178750, batch loss=0.276029, epoch loss=1.416523
Batch=419, step=5220, lr=0.178500, batch loss=0.273037, epoch loss=1.689560
Batch=479, step=5280, lr=0.178000, batch loss=0.248660, epoch loss=1.938221
Batch=539, step=5340, lr=0.178000, batch loss=0.197203, epoch loss=2.135424
Batch=599, step=5400, lr=0.177750, batch loss=0.232692, epoch loss=2.368116
Batch=659, step=5460, lr=0.177500, batch loss=0.324309, epoch loss=2.692425
Batch=719, step=5520, lr=0.177250, batch loss=0.323534, epoch loss=3.015959
Batch=779, step=5580, lr=0.177000, batch loss=0.341322, epoch loss=3.357281
Batch=839, step=5640, lr=0.176750, batch loss=0.313300, epoch loss=3.670581
Batch=899, step=5700, lr=0.176500, batch loss=0.275983, epoch loss=3.946564
Batch=959, step=5760, lr=0.176250, batch loss=0.211167, epoch loss=4.157731
Batch=1019, step=5820, lr=0.176000, batch loss=0.338323, epoch loss=4.496054
Batch=1079, step=5880, lr=0.175750, batch loss=0.187932, epoch loss=4.683986
Batch=1139, step=5940, lr=0.175500, batch loss=0.225511, epoch loss=4.909497
Batch=1199, step=6000, lr=0.175250, batch loss=0.185919, epoch loss=5.095416
Epoch=4, step=6000, lr=0.175250, epoch loss=5.095416
Batch=59, step=6060, lr=0.175000, batch loss=0.226289, epoch loss=0.226289
Batch=119, step=6120, lr=0.174750, batch loss=0.184898, epoch loss=0.411187
Batch=179, step=6180, lr=0.174500, batch loss=0.201902, epoch loss=0.613089
Batch=239, step=6240, lr=0.174250, batch loss=0.301584, epoch loss=0.914673
Batch=299, step=6300, lr=0.174000, batch loss=0.203677, epoch loss=1.118350
Batch=359, step=6360, lr=0.173750, batch loss=0.266986, epoch loss=1.385336
Batch=419, step=6420, lr=0.173250, batch loss=0.265325, epoch loss=1.650661
Batch=479, step=6480, lr=0.173250, batch loss=0.243999, epoch loss=1.894660
Batch=539, step=6540, lr=0.173000, batch loss=0.192962, epoch loss=2.087622
Batch=599, step=6600, lr=0.172500, batch loss=0.229625, epoch loss=2.317247
Batch=659, step=6660, lr=0.172500, batch loss=0.315885, epoch loss=2.633131
Batch=719, step=6720, lr=0.172250, batch loss=0.316256, epoch loss=2.949387
Batch=779, step=6780, lr=0.172000, batch loss=0.332031, epoch loss=3.281419
Batch=839, step=6840, lr=0.171750, batch loss=0.305334, epoch loss=3.586753
Batch=899, step=6900, lr=0.171500, batch loss=0.266579, epoch loss=3.853332
Batch=959, step=6960, lr=0.171250, batch loss=0.215924, epoch loss=4.069256
Batch=1019, step=7020, lr=0.171000, batch loss=0.325934, epoch loss=4.395190
Batch=1079, step=7080, lr=0.170750, batch loss=0.176182, epoch loss=4.571372
Batch=1139, step=7140, lr=0.170500, batch loss=0.214268, epoch loss=4.785639
Batch=1199, step=7200, lr=0.170250, batch loss=0.183345, epoch loss=4.968985
Epoch=5, step=7200, lr=0.170250, epoch loss=4.968985
Batch=59, step=7260, lr=0.170000, batch loss=0.239418, epoch loss=0.239418
Batch=119, step=7320, lr=0.169750, batch loss=0.180753, epoch loss=0.420171
Batch=179, step=7380, lr=0.169500, batch loss=0.195005, epoch loss=0.615177
Batch=239, step=7440, lr=0.169250, batch loss=0.291831, epoch loss=0.907008
Batch=299, step=7500, lr=0.169000, batch loss=0.207454, epoch loss=1.114462
Batch=359, step=7560, lr=0.168500, batch loss=0.264439, epoch loss=1.378901
Batch=419, step=7620, lr=0.168500, batch loss=0.259051, epoch loss=1.637952
Batch=479, step=7680, lr=0.168000, batch loss=0.234429, epoch loss=1.872381
Batch=539, step=7740, lr=0.168000, batch loss=0.191509, epoch loss=2.063889
Batch=599, step=7800, lr=0.167750, batch loss=0.228909, epoch loss=2.292799
Batch=659, step=7860, lr=0.167500, batch loss=0.305049, epoch loss=2.597848
Batch=719, step=7920, lr=0.167250, batch loss=0.310281, epoch loss=2.908129
Batch=779, step=7980, lr=0.167000, batch loss=0.329515, epoch loss=3.237644
Batch=839, step=8040, lr=0.166750, batch loss=0.293454, epoch loss=3.531098
Batch=899, step=8100, lr=0.166250, batch loss=0.263143, epoch loss=3.794241
Batch=959, step=8160, lr=0.166250, batch loss=0.198679, epoch loss=3.992921
Batch=1019, step=8220, lr=0.165750, batch loss=0.326641, epoch loss=4.319562
Batch=1079, step=8280, lr=0.165500, batch loss=0.195955, epoch loss=4.515517
Batch=1139, step=8340, lr=0.165500, batch loss=0.222325, epoch loss=4.737842
Batch=1199, step=8400, lr=0.165250, batch loss=0.174977, epoch loss=4.912819
Epoch=6, step=8400, lr=0.165250, epoch loss=4.912819
Batch=59, step=8460, lr=0.165000, batch loss=0.219428, epoch loss=0.219428
Batch=119, step=8520, lr=0.164750, batch loss=0.181235, epoch loss=0.400663
Batch=179, step=8580, lr=0.164250, batch loss=0.189044, epoch loss=0.589708
Batch=239, step=8640, lr=0.164000, batch loss=0.277140, epoch loss=0.866847
Batch=299, step=8700, lr=0.164000, batch loss=0.187354, epoch loss=1.054202
Batch=359, step=8760, lr=0.163500, batch loss=0.244351, epoch loss=1.298552
Batch=419, step=8820, lr=0.163500, batch loss=0.239149, epoch loss=1.537701
Batch=479, step=8880, lr=0.163000, batch loss=0.217071, epoch loss=1.754772
Batch=539, step=8940, lr=0.162750, batch loss=0.179203, epoch loss=1.933975
Batch=599, step=9000, lr=0.162750, batch loss=0.218089, epoch loss=2.152064
Batch=659, step=9060, lr=0.162250, batch loss=0.294692, epoch loss=2.446756
Batch=719, step=9120, lr=0.162250, batch loss=0.295476, epoch loss=2.742232
Batch=779, step=9180, lr=0.161750, batch loss=0.316554, epoch loss=3.058786
Batch=839, step=9240, lr=0.161500, batch loss=0.287434, epoch loss=3.346220
Batch=899, step=9300, lr=0.161250, batch loss=0.251951, epoch loss=3.598171
Batch=959, step=9360, lr=0.161250, batch loss=0.190526, epoch loss=3.788697
Batch=1019, step=9420, lr=0.161000, batch loss=0.318253, epoch loss=4.106950
Batch=1079, step=9480, lr=0.160500, batch loss=0.200463, epoch loss=4.307413
Batch=1139, step=9540, lr=0.160500, batch loss=0.212018, epoch loss=4.519431
Batch=1199, step=9600, lr=0.160250, batch loss=0.167611, epoch loss=4.687042
Epoch=7, step=9600, lr=0.160250, epoch loss=4.687042
Batch=59, step=9660, lr=0.159750, batch loss=0.200573, epoch loss=0.200573
Batch=119, step=9720, lr=0.159750, batch loss=0.163937, epoch loss=0.364510
Batch=179, step=9780, lr=0.159500, batch loss=0.178738, epoch loss=0.543248
Batch=239, step=9840, lr=0.159250, batch loss=0.263211, epoch loss=0.806460
Batch=299, step=9900, lr=0.159000, batch loss=0.182944, epoch loss=0.989403
Batch=359, step=9960, lr=0.158750, batch loss=0.240648, epoch loss=1.230052
Batch=419, step=10020, lr=0.158500, batch loss=0.232260, epoch loss=1.462312
Batch=479, step=10080, lr=0.158250, batch loss=0.215853, epoch loss=1.678165
Batch=539, step=10140, lr=0.158000, batch loss=0.171065, epoch loss=1.849230
Batch=599, step=10200, lr=0.157750, batch loss=0.204472, epoch loss=2.053702
Batch=659, step=10260, lr=0.157250, batch loss=0.281158, epoch loss=2.334859
Batch=719, step=10320, lr=0.157250, batch loss=0.284908, epoch loss=2.619767
Batch=779, step=10380, lr=0.156750, batch loss=0.295004, epoch loss=2.914771
Batch=839, step=10440, lr=0.156750, batch loss=0.273778, epoch loss=3.188549
Batch=899, step=10500, lr=0.156250, batch loss=0.236680, epoch loss=3.425229
Batch=959, step=10560, lr=0.156000, batch loss=0.182370, epoch loss=3.607599
Batch=1019, step=10620, lr=0.155750, batch loss=0.295763, epoch loss=3.903362
Batch=1079, step=10680, lr=0.155750, batch loss=0.182073, epoch loss=4.085435
Batch=1139, step=10740, lr=0.155500, batch loss=0.198464, epoch loss=4.283899
Batch=1199, step=10800, lr=0.155000, batch loss=0.155502, epoch loss=4.439401
Epoch=8, step=10800, lr=0.155000, epoch loss=4.439401
Batch=59, step=10860, lr=0.155000, batch loss=0.180353, epoch loss=0.180353
Batch=119, step=10920, lr=0.154500, batch loss=0.151978, epoch loss=0.332331
Batch=179, step=10980, lr=0.154250, batch loss=0.166355, epoch loss=0.498686
Batch=239, step=11040, lr=0.154250, batch loss=0.244075, epoch loss=0.742760
Batch=299, step=11100, lr=0.154000, batch loss=0.169353, epoch loss=0.912113
Batch=359, step=11160, lr=0.153750, batch loss=0.221415, epoch loss=1.133528
Batch=419, step=11220, lr=0.153500, batch loss=0.216563, epoch loss=1.350090
Batch=479, step=11280, lr=0.153250, batch loss=0.204198, epoch loss=1.554289
Batch=539, step=11340, lr=0.153000, batch loss=0.158607, epoch loss=1.712896
Batch=599, step=11400, lr=0.152500, batch loss=0.180304, epoch loss=1.893200
Batch=659, step=11460, lr=0.152500, batch loss=0.265227, epoch loss=2.158427
Batch=719, step=11520, lr=0.152250, batch loss=0.261381, epoch loss=2.419808
Batch=779, step=11580, lr=0.152000, batch loss=0.272475, epoch loss=2.692283
Batch=839, step=11640, lr=0.151750, batch loss=0.258293, epoch loss=2.950576
Batch=899, step=11700, lr=0.151250, batch loss=0.212798, epoch loss=3.163374
Batch=959, step=11760, lr=0.151250, batch loss=0.173140, epoch loss=3.336514
Batch=1019, step=11820, lr=0.151000, batch loss=0.264291, epoch loss=3.600804
Batch=1079, step=11880, lr=0.150500, batch loss=0.144555, epoch loss=3.745359
Batch=1139, step=11940, lr=0.150500, batch loss=0.183211, epoch loss=3.928570
Batch=1199, step=12000, lr=0.150000, batch loss=0.140116, epoch loss=4.068686
Epoch=9, step=12000, lr=0.150000, epoch loss=4.068686
Batch=59, step=12060, lr=0.150000, batch loss=0.160647, epoch loss=0.160647
Batch=119, step=12120, lr=0.149750, batch loss=0.132794, epoch loss=0.293441
Batch=179, step=12180, lr=0.149250, batch loss=0.151682, epoch loss=0.445123
Batch=239, step=12240, lr=0.149250, batch loss=0.222476, epoch loss=0.667600
Batch=299, step=12300, lr=0.149000, batch loss=0.138031, epoch loss=0.805630
Batch=359, step=12360, lr=0.148750, batch loss=0.202538, epoch loss=1.008168
Batch=419, step=12420, lr=0.148250, batch loss=0.211909, epoch loss=1.220078
Batch=479, step=12480, lr=0.148250, batch loss=0.175669, epoch loss=1.395747
Batch=539, step=12540, lr=0.148000, batch loss=0.145504, epoch loss=1.541251
Batch=599, step=12600, lr=0.147500, batch loss=0.148877, epoch loss=1.690128
Batch=659, step=12660, lr=0.147500, batch loss=0.220219, epoch loss=1.910347
Batch=719, step=12720, lr=0.147250, batch loss=0.217376, epoch loss=2.127723
Batch=779, step=12780, lr=0.146750, batch loss=0.239068, epoch loss=2.366792
Batch=839, step=12840, lr=0.146500, batch loss=0.231593, epoch loss=2.598385
Batch=899, step=12900, lr=0.146500, batch loss=0.212720, epoch loss=2.811105
Batch=959, step=12960, lr=0.146000, batch loss=0.160881, epoch loss=2.971986
Batch=1019, step=13020, lr=0.146000, batch loss=0.265449, epoch loss=3.237435
Batch=1079, step=13080, lr=0.145500, batch loss=0.118909, epoch loss=3.356344
Batch=1139, step=13140, lr=0.145250, batch loss=0.161118, epoch loss=3.517462
Batch=1199, step=13200, lr=0.145000, batch loss=0.120459, epoch loss=3.637921
Epoch=10, step=13200, lr=0.145000, epoch loss=3.637921
Batch=59, step=13260, lr=0.145000, batch loss=0.142472, epoch loss=0.142472
Batch=119, step=13320, lr=0.144750, batch loss=0.121943, epoch loss=0.264415
Batch=179, step=13380, lr=0.144500, batch loss=0.130062, epoch loss=0.394477
Batch=239, step=13440, lr=0.144000, batch loss=0.196132, epoch loss=0.590610
Batch=299, step=13500, lr=0.144000, batch loss=0.117319, epoch loss=0.707929
Batch=359, step=13560, lr=0.143750, batch loss=0.161062, epoch loss=0.868991
Batch=419, step=13620, lr=0.143250, batch loss=0.163452, epoch loss=1.032443
Batch=479, step=13680, lr=0.143250, batch loss=0.148497, epoch loss=1.180940
Batch=539, step=13740, lr=0.142750, batch loss=0.120308, epoch loss=1.301248
Batch=599, step=13800, lr=0.142750, batch loss=0.121877, epoch loss=1.423125
Batch=659, step=13860, lr=0.142500, batch loss=0.177595, epoch loss=1.600720
Batch=719, step=13920, lr=0.142250, batch loss=0.179555, epoch loss=1.780274
Batch=779, step=13980, lr=0.141750, batch loss=0.200151, epoch loss=1.980425
Batch=839, step=14040, lr=0.141500, batch loss=0.188690, epoch loss=2.169115
Batch=899, step=14100, lr=0.141250, batch loss=0.160213, epoch loss=2.329328
Batch=959, step=14160, lr=0.141250, batch loss=0.134855, epoch loss=2.464184
Batch=1019, step=14220, lr=0.141000, batch loss=0.285752, epoch loss=2.749936
Batch=1079, step=14280, lr=0.140750, batch loss=0.090528, epoch loss=2.840464
Batch=1139, step=14340, lr=0.140250, batch loss=0.132108, epoch loss=2.972572
Batch=1199, step=14400, lr=0.140250, batch loss=0.094687, epoch loss=3.067259
Epoch=11, step=14400, lr=0.140250, epoch loss=3.067259
Batch=59, step=14460, lr=0.140000, batch loss=0.121599, epoch loss=0.121599
Batch=119, step=14520, lr=0.139750, batch loss=0.105879, epoch loss=0.227477
Batch=179, step=14580, lr=0.139250, batch loss=0.104827, epoch loss=0.332304
Batch=239, step=14640, lr=0.139250, batch loss=0.143986, epoch loss=0.476291
Batch=299, step=14700, lr=0.139000, batch loss=0.080540, epoch loss=0.556831
Batch=359, step=14760, lr=0.138500, batch loss=0.120252, epoch loss=0.677083
Batch=419, step=14820, lr=0.138250, batch loss=0.123767, epoch loss=0.800850
Batch=479, step=14880, lr=0.138250, batch loss=0.111669, epoch loss=0.912519
Batch=539, step=14940, lr=0.138000, batch loss=0.121343, epoch loss=1.033862
Batch=599, step=15000, lr=0.137750, batch loss=0.089349, epoch loss=1.123211
Batch=659, step=15060, lr=0.137500, batch loss=0.133048, epoch loss=1.256259
Batch=719, step=15120, lr=0.137250, batch loss=0.137788, epoch loss=1.394047
Batch=779, step=15180, lr=0.137000, batch loss=0.179936, epoch loss=1.573983
Batch=839, step=15240, lr=0.136750, batch loss=0.151166, epoch loss=1.725148
Batch=899, step=15300, lr=0.136500, batch loss=0.147237, epoch loss=1.872386
Batch=959, step=15360, lr=0.136250, batch loss=0.092091, epoch loss=1.964476
Batch=1019, step=15420, lr=0.136000, batch loss=0.171996, epoch loss=2.136472
Batch=1079, step=15480, lr=0.135750, batch loss=0.045950, epoch loss=2.182422
Batch=1139, step=15540, lr=0.135250, batch loss=0.106699, epoch loss=2.289121
Batch=1199, step=15600, lr=0.135000, batch loss=0.059018, epoch loss=2.348139
Epoch=12, step=15600, lr=0.135000, epoch loss=2.348139
Batch=59, step=15660, lr=0.135000, batch loss=0.090203, epoch loss=0.090203
Batch=119, step=15720, lr=0.134750, batch loss=0.156876, epoch loss=0.247079
Batch=179, step=15780, lr=0.134500, batch loss=0.107852, epoch loss=0.354932
Batch=239, step=15840, lr=0.134250, batch loss=0.104283, epoch loss=0.459215
Batch=299, step=15900, lr=0.134000, batch loss=0.051365, epoch loss=0.510580
Batch=359, step=15960, lr=0.133500, batch loss=0.102163, epoch loss=0.612743
Batch=419, step=16020, lr=0.133500, batch loss=0.081140, epoch loss=0.693883
Batch=479, step=16080, lr=0.133250, batch loss=0.059535, epoch loss=0.753419
Batch=539, step=16140, lr=0.133000, batch loss=0.065235, epoch loss=0.818654
Batch=599, step=16200, lr=0.132750, batch loss=0.146772, epoch loss=0.965426
Batch=659, step=16260, lr=0.132500, batch loss=0.091046, epoch loss=1.056472
Batch=719, step=16320, lr=0.132250, batch loss=0.132754, epoch loss=1.189226
Batch=779, step=16380, lr=0.132000, batch loss=0.283590, epoch loss=1.472816
Batch=839, step=16440, lr=0.131750, batch loss=0.094470, epoch loss=1.567286
Batch=899, step=16500, lr=0.131250, batch loss=0.088331, epoch loss=1.655617
Batch=959, step=16560, lr=0.131250, batch loss=0.034626, epoch loss=1.690243
Batch=1019, step=16620, lr=0.131000, batch loss=0.063677, epoch loss=1.753920
Batch=1079, step=16680, lr=0.130500, batch loss=0.053747, epoch loss=1.807667
Batch=1139, step=16740, lr=0.130500, batch loss=0.095196, epoch loss=1.902863
Batch=1199, step=16800, lr=0.130250, batch loss=0.047304, epoch loss=1.950167
Epoch=13, step=16800, lr=0.130250, epoch loss=1.950167
Batch=59, step=16860, lr=0.129750, batch loss=0.037385, epoch loss=0.037385
Batch=119, step=16920, lr=0.129750, batch loss=0.038006, epoch loss=0.075391
Batch=179, step=16980, lr=0.129500, batch loss=0.045067, epoch loss=0.120457
Batch=239, step=17040, lr=0.129000, batch loss=0.061308, epoch loss=0.181765
Batch=299, step=17100, lr=0.128750, batch loss=0.021899, epoch loss=0.203664
Batch=359, step=17160, lr=0.128500, batch loss=0.047314, epoch loss=0.250978
Batch=419, step=17220, lr=0.128500, batch loss=0.080450, epoch loss=0.331428
Batch=479, step=17280, lr=0.128250, batch loss=0.026265, epoch loss=0.357693
Batch=539, step=17340, lr=0.128000, batch loss=0.027740, epoch loss=0.385433
Batch=599, step=17400, lr=0.127750, batch loss=0.037632, epoch loss=0.423065
Batch=659, step=17460, lr=0.127500, batch loss=0.047054, epoch loss=0.470119
Batch=719, step=17520, lr=0.127250, batch loss=0.041950, epoch loss=0.512068
Batch=779, step=17580, lr=0.127000, batch loss=0.039462, epoch loss=0.551530
Batch=839, step=17640, lr=0.126750, batch loss=0.054907, epoch loss=0.606437
Batch=899, step=17700, lr=0.126500, batch loss=0.056380, epoch loss=0.662817
Batch=959, step=17760, lr=0.126250, batch loss=0.040993, epoch loss=0.703810
Batch=1019, step=17820, lr=0.126000, batch loss=0.116117, epoch loss=0.819927
Batch=1079, step=17880, lr=0.125750, batch loss=0.023453, epoch loss=0.843381
Batch=1139, step=17940, lr=0.125500, batch loss=0.053330, epoch loss=0.896711
Batch=1199, step=18000, lr=0.125250, batch loss=0.022054, epoch loss=0.918766
Epoch=14, step=18000, lr=0.125250, epoch loss=0.918766
Batch=59, step=18060, lr=0.125000, batch loss=0.012952, epoch loss=0.012952
Batch=119, step=18120, lr=0.124750, batch loss=0.034972, epoch loss=0.047924
Batch=179, step=18180, lr=0.124250, batch loss=0.082701, epoch loss=0.130625
Batch=239, step=18240, lr=0.124250, batch loss=0.043135, epoch loss=0.173760
Batch=299, step=18300, lr=0.124000, batch loss=0.010648, epoch loss=0.184408
Batch=359, step=18360, lr=0.123500, batch loss=0.028717, epoch loss=0.213126
Batch=419, step=18420, lr=0.123500, batch loss=0.030506, epoch loss=0.243632
Batch=479, step=18480, lr=0.123250, batch loss=0.014288, epoch loss=0.257920
Batch=539, step=18540, lr=0.123000, batch loss=0.033662, epoch loss=0.291581
Batch=599, step=18600, lr=0.122750, batch loss=0.036809, epoch loss=0.328390
Batch=659, step=18660, lr=0.122500, batch loss=0.027611, epoch loss=0.356001
Batch=719, step=18720, lr=0.122250, batch loss=0.040215, epoch loss=0.396216
Batch=779, step=18780, lr=0.122000, batch loss=0.104289, epoch loss=0.500504
Batch=839, step=18840, lr=0.121750, batch loss=0.068320, epoch loss=0.568824
Batch=899, step=18900, lr=0.121500, batch loss=0.084050, epoch loss=0.652874
Batch=959, step=18960, lr=0.121250, batch loss=0.014727, epoch loss=0.667602
Batch=1019, step=19020, lr=0.121000, batch loss=0.020961, epoch loss=0.688563
Batch=1079, step=19080, lr=0.120500, batch loss=0.008323, epoch loss=0.696886
Batch=1139, step=19140, lr=0.120250, batch loss=0.025191, epoch loss=0.722076
Batch=1199, step=19200, lr=0.120250, batch loss=0.010836, epoch loss=0.732912
Epoch=15, step=19200, lr=0.120250, epoch loss=0.732912
Batch=59, step=19260, lr=0.120000, batch loss=0.005215, epoch loss=0.005215
Batch=119, step=19320, lr=0.119750, batch loss=0.021672, epoch loss=0.026887
Batch=179, step=19380, lr=0.119500, batch loss=0.053549, epoch loss=0.080437
Batch=239, step=19440, lr=0.119250, batch loss=0.024822, epoch loss=0.105259
Batch=299, step=19500, lr=0.118750, batch loss=0.011803, epoch loss=0.117062
Batch=359, step=19560, lr=0.118750, batch loss=0.023629, epoch loss=0.140691
Batch=419, step=19620, lr=0.118500, batch loss=0.020487, epoch loss=0.161178
Batch=479, step=19680, lr=0.118250, batch loss=0.008656, epoch loss=0.169834
Batch=539, step=19740, lr=0.118000, batch loss=0.016865, epoch loss=0.186699
Batch=599, step=19800, lr=0.117750, batch loss=0.022551, epoch loss=0.209250
Batch=659, step=19860, lr=0.117500, batch loss=0.017894, epoch loss=0.227143
Batch=719, step=19920, lr=0.117000, batch loss=0.039635, epoch loss=0.266778
Batch=779, step=19980, lr=0.117000, batch loss=0.080864, epoch loss=0.347642
Batch=839, step=20040, lr=0.116750, batch loss=0.032023, epoch loss=0.379665
Batch=899, step=20100, lr=0.116500, batch loss=0.030127, epoch loss=0.409792
Batch=959, step=20160, lr=0.116250, batch loss=0.012412, epoch loss=0.422204
Batch=1019, step=20220, lr=0.115750, batch loss=0.014525, epoch loss=0.436728
Batch=1079, step=20280, lr=0.115750, batch loss=0.001610, epoch loss=0.438339
Batch=1139, step=20340, lr=0.115500, batch loss=0.015668, epoch loss=0.454007
Batch=1199, step=20400, lr=0.115000, batch loss=0.006478, epoch loss=0.460485
Epoch=16, step=20400, lr=0.115000, epoch loss=0.460485
Batch=59, step=20460, lr=0.114750, batch loss=0.003141, epoch loss=0.003141
Batch=119, step=20520, lr=0.114500, batch loss=0.010039, epoch loss=0.013180
Batch=179, step=20580, lr=0.114500, batch loss=0.026988, epoch loss=0.040168
Batch=239, step=20640, lr=0.114000, batch loss=0.014977, epoch loss=0.055145
Batch=299, step=20700, lr=0.114000, batch loss=0.004015, epoch loss=0.059161
Batch=359, step=20760, lr=0.113750, batch loss=0.015994, epoch loss=0.075154
Batch=419, step=20820, lr=0.113500, batch loss=0.015293, epoch loss=0.090448
Batch=479, step=20880, lr=0.113250, batch loss=0.003612, epoch loss=0.094059
Batch=539, step=20940, lr=0.112750, batch loss=0.017684, epoch loss=0.111743
Batch=599, step=21000, lr=0.112500, batch loss=0.021819, epoch loss=0.133562
Batch=659, step=21060, lr=0.112500, batch loss=0.019599, epoch loss=0.153160
Batch=719, step=21120, lr=0.112250, batch loss=0.047587, epoch loss=0.200748
Batch=779, step=21180, lr=0.112000, batch loss=0.066610, epoch loss=0.267357
Batch=839, step=21240, lr=0.111750, batch loss=0.025330, epoch loss=0.292687
Batch=899, step=21300, lr=0.111500, batch loss=0.028724, epoch loss=0.321411
Batch=959, step=21360, lr=0.111250, batch loss=0.011233, epoch loss=0.332644
Batch=1019, step=21420, lr=0.111000, batch loss=0.013598, epoch loss=0.346242
Batch=1079, step=21480, lr=0.110750, batch loss=0.001178, epoch loss=0.347421
Batch=1139, step=21540, lr=0.110500, batch loss=0.012726, epoch loss=0.360147
Batch=1199, step=21600, lr=0.110250, batch loss=0.004739, epoch loss=0.364886
Epoch=17, step=21600, lr=0.110250, epoch loss=0.364886
Batch=59, step=21660, lr=0.110000, batch loss=0.001768, epoch loss=0.001768
Batch=119, step=21720, lr=0.109750, batch loss=0.007866, epoch loss=0.009634
Batch=179, step=21780, lr=0.109500, batch loss=0.018880, epoch loss=0.028515
Batch=239, step=21840, lr=0.109000, batch loss=0.012280, epoch loss=0.040795
Batch=299, step=21900, lr=0.109000, batch loss=0.002441, epoch loss=0.043236
Batch=359, step=21960, lr=0.108500, batch loss=0.011769, epoch loss=0.055004
Batch=419, step=22020, lr=0.108500, batch loss=0.012743, epoch loss=0.067747
Batch=479, step=22080, lr=0.108000, batch loss=0.003192, epoch loss=0.070939
Batch=539, step=22140, lr=0.107750, batch loss=0.016922, epoch loss=0.087861
Batch=599, step=22200, lr=0.107750, batch loss=0.014973, epoch loss=0.102834
Batch=659, step=22260, lr=0.107500, batch loss=0.010840, epoch loss=0.113674
Batch=719, step=22320, lr=0.107250, batch loss=0.021711, epoch loss=0.135385
Batch=779, step=22380, lr=0.107000, batch loss=0.029741, epoch loss=0.165126
Batch=839, step=22440, lr=0.106500, batch loss=0.027911, epoch loss=0.193037
Batch=899, step=22500, lr=0.106500, batch loss=0.024837, epoch loss=0.217874
Batch=959, step=22560, lr=0.106250, batch loss=0.009276, epoch loss=0.227150
Batch=1019, step=22620, lr=0.105750, batch loss=0.009834, epoch loss=0.236984
Batch=1079, step=22680, lr=0.105500, batch loss=0.001921, epoch loss=0.238906
Batch=1139, step=22740, lr=0.105250, batch loss=0.011713, epoch loss=0.250619
Batch=1199, step=22800, lr=0.105250, batch loss=0.005320, epoch loss=0.255939
Epoch=18, step=22800, lr=0.105250, epoch loss=0.255939
Batch=59, step=22860, lr=0.104750, batch loss=0.002380, epoch loss=0.002380
Batch=119, step=22920, lr=0.104500, batch loss=0.005545, epoch loss=0.007924
Batch=179, step=22980, lr=0.104500, batch loss=0.012304, epoch loss=0.020229
Batch=239, step=23040, lr=0.104250, batch loss=0.008415, epoch loss=0.028644
Batch=299, step=23100, lr=0.104000, batch loss=0.004401, epoch loss=0.033045
Batch=359, step=23160, lr=0.103750, batch loss=0.011800, epoch loss=0.044845
Batch=419, step=23220, lr=0.103500, batch loss=0.010446, epoch loss=0.055291
Batch=479, step=23280, lr=0.103250, batch loss=0.002736, epoch loss=0.058027
Batch=539, step=23340, lr=0.102750, batch loss=0.016092, epoch loss=0.074119
Batch=599, step=23400, lr=0.102750, batch loss=0.013487, epoch loss=0.087606
Batch=659, step=23460, lr=0.102500, batch loss=0.011480, epoch loss=0.099086
Batch=719, step=23520, lr=0.102250, batch loss=0.010692, epoch loss=0.109778
Batch=779, step=23580, lr=0.102000, batch loss=0.018544, epoch loss=0.128322
Batch=839, step=23640, lr=0.101500, batch loss=0.026831, epoch loss=0.155154
Batch=899, step=23700, lr=0.101500, batch loss=0.021234, epoch loss=0.176388
Batch=959, step=23760, lr=0.101250, batch loss=0.008174, epoch loss=0.184562
Batch=1019, step=23820, lr=0.101000, batch loss=0.008793, epoch loss=0.193355
Batch=1079, step=23880, lr=0.100750, batch loss=0.001057, epoch loss=0.194412
Batch=1139, step=23940, lr=0.100250, batch loss=0.008375, epoch loss=0.202787
Batch=1199, step=24000, lr=0.100250, batch loss=0.004675, epoch loss=0.207462
Epoch=19, step=24000, lr=0.100250, epoch loss=0.207462


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############***************************************..│
│********######*####*********************************###*###*#**********************************.....│
│*******######**##*************....*****************#*######*#*******************************........│
│*******##*##**##**********...........***************########*##***************************..........│
│*****#######************.......%...%%...***************#########*************************.........%.│
│******######***********.........%.........**************##*#####************************......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│
│****#######**********..........%%%%.........************#########********************........%%.%%.%│
│**#######************..........%%%%%%%........*************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######************...........%%%%%%%..........************#######**************............%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│
│#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│
│#****##********....................%%%%%%%%%.................**###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
2025-03-21 16:28.57 ---> saved as "1ee31f14a02d79a7fef80418b0a4e682dab3dbe7f0208736abc88c400e826988"
Job succeeded
2025-03-21 16:28.59: Job succeeded