Organisationsahrefsocannla65dd8 ()alpine-3.21-5.3_opam-2.3

alpine-3.21-5.3_opam-2.3

Link Copied
Code Copied

Logs

2025-04-10 12:25.59: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (a65dd8955db6894ca41a3c2080e5dda2dd1bf41f) (linux-x86_64:alpine-3.21-5.3_opam-2.3)
Base: ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard a65dd895
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962
# alpine-3.21-5.3_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-04-10 12:25.59: Using cache hint "ahrefs/ocannl-ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962-alpine-3.21-5.3_opam-2.3-ab22fb8412356c04fa0386e1ea5b2a04"
2025-04-10 12:25.59: Using OBuilder spec:
((from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962)
(comment alpine-3.21-5.3_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-04-10 12:25.59: Waiting for resource in pool OCluster
2025-04-10 12:29.08: Waiting for worker…
2025-04-10 12:31.10: Got resource from pool OCluster
Building on asteria.caelum.ci.dev
All commits already cached
HEAD is now at a65dd895 Tiny formatting


(from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962)
2025-04-10 12:31.11 ---> using "f154ab12a6d575cd956c81477efd0302c0ef4dccea7e9c1952fd02140b057756" from cache


/: (comment alpine-3.21-5.3_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-04-10 12:31.11 ---> using "6310931d2bf5fef99ec3c0d979621049ed21e3351bd07029cd06bddb57173709" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-04-10 12:31.43 ---> saved as "675c7208637c697a7a3366c064d560f88c87ccaec86349f5914d7ac40053bae7"


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-04-10 12:31.43 ---> saved as "6021679194ffece7289cd5c5b221b2f3071db6c856a010fda1808be99b3a5c12"


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-04-10 12:31.43 ---> saved as "91e9c49728c8d128b944322e480ab1d9f5be9d0ae301c2ee057902eeb7ca9559"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
da74d7829f..fa0e8c74bf  master     -> origin/master
598efb46ec Merge pull request #27716 from avsm/fix-mpopcnt


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-04-10 12:32.15 ---> saved as "a58d8a9d6da26289dd42a9d03d610280a3bbd72bf39ff3d9dcb3a5d2e9248972"


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-04-10 12:32.16 ---> saved as "bd9e6634308be37a6e9d7c4c230607ce0524551ece0562ec5f02711731e77807"


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-04-10 12:32.21 ---> saved as "46f31418243325eda33ababd5576a3ab33f11afe293a6a9b2536b80b108c3658"


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-04-10 12:32.21 ---> saved as "295e128d3951c1a99ed61e410c335645157b6d0fcc07f984d050e48de086fc86"


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apk" "update"
- fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/main/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/community/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/edge/main/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/edge/testing/x86_64/APKINDEX.tar.gz
- v3.21.3-301-ge7fe61bc1a5 [https://dl-cdn.alpinelinux.org/alpine/v3.21/main]
- v3.21.3-294-g2e5a2ba3499 [https://dl-cdn.alpinelinux.org/alpine/v3.21/community]
- v20250108-5432-gef5183603bf [https://dl-cdn.alpinelinux.org/alpine/edge/main]
- v20250108-5432-gef5183603bf [https://dl-cdn.alpinelinux.org/alpine/edge/community]
- v20250108-5432-gef5183603bf [https://dl-cdn.alpinelinux.org/alpine/edge/testing]
- OK: 57564 distinct packages available


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-dev


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "apk" "add" "libffi-dev"
- (1/2) Installing linux-headers (6.6-r1)
- (2/2) Installing libffi-dev (3.4.7-r0)
- OK: 312 MiB in 104 packages
2025-04-10 12:32.35 ---> saved as "bafd27aceb88f1c144a5a11a84a9176a2ada1f1616a9c574eb8abe54889dca56"


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 74 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.1
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.18.0
- install dune-configurator       3.18.0
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved base.v0.17.1  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> installed conf-pkg-config.4
-> retrieved csexp.1.5.2  (cached)
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> installed conf-libffi.2.0.0
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved dune.3.18.0, dune-configurator.3.18.0  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> retrieved ppx_hash.v0.17.0  (cached)
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved result.1.5  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlfind.1.9.8
-> installed ocamlbuild.0.16.1
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed fmt.0.10.0
-> installed ptime.1.2.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.18.0
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed cppo.1.8.0
-> installed multicore-magic.2.3.1
-> installed ocaml-compiler-libs.v0.17.0
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed printbox.0.12
-> installed re.1.12.0
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed thread-local-storage.0.2
-> installed saturn_lockfree.0.5.0
-> installed integers.0.7.0
-> installed parsexp.v0.17.0
-> installed dune-configurator.3.18.0
-> installed bigstringaf.0.10.0
-> installed mdx.2.5.0
-> installed sexplib.v0.17.0
-> installed angstrom.0.16.1
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed base.v0.17.1
-> installed variantslib.v0.17.0
-> installed fieldslib.v0.17.0
-> installed stdio.v0.17.0
-> installed ctypes-foreign.0.23.0
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed ppxlib.0.35.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_optcomp.v0.17.0
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_compare.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed ppx_base.v0.17.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-04-10 12:33.49 ---> saved as "98b979ac8cb250efaaa3dbb19b51c37733707fa0f8285cd510c84f2969e8cded"


/src: (copy (src .) (dst /src))
2025-04-10 12:33.49 ---> saved as "c3f802f0fd208e430e8469639466aaff650ef5961ed039914792c2a4e0269870"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/68707c8f29a4341b664a251e941440c5/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8477309b65574c1f0f4b36d24eb668f9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.198750, batch loss=1.447177, epoch loss=37.072216
Batch=359, step=360, lr=0.198750, batch loss=1.327597, epoch loss=38.399812
Batch=419, step=420, lr=0.198500, batch loss=0.619847, epoch loss=39.019659
Batch=479, step=480, lr=0.198000, batch loss=0.820624, epoch loss=39.840283
Batch=539, step=540, lr=0.198000, batch loss=0.691717, epoch loss=40.532000
Batch=599, step=600, lr=0.197750, batch loss=1.063421, epoch loss=41.595421
Batch=659, step=660, lr=0.197500, batch loss=0.483577, epoch loss=42.078997
Batch=719, step=720, lr=0.197250, batch loss=0.411400, epoch loss=42.490398
Batch=779, step=780, lr=0.197000, batch loss=0.470142, epoch loss=42.960540
Batch=839, step=840, lr=0.196750, batch loss=0.446832, epoch loss=43.407372
Batch=899, step=900, lr=0.196500, batch loss=0.382818, epoch loss=43.790190
Batch=959, step=960, lr=0.196250, batch loss=0.245059, epoch loss=44.035250
Batch=1019, step=1020, lr=0.196000, batch loss=0.466917, epoch loss=44.502167
Batch=1079, step=1080, lr=0.195750, batch loss=0.248594, epoch loss=44.750761
Batch=1139, step=1140, lr=0.195500, batch loss=0.317784, epoch loss=45.068546
Batch=1199, step=1200, lr=0.195250, batch loss=0.263718, epoch loss=45.332264
Epoch=0, step=1200, lr=0.195250, epoch loss=45.332264
Batch=59, step=1260, lr=0.195000, batch loss=0.262173, epoch loss=0.262173
Batch=119, step=1320, lr=0.194500, batch loss=0.205325, epoch loss=0.467497
Batch=179, step=1380, lr=0.194250, batch loss=0.243626, epoch loss=0.711123
Batch=239, step=1440, lr=0.194250, batch loss=0.347802, epoch loss=1.058926
Batch=299, step=1500, lr=0.194000, batch loss=0.247544, epoch loss=1.306470
Batch=359, step=1560, lr=0.193750, batch loss=0.316175, epoch loss=1.622644
Batch=419, step=1620, lr=0.193250, batch loss=0.308949, epoch loss=1.931593
Batch=479, step=1680, lr=0.193250, batch loss=0.274869, epoch loss=2.206462
Batch=539, step=1740, lr=0.193000, batch loss=0.210322, epoch loss=2.416784
Batch=599, step=1800, lr=0.192750, batch loss=0.252092, epoch loss=2.668876
Batch=659, step=1860, lr=0.192500, batch loss=0.366985, epoch loss=3.035861
Batch=719, step=1920, lr=0.192250, batch loss=0.355089, epoch loss=3.390950
Batch=779, step=1980, lr=0.192000, batch loss=0.380673, epoch loss=3.771623
Batch=839, step=2040, lr=0.191750, batch loss=0.340189, epoch loss=4.111812
Batch=899, step=2100, lr=0.191500, batch loss=0.294885, epoch loss=4.406696
Batch=959, step=2160, lr=0.191250, batch loss=0.214206, epoch loss=4.620902
Batch=1019, step=2220, lr=0.191000, batch loss=0.329885, epoch loss=4.950787
Batch=1079, step=2280, lr=0.190500, batch loss=0.196035, epoch loss=5.146822
Batch=1139, step=2340, lr=0.190250, batch loss=0.253823, epoch loss=5.400645
Batch=1199, step=2400, lr=0.190250, batch loss=0.211211, epoch loss=5.611856
Epoch=1, step=2400, lr=0.190250, epoch loss=5.611856
Batch=59, step=2460, lr=0.190000, batch loss=0.228248, epoch loss=0.228248
Batch=119, step=2520, lr=0.189750, batch loss=0.198862, epoch loss=0.427110
Batch=179, step=2580, lr=0.189500, batch loss=0.218434, epoch loss=0.645544
Batch=239, step=2640, lr=0.189250, batch loss=0.327186, epoch loss=0.972730
Batch=299, step=2700, lr=0.188750, batch loss=0.219893, epoch loss=1.192623
Batch=359, step=2760, lr=0.188750, batch loss=0.299249, epoch loss=1.491872
Batch=419, step=2820, lr=0.188250, batch loss=0.288864, epoch loss=1.780736
Batch=479, step=2880, lr=0.188250, batch loss=0.266530, epoch loss=2.047266
Batch=539, step=2940, lr=0.187750, batch loss=0.206228, epoch loss=2.253494
Batch=599, step=3000, lr=0.187750, batch loss=0.259891, epoch loss=2.513385
Batch=659, step=3060, lr=0.187500, batch loss=0.359795, epoch loss=2.873180
Batch=719, step=3120, lr=0.187000, batch loss=0.360988, epoch loss=3.234168
Batch=779, step=3180, lr=0.187000, batch loss=0.358972, epoch loss=3.593140
Batch=839, step=3240, lr=0.186750, batch loss=0.329325, epoch loss=3.922465
Batch=899, step=3300, lr=0.186500, batch loss=0.295882, epoch loss=4.218347
Batch=959, step=3360, lr=0.186250, batch loss=0.230283, epoch loss=4.448630
Batch=1019, step=3420, lr=0.186000, batch loss=0.333899, epoch loss=4.782530
Batch=1079, step=3480, lr=0.185750, batch loss=0.193382, epoch loss=4.975912
Batch=1139, step=3540, lr=0.185500, batch loss=0.249460, epoch loss=5.225373
Batch=1199, step=3600, lr=0.185250, batch loss=0.199779, epoch loss=5.425152
Epoch=2, step=3600, lr=0.185250, epoch loss=5.425152
Batch=59, step=3660, lr=0.185000, batch loss=0.230337, epoch loss=0.230337
Batch=119, step=3720, lr=0.184750, batch loss=0.193193, epoch loss=0.423530
Batch=179, step=3780, lr=0.184500, batch loss=0.212019, epoch loss=0.635549
Batch=239, step=3840, lr=0.184250, batch loss=0.317463, epoch loss=0.953012
Batch=299, step=3900, lr=0.184000, batch loss=0.208917, epoch loss=1.161930
Batch=359, step=3960, lr=0.183750, batch loss=0.285191, epoch loss=1.447121
Batch=419, step=4020, lr=0.183500, batch loss=0.276516, epoch loss=1.723637
Batch=479, step=4080, lr=0.183000, batch loss=0.258549, epoch loss=1.982186
Batch=539, step=4140, lr=0.183000, batch loss=0.197613, epoch loss=2.179798
Batch=599, step=4200, lr=0.182750, batch loss=0.237122, epoch loss=2.416921
Batch=659, step=4260, lr=0.182500, batch loss=0.331251, epoch loss=2.748171
Batch=719, step=4320, lr=0.182250, batch loss=0.339480, epoch loss=3.087651
Batch=779, step=4380, lr=0.182000, batch loss=0.352114, epoch loss=3.439764
Batch=839, step=4440, lr=0.181750, batch loss=0.317095, epoch loss=3.756859
Batch=899, step=4500, lr=0.181500, batch loss=0.285306, epoch loss=4.042165
Batch=959, step=4560, lr=0.181250, batch loss=0.238545, epoch loss=4.280710
Batch=1019, step=4620, lr=0.180750, batch loss=0.330777, epoch loss=4.611486
Batch=1079, step=4680, lr=0.180750, batch loss=0.197499, epoch loss=4.808986
Batch=1139, step=4740, lr=0.180500, batch loss=0.234434, epoch loss=5.043419
Batch=1199, step=4800, lr=0.180250, batch loss=0.191649, epoch loss=5.235069
Epoch=3, step=4800, lr=0.180250, epoch loss=5.235069
Batch=59, step=4860, lr=0.180000, batch loss=0.228450, epoch loss=0.228450
Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418668
Batch=179, step=4980, lr=0.179500, batch loss=0.205685, epoch loss=0.624354
Batch=239, step=5040, lr=0.179250, batch loss=0.306359, epoch loss=0.930713
Batch=299, step=5100, lr=0.179000, batch loss=0.203404, epoch loss=1.134116
Batch=359, step=5160, lr=0.178750, batch loss=0.272232, epoch loss=1.406348
Batch=419, step=5220, lr=0.178250, batch loss=0.264584, epoch loss=1.670932
Batch=479, step=5280, lr=0.178250, batch loss=0.240429, epoch loss=1.911361
Batch=539, step=5340, lr=0.177750, batch loss=0.191059, epoch loss=2.102420
Batch=599, step=5400, lr=0.177500, batch loss=0.231230, epoch loss=2.333650
Batch=659, step=5460, lr=0.177500, batch loss=0.324029, epoch loss=2.657679
Batch=719, step=5520, lr=0.177250, batch loss=0.332538, epoch loss=2.990217
Batch=779, step=5580, lr=0.177000, batch loss=0.341622, epoch loss=3.331839
Batch=839, step=5640, lr=0.176750, batch loss=0.309542, epoch loss=3.641381
Batch=899, step=5700, lr=0.176500, batch loss=0.272148, epoch loss=3.913529
Batch=959, step=5760, lr=0.176250, batch loss=0.215356, epoch loss=4.128884
Batch=1019, step=5820, lr=0.176000, batch loss=0.335446, epoch loss=4.464330
Batch=1079, step=5880, lr=0.175750, batch loss=0.193113, epoch loss=4.657443
Batch=1139, step=5940, lr=0.175500, batch loss=0.224662, epoch loss=4.882105
Batch=1199, step=6000, lr=0.175250, batch loss=0.189246, epoch loss=5.071351
Epoch=4, step=6000, lr=0.175250, epoch loss=5.071351
Batch=59, step=6060, lr=0.175000, batch loss=0.234318, epoch loss=0.234318
Batch=119, step=6120, lr=0.174750, batch loss=0.192077, epoch loss=0.426395
Batch=179, step=6180, lr=0.174500, batch loss=0.200845, epoch loss=0.627240
Batch=239, step=6240, lr=0.174250, batch loss=0.298696, epoch loss=0.925936
Batch=299, step=6300, lr=0.173750, batch loss=0.198289, epoch loss=1.124225
Batch=359, step=6360, lr=0.173500, batch loss=0.265136, epoch loss=1.389361
Batch=419, step=6420, lr=0.173500, batch loss=0.258806, epoch loss=1.648167
Batch=479, step=6480, lr=0.173250, batch loss=0.234835, epoch loss=1.883002
Batch=539, step=6540, lr=0.173000, batch loss=0.189655, epoch loss=2.072657
Batch=599, step=6600, lr=0.172500, batch loss=0.232858, epoch loss=2.305516
Batch=659, step=6660, lr=0.172500, batch loss=0.313454, epoch loss=2.618970
Batch=719, step=6720, lr=0.172250, batch loss=0.318345, epoch loss=2.937315
Batch=779, step=6780, lr=0.172000, batch loss=0.330610, epoch loss=3.267925
Batch=839, step=6840, lr=0.171750, batch loss=0.305514, epoch loss=3.573439
Batch=899, step=6900, lr=0.171250, batch loss=0.269913, epoch loss=3.843353
Batch=959, step=6960, lr=0.171250, batch loss=0.207282, epoch loss=4.050635
Batch=1019, step=7020, lr=0.171000, batch loss=0.328487, epoch loss=4.379122
Batch=1079, step=7080, lr=0.170750, batch loss=0.184021, epoch loss=4.563143
Batch=1139, step=7140, lr=0.170500, batch loss=0.215842, epoch loss=4.778985
Batch=1199, step=7200, lr=0.170250, batch loss=0.181940, epoch loss=4.960924
Epoch=5, step=7200, lr=0.170250, epoch loss=4.960924
Batch=59, step=7260, lr=0.170000, batch loss=0.235882, epoch loss=0.235882
Batch=119, step=7320, lr=0.169750, batch loss=0.182253, epoch loss=0.418136
Batch=179, step=7380, lr=0.169500, batch loss=0.195514, epoch loss=0.613650
Batch=239, step=7440, lr=0.169250, batch loss=0.290300, epoch loss=0.903951
Batch=299, step=7500, lr=0.169000, batch loss=0.198024, epoch loss=1.101975
Batch=359, step=7560, lr=0.168500, batch loss=0.261183, epoch loss=1.363158
Batch=419, step=7620, lr=0.168500, batch loss=0.255014, epoch loss=1.618172
Batch=479, step=7680, lr=0.168250, batch loss=0.238548, epoch loss=1.856720
Batch=539, step=7740, lr=0.168000, batch loss=0.186373, epoch loss=2.043093
Batch=599, step=7800, lr=0.167750, batch loss=0.224585, epoch loss=2.267678
Batch=659, step=7860, lr=0.167250, batch loss=0.304887, epoch loss=2.572565
Batch=719, step=7920, lr=0.167000, batch loss=0.307956, epoch loss=2.880521
Batch=779, step=7980, lr=0.167000, batch loss=0.325033, epoch loss=3.205554
Batch=839, step=8040, lr=0.166750, batch loss=0.292498, epoch loss=3.498052
Batch=899, step=8100, lr=0.166500, batch loss=0.261170, epoch loss=3.759222
Batch=959, step=8160, lr=0.166250, batch loss=0.201917, epoch loss=3.961138
Batch=1019, step=8220, lr=0.166000, batch loss=0.323336, epoch loss=4.284475
Batch=1079, step=8280, lr=0.165500, batch loss=0.188716, epoch loss=4.473191
Batch=1139, step=8340, lr=0.165250, batch loss=0.219543, epoch loss=4.692734
Batch=1199, step=8400, lr=0.165000, batch loss=0.173073, epoch loss=4.865807
Epoch=6, step=8400, lr=0.165000, epoch loss=4.865807
Batch=59, step=8460, lr=0.165000, batch loss=0.211890, epoch loss=0.211890
Batch=119, step=8520, lr=0.164500, batch loss=0.171969, epoch loss=0.383859
Batch=179, step=8580, lr=0.164250, batch loss=0.188099, epoch loss=0.571958
Batch=239, step=8640, lr=0.164250, batch loss=0.277155, epoch loss=0.849113
Batch=299, step=8700, lr=0.163750, batch loss=0.193720, epoch loss=1.042833
Batch=359, step=8760, lr=0.163750, batch loss=0.254683, epoch loss=1.297515
Batch=419, step=8820, lr=0.163500, batch loss=0.245530, epoch loss=1.543045
Batch=479, step=8880, lr=0.163250, batch loss=0.228489, epoch loss=1.771534
Batch=539, step=8940, lr=0.163000, batch loss=0.178722, epoch loss=1.950256
Batch=599, step=9000, lr=0.162750, batch loss=0.217523, epoch loss=2.167779
Batch=659, step=9060, lr=0.162500, batch loss=0.292986, epoch loss=2.460766
Batch=719, step=9120, lr=0.162250, batch loss=0.295877, epoch loss=2.756643
Batch=779, step=9180, lr=0.162000, batch loss=0.313845, epoch loss=3.070488
Batch=839, step=9240, lr=0.161750, batch loss=0.280972, epoch loss=3.351460
Batch=899, step=9300, lr=0.161250, batch loss=0.251710, epoch loss=3.603170
Batch=959, step=9360, lr=0.161250, batch loss=0.185919, epoch loss=3.789089
Batch=1019, step=9420, lr=0.161000, batch loss=0.318140, epoch loss=4.107229
Batch=1079, step=9480, lr=0.160750, batch loss=0.185935, epoch loss=4.293163
Batch=1139, step=9540, lr=0.160500, batch loss=0.213022, epoch loss=4.506186
Batch=1199, step=9600, lr=0.160250, batch loss=0.166985, epoch loss=4.673171
Epoch=7, step=9600, lr=0.160250, epoch loss=4.673171
Batch=59, step=9660, lr=0.160000, batch loss=0.210220, epoch loss=0.210220
Batch=119, step=9720, lr=0.159750, batch loss=0.170948, epoch loss=0.381168
Batch=179, step=9780, lr=0.159500, batch loss=0.179080, epoch loss=0.560248
Batch=239, step=9840, lr=0.159250, batch loss=0.261350, epoch loss=0.821599
Batch=299, step=9900, lr=0.159000, batch loss=0.184366, epoch loss=1.005964
Batch=359, step=9960, lr=0.158750, batch loss=0.237918, epoch loss=1.243882
Batch=419, step=10020, lr=0.158500, batch loss=0.234171, epoch loss=1.478053
Batch=479, step=10080, lr=0.158250, batch loss=0.219316, epoch loss=1.697369
Batch=539, step=10140, lr=0.158000, batch loss=0.164597, epoch loss=1.861966
Batch=599, step=10200, lr=0.157750, batch loss=0.202133, epoch loss=2.064099
Batch=659, step=10260, lr=0.157500, batch loss=0.280647, epoch loss=2.344747
Batch=719, step=10320, lr=0.157250, batch loss=0.284097, epoch loss=2.628843
Batch=779, step=10380, lr=0.157000, batch loss=0.294097, epoch loss=2.922940
Batch=839, step=10440, lr=0.156500, batch loss=0.265573, epoch loss=3.188513
Batch=899, step=10500, lr=0.156500, batch loss=0.241374, epoch loss=3.429888
Batch=959, step=10560, lr=0.156250, batch loss=0.196215, epoch loss=3.626103
Batch=1019, step=10620, lr=0.156000, batch loss=0.279550, epoch loss=3.905652
Batch=1079, step=10680, lr=0.155500, batch loss=0.167019, epoch loss=4.072672
Batch=1139, step=10740, lr=0.155500, batch loss=0.200570, epoch loss=4.273242
Batch=1199, step=10800, lr=0.155000, batch loss=0.154838, epoch loss=4.428079
Epoch=8, step=10800, lr=0.155000, epoch loss=4.428079
Batch=59, step=10860, lr=0.154750, batch loss=0.174735, epoch loss=0.174735
Batch=119, step=10920, lr=0.154500, batch loss=0.142983, epoch loss=0.317718
Batch=179, step=10980, lr=0.154250, batch loss=0.164616, epoch loss=0.482334
Batch=239, step=11040, lr=0.154250, batch loss=0.242519, epoch loss=0.724852
Batch=299, step=11100, lr=0.153750, batch loss=0.164983, epoch loss=0.889835
Batch=359, step=11160, lr=0.153750, batch loss=0.222356, epoch loss=1.112191
Batch=419, step=11220, lr=0.153500, batch loss=0.228295, epoch loss=1.340485
Batch=479, step=11280, lr=0.153000, batch loss=0.202875, epoch loss=1.543360
Batch=539, step=11340, lr=0.152750, batch loss=0.158955, epoch loss=1.702314
Batch=599, step=11400, lr=0.152500, batch loss=0.185189, epoch loss=1.887504
Batch=659, step=11460, lr=0.152500, batch loss=0.261205, epoch loss=2.148709
Batch=719, step=11520, lr=0.152250, batch loss=0.253208, epoch loss=2.401917
Batch=779, step=11580, lr=0.152000, batch loss=0.267880, epoch loss=2.669797
Batch=839, step=11640, lr=0.151750, batch loss=0.249167, epoch loss=2.918964
Batch=899, step=11700, lr=0.151500, batch loss=0.215306, epoch loss=3.134270
Batch=959, step=11760, lr=0.151250, batch loss=0.161252, epoch loss=3.295522
Batch=1019, step=11820, lr=0.151000, batch loss=0.274117, epoch loss=3.569640
Batch=1079, step=11880, lr=0.150750, batch loss=0.148298, epoch loss=3.717938
Batch=1139, step=11940, lr=0.150500, batch loss=0.187195, epoch loss=3.905133
Batch=1199, step=12000, lr=0.150250, batch loss=0.138365, epoch loss=4.043498
Epoch=9, step=12000, lr=0.150250, epoch loss=4.043498
Batch=59, step=12060, lr=0.150000, batch loss=0.161934, epoch loss=0.161934
Batch=119, step=12120, lr=0.149750, batch loss=0.134704, epoch loss=0.296638
Batch=179, step=12180, lr=0.149500, batch loss=0.149788, epoch loss=0.446425
Batch=239, step=12240, lr=0.149250, batch loss=0.217153, epoch loss=0.663579
Batch=299, step=12300, lr=0.149000, batch loss=0.141423, epoch loss=0.805002
Batch=359, step=12360, lr=0.148750, batch loss=0.196074, epoch loss=1.001076
Batch=419, step=12420, lr=0.148500, batch loss=0.205346, epoch loss=1.206422
Batch=479, step=12480, lr=0.148250, batch loss=0.177293, epoch loss=1.383715
Batch=539, step=12540, lr=0.148000, batch loss=0.141545, epoch loss=1.525260
Batch=599, step=12600, lr=0.147750, batch loss=0.147296, epoch loss=1.672556
Batch=659, step=12660, lr=0.147500, batch loss=0.226007, epoch loss=1.898562
Batch=719, step=12720, lr=0.147250, batch loss=0.237857, epoch loss=2.136419
Batch=779, step=12780, lr=0.147000, batch loss=0.263163, epoch loss=2.399582
Batch=839, step=12840, lr=0.146750, batch loss=0.234055, epoch loss=2.633637
Batch=899, step=12900, lr=0.146500, batch loss=0.202620, epoch loss=2.836257
Batch=959, step=12960, lr=0.146250, batch loss=0.146702, epoch loss=2.982959
Batch=1019, step=13020, lr=0.145750, batch loss=0.253075, epoch loss=3.236034
Batch=1079, step=13080, lr=0.145500, batch loss=0.118125, epoch loss=3.354160
Batch=1139, step=13140, lr=0.145250, batch loss=0.158171, epoch loss=3.512331
Batch=1199, step=13200, lr=0.145000, batch loss=0.118007, epoch loss=3.630338
Epoch=10, step=13200, lr=0.145000, epoch loss=3.630338
Batch=59, step=13260, lr=0.144750, batch loss=0.140347, epoch loss=0.140347
Batch=119, step=13320, lr=0.144750, batch loss=0.119779, epoch loss=0.260126
Batch=179, step=13380, lr=0.144500, batch loss=0.127894, epoch loss=0.388020
Batch=239, step=13440, lr=0.144250, batch loss=0.192964, epoch loss=0.580985
Batch=299, step=13500, lr=0.144000, batch loss=0.113527, epoch loss=0.694511
Batch=359, step=13560, lr=0.143750, batch loss=0.158928, epoch loss=0.853439
Batch=419, step=13620, lr=0.143500, batch loss=0.160537, epoch loss=1.013977
Batch=479, step=13680, lr=0.143250, batch loss=0.143068, epoch loss=1.157045
Batch=539, step=13740, lr=0.143000, batch loss=0.117831, epoch loss=1.274876
Batch=599, step=13800, lr=0.142500, batch loss=0.118603, epoch loss=1.393479
Batch=659, step=13860, lr=0.142250, batch loss=0.173618, epoch loss=1.567097
Batch=719, step=13920, lr=0.142000, batch loss=0.174542, epoch loss=1.741639
Batch=779, step=13980, lr=0.142000, batch loss=0.195633, epoch loss=1.937272
Batch=839, step=14040, lr=0.141750, batch loss=0.201963, epoch loss=2.139236
Batch=899, step=14100, lr=0.141500, batch loss=0.223721, epoch loss=2.362956
Batch=959, step=14160, lr=0.141250, batch loss=0.100226, epoch loss=2.463182
Batch=1019, step=14220, lr=0.140750, batch loss=0.197786, epoch loss=2.660968
Batch=1079, step=14280, lr=0.140750, batch loss=0.076936, epoch loss=2.737904
Batch=1139, step=14340, lr=0.140500, batch loss=0.128643, epoch loss=2.866546
Batch=1199, step=14400, lr=0.140250, batch loss=0.084962, epoch loss=2.951508
Epoch=11, step=14400, lr=0.140250, epoch loss=2.951508
Batch=59, step=14460, lr=0.140000, batch loss=0.105451, epoch loss=0.105451
Batch=119, step=14520, lr=0.139750, batch loss=0.103223, epoch loss=0.208673
Batch=179, step=14580, lr=0.139250, batch loss=0.100025, epoch loss=0.308699
Batch=239, step=14640, lr=0.139250, batch loss=0.139101, epoch loss=0.447800
Batch=299, step=14700, lr=0.139000, batch loss=0.074367, epoch loss=0.522167
Batch=359, step=14760, lr=0.138750, batch loss=0.119299, epoch loss=0.641465
Batch=419, step=14820, lr=0.138500, batch loss=0.128995, epoch loss=0.770461
Batch=479, step=14880, lr=0.138250, batch loss=0.100124, epoch loss=0.870585
Batch=539, step=14940, lr=0.137750, batch loss=0.119233, epoch loss=0.989819
Batch=599, step=15000, lr=0.137750, batch loss=0.085374, epoch loss=1.075192
Batch=659, step=15060, lr=0.137500, batch loss=0.138521, epoch loss=1.213713
Batch=719, step=15120, lr=0.137250, batch loss=0.160660, epoch loss=1.374373
Batch=779, step=15180, lr=0.137000, batch loss=0.266586, epoch loss=1.640958
Batch=839, step=15240, lr=0.136750, batch loss=0.132103, epoch loss=1.773062
Batch=899, step=15300, lr=0.136500, batch loss=0.138269, epoch loss=1.911331
Batch=959, step=15360, lr=0.136250, batch loss=0.079336, epoch loss=1.990667
Batch=1019, step=15420, lr=0.136000, batch loss=0.158293, epoch loss=2.148961
Batch=1079, step=15480, lr=0.135750, batch loss=0.043651, epoch loss=2.192612
Batch=1139, step=15540, lr=0.135500, batch loss=0.099377, epoch loss=2.291989
Batch=1199, step=15600, lr=0.135000, batch loss=0.059695, epoch loss=2.351684
Epoch=12, step=15600, lr=0.135000, epoch loss=2.351684
Batch=59, step=15660, lr=0.135000, batch loss=0.080001, epoch loss=0.080001
Batch=119, step=15720, lr=0.134750, batch loss=0.135768, epoch loss=0.215768
Batch=179, step=15780, lr=0.134250, batch loss=0.096594, epoch loss=0.312362
Batch=239, step=15840, lr=0.134250, batch loss=0.095148, epoch loss=0.407510
Batch=299, step=15900, lr=0.133750, batch loss=0.044262, epoch loss=0.451772
Batch=359, step=15960, lr=0.133750, batch loss=0.082375, epoch loss=0.534146
Batch=419, step=16020, lr=0.133250, batch loss=0.076909, epoch loss=0.611055
Batch=479, step=16080, lr=0.133250, batch loss=0.057073, epoch loss=0.668128
Batch=539, step=16140, lr=0.132750, batch loss=0.064167, epoch loss=0.732295
Batch=599, step=16200, lr=0.132750, batch loss=0.158113, epoch loss=0.890408
Batch=659, step=16260, lr=0.132500, batch loss=0.090391, epoch loss=0.980799
Batch=719, step=16320, lr=0.132000, batch loss=0.118136, epoch loss=1.098936
Batch=779, step=16380, lr=0.132000, batch loss=0.277941, epoch loss=1.376877
Batch=839, step=16440, lr=0.131750, batch loss=0.089633, epoch loss=1.466510
Batch=899, step=16500, lr=0.131500, batch loss=0.075748, epoch loss=1.542258
Batch=959, step=16560, lr=0.131250, batch loss=0.030790, epoch loss=1.573048
Batch=1019, step=16620, lr=0.131000, batch loss=0.056050, epoch loss=1.629098
Batch=1079, step=16680, lr=0.130750, batch loss=0.067481, epoch loss=1.696579
Batch=1139, step=16740, lr=0.130250, batch loss=0.123996, epoch loss=1.820574
Batch=1199, step=16800, lr=0.130250, batch loss=0.054430, epoch loss=1.875004
Epoch=13, step=16800, lr=0.130250, epoch loss=1.875004
Batch=59, step=16860, lr=0.130000, batch loss=0.033699, epoch loss=0.033699
Batch=119, step=16920, lr=0.129750, batch loss=0.033595, epoch loss=0.067294
Batch=179, step=16980, lr=0.129500, batch loss=0.043943, epoch loss=0.111237
Batch=239, step=17040, lr=0.129000, batch loss=0.058851, epoch loss=0.170087
Batch=299, step=17100, lr=0.129000, batch loss=0.019124, epoch loss=0.189211
Batch=359, step=17160, lr=0.128750, batch loss=0.043419, epoch loss=0.232631
Batch=419, step=17220, lr=0.128500, batch loss=0.044684, epoch loss=0.277314
Batch=479, step=17280, lr=0.128250, batch loss=0.021141, epoch loss=0.298456
Batch=539, step=17340, lr=0.128000, batch loss=0.026701, epoch loss=0.325157
Batch=599, step=17400, lr=0.127750, batch loss=0.037262, epoch loss=0.362419
Batch=659, step=17460, lr=0.127500, batch loss=0.051205, epoch loss=0.413624
Batch=719, step=17520, lr=0.127250, batch loss=0.061928, epoch loss=0.475551
Batch=779, step=17580, lr=0.127000, batch loss=0.064681, epoch loss=0.540233
Batch=839, step=17640, lr=0.126750, batch loss=0.100606, epoch loss=0.640839
Batch=899, step=17700, lr=0.126500, batch loss=0.048231, epoch loss=0.689071
Batch=959, step=17760, lr=0.126250, batch loss=0.019032, epoch loss=0.708103
Batch=1019, step=17820, lr=0.126000, batch loss=0.047482, epoch loss=0.755585
Batch=1079, step=17880, lr=0.125750, batch loss=0.033036, epoch loss=0.788621
Batch=1139, step=17940, lr=0.125500, batch loss=0.062926, epoch loss=0.851547
Batch=1199, step=18000, lr=0.125250, batch loss=0.021103, epoch loss=0.872650
Epoch=14, step=18000, lr=0.125250, epoch loss=0.872650
Batch=59, step=18060, lr=0.125000, batch loss=0.012290, epoch loss=0.012290
Batch=119, step=18120, lr=0.124750, batch loss=0.017916, epoch loss=0.030206
Batch=179, step=18180, lr=0.124500, batch loss=0.028446, epoch loss=0.058652
Batch=239, step=18240, lr=0.124000, batch loss=0.032978, epoch loss=0.091630
Batch=299, step=18300, lr=0.124000, batch loss=0.009573, epoch loss=0.101203
Batch=359, step=18360, lr=0.123750, batch loss=0.023015, epoch loss=0.124218
Batch=419, step=18420, lr=0.123500, batch loss=0.031140, epoch loss=0.155358
Batch=479, step=18480, lr=0.123250, batch loss=0.022210, epoch loss=0.177568
Batch=539, step=18540, lr=0.123000, batch loss=0.042260, epoch loss=0.219828
Batch=599, step=18600, lr=0.122750, batch loss=0.026153, epoch loss=0.245980
Batch=659, step=18660, lr=0.122500, batch loss=0.032452, epoch loss=0.278433
Batch=719, step=18720, lr=0.122250, batch loss=0.044982, epoch loss=0.323415
Batch=779, step=18780, lr=0.121750, batch loss=0.119711, epoch loss=0.443125
Batch=839, step=18840, lr=0.121750, batch loss=0.054701, epoch loss=0.497827
Batch=899, step=18900, lr=0.121500, batch loss=0.054252, epoch loss=0.552079
Batch=959, step=18960, lr=0.121250, batch loss=0.014614, epoch loss=0.566692
Batch=1019, step=19020, lr=0.121000, batch loss=0.022344, epoch loss=0.589036
Batch=1079, step=19080, lr=0.120750, batch loss=0.008994, epoch loss=0.598030
Batch=1139, step=19140, lr=0.120500, batch loss=0.023416, epoch loss=0.621446
Batch=1199, step=19200, lr=0.120250, batch loss=0.009314, epoch loss=0.630760
Epoch=15, step=19200, lr=0.120250, epoch loss=0.630760
Batch=59, step=19260, lr=0.120000, batch loss=0.004144, epoch loss=0.004144
Batch=119, step=19320, lr=0.119750, batch loss=0.014192, epoch loss=0.018335
Batch=179, step=19380, lr=0.119500, batch loss=0.030262, epoch loss=0.048597
Batch=239, step=19440, lr=0.119250, batch loss=0.022854, epoch loss=0.071451
Batch=299, step=19500, lr=0.119000, batch loss=0.006143, epoch loss=0.077594
Batch=359, step=19560, lr=0.118750, batch loss=0.016731, epoch loss=0.094325
Batch=419, step=19620, lr=0.118500, batch loss=0.019726, epoch loss=0.114051
Batch=479, step=19680, lr=0.118250, batch loss=0.009423, epoch loss=0.123473
Batch=539, step=19740, lr=0.118000, batch loss=0.020510, epoch loss=0.143983
Batch=599, step=19800, lr=0.117750, batch loss=0.019853, epoch loss=0.163836
Batch=659, step=19860, lr=0.117500, batch loss=0.020167, epoch loss=0.184003
Batch=719, step=19920, lr=0.117250, batch loss=0.015687, epoch loss=0.199690
Batch=779, step=19980, lr=0.117000, batch loss=0.018164, epoch loss=0.217854
Batch=839, step=20040, lr=0.116750, batch loss=0.030711, epoch loss=0.248565
Batch=899, step=20100, lr=0.116500, batch loss=0.026576, epoch loss=0.275141
Batch=959, step=20160, lr=0.116250, batch loss=0.012180, epoch loss=0.287321
Batch=1019, step=20220, lr=0.115750, batch loss=0.016197, epoch loss=0.303517
Batch=1079, step=20280, lr=0.115750, batch loss=0.003039, epoch loss=0.306556
Batch=1139, step=20340, lr=0.115500, batch loss=0.016924, epoch loss=0.323480
Batch=1199, step=20400, lr=0.115250, batch loss=0.008357, epoch loss=0.331837
Epoch=16, step=20400, lr=0.115250, epoch loss=0.331837
Batch=59, step=20460, lr=0.115000, batch loss=0.004314, epoch loss=0.004314
Batch=119, step=20520, lr=0.114750, batch loss=0.011483, epoch loss=0.015798
Batch=179, step=20580, lr=0.114500, batch loss=0.025678, epoch loss=0.041475
Batch=239, step=20640, lr=0.114250, batch loss=0.014779, epoch loss=0.056255
Batch=299, step=20700, lr=0.114000, batch loss=0.003282, epoch loss=0.059537
Batch=359, step=20760, lr=0.113750, batch loss=0.015139, epoch loss=0.074676
Batch=419, step=20820, lr=0.113500, batch loss=0.014741, epoch loss=0.089417
Batch=479, step=20880, lr=0.113250, batch loss=0.003935, epoch loss=0.093353
Batch=539, step=20940, lr=0.113000, batch loss=0.016654, epoch loss=0.110007
Batch=599, step=21000, lr=0.112750, batch loss=0.018083, epoch loss=0.128090
Batch=659, step=21060, lr=0.112500, batch loss=0.013370, epoch loss=0.141459
Batch=719, step=21120, lr=0.112250, batch loss=0.036666, epoch loss=0.178125
Batch=779, step=21180, lr=0.111750, batch loss=0.074252, epoch loss=0.252377
Batch=839, step=21240, lr=0.111500, batch loss=0.025763, epoch loss=0.278140
Batch=899, step=21300, lr=0.111250, batch loss=0.028808, epoch loss=0.306948
Batch=959, step=21360, lr=0.111000, batch loss=0.011050, epoch loss=0.317998
Batch=1019, step=21420, lr=0.111000, batch loss=0.013832, epoch loss=0.331830
Batch=1079, step=21480, lr=0.110750, batch loss=0.001641, epoch loss=0.333471
Batch=1139, step=21540, lr=0.110500, batch loss=0.013054, epoch loss=0.346525
Batch=1199, step=21600, lr=0.110250, batch loss=0.005238, epoch loss=0.351763
Epoch=17, step=21600, lr=0.110250, epoch loss=0.351763
Batch=59, step=21660, lr=0.110000, batch loss=0.002817, epoch loss=0.002817
Batch=119, step=21720, lr=0.109750, batch loss=0.006652, epoch loss=0.009469
Batch=179, step=21780, lr=0.109500, batch loss=0.013292, epoch loss=0.022761
Batch=239, step=21840, lr=0.109250, batch loss=0.011172, epoch loss=0.033933
Batch=299, step=21900, lr=0.108750, batch loss=0.001874, epoch loss=0.035808
Batch=359, step=21960, lr=0.108750, batch loss=0.010741, epoch loss=0.046549
Batch=419, step=22020, lr=0.108500, batch loss=0.011719, epoch loss=0.058267
Batch=479, step=22080, lr=0.108250, batch loss=0.004315, epoch loss=0.062582
Batch=539, step=22140, lr=0.108000, batch loss=0.015112, epoch loss=0.077695
Batch=599, step=22200, lr=0.107750, batch loss=0.016293, epoch loss=0.093987
Batch=659, step=22260, lr=0.107500, batch loss=0.014503, epoch loss=0.108490
Batch=719, step=22320, lr=0.107250, batch loss=0.033799, epoch loss=0.142289
Batch=779, step=22380, lr=0.107000, batch loss=0.040695, epoch loss=0.182984
Batch=839, step=22440, lr=0.106750, batch loss=0.022296, epoch loss=0.205280
Batch=899, step=22500, lr=0.106500, batch loss=0.021275, epoch loss=0.226555
Batch=959, step=22560, lr=0.106250, batch loss=0.015935, epoch loss=0.242490
Batch=1019, step=22620, lr=0.106000, batch loss=0.011160, epoch loss=0.253650
Batch=1079, step=22680, lr=0.105750, batch loss=0.000425, epoch loss=0.254075
Batch=1139, step=22740, lr=0.105500, batch loss=0.012119, epoch loss=0.266194
Batch=1199, step=22800, lr=0.105250, batch loss=0.004883, epoch loss=0.271077
Epoch=18, step=22800, lr=0.105250, epoch loss=0.271077
Batch=59, step=22860, lr=0.105000, batch loss=0.002066, epoch loss=0.002066
Batch=119, step=22920, lr=0.104750, batch loss=0.005814, epoch loss=0.007880
Batch=179, step=22980, lr=0.104500, batch loss=0.011116, epoch loss=0.018996
Batch=239, step=23040, lr=0.104000, batch loss=0.012091, epoch loss=0.031087
Batch=299, step=23100, lr=0.103750, batch loss=0.008218, epoch loss=0.039305
Batch=359, step=23160, lr=0.103750, batch loss=0.011430, epoch loss=0.050734
Batch=419, step=23220, lr=0.103250, batch loss=0.011039, epoch loss=0.061773
Batch=479, step=23280, lr=0.103250, batch loss=0.002549, epoch loss=0.064323
Batch=539, step=23340, lr=0.103000, batch loss=0.017224, epoch loss=0.081547
Batch=599, step=23400, lr=0.102750, batch loss=0.014309, epoch loss=0.095856
Batch=659, step=23460, lr=0.102500, batch loss=0.010741, epoch loss=0.106597
Batch=719, step=23520, lr=0.102250, batch loss=0.015109, epoch loss=0.121706
Batch=779, step=23580, lr=0.102000, batch loss=0.022469, epoch loss=0.144176
Batch=839, step=23640, lr=0.101750, batch loss=0.025372, epoch loss=0.169548
Batch=899, step=23700, lr=0.101500, batch loss=0.024005, epoch loss=0.193553
Batch=959, step=23760, lr=0.101250, batch loss=0.009030, epoch loss=0.202582
Batch=1019, step=23820, lr=0.101000, batch loss=0.008170, epoch loss=0.210753
Batch=1079, step=23880, lr=0.100750, batch loss=0.000988, epoch loss=0.211741
Batch=1139, step=23940, lr=0.100500, batch loss=0.009689, epoch loss=0.221429
Batch=1199, step=24000, lr=0.100250, batch loss=0.004845, epoch loss=0.226275
Epoch=19, step=24000, lr=0.100250, epoch loss=0.226275


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########****************************************..│
│***********###########*************************############*************************************....│
│********######*####*********************************###*###*#********************************.......│
│*******######**##*************....*****************#*######*#******************************.........│
│*******##*##**##**********...........***************########*##**************************...........│
│*****#######************.......%...%%...***************#########***********************...........%.│
│******######***********.........%.........**************##*#####**********************........%.%.%.│
│***#########**********.........%%%.%%......*************#*#######********************........%.%%%%.│
│****#######**********..........%%%%.........************#########*******************.........%%.%%.%│
│**#######************..........%%%%%%%........*************###*###*****************..........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########***************............%%%%%%.│
│*#######************...........%%%%%%%...........***********#######*************.............%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####**************.............%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###***********...............%%%%%..│
│#######***********.............%.%%%%%%..............********#######*********..............%%%%.%%..│
│#####*#**********...............%%%%%%%...............*******#######*******................%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######******.................%%%%%%...│
│#######*********.................%%%%%%%%................****###*###*****.................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%................**######***..................%%%%%%.....│
│#****##********....................%%%%%%%%%.................**###*#*.................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................******...................%.%%.%%......│
│*************........................%..%%%%%%%.................***...............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
2025-04-10 12:34.32 ---> saved as "6ad287c98f5ae7eecb0f63461fbe2a8df65cf419ffe211c1b5e98bca8d6af63d"
Job succeeded
2025-04-10 12:34.32: Job succeeded