Organisationsahrefsocannle3497a ()alpine-3.21-5.3_opam-2.3

alpine-3.21-5.3_opam-2.3

Link Copied
Code Copied

Logs

2025-05-09 15:39.37: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (e3497ade7ab3c160dfa7da252b570cf008023498) (linux-x86_64:alpine-3.21-5.3_opam-2.3)
Base: ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962
Opam project build


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard e3497ade
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962
# alpine-3.21-5.3_opam-2.3
USER 1000:1000
ENV CLICOLOR_FORCE="1"
ENV OPAMCOLOR="always"
WORKDIR /src
RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam
RUN opam init --reinit -ni
RUN uname -rs && opam exec -- ocaml -version && opam --version
WORKDIR /src
RUN sudo chown opam /src
RUN cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u
COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./
RUN opam pin add -yn neural_nets_lib.dev './' && \
opam pin add -yn arrayjit.dev './'
RUN echo '(lang dune 3.0)' > './dune-project'
ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"
ENV CI="true"
ENV OCAMLCI="true"
RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS
RUN opam install $DEPS
COPY --chown=1000:1000 . /src
RUN opam exec -- dune build @install @check @runtest && rm -rf _build


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-05-09 15:39.37: Using cache hint "ahrefs/ocannl-ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962-alpine-3.21-5.3_opam-2.3-cfc8860510df1264e4dceb092b0ce2dc"
2025-05-09 15:39.37: Using OBuilder spec:
((from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962)
(comment alpine-3.21-5.3_opam-2.3)
(user (uid 1000) (gid 1000))
(env CLICOLOR_FORCE 1)
(env OPAMCOLOR always)
(workdir /src)
(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
(run (shell "opam init --reinit -ni"))
(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
(workdir /src)
(run (shell "sudo chown opam /src"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u"))
(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
(run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
(run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")
(env CI true)
(env OCAMLCI true)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
(copy (src .) (dst /src))
(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
)


2025-05-09 15:39.37: Waiting for resource in pool OCluster
2025-05-09 15:39.38: Waiting for worker…
2025-05-09 15:39.38: Got resource from pool OCluster
Building on asteria.caelum.ci.dev
HEAD is now at 86bf625e cuda_backend.ml tiny refactoring fixes
HEAD is now at e3497ade gcc_backend.ml: tiny fixes to recent refactorings


(from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:e2ee63eda00d030de85e2df8292477e3b768ff41108d47f21b9146fd8250d962)
2025-05-09 15:39.39 ---> using "f154ab12a6d575cd956c81477efd0302c0ef4dccea7e9c1952fd02140b057756" from cache


/: (comment alpine-3.21-5.3_opam-2.3)


/: (user (uid 1000) (gid 1000))


/: (env CLICOLOR_FORCE 1)


/: (env OPAMCOLOR always)


/: (workdir /src)


/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))
2025-05-09 15:39.39 ---> using "6310931d2bf5fef99ec3c0d979621049ed21e3351bd07029cd06bddb57173709" from cache


/src: (run (shell "opam init --reinit -ni"))
Configuring from /home/opam/.opamrc and then from built-in defaults.
Checking for available remotes: rsync and local, git.
- you won't be able to use mercurial repositories unless you install the hg command on your system.
- you won't be able to use darcs repositories unless you install the darcs command on your system.


This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.
You may want to back it up before going further.


Continue? [y/n] y
Format upgrade done.


<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
2025-05-09 15:39.39 ---> using "675c7208637c697a7a3366c064d560f88c87ccaec86349f5914d7ac40053bae7" from cache


/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))
Linux 5.15.0-134-generic
The OCaml toplevel, version 5.3.0
2.3.0
2025-05-09 15:39.39 ---> using "6021679194ffece7289cd5c5b221b2f3071db6c856a010fda1808be99b3a5c12" from cache


/src: (workdir /src)


/src: (run (shell "sudo chown opam /src"))
2025-05-09 15:39.39 ---> using "91e9c49728c8d128b944322e480ab1d9f5be9d0ae301c2ee057902eeb7ca9559" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u"))
From https://github.com/ocaml/opam-repository
* branch                  master     -> FETCH_HEAD
da74d7829f..e28c86445c  master     -> origin/master
997e4758ac Merge pull request #27839 from public-release/opam-publish-base.v0.17.2


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] synchronised from git+file:///home/opam/opam-repository


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# To update the current shell environment, run: eval $(opam env)
2025-05-09 15:39.39 ---> using "51888d037c9675ad285c6f5a848dd1a42259a464610778c5b4536c5ef63eff86" from cache


/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))
2025-05-09 15:39.39 ---> using "de001837541ad7d214f8939012d3b4faa6f14ddfe64705dd4ec95c45b64de330" from cache


/src: (run (network host)
(shell  "opam pin add -yn neural_nets_lib.dev './' && \
\nopam pin add -yn arrayjit.dev './'"))
[neural_nets_lib.dev] synchronised (file:///src)
neural_nets_lib is now pinned to file:///src (version dev)
[arrayjit.dev] synchronised (file:///src)
arrayjit is now pinned to file:///src (version dev)
2025-05-09 15:39.39 ---> using "416ff8eacf6a1880f8d8253a54cc85444b5bba5afca0f583900d49c2004705f8" from cache


/src: (run (network host)
(shell "echo '(lang dune 3.0)' > './dune-project'"))
2025-05-09 15:39.39 ---> using "ac8b8bf2b5a98120c2d60c29184602e7589875f9ca39ea26192783f8a0f1e752" from cache


/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")


/src: (env CI true)


/src: (env OCAMLCI true)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))
+ /usr/bin/sudo "apk" "update"
- fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/main/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/community/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/edge/main/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz
- fetch https://dl-cdn.alpinelinux.org/alpine/edge/testing/x86_64/APKINDEX.tar.gz
- v3.21.3-452-gee5ce66b724 [https://dl-cdn.alpinelinux.org/alpine/v3.21/main]
- v3.21.3-459-gb091f834ac8 [https://dl-cdn.alpinelinux.org/alpine/v3.21/community]
- v20250108-7778-g00577f87725 [https://dl-cdn.alpinelinux.org/alpine/edge/main]
- v20250108-7443-g8323c36c8cb [https://dl-cdn.alpinelinux.org/alpine/edge/community]
- v20250108-7443-g8323c36c8cb [https://dl-cdn.alpinelinux.org/alpine/edge/testing]
- OK: 58155 distinct packages available


<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>
[arrayjit.dev] synchronised (file:///src)
[neural_nets_lib.dev] synchronised (file:///src)


[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).


The following system packages will first need to be installed:
libffi-dev


<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>


+ /usr/bin/sudo "apk" "add" "libffi-dev"
- (1/2) Installing linux-headers (6.6-r1)
- (2/2) Installing libffi-dev (3.4.7-r0)
- OK: 312 MiB in 104 packages
2025-05-09 15:39.39 ---> using "f9f03a7a121e0d923fd5b471ca89a4a497a822abf7f75b584e6fd3c406b5e664" from cache


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam install $DEPS"))
[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).
[NOTE] Package ocaml-config is already installed (current version is 3).
[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).
[NOTE] Package ocaml is already installed (current version is 5.3.0).
[NOTE] Package base-unix is already installed (current version is base).
[NOTE] Package base-threads is already installed (current version is base).
[NOTE] Package base-nnp is already installed (current version is base).
[NOTE] Package base-effects is already installed (current version is base).
[NOTE] Package base-domains is already installed (current version is base).
[NOTE] Package base-bigarray is already installed (current version is base).
The following actions will be performed:
=== install 74 packages
- install angstrom                0.16.1
- install astring                 0.8.5
- install backoff                 0.1.1
- install base                    v0.17.2
- install bigarray-compat         1.1.0
- install bigstringaf             0.10.0
- install camlp-streams           5.0.1
- install cmdliner                1.3.0
- install conf-libffi             2.0.0
- install conf-pkg-config         4
- install cppo                    1.8.0
- install csexp                   1.5.2
- install ctypes                  0.23.0
- install ctypes-foreign          0.23.0
- install dune                    3.18.2
- install dune-configurator       3.18.2
- install fieldslib               v0.17.0
- install fmt                     0.10.0
- install integers                0.7.0
- install jane-street-headers     v0.17.0
- install jst-config              v0.17.0
- install logs                    0.8.0
- install mdx                     2.5.0
- install mtime                   2.1.0
- install multicore-magic         2.3.1
- install num                     1.5-1
- install ocaml-compiler-libs     v0.17.0
- install ocaml-syntax-shims      1.0.0
- install ocaml-version           4.0.0
- install ocaml_intrinsics_kernel v0.17.1
- install ocamlbuild              0.16.1
- install ocamlfind               1.9.8
- install parsexp                 v0.17.0
- install ppx_assert              v0.17.0
- install ppx_base                v0.17.0
- install ppx_cold                v0.17.0
- install ppx_compare             v0.17.0
- install ppx_derivers            1.2.1
- install ppx_deriving            6.0.3
- install ppx_enumerate           v0.17.0
- install ppx_expect              v0.17.2
- install ppx_fields_conv         v0.17.0
- install ppx_globalize           v0.17.0
- install ppx_hash                v0.17.0
- install ppx_here                v0.17.0
- install ppx_inline_test         v0.17.0
- install ppx_minidebug           2.2.0
- install ppx_optcomp             v0.17.0
- install ppx_sexp_conv           v0.17.0
- install ppx_string              v0.17.0
- install ppx_variants_conv       v0.17.0
- install ppxlib                  0.35.0
- install ppxlib_jane             v0.17.2
- install printbox                0.12
- install printbox-ext-plot       0.12
- install printbox-html           0.12
- install printbox-md             0.12
- install printbox-text           0.12
- install ptime                   1.2.0
- install re                      1.12.0
- install result                  1.5
- install saturn_lockfree         0.5.0
- install seq                     base
- install sexplib                 v0.17.0
- install sexplib0                v0.17.0
- install stdio                   v0.17.0
- install stdlib-shims            0.3.0
- install thread-local-storage    0.2
- install time_now                v0.17.0
- install topkg                   1.0.8
- install tyxml                   4.6.0
- install uucp                    16.0.0
- install uutf                    1.0.4
- install variantslib             v0.17.0


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> retrieved backoff.0.1.1  (cached)
-> retrieved angstrom.0.16.1  (cached)
-> retrieved astring.0.8.5  (cached)
-> retrieved bigarray-compat.1.1.0  (cached)
-> retrieved bigstringaf.0.10.0  (cached)
-> retrieved base.v0.17.2  (cached)
-> retrieved camlp-streams.5.0.1  (cached)
-> retrieved cmdliner.1.3.0  (cached)
-> retrieved cppo.1.8.0  (cached)
-> installed conf-pkg-config.4
-> retrieved csexp.1.5.2  (cached)
-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0  (cached)
-> installed conf-libffi.2.0.0
-> retrieved fieldslib.v0.17.0  (cached)
-> retrieved fmt.0.10.0  (cached)
-> retrieved integers.0.7.0  (cached)
-> retrieved jane-street-headers.v0.17.0  (cached)
-> retrieved jst-config.v0.17.0  (cached)
-> retrieved logs.0.8.0  (cached)
-> retrieved mtime.2.1.0  (cached)
-> retrieved mdx.2.5.0  (cached)
-> retrieved multicore-magic.2.3.1  (cached)
-> retrieved num.1.5-1  (cached)
-> retrieved ocaml-compiler-libs.v0.17.0  (cached)
-> retrieved ocaml-syntax-shims.1.0.0  (cached)
-> retrieved ocaml-version.4.0.0  (cached)
-> retrieved ocaml_intrinsics_kernel.v0.17.1  (cached)
-> retrieved ocamlbuild.0.16.1  (cached)
-> retrieved ocamlfind.1.9.8  (cached)
-> retrieved dune.3.18.2, dune-configurator.3.18.2  (cached)
-> retrieved parsexp.v0.17.0  (cached)
-> retrieved ppx_assert.v0.17.0  (cached)
-> retrieved ppx_base.v0.17.0  (cached)
-> retrieved ppx_cold.v0.17.0  (cached)
-> retrieved ppx_compare.v0.17.0  (cached)
-> retrieved ppx_derivers.1.2.1  (cached)
-> retrieved ppx_enumerate.v0.17.0  (cached)
-> retrieved ppx_deriving.6.0.3  (cached)
-> retrieved ppx_fields_conv.v0.17.0  (cached)
-> retrieved ppx_expect.v0.17.2  (cached)
-> retrieved ppx_globalize.v0.17.0  (cached)
-> retrieved ppx_hash.v0.17.0  (cached)
-> installed cmdliner.1.3.0
-> installed num.1.5-1
-> retrieved ppx_here.v0.17.0  (cached)
-> retrieved ppx_inline_test.v0.17.0  (cached)
-> retrieved ppx_optcomp.v0.17.0  (cached)
-> retrieved ppx_sexp_conv.v0.17.0  (cached)
-> retrieved ppx_minidebug.2.2.0  (cached)
-> retrieved ppx_string.v0.17.0  (cached)
-> retrieved ppx_variants_conv.v0.17.0  (cached)
-> retrieved ppxlib_jane.v0.17.2  (cached)
-> retrieved ptime.1.2.0  (cached)
-> retrieved re.1.12.0  (cached)
-> retrieved result.1.5  (cached)
-> retrieved ppxlib.0.35.0  (cached)
-> retrieved seq.base  (cached)
-> installed seq.base
-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12  (cached)
-> retrieved saturn_lockfree.0.5.0  (cached)
-> retrieved sexplib.v0.17.0  (cached)
-> retrieved sexplib0.v0.17.0  (cached)
-> retrieved stdio.v0.17.0  (cached)
-> retrieved stdlib-shims.0.3.0  (cached)
-> retrieved thread-local-storage.0.2  (cached)
-> retrieved time_now.v0.17.0  (cached)
-> retrieved topkg.1.0.8  (cached)
-> retrieved tyxml.4.6.0  (cached)
-> retrieved uutf.1.0.4  (cached)
-> retrieved variantslib.v0.17.0  (cached)
-> retrieved uucp.16.0.0  (cached)
-> installed ocamlfind.1.9.8
-> installed ocamlbuild.0.16.1
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed mtime.2.1.0
-> installed fmt.0.10.0
-> installed ptime.1.2.0
-> installed astring.0.8.5
-> installed logs.0.8.0
-> installed dune.3.18.2
-> installed jane-street-headers.v0.17.0
-> installed ppx_derivers.1.2.1
-> installed csexp.1.5.2
-> installed backoff.0.1.1
-> installed bigarray-compat.1.1.0
-> installed camlp-streams.5.0.1
-> installed cppo.1.8.0
-> installed multicore-magic.2.3.1
-> installed ocaml-compiler-libs.v0.17.0
-> installed ocaml-syntax-shims.1.0.0
-> installed ocaml-version.4.0.0
-> installed ocaml_intrinsics_kernel.v0.17.1
-> installed printbox.0.12
-> installed re.1.12.0
-> installed result.1.5
-> installed sexplib0.v0.17.0
-> installed stdlib-shims.0.3.0
-> installed thread-local-storage.0.2
-> installed saturn_lockfree.0.5.0
-> installed integers.0.7.0
-> installed dune-configurator.3.18.2
-> installed parsexp.v0.17.0
-> installed bigstringaf.0.10.0
-> installed mdx.2.5.0
-> installed sexplib.v0.17.0
-> installed angstrom.0.16.1
-> installed tyxml.4.6.0
-> installed printbox-html.0.12
-> installed ctypes.0.23.0
-> installed base.v0.17.2
-> installed fieldslib.v0.17.0
-> installed variantslib.v0.17.0
-> installed stdio.v0.17.0
-> installed ctypes-foreign.0.23.0
-> installed uucp.16.0.0
-> installed printbox-text.0.12
-> installed printbox-md.0.12
-> installed printbox-ext-plot.0.12
-> installed ppxlib.0.35.0
-> installed ppx_optcomp.v0.17.0
-> installed ppxlib_jane.v0.17.2
-> installed ppx_cold.v0.17.0
-> installed ppx_here.v0.17.0
-> installed ppx_variants_conv.v0.17.0
-> installed ppx_fields_conv.v0.17.0
-> installed ppx_globalize.v0.17.0
-> installed ppx_enumerate.v0.17.0
-> installed ppx_deriving.6.0.3
-> installed ppx_compare.v0.17.0
-> installed ppx_sexp_conv.v0.17.0
-> installed ppx_hash.v0.17.0
-> installed ppx_assert.v0.17.0
-> installed ppx_minidebug.2.2.0
-> installed ppx_base.v0.17.0
-> installed jst-config.v0.17.0
-> installed ppx_string.v0.17.0
-> installed time_now.v0.17.0
-> installed ppx_inline_test.v0.17.0
-> installed ppx_expect.v0.17.2
Done.
# To update the current shell environment, run: eval $(opam env)
2025-05-09 15:39.39 ---> using "656d70ee67c6b2615b09150c0b61326d63fa0e071aad3cce442b9a9642071ed3" from cache


/src: (copy (src .) (dst /src))
2025-05-09 15:39.39 ---> saved as "062a2d20a1eb1acd6bf2dbd99d4ff7bacc4e3fc26012ec3b4560bbc495afdbcc"


/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))
(cd _build/default/test_ppx && ./test_ppx_op.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/f0404a487fffcfc0e2b324ec31af886f/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)


Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/024793de964f7c9ba4abe9edc48d50e9/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
(cd _build/default/test && ./moons_demo_parallel_run.exe)


Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.
Retrieving commandline, environment, or config file variable ocannl_log_level
Found 0, in the config file
("Set log_level to" 1)
└─{orphaned from #2}
Retrieving commandline, environment, or config file variable ocannl_backend
Found cc, in the config file
Retrieving commandline, environment, or config file variable ocannl_ll_ident_style
Not found, using default heuristic
Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level
Not found, using default 3
Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command
Not found, using default gcc
Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout
Not found, using default false
Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453
Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087
Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382
Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039
Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216
Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512
Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081
Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141
Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385
Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263
Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603
Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902
Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024
Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685
Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407
Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543
Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049
Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829
Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269
Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952
Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952
Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138
Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381
Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025
Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921
Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269
Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828
Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563
Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831
Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657
Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042
Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243
Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160
Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542
Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179
Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413
Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446
Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419
Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654
Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028
Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821
Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821
Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363
Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325
Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481
Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578
Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525
Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415
Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160
Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004
Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802
Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881
Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937
Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479
Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468
Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248
Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112
Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523
Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923
Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236
Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812
Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723
Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723
Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539
Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341
Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053
Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157
Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168
Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420
Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854
Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365
Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593
Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989
Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121
Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243
Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563
Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333
Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239
Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610
Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501
Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094
Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306
Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074
Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074
Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079
Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298
Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203
Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269
Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862
Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001
Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056
Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748
Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194
Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213
Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805
Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672
Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851
Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195
Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366
Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287
Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436
Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264
Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319
Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180
Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180
Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529
Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820
Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361
Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741
Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671
Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786
Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067
Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387
Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355
Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855
Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963
Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184
Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407
Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164
Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633
Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671
Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133
Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999
Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312
Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223
Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223
Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877
Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301
Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593
Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416
Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253
Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689
Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497
Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495
Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390
Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314
Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229
Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518
Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460
Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885
Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660
Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955
Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988
Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087
Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829
Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103
Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103
Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725
Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742
Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559
Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762
Batch=299, step=8700, lr=0.164000, batch loss=0.191994, epoch loss=1.062755
Batch=359, step=8760, lr=0.163750, batch loss=0.248632, epoch loss=1.311388
Batch=419, step=8820, lr=0.163500, batch loss=0.245601, epoch loss=1.556988
Batch=479, step=8880, lr=0.163250, batch loss=0.228591, epoch loss=1.785580
Batch=539, step=8940, lr=0.163000, batch loss=0.178132, epoch loss=1.963712
Batch=599, step=9000, lr=0.162750, batch loss=0.217388, epoch loss=2.181101
Batch=659, step=9060, lr=0.162500, batch loss=0.294814, epoch loss=2.475915
Batch=719, step=9120, lr=0.162250, batch loss=0.296433, epoch loss=2.772348
Batch=779, step=9180, lr=0.162000, batch loss=0.316728, epoch loss=3.089075
Batch=839, step=9240, lr=0.161750, batch loss=0.287243, epoch loss=3.376318
Batch=899, step=9300, lr=0.161500, batch loss=0.251060, epoch loss=3.627378
Batch=959, step=9360, lr=0.161250, batch loss=0.190532, epoch loss=3.817911
Batch=1019, step=9420, lr=0.161000, batch loss=0.311728, epoch loss=4.129639
Batch=1079, step=9480, lr=0.160750, batch loss=0.191595, epoch loss=4.321234
Batch=1139, step=9540, lr=0.160500, batch loss=0.215772, epoch loss=4.537006
Batch=1199, step=9600, lr=0.160250, batch loss=0.165620, epoch loss=4.702626
Epoch=7, step=9600, lr=0.160250, epoch loss=4.702626
Batch=59, step=9660, lr=0.160000, batch loss=0.197217, epoch loss=0.197217
Batch=119, step=9720, lr=0.159750, batch loss=0.165467, epoch loss=0.362684
Batch=179, step=9780, lr=0.159500, batch loss=0.179286, epoch loss=0.541970
Batch=239, step=9840, lr=0.159250, batch loss=0.263837, epoch loss=0.805807
Batch=299, step=9900, lr=0.159000, batch loss=0.182187, epoch loss=0.987994
Batch=359, step=9960, lr=0.158750, batch loss=0.240842, epoch loss=1.228836
Batch=419, step=10020, lr=0.158500, batch loss=0.232979, epoch loss=1.461815
Batch=479, step=10080, lr=0.158250, batch loss=0.213194, epoch loss=1.675009
Batch=539, step=10140, lr=0.158000, batch loss=0.170694, epoch loss=1.845703
Batch=599, step=10200, lr=0.157750, batch loss=0.200247, epoch loss=2.045950
Batch=659, step=10260, lr=0.157500, batch loss=0.283032, epoch loss=2.328982
Batch=719, step=10320, lr=0.157250, batch loss=0.288754, epoch loss=2.617735
Batch=779, step=10380, lr=0.157000, batch loss=0.296880, epoch loss=2.914615
Batch=839, step=10440, lr=0.156750, batch loss=0.267657, epoch loss=3.182272
Batch=899, step=10500, lr=0.156500, batch loss=0.242699, epoch loss=3.424972
Batch=959, step=10560, lr=0.156250, batch loss=0.198668, epoch loss=3.623639
Batch=1019, step=10620, lr=0.156000, batch loss=0.295119, epoch loss=3.918758
Batch=1079, step=10680, lr=0.155750, batch loss=0.178662, epoch loss=4.097421
Batch=1139, step=10740, lr=0.155500, batch loss=0.205425, epoch loss=4.302846
Batch=1199, step=10800, lr=0.155250, batch loss=0.156138, epoch loss=4.458984
Epoch=8, step=10800, lr=0.155250, epoch loss=4.458984
Batch=59, step=10860, lr=0.155000, batch loss=0.177430, epoch loss=0.177430
Batch=119, step=10920, lr=0.154750, batch loss=0.152366, epoch loss=0.329795
Batch=179, step=10980, lr=0.154500, batch loss=0.167114, epoch loss=0.496909
Batch=239, step=11040, lr=0.154250, batch loss=0.242622, epoch loss=0.739531
Batch=299, step=11100, lr=0.154000, batch loss=0.169984, epoch loss=0.909515
Batch=359, step=11160, lr=0.153750, batch loss=0.222140, epoch loss=1.131654
Batch=419, step=11220, lr=0.153500, batch loss=0.229250, epoch loss=1.360905
Batch=479, step=11280, lr=0.153250, batch loss=0.202871, epoch loss=1.563775
Batch=539, step=11340, lr=0.153000, batch loss=0.159118, epoch loss=1.722894
Batch=599, step=11400, lr=0.152750, batch loss=0.178498, epoch loss=1.901392
Batch=659, step=11460, lr=0.152500, batch loss=0.264724, epoch loss=2.166116
Batch=719, step=11520, lr=0.152250, batch loss=0.256959, epoch loss=2.423075
Batch=779, step=11580, lr=0.152000, batch loss=0.273281, epoch loss=2.696355
Batch=839, step=11640, lr=0.151750, batch loss=0.255783, epoch loss=2.952138
Batch=899, step=11700, lr=0.151500, batch loss=0.212942, epoch loss=3.165080
Batch=959, step=11760, lr=0.151250, batch loss=0.168195, epoch loss=3.333275
Batch=1019, step=11820, lr=0.151000, batch loss=0.266032, epoch loss=3.599307
Batch=1079, step=11880, lr=0.150750, batch loss=0.149776, epoch loss=3.749083
Batch=1139, step=11940, lr=0.150500, batch loss=0.185522, epoch loss=3.934605
Batch=1199, step=12000, lr=0.150250, batch loss=0.139483, epoch loss=4.074088
Epoch=9, step=12000, lr=0.150250, epoch loss=4.074088
Batch=59, step=12060, lr=0.150000, batch loss=0.158027, epoch loss=0.158027
Batch=119, step=12120, lr=0.149750, batch loss=0.128834, epoch loss=0.286861
Batch=179, step=12180, lr=0.149500, batch loss=0.150389, epoch loss=0.437250
Batch=239, step=12240, lr=0.149250, batch loss=0.223284, epoch loss=0.660534
Batch=299, step=12300, lr=0.149000, batch loss=0.142595, epoch loss=0.803130
Batch=359, step=12360, lr=0.148750, batch loss=0.195474, epoch loss=0.998603
Batch=419, step=12420, lr=0.148500, batch loss=0.206189, epoch loss=1.204793
Batch=479, step=12480, lr=0.148250, batch loss=0.178781, epoch loss=1.383574
Batch=539, step=12540, lr=0.148000, batch loss=0.142576, epoch loss=1.526150
Batch=599, step=12600, lr=0.147750, batch loss=0.150450, epoch loss=1.676600
Batch=659, step=12660, lr=0.147500, batch loss=0.224849, epoch loss=1.901449
Batch=719, step=12720, lr=0.147250, batch loss=0.235381, epoch loss=2.136830
Batch=779, step=12780, lr=0.147000, batch loss=0.252610, epoch loss=2.389441
Batch=839, step=12840, lr=0.146750, batch loss=0.225004, epoch loss=2.614445
Batch=899, step=12900, lr=0.146500, batch loss=0.185276, epoch loss=2.799721
Batch=959, step=12960, lr=0.146250, batch loss=0.149057, epoch loss=2.948777
Batch=1019, step=13020, lr=0.146000, batch loss=0.267918, epoch loss=3.216695
Batch=1079, step=13080, lr=0.145750, batch loss=0.115425, epoch loss=3.332120
Batch=1139, step=13140, lr=0.145500, batch loss=0.155344, epoch loss=3.487464
Batch=1199, step=13200, lr=0.145250, batch loss=0.118495, epoch loss=3.605959
Epoch=10, step=13200, lr=0.145250, epoch loss=3.605959
Batch=59, step=13260, lr=0.145000, batch loss=0.143436, epoch loss=0.143436
Batch=119, step=13320, lr=0.144750, batch loss=0.119649, epoch loss=0.263085
Batch=179, step=13380, lr=0.144500, batch loss=0.127185, epoch loss=0.390270
Batch=239, step=13440, lr=0.144250, batch loss=0.186827, epoch loss=0.577097
Batch=299, step=13500, lr=0.144000, batch loss=0.112687, epoch loss=0.689784
Batch=359, step=13560, lr=0.143750, batch loss=0.161652, epoch loss=0.851436
Batch=419, step=13620, lr=0.143500, batch loss=0.160664, epoch loss=1.012101
Batch=479, step=13680, lr=0.143250, batch loss=0.147468, epoch loss=1.159569
Batch=539, step=13740, lr=0.143000, batch loss=0.118071, epoch loss=1.277640
Batch=599, step=13800, lr=0.142500, batch loss=0.120059, epoch loss=1.397699
Batch=659, step=13860, lr=0.142500, batch loss=0.175819, epoch loss=1.573519
Batch=719, step=13920, lr=0.142250, batch loss=0.172365, epoch loss=1.745884
Batch=779, step=13980, lr=0.142000, batch loss=0.178497, epoch loss=1.924381
Batch=839, step=14040, lr=0.141750, batch loss=0.186000, epoch loss=2.110381
Batch=899, step=14100, lr=0.141500, batch loss=0.178086, epoch loss=2.288466
Batch=959, step=14160, lr=0.141250, batch loss=0.146486, epoch loss=2.434953
Batch=1019, step=14220, lr=0.141000, batch loss=0.329923, epoch loss=2.764876
Batch=1079, step=14280, lr=0.140750, batch loss=0.082765, epoch loss=2.847640
Batch=1139, step=14340, lr=0.140500, batch loss=0.122222, epoch loss=2.969862
Batch=1199, step=14400, lr=0.140250, batch loss=0.091695, epoch loss=3.061557
Epoch=11, step=14400, lr=0.140250, epoch loss=3.061557
Batch=59, step=14460, lr=0.140000, batch loss=0.112955, epoch loss=0.112955
Batch=119, step=14520, lr=0.139750, batch loss=0.102678, epoch loss=0.215632
Batch=179, step=14580, lr=0.139500, batch loss=0.106383, epoch loss=0.322015
Batch=239, step=14640, lr=0.139250, batch loss=0.140587, epoch loss=0.462602
Batch=299, step=14700, lr=0.139000, batch loss=0.080914, epoch loss=0.543516
Batch=359, step=14760, lr=0.138750, batch loss=0.125447, epoch loss=0.668963
Batch=419, step=14820, lr=0.138500, batch loss=0.129192, epoch loss=0.798155
Batch=479, step=14880, lr=0.138250, batch loss=0.101558, epoch loss=0.899713
Batch=539, step=14940, lr=0.138000, batch loss=0.098473, epoch loss=0.998186
Batch=599, step=15000, lr=0.137750, batch loss=0.084857, epoch loss=1.083043
Batch=659, step=15060, lr=0.137500, batch loss=0.129047, epoch loss=1.212090
Batch=719, step=15120, lr=0.137250, batch loss=0.124777, epoch loss=1.336868
Batch=779, step=15180, lr=0.137000, batch loss=0.128628, epoch loss=1.465495
Batch=839, step=15240, lr=0.136750, batch loss=0.147606, epoch loss=1.613102
Batch=899, step=15300, lr=0.136500, batch loss=0.162006, epoch loss=1.775107
Batch=959, step=15360, lr=0.136250, batch loss=0.102805, epoch loss=1.877912
Batch=1019, step=15420, lr=0.136000, batch loss=0.217278, epoch loss=2.095190
Batch=1079, step=15480, lr=0.135750, batch loss=0.035492, epoch loss=2.130683
Batch=1139, step=15540, lr=0.135500, batch loss=0.086358, epoch loss=2.217040
Batch=1199, step=15600, lr=0.135250, batch loss=0.053701, epoch loss=2.270742
Epoch=12, step=15600, lr=0.135250, epoch loss=2.270742
Batch=59, step=15660, lr=0.135000, batch loss=0.088131, epoch loss=0.088131
Batch=119, step=15720, lr=0.134750, batch loss=0.146550, epoch loss=0.234681
Batch=179, step=15780, lr=0.134500, batch loss=0.102598, epoch loss=0.337279
Batch=239, step=15840, lr=0.134250, batch loss=0.097197, epoch loss=0.434477
Batch=299, step=15900, lr=0.134000, batch loss=0.043585, epoch loss=0.478062
Batch=359, step=15960, lr=0.133750, batch loss=0.079582, epoch loss=0.557643
Batch=419, step=16020, lr=0.133500, batch loss=0.078812, epoch loss=0.636455
Batch=479, step=16080, lr=0.133250, batch loss=0.068074, epoch loss=0.704529
Batch=539, step=16140, lr=0.133000, batch loss=0.053052, epoch loss=0.757581
Batch=599, step=16200, lr=0.132750, batch loss=0.097602, epoch loss=0.855183
Batch=659, step=16260, lr=0.132500, batch loss=0.074574, epoch loss=0.929757
Batch=719, step=16320, lr=0.132250, batch loss=0.089683, epoch loss=1.019440
Batch=779, step=16380, lr=0.132000, batch loss=0.151304, epoch loss=1.170745
Batch=839, step=16440, lr=0.131750, batch loss=0.122575, epoch loss=1.293319
Batch=899, step=16500, lr=0.131500, batch loss=0.143590, epoch loss=1.436909
Batch=959, step=16560, lr=0.131250, batch loss=0.033282, epoch loss=1.470191
Batch=1019, step=16620, lr=0.131000, batch loss=0.083778, epoch loss=1.553969
Batch=1079, step=16680, lr=0.130750, batch loss=0.035172, epoch loss=1.589141
Batch=1139, step=16740, lr=0.130500, batch loss=0.071049, epoch loss=1.660191
Batch=1199, step=16800, lr=0.130250, batch loss=0.033423, epoch loss=1.693614
Epoch=13, step=16800, lr=0.130250, epoch loss=1.693614
Batch=59, step=16860, lr=0.130000, batch loss=0.032976, epoch loss=0.032976
Batch=119, step=16920, lr=0.129750, batch loss=0.035602, epoch loss=0.068578
Batch=179, step=16980, lr=0.129500, batch loss=0.040931, epoch loss=0.109509
Batch=239, step=17040, lr=0.129250, batch loss=0.053951, epoch loss=0.163460
Batch=299, step=17100, lr=0.129000, batch loss=0.018064, epoch loss=0.181524
Batch=359, step=17160, lr=0.128750, batch loss=0.041140, epoch loss=0.222664
Batch=419, step=17220, lr=0.128500, batch loss=0.050687, epoch loss=0.273351
Batch=479, step=17280, lr=0.128250, batch loss=0.033652, epoch loss=0.307003
Batch=539, step=17340, lr=0.128000, batch loss=0.060030, epoch loss=0.367033
Batch=599, step=17400, lr=0.127750, batch loss=0.034694, epoch loss=0.401727
Batch=659, step=17460, lr=0.127500, batch loss=0.047880, epoch loss=0.449607
Batch=719, step=17520, lr=0.127250, batch loss=0.048843, epoch loss=0.498449
Batch=779, step=17580, lr=0.127000, batch loss=0.071303, epoch loss=0.569752
Batch=839, step=17640, lr=0.126750, batch loss=0.169785, epoch loss=0.739537
Batch=899, step=17700, lr=0.126500, batch loss=0.059222, epoch loss=0.798758
Batch=959, step=17760, lr=0.126250, batch loss=0.020928, epoch loss=0.819686
Batch=1019, step=17820, lr=0.126000, batch loss=0.031669, epoch loss=0.851355
Batch=1079, step=17880, lr=0.125750, batch loss=0.010902, epoch loss=0.862257
Batch=1139, step=17940, lr=0.125500, batch loss=0.029314, epoch loss=0.891571
Batch=1199, step=18000, lr=0.125250, batch loss=0.013316, epoch loss=0.904887
Epoch=14, step=18000, lr=0.125250, epoch loss=0.904887
Batch=59, step=18060, lr=0.125000, batch loss=0.010363, epoch loss=0.010363
Batch=119, step=18120, lr=0.124750, batch loss=0.017617, epoch loss=0.027980
Batch=179, step=18180, lr=0.124500, batch loss=0.031093, epoch loss=0.059073
Batch=239, step=18240, lr=0.124250, batch loss=0.033118, epoch loss=0.092191
Batch=299, step=18300, lr=0.124000, batch loss=0.011134, epoch loss=0.103325
Batch=359, step=18360, lr=0.123750, batch loss=0.021511, epoch loss=0.124836
Batch=419, step=18420, lr=0.123500, batch loss=0.026544, epoch loss=0.151380
Batch=479, step=18480, lr=0.123250, batch loss=0.025739, epoch loss=0.177118
Batch=539, step=18540, lr=0.123000, batch loss=0.055978, epoch loss=0.233096
Batch=599, step=18600, lr=0.122750, batch loss=0.027317, epoch loss=0.260413
Batch=659, step=18660, lr=0.122500, batch loss=0.031417, epoch loss=0.291830
Batch=719, step=18720, lr=0.122250, batch loss=0.040523, epoch loss=0.332353
Batch=779, step=18780, lr=0.122000, batch loss=0.115124, epoch loss=0.447477
Batch=839, step=18840, lr=0.121750, batch loss=0.059338, epoch loss=0.506815
Batch=899, step=18900, lr=0.121500, batch loss=0.074621, epoch loss=0.581437
Batch=959, step=18960, lr=0.121250, batch loss=0.013333, epoch loss=0.594769
Batch=1019, step=19020, lr=0.121000, batch loss=0.021059, epoch loss=0.615829
Batch=1079, step=19080, lr=0.120750, batch loss=0.009287, epoch loss=0.625116
Batch=1139, step=19140, lr=0.120500, batch loss=0.023160, epoch loss=0.648276
Batch=1199, step=19200, lr=0.120250, batch loss=0.009801, epoch loss=0.658076
Epoch=15, step=19200, lr=0.120250, epoch loss=0.658076
Batch=59, step=19260, lr=0.120000, batch loss=0.004556, epoch loss=0.004556
Batch=119, step=19320, lr=0.119750, batch loss=0.018799, epoch loss=0.023355
Batch=179, step=19380, lr=0.119500, batch loss=0.053791, epoch loss=0.077146
Batch=239, step=19440, lr=0.119250, batch loss=0.022462, epoch loss=0.099608
Batch=299, step=19500, lr=0.119000, batch loss=0.017023, epoch loss=0.116631
Batch=359, step=19560, lr=0.118750, batch loss=0.032207, epoch loss=0.148838
Batch=419, step=19620, lr=0.118500, batch loss=0.020380, epoch loss=0.169218
Batch=479, step=19680, lr=0.118250, batch loss=0.007093, epoch loss=0.176311
Batch=539, step=19740, lr=0.118000, batch loss=0.017825, epoch loss=0.194137
Batch=599, step=19800, lr=0.117750, batch loss=0.021709, epoch loss=0.215845
Batch=659, step=19860, lr=0.117500, batch loss=0.018261, epoch loss=0.234106
Batch=719, step=19920, lr=0.117250, batch loss=0.037504, epoch loss=0.271610
Batch=779, step=19980, lr=0.117000, batch loss=0.079358, epoch loss=0.350968
Batch=839, step=20040, lr=0.116750, batch loss=0.030589, epoch loss=0.381557
Batch=899, step=20100, lr=0.116500, batch loss=0.032203, epoch loss=0.413760
Batch=959, step=20160, lr=0.116250, batch loss=0.011993, epoch loss=0.425753
Batch=1019, step=20220, lr=0.116000, batch loss=0.016190, epoch loss=0.441943
Batch=1079, step=20280, lr=0.115750, batch loss=0.002367, epoch loss=0.444310
Batch=1139, step=20340, lr=0.115500, batch loss=0.014735, epoch loss=0.459045
Batch=1199, step=20400, lr=0.115250, batch loss=0.006729, epoch loss=0.465774
Epoch=16, step=20400, lr=0.115250, epoch loss=0.465774
Batch=59, step=20460, lr=0.115000, batch loss=0.003155, epoch loss=0.003155
Batch=119, step=20520, lr=0.114750, batch loss=0.008850, epoch loss=0.012004
Batch=179, step=20580, lr=0.114500, batch loss=0.020029, epoch loss=0.032034
Batch=239, step=20640, lr=0.114250, batch loss=0.019474, epoch loss=0.051508
Batch=299, step=20700, lr=0.114000, batch loss=0.010100, epoch loss=0.061608
Batch=359, step=20760, lr=0.113750, batch loss=0.014508, epoch loss=0.076116
Batch=419, step=20820, lr=0.113500, batch loss=0.014357, epoch loss=0.090473
Batch=479, step=20880, lr=0.113250, batch loss=0.003144, epoch loss=0.093617
Batch=539, step=20940, lr=0.113000, batch loss=0.016876, epoch loss=0.110493
Batch=599, step=21000, lr=0.112750, batch loss=0.019629, epoch loss=0.130122
Batch=659, step=21060, lr=0.112500, batch loss=0.016162, epoch loss=0.146284
Batch=719, step=21120, lr=0.112250, batch loss=0.035047, epoch loss=0.181330
Batch=779, step=21180, lr=0.112000, batch loss=0.050482, epoch loss=0.231812
Batch=839, step=21240, lr=0.111750, batch loss=0.025407, epoch loss=0.257219
Batch=899, step=21300, lr=0.111500, batch loss=0.023657, epoch loss=0.280876
Batch=959, step=21360, lr=0.111250, batch loss=0.018090, epoch loss=0.298966
Batch=1019, step=21420, lr=0.111000, batch loss=0.016705, epoch loss=0.315671
Batch=1079, step=21480, lr=0.110750, batch loss=0.000666, epoch loss=0.316337
Batch=1139, step=21540, lr=0.110500, batch loss=0.012611, epoch loss=0.328948
Batch=1199, step=21600, lr=0.110250, batch loss=0.005160, epoch loss=0.334108
Epoch=17, step=21600, lr=0.110250, epoch loss=0.334108
Batch=59, step=21660, lr=0.110000, batch loss=0.002883, epoch loss=0.002883
Batch=119, step=21720, lr=0.109750, batch loss=0.006808, epoch loss=0.009691
Batch=179, step=21780, lr=0.109500, batch loss=0.011879, epoch loss=0.021570
Batch=239, step=21840, lr=0.109250, batch loss=0.009435, epoch loss=0.031005
Batch=299, step=21900, lr=0.109000, batch loss=0.009598, epoch loss=0.040603
Batch=359, step=21960, lr=0.108750, batch loss=0.013813, epoch loss=0.054416
Batch=419, step=22020, lr=0.108500, batch loss=0.012937, epoch loss=0.067353
Batch=479, step=22080, lr=0.108250, batch loss=0.003294, epoch loss=0.070647
Batch=539, step=22140, lr=0.108000, batch loss=0.017532, epoch loss=0.088179
Batch=599, step=22200, lr=0.107750, batch loss=0.015818, epoch loss=0.103998
Batch=659, step=22260, lr=0.107500, batch loss=0.011978, epoch loss=0.115976
Batch=719, step=22320, lr=0.107250, batch loss=0.014767, epoch loss=0.130743
Batch=779, step=22380, lr=0.107000, batch loss=0.022472, epoch loss=0.153215
Batch=839, step=22440, lr=0.106750, batch loss=0.026224, epoch loss=0.179440
Batch=899, step=22500, lr=0.106500, batch loss=0.025571, epoch loss=0.205011
Batch=959, step=22560, lr=0.106250, batch loss=0.008877, epoch loss=0.213888
Batch=1019, step=22620, lr=0.106000, batch loss=0.009870, epoch loss=0.223758
Batch=1079, step=22680, lr=0.105750, batch loss=0.001567, epoch loss=0.225325
Batch=1139, step=22740, lr=0.105500, batch loss=0.011099, epoch loss=0.236424
Batch=1199, step=22800, lr=0.105250, batch loss=0.005152, epoch loss=0.241576
Epoch=18, step=22800, lr=0.105250, epoch loss=0.241576
Batch=59, step=22860, lr=0.105000, batch loss=0.002313, epoch loss=0.002313
Batch=119, step=22920, lr=0.104750, batch loss=0.005363, epoch loss=0.007675
Batch=179, step=22980, lr=0.104500, batch loss=0.012119, epoch loss=0.019794
Batch=239, step=23040, lr=0.104250, batch loss=0.007965, epoch loss=0.027760
Batch=299, step=23100, lr=0.104000, batch loss=0.008479, epoch loss=0.036238
Batch=359, step=23160, lr=0.103750, batch loss=0.012081, epoch loss=0.048319
Batch=419, step=23220, lr=0.103500, batch loss=0.010486, epoch loss=0.058805
Batch=479, step=23280, lr=0.103250, batch loss=0.004095, epoch loss=0.062900
Batch=539, step=23340, lr=0.103000, batch loss=0.014546, epoch loss=0.077446
Batch=599, step=23400, lr=0.102750, batch loss=0.014205, epoch loss=0.091651
Batch=659, step=23460, lr=0.102500, batch loss=0.013251, epoch loss=0.104902
Batch=719, step=23520, lr=0.102250, batch loss=0.013736, epoch loss=0.118639
Batch=779, step=23580, lr=0.102000, batch loss=0.022458, epoch loss=0.141097
Batch=839, step=23640, lr=0.101750, batch loss=0.026165, epoch loss=0.167261
Batch=899, step=23700, lr=0.101500, batch loss=0.020832, epoch loss=0.188093
Batch=959, step=23760, lr=0.101250, batch loss=0.007873, epoch loss=0.195966
Batch=1019, step=23820, lr=0.101000, batch loss=0.006282, epoch loss=0.202247
Batch=1079, step=23880, lr=0.100750, batch loss=0.002386, epoch loss=0.204633
Batch=1139, step=23940, lr=0.100500, batch loss=0.008003, epoch loss=0.212636
Batch=1199, step=24000, lr=0.100250, batch loss=0.004351, epoch loss=0.216987
Epoch=19, step=24000, lr=0.100250, epoch loss=0.216987


Half-moons scatterplot and decision boundary:
┌────────────────────────────────────────────────────────────────────────────────────────────────────┐
│********************************#*******************************************************************│
│**********************#*#*#######*###*#####*********************************************************│
│**********************#########################*****************************************************│
│*****************#**########*######*###########*###*************************************************│
│***************#################*###################************************************************│
│************######*#################*#################**********************************************│
│**********#*#####*########*#**************##*#########*#********************************************│
│***********########*##*#******************#*****##########******************************************│
│***********###########*************************############***************************************..│
│********######*####*********************************###*###*#**********************************.....│
│*******######**##*************....*****************#*######*#*******************************........│
│*******##*##**##**********...........***************########*##***************************..........│
│*****#######************.......%...%%...***************#########*************************.........%.│
│******######**********..........%........***************##*#####************************......%.%.%.│
│***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│
│****#######**********..........%%%%.........************#########********************........%%.%%.%│
│**#######************..........%%%%%%%........*************###*###******************.........%%%%%%.│
│*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│
│*#######************...........%%%%%%%..........************#######**************............%%%%%%.│
│*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.│
│*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│
│#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│
│#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.│
│###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│
│#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│
│######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│
│*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│
│#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│
│**************.....................%.%%%%%%...................*******..................%.%%.%%......│
│**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│
│*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│
│************............................%..%%%%..%................................%%%%%%%%..........│
│************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│
│***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│
│***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│
│**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│
│**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│
│*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│
│*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│
│********................................................%...%%%%.%%.%%%%..%.........................│
└────────────────────────────────────────────────────────────────────────────────────────────────────┘
2025-05-09 15:40.03 ---> saved as "7854e3591e22176c0f1cb792c15d687f585995f6b08ed39152f16431a9d21c35"
Job succeeded
2025-05-09 15:40.03: Job succeeded