2025-04-07 01:09.40: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (463c57039dce5e7ebd23e4739665cbc647a282e4) (linux-x86_64:fedora-40-5.3_opam-2.3) Base: ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 463c5703 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e # fedora-40-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo dnf install -y findutils RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-04-07 01:09.40: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e-fedora-40-5.3_opam-2.3-ab22fb8412356c04fa0386e1ea5b2a04" 2025-04-07 01:09.40: Using OBuilder spec: ((from ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e) (comment fedora-40-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (network host) (shell "sudo dnf install -y findutils")) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-04-07 01:09.40: Waiting for resource in pool OCluster 2025-04-07 01:09.40: Waiting for worker… 2025-04-07 01:09.42: Got resource from pool OCluster Building on doris.caelum.ci.dev All commits already cached HEAD is now at 463c5703 Fix ppx_minidebug version, update cudajit version (from ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e) 2025-04-07 01:09.42 ---> using "1476c452edd56a53215a301540bb2009c8d2a15dba423047d40300e18599671f" from cache /: (comment fedora-40-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (network host) (shell "sudo dnf install -y findutils")) Fedora 40 - x86_64 11 kB/s | 27 kB 00:02 Fedora 40 openh264 (From Cisco) - x86_64 695 B/s | 989 B 00:01 Fedora 40 - x86_64 - Updates 19 kB/s | 8.4 kB 00:00 Fedora 40 - x86_64 - Updates 1.6 MB/s | 6.4 MB 00:03 Package findutils-1:4.9.0-9.fc40.x86_64 is already installed. Dependencies resolved. Nothing to do. Complete! 2025-04-07 01:09.42 ---> using "94cbd80bcc330ce623803fbb520a85cab1400e88c96764ea5c2e02cd7e19ad53" from cache /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-04-07 01:09.42 ---> using "3cf8028e21344054f79e658239efeb83a6d7aca5695c51333e87cfafa85482ca" from cache /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [y/n] y [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com" [default] synchronised from file:///home/opam/opam-repository 2025-04-07 01:09.42 ---> using "e8183cd9ba1a23c4d7ddb3cf8b2d2350a9e0a7713ccd1e3ac8d0c1e7fffb4080" from cache /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-04-07 01:09.42 ---> using "e1cd23eff74270a484a5911f8cc469d53dbcdcfafe79312b85310d9254a411b6" from cache /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-04-07 01:09.42 ---> using "c4ed4798c67e57846c83ac2aba073c0c9239aa56a5e2c21f39faf8e0638895ed" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD 862a7640b1..598efb46ec master -> origin/master 598efb46ec Merge pull request #27716 from avsm/fix-mpopcnt <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-04-07 01:10.44 ---> saved as "3355a3df8a5ac95e8bc5dea05ca00df1d5f5fafebc3b47f94eb5cac2a609e3ed" /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-04-07 01:10.45 ---> saved as "13e012d15506a44e583e4a460668a2406a723dff8101c52844ec46f2b6ae7e52" /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-04-07 01:10.47 ---> saved as "3366b9fd0e0e5b6a4c28e365e99db5db31aba38117570adcd868d2adee84cddc" /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-04-07 01:10.48 ---> saved as "b06a80310bf31f90599a9b4752ffa4c362ff4e92b332ea7fbd42b69856c19838" /src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "yum" "makecache" - Fedora 40 - x86_64 22 kB/s | 27 kB 00:01 - Fedora 40 openh264 (From Cisco) - x86_64 1.2 kB/s | 989 B 00:00 - Fedora 40 - x86_64 - Updates 15 kB/s | 24 kB 00:01 - Metadata cache created. <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-devel <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "yum" "install" "-y" "libffi-devel" - Last metadata expiration check: 0:00:13 ago on Mon Apr 7 01:10:52 2025. - Dependencies resolved. - ================================================================================ - Package Architecture Version Repository Size - ================================================================================ - Installing: - libffi-devel x86_64 3.4.4-7.fc40 fedora 28 k - - Transaction Summary - ================================================================================ - Install 1 Package - - Total download size: 28 k - Installed size: 33 k - Downloading Packages: - libffi-devel-3.4.4-7.fc40.x86_64.rpm 33 kB/s | 28 kB 00:00 - -------------------------------------------------------------------------------- - Total 4.3 kB/s | 28 kB 00:06 - Running transaction check - Transaction check succeeded. - Running transaction test - Transaction test succeeded. - Running transaction - Preparing : 1/1 - Installing : libffi-devel-3.4.4-7.fc40.x86_64 1/1 - Running scriptlet: libffi-devel-3.4.4-7.fc40.x86_64 1/1 - - Installed: - libffi-devel-3.4.4-7.fc40.x86_64 - - Complete! + /usr/bin/rpm "-q" "--whatprovides" "libffi-devel" - libffi-devel-3.4.4-7.fc40.x86_64 2025-04-07 01:11.13 ---> saved as "5289a9616ad86064a792a0e67a128e63dbb3ddbff4b2c84fd39169644de1cc95" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 74 packages - install angstrom 0.16.1 - install astring 0.8.5 - install backoff 0.1.1 - install base v0.17.1 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install camlp-streams 5.0.1 - install cmdliner 1.3.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.18.0 - install dune-configurator 3.18.0 - install fieldslib v0.17.0 - install fmt 0.10.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install logs 0.8.0 - install mdx 2.5.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml-version 4.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.2.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.2 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install result 1.5 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install thread-local-storage 0.2 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved astring.0.8.5 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved base.v0.17.1 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved camlp-streams.5.0.1 (cached) -> retrieved cppo.1.8.0 (cached) -> retrieved cmdliner.1.3.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved fmt.0.10.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved logs.0.8.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved mdx.2.5.0 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml-version.4.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved dune.3.18.0, dune-configurator.3.18.0 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> installed cmdliner.1.3.0 -> installed num.1.5-1 -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppx_minidebug.2.2.0 (cached) -> retrieved ppxlib_jane.v0.17.2 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved result.1.5 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved sexplib.v0.17.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved thread-local-storage.0.2 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved uucp.16.0.0 (cached) -> installed ocamlfind.1.9.8 -> installed ocamlbuild.0.16.1 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed fmt.0.10.0 -> installed ptime.1.2.0 -> installed astring.0.8.5 -> installed logs.0.8.0 -> installed dune.3.18.0 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed camlp-streams.5.0.1 -> installed cppo.1.8.0 -> installed multicore-magic.2.3.1 -> installed ocaml-compiler-libs.v0.17.0 -> installed ocaml-syntax-shims.1.0.0 -> installed ocaml-version.4.0.0 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed printbox.0.12 -> installed re.1.12.0 -> installed result.1.5 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed thread-local-storage.0.2 -> installed saturn_lockfree.0.5.0 -> installed integers.0.7.0 -> installed parsexp.v0.17.0 -> installed dune-configurator.3.18.0 -> installed bigstringaf.0.10.0 -> installed mdx.2.5.0 -> installed sexplib.v0.17.0 -> installed angstrom.0.16.1 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.1 -> installed fieldslib.v0.17.0 -> installed variantslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.35.0 -> installed ppxlib_jane.v0.17.2 -> installed ppx_optcomp.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_cold.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.2.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-04-07 01:12.33 ---> saved as "25365da6223daf3c21971d5bc012b43c3bae23141bc69d31f565af20d4b786c2" /src: (copy (src .) (dst /src)) 2025-04-07 01:12.33 ---> saved as "470c30dcaf7b1f9e0e94b8e2464abb3e7f6fe5099baf8bf4717a271e861e6686" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/79eb33c5a4dd880b36baff8fb0e75166/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bc70219a184f66fb3459bbcea439c0e4/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382 Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039 Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216 Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512 Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081 Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141 Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385 Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263 Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603 Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902 Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024 Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685 Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407 Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543 Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049 Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829 Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269 Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952 Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952 Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138 Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381 Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025 Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921 Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269 Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828 Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563 Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831 Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657 Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042 Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243 Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160 Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542 Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179 Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413 Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446 Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419 Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654 Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028 Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821 Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821 Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363 Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325 Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481 Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578 Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525 Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415 Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160 Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004 Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802 Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881 Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937 Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479 Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468 Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248 Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112 Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523 Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923 Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236 Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812 Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723 Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723 Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539 Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341 Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053 Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157 Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168 Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420 Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854 Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365 Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593 Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989 Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121 Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243 Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563 Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333 Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239 Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610 Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501 Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094 Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306 Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074 Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074 Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079 Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298 Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203 Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269 Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862 Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001 Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056 Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748 Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194 Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213 Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805 Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672 Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851 Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195 Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366 Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287 Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436 Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264 Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319 Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180 Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180 Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529 Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820 Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361 Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741 Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671 Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786 Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067 Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387 Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355 Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855 Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963 Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184 Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407 Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164 Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633 Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671 Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133 Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999 Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312 Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223 Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223 Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877 Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301 Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593 Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416 Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253 Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689 Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497 Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495 Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390 Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314 Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229 Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518 Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460 Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885 Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660 Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955 Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988 Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087 Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829 Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103 Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103 Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725 Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742 Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559 Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762 Batch=299, step=8700, lr=0.163750, batch loss=0.191994, epoch loss=1.062755 Batch=359, step=8760, lr=0.163750, batch loss=0.248543, epoch loss=1.311299 Batch=419, step=8820, lr=0.163500, batch loss=0.245673, epoch loss=1.556971 Batch=479, step=8880, lr=0.163250, batch loss=0.228519, epoch loss=1.785490 Batch=539, step=8940, lr=0.163000, batch loss=0.178226, epoch loss=1.963716 Batch=599, step=9000, lr=0.162750, batch loss=0.217296, epoch loss=2.181012 Batch=659, step=9060, lr=0.162500, batch loss=0.294862, epoch loss=2.475874 Batch=719, step=9120, lr=0.162250, batch loss=0.296342, epoch loss=2.772216 Batch=779, step=9180, lr=0.162000, batch loss=0.316821, epoch loss=3.089037 Batch=839, step=9240, lr=0.161750, batch loss=0.287173, epoch loss=3.376211 Batch=899, step=9300, lr=0.161500, batch loss=0.251128, epoch loss=3.627338 Batch=959, step=9360, lr=0.161250, batch loss=0.190405, epoch loss=3.817743 Batch=1019, step=9420, lr=0.161000, batch loss=0.311928, epoch loss=4.129670 Batch=1079, step=9480, lr=0.160750, batch loss=0.191415, epoch loss=4.321085 Batch=1139, step=9540, lr=0.160500, batch loss=0.215877, epoch loss=4.536962 Batch=1199, step=9600, lr=0.160250, batch loss=0.165629, epoch loss=4.702591 Epoch=7, step=9600, lr=0.160250, epoch loss=4.702591 Batch=59, step=9660, lr=0.159750, batch loss=0.197124, epoch loss=0.197124 Batch=119, step=9720, lr=0.159750, batch loss=0.165474, epoch loss=0.362598 Batch=179, step=9780, lr=0.159500, batch loss=0.179284, epoch loss=0.541882 Batch=239, step=9840, lr=0.159000, batch loss=0.263828, epoch loss=0.805710 Batch=299, step=9900, lr=0.159000, batch loss=0.182147, epoch loss=0.987857 Batch=359, step=9960, lr=0.158750, batch loss=0.240882, epoch loss=1.228739 Batch=419, step=10020, lr=0.158500, batch loss=0.232954, epoch loss=1.461692 Batch=479, step=10080, lr=0.158250, batch loss=0.213229, epoch loss=1.674922 Batch=539, step=10140, lr=0.158000, batch loss=0.170538, epoch loss=1.845459 Batch=599, step=10200, lr=0.157750, batch loss=0.200270, epoch loss=2.045729 Batch=659, step=10260, lr=0.157500, batch loss=0.283039, epoch loss=2.328768 Batch=719, step=10320, lr=0.157250, batch loss=0.279698, epoch loss=2.608466 Batch=779, step=10380, lr=0.157000, batch loss=0.300731, epoch loss=2.909197 Batch=839, step=10440, lr=0.156750, batch loss=0.271014, epoch loss=3.180211 Batch=899, step=10500, lr=0.156500, batch loss=0.239747, epoch loss=3.419958 Batch=959, step=10560, lr=0.156250, batch loss=0.198125, epoch loss=3.618084 Batch=1019, step=10620, lr=0.156000, batch loss=0.293644, epoch loss=3.911728 Batch=1079, step=10680, lr=0.155750, batch loss=0.179929, epoch loss=4.091657 Batch=1139, step=10740, lr=0.155500, batch loss=0.204219, epoch loss=4.295875 Batch=1199, step=10800, lr=0.155250, batch loss=0.154717, epoch loss=4.450593 Epoch=8, step=10800, lr=0.155250, epoch loss=4.450593 Batch=59, step=10860, lr=0.155000, batch loss=0.176369, epoch loss=0.176369 Batch=119, step=10920, lr=0.154750, batch loss=0.147164, epoch loss=0.323533 Batch=179, step=10980, lr=0.154500, batch loss=0.166011, epoch loss=0.489544 Batch=239, step=11040, lr=0.154250, batch loss=0.241473, epoch loss=0.731017 Batch=299, step=11100, lr=0.154000, batch loss=0.170471, epoch loss=0.901487 Batch=359, step=11160, lr=0.153750, batch loss=0.221276, epoch loss=1.122763 Batch=419, step=11220, lr=0.153500, batch loss=0.218353, epoch loss=1.341116 Batch=479, step=11280, lr=0.153250, batch loss=0.209255, epoch loss=1.550371 Batch=539, step=11340, lr=0.153000, batch loss=0.158902, epoch loss=1.709273 Batch=599, step=11400, lr=0.152750, batch loss=0.177408, epoch loss=1.886681 Batch=659, step=11460, lr=0.152500, batch loss=0.265252, epoch loss=2.151933 Batch=719, step=11520, lr=0.152250, batch loss=0.260150, epoch loss=2.412083 Batch=779, step=11580, lr=0.152000, batch loss=0.272845, epoch loss=2.684928 Batch=839, step=11640, lr=0.151750, batch loss=0.256176, epoch loss=2.941104 Batch=899, step=11700, lr=0.151500, batch loss=0.212604, epoch loss=3.153707 Batch=959, step=11760, lr=0.151250, batch loss=0.169073, epoch loss=3.322780 Batch=1019, step=11820, lr=0.151000, batch loss=0.266263, epoch loss=3.589044 Batch=1079, step=11880, lr=0.150750, batch loss=0.149095, epoch loss=3.738139 Batch=1139, step=11940, lr=0.150500, batch loss=0.184222, epoch loss=3.922361 Batch=1199, step=12000, lr=0.150250, batch loss=0.138547, epoch loss=4.060908 Epoch=9, step=12000, lr=0.150250, epoch loss=4.060908 Batch=59, step=12060, lr=0.150000, batch loss=0.157994, epoch loss=0.157994 Batch=119, step=12120, lr=0.149750, batch loss=0.129039, epoch loss=0.287033 Batch=179, step=12180, lr=0.149500, batch loss=0.153541, epoch loss=0.440574 Batch=239, step=12240, lr=0.149250, batch loss=0.218309, epoch loss=0.658883 Batch=299, step=12300, lr=0.149000, batch loss=0.141634, epoch loss=0.800517 Batch=359, step=12360, lr=0.148750, batch loss=0.197463, epoch loss=0.997980 Batch=419, step=12420, lr=0.148500, batch loss=0.204504, epoch loss=1.202484 Batch=479, step=12480, lr=0.148250, batch loss=0.179937, epoch loss=1.382421 Batch=539, step=12540, lr=0.148000, batch loss=0.141341, epoch loss=1.523761 Batch=599, step=12600, lr=0.147750, batch loss=0.150485, epoch loss=1.674246 Batch=659, step=12660, lr=0.147500, batch loss=0.224038, epoch loss=1.898284 Batch=719, step=12720, lr=0.147250, batch loss=0.237101, epoch loss=2.135385 Batch=779, step=12780, lr=0.147000, batch loss=0.258476, epoch loss=2.393861 Batch=839, step=12840, lr=0.146750, batch loss=0.237665, epoch loss=2.631526 Batch=899, step=12900, lr=0.146500, batch loss=0.229461, epoch loss=2.860986 Batch=959, step=12960, lr=0.146250, batch loss=0.139596, epoch loss=3.000582 Batch=1019, step=13020, lr=0.146000, batch loss=0.209285, epoch loss=3.209868 Batch=1079, step=13080, lr=0.145750, batch loss=0.118359, epoch loss=3.328227 Batch=1139, step=13140, lr=0.145500, batch loss=0.156564, epoch loss=3.484791 Batch=1199, step=13200, lr=0.145250, batch loss=0.117177, epoch loss=3.601968 Epoch=10, step=13200, lr=0.145250, epoch loss=3.601968 Batch=59, step=13260, lr=0.145000, batch loss=0.138014, epoch loss=0.138014 Batch=119, step=13320, lr=0.144750, batch loss=0.116822, epoch loss=0.254836 Batch=179, step=13380, lr=0.144250, batch loss=0.127997, epoch loss=0.382833 Batch=239, step=13440, lr=0.144250, batch loss=0.185218, epoch loss=0.568051 Batch=299, step=13500, lr=0.144000, batch loss=0.118519, epoch loss=0.686569 Batch=359, step=13560, lr=0.143750, batch loss=0.162977, epoch loss=0.849547 Batch=419, step=13620, lr=0.143500, batch loss=0.161388, epoch loss=1.010934 Batch=479, step=13680, lr=0.143250, batch loss=0.146726, epoch loss=1.157660 Batch=539, step=13740, lr=0.143000, batch loss=0.119550, epoch loss=1.277210 Batch=599, step=13800, lr=0.142750, batch loss=0.120067, epoch loss=1.397277 Batch=659, step=13860, lr=0.142500, batch loss=0.176211, epoch loss=1.573488 Batch=719, step=13920, lr=0.142250, batch loss=0.177821, epoch loss=1.751309 Batch=779, step=13980, lr=0.142000, batch loss=0.195126, epoch loss=1.946435 Batch=839, step=14040, lr=0.141750, batch loss=0.185731, epoch loss=2.132165 Batch=899, step=14100, lr=0.141500, batch loss=0.158306, epoch loss=2.290471 Batch=959, step=14160, lr=0.141250, batch loss=0.134044, epoch loss=2.424515 Batch=1019, step=14220, lr=0.140750, batch loss=0.282996, epoch loss=2.707511 Batch=1079, step=14280, lr=0.140750, batch loss=0.088703, epoch loss=2.796214 Batch=1139, step=14340, lr=0.140500, batch loss=0.130230, epoch loss=2.926444 Batch=1199, step=14400, lr=0.140250, batch loss=0.092646, epoch loss=3.019090 Epoch=11, step=14400, lr=0.140250, epoch loss=3.019090 Batch=59, step=14460, lr=0.140000, batch loss=0.118849, epoch loss=0.118849 Batch=119, step=14520, lr=0.139750, batch loss=0.105374, epoch loss=0.224223 Batch=179, step=14580, lr=0.139500, batch loss=0.103803, epoch loss=0.328026 Batch=239, step=14640, lr=0.139250, batch loss=0.140415, epoch loss=0.468441 Batch=299, step=14700, lr=0.139000, batch loss=0.078777, epoch loss=0.547217 Batch=359, step=14760, lr=0.138750, batch loss=0.117778, epoch loss=0.664996 Batch=419, step=14820, lr=0.138500, batch loss=0.123653, epoch loss=0.788649 Batch=479, step=14880, lr=0.138250, batch loss=0.104424, epoch loss=0.893073 Batch=539, step=14940, lr=0.138000, batch loss=0.115156, epoch loss=1.008230 Batch=599, step=15000, lr=0.137750, batch loss=0.087054, epoch loss=1.095283 Batch=659, step=15060, lr=0.137500, batch loss=0.125383, epoch loss=1.220666 Batch=719, step=15120, lr=0.137250, batch loss=0.130994, epoch loss=1.351660 Batch=779, step=15180, lr=0.137000, batch loss=0.162351, epoch loss=1.514011 Batch=839, step=15240, lr=0.136750, batch loss=0.173071, epoch loss=1.687082 Batch=899, step=15300, lr=0.136500, batch loss=0.291595, epoch loss=1.978677 Batch=959, step=15360, lr=0.136250, batch loss=0.058765, epoch loss=2.037442 Batch=1019, step=15420, lr=0.136000, batch loss=0.140763, epoch loss=2.178205 Batch=1079, step=15480, lr=0.135500, batch loss=0.039456, epoch loss=2.217660 Batch=1139, step=15540, lr=0.135500, batch loss=0.088854, epoch loss=2.306515 Batch=1199, step=15600, lr=0.135250, batch loss=0.055496, epoch loss=2.362011 Epoch=12, step=15600, lr=0.135250, epoch loss=2.362011 Batch=59, step=15660, lr=0.135000, batch loss=0.084149, epoch loss=0.084149 Batch=119, step=15720, lr=0.134750, batch loss=0.124686, epoch loss=0.208835 Batch=179, step=15780, lr=0.134500, batch loss=0.096740, epoch loss=0.305575 Batch=239, step=15840, lr=0.134250, batch loss=0.097591, epoch loss=0.403165 Batch=299, step=15900, lr=0.134000, batch loss=0.038960, epoch loss=0.442126 Batch=359, step=15960, lr=0.133750, batch loss=0.080935, epoch loss=0.523061 Batch=419, step=16020, lr=0.133500, batch loss=0.080361, epoch loss=0.603421 Batch=479, step=16080, lr=0.133250, batch loss=0.064688, epoch loss=0.668109 Batch=539, step=16140, lr=0.133000, batch loss=0.055554, epoch loss=0.723663 Batch=599, step=16200, lr=0.132750, batch loss=0.102058, epoch loss=0.825720 Batch=659, step=16260, lr=0.132500, batch loss=0.077244, epoch loss=0.902964 Batch=719, step=16320, lr=0.132250, batch loss=0.104647, epoch loss=1.007611 Batch=779, step=16380, lr=0.132000, batch loss=0.228621, epoch loss=1.236232 Batch=839, step=16440, lr=0.131750, batch loss=0.091528, epoch loss=1.327759 Batch=899, step=16500, lr=0.131500, batch loss=0.080964, epoch loss=1.408723 Batch=959, step=16560, lr=0.131250, batch loss=0.035324, epoch loss=1.444047 Batch=1019, step=16620, lr=0.131000, batch loss=0.101005, epoch loss=1.545052 Batch=1079, step=16680, lr=0.130750, batch loss=0.027451, epoch loss=1.572503 Batch=1139, step=16740, lr=0.130500, batch loss=0.058338, epoch loss=1.630841 Batch=1199, step=16800, lr=0.130250, batch loss=0.028245, epoch loss=1.659086 Epoch=13, step=16800, lr=0.130250, epoch loss=1.659086 Batch=59, step=16860, lr=0.130000, batch loss=0.035383, epoch loss=0.035383 Batch=119, step=16920, lr=0.129750, batch loss=0.047869, epoch loss=0.083252 Batch=179, step=16980, lr=0.129500, batch loss=0.047671, epoch loss=0.130923 Batch=239, step=17040, lr=0.129250, batch loss=0.070718, epoch loss=0.201641 Batch=299, step=17100, lr=0.129000, batch loss=0.069119, epoch loss=0.270761 Batch=359, step=17160, lr=0.128750, batch loss=0.055845, epoch loss=0.326605 Batch=419, step=17220, lr=0.128500, batch loss=0.097534, epoch loss=0.424139 Batch=479, step=17280, lr=0.128250, batch loss=0.024313, epoch loss=0.448452 Batch=539, step=17340, lr=0.128000, batch loss=0.032001, epoch loss=0.480453 Batch=599, step=17400, lr=0.127750, batch loss=0.049615, epoch loss=0.530068 Batch=659, step=17460, lr=0.127500, batch loss=0.045154, epoch loss=0.575222 Batch=719, step=17520, lr=0.127250, batch loss=0.042588, epoch loss=0.617810 Batch=779, step=17580, lr=0.127000, batch loss=0.076017, epoch loss=0.693827 Batch=839, step=17640, lr=0.126750, batch loss=0.067812, epoch loss=0.761638 Batch=899, step=17700, lr=0.126500, batch loss=0.086796, epoch loss=0.848435 Batch=959, step=17760, lr=0.126250, batch loss=0.039866, epoch loss=0.888301 Batch=1019, step=17820, lr=0.126000, batch loss=0.068208, epoch loss=0.956509 Batch=1079, step=17880, lr=0.125750, batch loss=0.019535, epoch loss=0.976044 Batch=1139, step=17940, lr=0.125500, batch loss=0.038048, epoch loss=1.014092 Batch=1199, step=18000, lr=0.125250, batch loss=0.018557, epoch loss=1.032649 Epoch=14, step=18000, lr=0.125250, epoch loss=1.032649 Batch=59, step=18060, lr=0.125000, batch loss=0.012268, epoch loss=0.012268 Batch=119, step=18120, lr=0.124750, batch loss=0.019830, epoch loss=0.032098 Batch=179, step=18180, lr=0.124500, batch loss=0.029246, epoch loss=0.061344 Batch=239, step=18240, lr=0.124250, batch loss=0.033108, epoch loss=0.094452 Batch=299, step=18300, lr=0.124000, batch loss=0.009892, epoch loss=0.104345 Batch=359, step=18360, lr=0.123750, batch loss=0.028450, epoch loss=0.132795 Batch=419, step=18420, lr=0.123500, batch loss=0.031047, epoch loss=0.163842 Batch=479, step=18480, lr=0.123250, batch loss=0.013829, epoch loss=0.177671 Batch=539, step=18540, lr=0.123000, batch loss=0.033232, epoch loss=0.210903 Batch=599, step=18600, lr=0.122750, batch loss=0.039040, epoch loss=0.249943 Batch=659, step=18660, lr=0.122500, batch loss=0.026057, epoch loss=0.276000 Batch=719, step=18720, lr=0.122250, batch loss=0.046504, epoch loss=0.322504 Batch=779, step=18780, lr=0.122000, batch loss=0.094489, epoch loss=0.416993 Batch=839, step=18840, lr=0.121750, batch loss=0.063047, epoch loss=0.480040 Batch=899, step=18900, lr=0.121500, batch loss=0.089446, epoch loss=0.569486 Batch=959, step=18960, lr=0.121250, batch loss=0.014348, epoch loss=0.583834 Batch=1019, step=19020, lr=0.121000, batch loss=0.018304, epoch loss=0.602138 Batch=1079, step=19080, lr=0.120750, batch loss=0.003238, epoch loss=0.605376 Batch=1139, step=19140, lr=0.120250, batch loss=0.020213, epoch loss=0.625590 Batch=1199, step=19200, lr=0.120000, batch loss=0.010368, epoch loss=0.635957 Epoch=15, step=19200, lr=0.120000, epoch loss=0.635957 Batch=59, step=19260, lr=0.120000, batch loss=0.005075, epoch loss=0.005075 Batch=119, step=19320, lr=0.119750, batch loss=0.020387, epoch loss=0.025463 Batch=179, step=19380, lr=0.119500, batch loss=0.053126, epoch loss=0.078589 Batch=239, step=19440, lr=0.119250, batch loss=0.023285, epoch loss=0.101874 Batch=299, step=19500, lr=0.119000, batch loss=0.013090, epoch loss=0.114965 Batch=359, step=19560, lr=0.118750, batch loss=0.027079, epoch loss=0.142044 Batch=419, step=19620, lr=0.118500, batch loss=0.020027, epoch loss=0.162071 Batch=479, step=19680, lr=0.118250, batch loss=0.008306, epoch loss=0.170376 Batch=539, step=19740, lr=0.118000, batch loss=0.016668, epoch loss=0.187045 Batch=599, step=19800, lr=0.117750, batch loss=0.020852, epoch loss=0.207896 Batch=659, step=19860, lr=0.117500, batch loss=0.017170, epoch loss=0.225066 Batch=719, step=19920, lr=0.117250, batch loss=0.036500, epoch loss=0.261566 Batch=779, step=19980, lr=0.117000, batch loss=0.077040, epoch loss=0.338606 Batch=839, step=20040, lr=0.116500, batch loss=0.030356, epoch loss=0.368961 Batch=899, step=20100, lr=0.116500, batch loss=0.036523, epoch loss=0.405484 Batch=959, step=20160, lr=0.116250, batch loss=0.012716, epoch loss=0.418200 Batch=1019, step=20220, lr=0.116000, batch loss=0.014946, epoch loss=0.433146 Batch=1079, step=20280, lr=0.115750, batch loss=0.002167, epoch loss=0.435313 Batch=1139, step=20340, lr=0.115500, batch loss=0.015474, epoch loss=0.450787 Batch=1199, step=20400, lr=0.115250, batch loss=0.008239, epoch loss=0.459026 Epoch=16, step=20400, lr=0.115250, epoch loss=0.459026 Batch=59, step=20460, lr=0.115000, batch loss=0.004102, epoch loss=0.004102 Batch=119, step=20520, lr=0.114750, batch loss=0.011706, epoch loss=0.015808 Batch=179, step=20580, lr=0.114500, batch loss=0.024141, epoch loss=0.039949 Batch=239, step=20640, lr=0.114250, batch loss=0.015067, epoch loss=0.055016 Batch=299, step=20700, lr=0.114000, batch loss=0.002549, epoch loss=0.057565 Batch=359, step=20760, lr=0.113750, batch loss=0.014289, epoch loss=0.071854 Batch=419, step=20820, lr=0.113500, batch loss=0.015600, epoch loss=0.087454 Batch=479, step=20880, lr=0.113000, batch loss=0.005287, epoch loss=0.092741 Batch=539, step=20940, lr=0.113000, batch loss=0.016020, epoch loss=0.108761 Batch=599, step=21000, lr=0.112750, batch loss=0.018349, epoch loss=0.127110 Batch=659, step=21060, lr=0.112500, batch loss=0.014928, epoch loss=0.142038 Batch=719, step=21120, lr=0.112250, batch loss=0.039186, epoch loss=0.181224 Batch=779, step=21180, lr=0.112000, batch loss=0.071773, epoch loss=0.252997 Batch=839, step=21240, lr=0.111750, batch loss=0.026067, epoch loss=0.279063 Batch=899, step=21300, lr=0.111500, batch loss=0.036532, epoch loss=0.315595 Batch=959, step=21360, lr=0.111250, batch loss=0.010234, epoch loss=0.325830 Batch=1019, step=21420, lr=0.111000, batch loss=0.011834, epoch loss=0.337664 Batch=1079, step=21480, lr=0.110750, batch loss=0.000080, epoch loss=0.337744 Batch=1139, step=21540, lr=0.110500, batch loss=0.011798, epoch loss=0.349542 Batch=1199, step=21600, lr=0.110250, batch loss=0.005134, epoch loss=0.354675 Epoch=17, step=21600, lr=0.110250, epoch loss=0.354675 Batch=59, step=21660, lr=0.110000, batch loss=0.002323, epoch loss=0.002323 Batch=119, step=21720, lr=0.109750, batch loss=0.006677, epoch loss=0.009000 Batch=179, step=21780, lr=0.109500, batch loss=0.013198, epoch loss=0.022198 Batch=239, step=21840, lr=0.109250, batch loss=0.015121, epoch loss=0.037319 Batch=299, step=21900, lr=0.108750, batch loss=0.002502, epoch loss=0.039821 Batch=359, step=21960, lr=0.108750, batch loss=0.012016, epoch loss=0.051837 Batch=419, step=22020, lr=0.108500, batch loss=0.011676, epoch loss=0.063513 Batch=479, step=22080, lr=0.108250, batch loss=0.002567, epoch loss=0.066079 Batch=539, step=22140, lr=0.108000, batch loss=0.018065, epoch loss=0.084145 Batch=599, step=22200, lr=0.107750, batch loss=0.016500, epoch loss=0.100645 Batch=659, step=22260, lr=0.107500, batch loss=0.016526, epoch loss=0.117171 Batch=719, step=22320, lr=0.107250, batch loss=0.021045, epoch loss=0.138216 Batch=779, step=22380, lr=0.107000, batch loss=0.028831, epoch loss=0.167048 Batch=839, step=22440, lr=0.106750, batch loss=0.029179, epoch loss=0.196227 Batch=899, step=22500, lr=0.106500, batch loss=0.023808, epoch loss=0.220035 Batch=959, step=22560, lr=0.106250, batch loss=0.008553, epoch loss=0.228587 Batch=1019, step=22620, lr=0.106000, batch loss=0.007343, epoch loss=0.235930 Batch=1079, step=22680, lr=0.105750, batch loss=0.001913, epoch loss=0.237843 Batch=1139, step=22740, lr=0.105500, batch loss=0.010232, epoch loss=0.248075 Batch=1199, step=22800, lr=0.105250, batch loss=0.004924, epoch loss=0.252999 Epoch=18, step=22800, lr=0.105250, epoch loss=0.252999 Batch=59, step=22860, lr=0.105000, batch loss=0.002508, epoch loss=0.002508 Batch=119, step=22920, lr=0.104750, batch loss=0.004359, epoch loss=0.006867 Batch=179, step=22980, lr=0.104500, batch loss=0.012181, epoch loss=0.019047 Batch=239, step=23040, lr=0.104250, batch loss=0.008397, epoch loss=0.027445 Batch=299, step=23100, lr=0.104000, batch loss=0.004152, epoch loss=0.031597 Batch=359, step=23160, lr=0.103750, batch loss=0.011764, epoch loss=0.043361 Batch=419, step=23220, lr=0.103500, batch loss=0.010618, epoch loss=0.053979 Batch=479, step=23280, lr=0.103250, batch loss=0.002866, epoch loss=0.056845 Batch=539, step=23340, lr=0.103000, batch loss=0.016366, epoch loss=0.073212 Batch=599, step=23400, lr=0.102750, batch loss=0.013327, epoch loss=0.086539 Batch=659, step=23460, lr=0.102500, batch loss=0.010431, epoch loss=0.096970 Batch=719, step=23520, lr=0.102250, batch loss=0.014971, epoch loss=0.111941 Batch=779, step=23580, lr=0.102000, batch loss=0.022079, epoch loss=0.134021 Batch=839, step=23640, lr=0.101750, batch loss=0.024341, epoch loss=0.158361 Batch=899, step=23700, lr=0.101500, batch loss=0.023362, epoch loss=0.181724 Batch=959, step=23760, lr=0.101250, batch loss=0.007533, epoch loss=0.189257 Batch=1019, step=23820, lr=0.101000, batch loss=0.007682, epoch loss=0.196939 Batch=1079, step=23880, lr=0.100750, batch loss=0.000732, epoch loss=0.197671 Batch=1139, step=23940, lr=0.100500, batch loss=0.008440, epoch loss=0.206111 Batch=1199, step=24000, lr=0.100250, batch loss=0.004624, epoch loss=0.210735 Epoch=19, step=24000, lr=0.100250, epoch loss=0.210735 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########******************************************│ │***********###########*************************############***************************************..│ │********######*####*********************************###*###*#**********************************.....│ │*******######**##***************.******************#*######*#*******************************........│ │*******##*##**##***********..........***************########*##****************************.........│ │*****#######************.......%...%%...***************#########*************************.........%.│ │******######***********.........%........***************##*#####************************......%.%.%.│ │***#########**********.........%%%.%%......*************#*#######**********************......%.%%%%.│ │****#######**********..........%%%%.........************#########*********************.......%%.%%.%│ │**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│ │*#######************...........%%%%%%%..........************#######***************...........%%%%%%.│ │*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.│ │*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│ │#####*#**********...............%%%%%%%...............*******#######*********..............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│ │#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................*******..................%.%%.%%......│ │**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%................**................%%%%%%%.%.%.......│ │************............................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-04-07 01:12.54 ---> saved as "a323295aa11f8cfd5e456b65e7e47c650607f6f8ea5ab3f8c53edecd484b244a" Job succeeded 2025-04-07 01:12.55: Job succeeded