2025-04-10 12:26.18: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (a65dd8955db6894ca41a3c2080e5dda2dd1bf41f) (linux-x86_64:fedora-40-5.3_opam-2.3) Base: ocaml/opam:fedora-40-ocaml-5.3@sha256:085ca0b50661db0cd2ae05f976d29b98de93e440abe3822ae5440449c154f2e9 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard a65dd895 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:fedora-40-ocaml-5.3@sha256:085ca0b50661db0cd2ae05f976d29b98de93e440abe3822ae5440449c154f2e9 # fedora-40-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo dnf install -y findutils RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-04-10 12:26.18: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-40-ocaml-5.3@sha256:085ca0b50661db0cd2ae05f976d29b98de93e440abe3822ae5440449c154f2e9-fedora-40-5.3_opam-2.3-ab22fb8412356c04fa0386e1ea5b2a04" 2025-04-10 12:26.18: Using OBuilder spec: ((from ocaml/opam:fedora-40-ocaml-5.3@sha256:085ca0b50661db0cd2ae05f976d29b98de93e440abe3822ae5440449c154f2e9) (comment fedora-40-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (network host) (shell "sudo dnf install -y findutils")) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-04-10 12:26.18: Waiting for resource in pool OCluster 2025-04-10 12:45.44: Waiting for worker… 2025-04-10 12:47.52: Got resource from pool OCluster Building on phoebe.caelum.ci.dev HEAD is now at 2032408f More %cd flexibility: derive projections for `!.` and `!..` HEAD is now at a65dd895 Tiny formatting (from ocaml/opam:fedora-40-ocaml-5.3@sha256:085ca0b50661db0cd2ae05f976d29b98de93e440abe3822ae5440449c154f2e9) 2025-04-10 12:47.56 ---> using "21461d52a42a3c1f560b945187f02d66a4c3fb0129681d4a4c38150df990fcac" from cache /: (comment fedora-40-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (network host) (shell "sudo dnf install -y findutils")) Fedora 40 - x86_64 - Updates 93 kB/s | 25 kB 00:00 Fedora 40 - x86_64 - Updates 3.0 MB/s | 4.6 MB 00:01 Last metadata expiration check: 0:00:08 ago on Thu Apr 10 12:47:59 2025. Package findutils-1:4.9.0-9.fc40.x86_64 is already installed. Dependencies resolved. Nothing to do. Complete! 2025-04-10 12:48.11 ---> saved as "c8d3453e3df4379bb3a42b996ce71b8f47f067d0514c64547d802d5a7ccae45c" /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-04-10 12:48.12 ---> saved as "c264a6e325f77f530b918cc14676f703be70eb587b7425d9924cd48a7c60d8a2" /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [y/n] y Format upgrade done. [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [default] Initialised 2025-04-10 12:49.08 ---> saved as "6b021592e775df94471d4b558b9050a2ecffa9c933f0c2d9589796692be64eee" /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-04-10 12:49.08 ---> saved as "1954af2adf48b2b06ae74ce279b035e8c369502e52b863979cd038a334ece1a5" /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-04-10 12:49.09 ---> saved as "ce6f481f81ba3a86c17c9b8fdf7b92acb5e212833de78278fccbfb8d2ef3a9c4" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 598efb46ec7387aed249220ca6a2bc39eb4b94e9 || git fetch origin master) && git reset -q --hard 598efb46ec7387aed249220ca6a2bc39eb4b94e9 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD da74d7829f..fa0e8c74bf master -> origin/master 598efb46ec Merge pull request #27716 from avsm/fix-mpopcnt <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [default] synchronised from git+file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-04-10 12:49.55 ---> saved as "788214a1685cb5691059e74206a50cef9beccdb8ca5a67957668fc21539e4c68" /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-04-10 12:49.55 ---> saved as "724d60f6c50c32610eea7a2a322e5d9ea56c5ae0b700f5bb28105a98158dfdc1" /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-04-10 12:49.59 ---> saved as "55b163ca8e443a660c5129bb19ab0cff4c78d4da05400f0f21ae3b01fe70f404" /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-04-10 12:50.00 ---> saved as "2f16b2a0f4700db2647b566c47b915254a4c387e422672c29fe92d7caffc1e7e" /src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.0 dune-configurator.3.18.0 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "yum" "makecache" - Fedora 40 - x86_64 194 kB/s | 27 kB 00:00 - Fedora 40 openh264 (From Cisco) - x86_64 9.2 kB/s | 989 B 00:00 - Fedora 40 - x86_64 - Updates 167 kB/s | 25 kB 00:00 - Metadata cache created. <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-devel <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "yum" "install" "-y" "libffi-devel" - Last metadata expiration check: 0:00:26 ago on Thu Apr 10 12:50:02 2025. - Dependencies resolved. - ================================================================================ - Package Architecture Version Repository Size - ================================================================================ - Installing: - libffi-devel x86_64 3.4.4-7.fc40 fedora 28 k - - Transaction Summary - ================================================================================ - Install 1 Package - - Total download size: 28 k - Installed size: 33 k - Downloading Packages: - libffi-devel-3.4.4-7.fc40.x86_64.rpm 471 kB/s | 28 kB 00:00 - -------------------------------------------------------------------------------- - Total 117 kB/s | 28 kB 00:00 - Running transaction check - Transaction check succeeded. - Running transaction test - Transaction test succeeded. - Running transaction - Preparing : 1/1 - Installing : libffi-devel-3.4.4-7.fc40.x86_64 1/1 - Running scriptlet: libffi-devel-3.4.4-7.fc40.x86_64 1/1 - - Installed: - libffi-devel-3.4.4-7.fc40.x86_64 - - Complete! + /usr/bin/rpm "-q" "--whatprovides" "libffi-devel" - libffi-devel-3.4.4-7.fc40.x86_64 2025-04-10 12:50.31 ---> saved as "452e4a1b6b7845c14b4d3d35e3b8f0f429c5bb408be23a4f7ecb33c028d0e877" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 74 packages - install angstrom 0.16.1 - install astring 0.8.5 - install backoff 0.1.1 - install base v0.17.1 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install camlp-streams 5.0.1 - install cmdliner 1.3.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.18.0 - install dune-configurator 3.18.0 - install fieldslib v0.17.0 - install fmt 0.10.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install logs 0.8.0 - install mdx 2.5.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml-version 4.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.2.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.2 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install result 1.5 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install thread-local-storage 0.2 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved astring.0.8.5 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.1 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved camlp-streams.5.0.1 (cached) -> retrieved cmdliner.1.3.0 (cached) -> retrieved cppo.1.8.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved fmt.0.10.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved logs.0.8.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved mdx.2.5.0 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml-version.4.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved dune.3.18.0, dune-configurator.3.18.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_minidebug.2.2.0 (cached) -> installed cmdliner.1.3.0 -> installed num.1.5-1 -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib_jane.v0.17.2 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved result.1.5 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved sexplib.v0.17.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved thread-local-storage.0.2 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved uucp.16.0.0 (cached) -> installed ocamlbuild.0.16.1 -> installed ocamlfind.1.9.8 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed fmt.0.10.0 -> installed ptime.1.2.0 -> installed astring.0.8.5 -> installed logs.0.8.0 -> installed dune.3.18.0 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed camlp-streams.5.0.1 -> installed multicore-magic.2.3.1 -> installed ocaml-version.4.0.0 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed printbox.0.12 -> installed result.1.5 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed thread-local-storage.0.2 -> installed ocaml-syntax-shims.1.0.0 -> installed ocaml-compiler-libs.v0.17.0 -> installed cppo.1.8.0 -> installed re.1.12.0 -> installed integers.0.7.0 -> installed saturn_lockfree.0.5.0 -> installed dune-configurator.3.18.0 -> installed parsexp.v0.17.0 -> installed bigstringaf.0.10.0 -> installed sexplib.v0.17.0 -> installed angstrom.0.16.1 -> installed mdx.2.5.0 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed ctypes.0.23.0 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed base.v0.17.1 -> installed variantslib.v0.17.0 -> installed fieldslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed ppxlib.0.35.0 -> installed ppx_optcomp.v0.17.0 -> installed ppxlib_jane.v0.17.2 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed ppx_globalize.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.2.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-04-10 12:53.39 ---> saved as "1058e2449499bbe76c41cc2023d98e145be14dfe2e016d935fad8000a2b908d4" /src: (copy (src .) (dst /src)) 2025-04-10 12:53.39 ---> saved as "998eb35cd2b485b90d75442932bcf13e1a4a471e046b6650832a42dd0250e43e" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/35ef9ab18ff3dcb4a992aa1342d31755/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d902926e437f2d8e152336add0fe65b6/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.199750, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199500, batch loss=8.516926, epoch loss=32.126379 Batch=179, step=180, lr=0.199250, batch loss=2.639251, epoch loss=34.765630 Batch=239, step=240, lr=0.199000, batch loss=0.852304, epoch loss=35.617934 Batch=299, step=300, lr=0.198750, batch loss=1.444952, epoch loss=37.062886 Batch=359, step=360, lr=0.198500, batch loss=1.336864, epoch loss=38.399751 Batch=419, step=420, lr=0.198250, batch loss=0.614359, epoch loss=39.014109 Batch=479, step=480, lr=0.198000, batch loss=0.794791, epoch loss=39.808900 Batch=539, step=540, lr=0.197750, batch loss=0.651963, epoch loss=40.460863 Batch=599, step=600, lr=0.197500, batch loss=1.084678, epoch loss=41.545541 Batch=659, step=660, lr=0.197250, batch loss=0.482492, epoch loss=42.028033 Batch=719, step=720, lr=0.197000, batch loss=0.412679, epoch loss=42.440712 Batch=779, step=780, lr=0.196750, batch loss=0.470773, epoch loss=42.911485 Batch=839, step=840, lr=0.196500, batch loss=0.445245, epoch loss=43.356730 Batch=899, step=900, lr=0.196250, batch loss=0.386279, epoch loss=43.743009 Batch=959, step=960, lr=0.196250, batch loss=0.251556, epoch loss=43.994565 Batch=1019, step=1020, lr=0.196000, batch loss=0.471304, epoch loss=44.465869 Batch=1079, step=1080, lr=0.195750, batch loss=0.248082, epoch loss=44.713951 Batch=1139, step=1140, lr=0.195250, batch loss=0.318781, epoch loss=45.032732 Batch=1199, step=1200, lr=0.195000, batch loss=0.266006, epoch loss=45.298738 Epoch=0, step=1200, lr=0.195000, epoch loss=45.298738 Batch=59, step=1260, lr=0.194750, batch loss=0.264453, epoch loss=0.264453 Batch=119, step=1320, lr=0.194500, batch loss=0.206068, epoch loss=0.470521 Batch=179, step=1380, lr=0.194250, batch loss=0.245619, epoch loss=0.716141 Batch=239, step=1440, lr=0.194000, batch loss=0.348740, epoch loss=1.064881 Batch=299, step=1500, lr=0.193750, batch loss=0.241127, epoch loss=1.306008 Batch=359, step=1560, lr=0.193500, batch loss=0.315445, epoch loss=1.621453 Batch=419, step=1620, lr=0.193250, batch loss=0.310192, epoch loss=1.931645 Batch=479, step=1680, lr=0.193250, batch loss=0.277747, epoch loss=2.209392 Batch=539, step=1740, lr=0.193000, batch loss=0.210504, epoch loss=2.419897 Batch=599, step=1800, lr=0.192750, batch loss=0.244113, epoch loss=2.664010 Batch=659, step=1860, lr=0.192250, batch loss=0.363831, epoch loss=3.027841 Batch=719, step=1920, lr=0.192000, batch loss=0.354941, epoch loss=3.382782 Batch=779, step=1980, lr=0.192000, batch loss=0.378550, epoch loss=3.761332 Batch=839, step=2040, lr=0.191750, batch loss=0.341054, epoch loss=4.102386 Batch=899, step=2100, lr=0.191500, batch loss=0.298542, epoch loss=4.400928 Batch=959, step=2160, lr=0.191000, batch loss=0.207640, epoch loss=4.608568 Batch=1019, step=2220, lr=0.190750, batch loss=0.334578, epoch loss=4.943146 Batch=1079, step=2280, lr=0.190750, batch loss=0.191913, epoch loss=5.135059 Batch=1139, step=2340, lr=0.190500, batch loss=0.257983, epoch loss=5.393042 Batch=1199, step=2400, lr=0.190250, batch loss=0.211220, epoch loss=5.604262 Epoch=1, step=2400, lr=0.190250, epoch loss=5.604262 Batch=59, step=2460, lr=0.189750, batch loss=0.228894, epoch loss=0.228894 Batch=119, step=2520, lr=0.189750, batch loss=0.201015, epoch loss=0.429909 Batch=179, step=2580, lr=0.189250, batch loss=0.222473, epoch loss=0.652382 Batch=239, step=2640, lr=0.189250, batch loss=0.328699, epoch loss=0.981081 Batch=299, step=2700, lr=0.189000, batch loss=0.209088, epoch loss=1.190168 Batch=359, step=2760, lr=0.188500, batch loss=0.294018, epoch loss=1.484187 Batch=419, step=2820, lr=0.188250, batch loss=0.281814, epoch loss=1.766001 Batch=479, step=2880, lr=0.188000, batch loss=0.256306, epoch loss=2.022307 Batch=539, step=2940, lr=0.188000, batch loss=0.196983, epoch loss=2.219290 Batch=599, step=3000, lr=0.187750, batch loss=0.242345, epoch loss=2.461635 Batch=659, step=3060, lr=0.187500, batch loss=0.349505, epoch loss=2.811140 Batch=719, step=3120, lr=0.187250, batch loss=0.346417, epoch loss=3.157557 Batch=779, step=3180, lr=0.186750, batch loss=0.367436, epoch loss=3.524993 Batch=839, step=3240, lr=0.186750, batch loss=0.322998, epoch loss=3.847991 Batch=899, step=3300, lr=0.186250, batch loss=0.285541, epoch loss=4.133531 Batch=959, step=3360, lr=0.186000, batch loss=0.214168, epoch loss=4.347700 Batch=1019, step=3420, lr=0.186000, batch loss=0.329021, epoch loss=4.676721 Batch=1079, step=3480, lr=0.185750, batch loss=0.202058, epoch loss=4.878778 Batch=1139, step=3540, lr=0.185250, batch loss=0.260634, epoch loss=5.139413 Batch=1199, step=3600, lr=0.185250, batch loss=0.202170, epoch loss=5.341583 Epoch=2, step=3600, lr=0.185250, epoch loss=5.341583 Batch=59, step=3660, lr=0.184750, batch loss=0.224982, epoch loss=0.224982 Batch=119, step=3720, lr=0.184500, batch loss=0.194728, epoch loss=0.419710 Batch=179, step=3780, lr=0.184500, batch loss=0.212434, epoch loss=0.632144 Batch=239, step=3840, lr=0.184250, batch loss=0.315460, epoch loss=0.947603 Batch=299, step=3900, lr=0.184000, batch loss=0.205806, epoch loss=1.153409 Batch=359, step=3960, lr=0.183750, batch loss=0.289800, epoch loss=1.443209 Batch=419, step=4020, lr=0.183500, batch loss=0.280211, epoch loss=1.723420 Batch=479, step=4080, lr=0.183000, batch loss=0.254690, epoch loss=1.978110 Batch=539, step=4140, lr=0.182750, batch loss=0.202715, epoch loss=2.180825 Batch=599, step=4200, lr=0.182750, batch loss=0.242265, epoch loss=2.423089 Batch=659, step=4260, lr=0.182500, batch loss=0.330083, epoch loss=2.753172 Batch=719, step=4320, lr=0.182250, batch loss=0.327882, epoch loss=3.081054 Batch=779, step=4380, lr=0.182000, batch loss=0.352328, epoch loss=3.433382 Batch=839, step=4440, lr=0.181500, batch loss=0.318397, epoch loss=3.751779 Batch=899, step=4500, lr=0.181250, batch loss=0.293711, epoch loss=4.045490 Batch=959, step=4560, lr=0.181000, batch loss=0.241244, epoch loss=4.286733 Batch=1019, step=4620, lr=0.181000, batch loss=0.352392, epoch loss=4.639125 Batch=1079, step=4680, lr=0.180750, batch loss=0.214409, epoch loss=4.853535 Batch=1139, step=4740, lr=0.180500, batch loss=0.253577, epoch loss=5.107112 Batch=1199, step=4800, lr=0.180250, batch loss=0.193086, epoch loss=5.300198 Epoch=3, step=4800, lr=0.180250, epoch loss=5.300198 Batch=59, step=4860, lr=0.180000, batch loss=0.220964, epoch loss=0.220964 Batch=119, step=4920, lr=0.179500, batch loss=0.190075, epoch loss=0.411038 Batch=179, step=4980, lr=0.179250, batch loss=0.207172, epoch loss=0.618211 Batch=239, step=5040, lr=0.179250, batch loss=0.310746, epoch loss=0.928957 Batch=299, step=5100, lr=0.178750, batch loss=0.205576, epoch loss=1.134533 Batch=359, step=5160, lr=0.178750, batch loss=0.276963, epoch loss=1.411496 Batch=419, step=5220, lr=0.178500, batch loss=0.273234, epoch loss=1.684730 Batch=479, step=5280, lr=0.178250, batch loss=0.249320, epoch loss=1.934051 Batch=539, step=5340, lr=0.178000, batch loss=0.197832, epoch loss=2.131883 Batch=599, step=5400, lr=0.177500, batch loss=0.240016, epoch loss=2.371899 Batch=659, step=5460, lr=0.177500, batch loss=0.320464, epoch loss=2.692364 Batch=719, step=5520, lr=0.177250, batch loss=0.328840, epoch loss=3.021204 Batch=779, step=5580, lr=0.177000, batch loss=0.348935, epoch loss=3.370139 Batch=839, step=5640, lr=0.176750, batch loss=0.306788, epoch loss=3.676928 Batch=899, step=5700, lr=0.176500, batch loss=0.270764, epoch loss=3.947692 Batch=959, step=5760, lr=0.176000, batch loss=0.219834, epoch loss=4.167525 Batch=1019, step=5820, lr=0.176000, batch loss=0.335934, epoch loss=4.503459 Batch=1079, step=5880, lr=0.175750, batch loss=0.191814, epoch loss=4.695272 Batch=1139, step=5940, lr=0.175500, batch loss=0.224979, epoch loss=4.920251 Batch=1199, step=6000, lr=0.175250, batch loss=0.189524, epoch loss=5.109775 Epoch=4, step=6000, lr=0.175250, epoch loss=5.109775 Batch=59, step=6060, lr=0.175000, batch loss=0.238230, epoch loss=0.238230 Batch=119, step=6120, lr=0.174750, batch loss=0.189949, epoch loss=0.428178 Batch=179, step=6180, lr=0.174500, batch loss=0.201120, epoch loss=0.629298 Batch=239, step=6240, lr=0.174000, batch loss=0.300279, epoch loss=0.929578 Batch=299, step=6300, lr=0.174000, batch loss=0.206999, epoch loss=1.136577 Batch=359, step=6360, lr=0.173750, batch loss=0.267150, epoch loss=1.403726 Batch=419, step=6420, lr=0.173500, batch loss=0.267542, epoch loss=1.671268 Batch=479, step=6480, lr=0.173250, batch loss=0.242572, epoch loss=1.913840 Batch=539, step=6540, lr=0.173000, batch loss=0.195895, epoch loss=2.109735 Batch=599, step=6600, lr=0.172750, batch loss=0.233937, epoch loss=2.343672 Batch=659, step=6660, lr=0.172250, batch loss=0.314178, epoch loss=2.657849 Batch=719, step=6720, lr=0.172250, batch loss=0.319359, epoch loss=2.977208 Batch=779, step=6780, lr=0.172000, batch loss=0.341195, epoch loss=3.318403 Batch=839, step=6840, lr=0.171750, batch loss=0.299640, epoch loss=3.618042 Batch=899, step=6900, lr=0.171500, batch loss=0.263119, epoch loss=3.881161 Batch=959, step=6960, lr=0.171250, batch loss=0.199181, epoch loss=4.080342 Batch=1019, step=7020, lr=0.170750, batch loss=0.292450, epoch loss=4.372792 Batch=1079, step=7080, lr=0.170750, batch loss=0.185516, epoch loss=4.558309 Batch=1139, step=7140, lr=0.170500, batch loss=0.217321, epoch loss=4.775630 Batch=1199, step=7200, lr=0.170250, batch loss=0.181689, epoch loss=4.957319 Epoch=5, step=7200, lr=0.170250, epoch loss=4.957319 Batch=59, step=7260, lr=0.170000, batch loss=0.228376, epoch loss=0.228376 Batch=119, step=7320, lr=0.169500, batch loss=0.188704, epoch loss=0.417080 Batch=179, step=7380, lr=0.169500, batch loss=0.194917, epoch loss=0.611997 Batch=239, step=7440, lr=0.169250, batch loss=0.288800, epoch loss=0.900797 Batch=299, step=7500, lr=0.169000, batch loss=0.194540, epoch loss=1.095338 Batch=359, step=7560, lr=0.168750, batch loss=0.258459, epoch loss=1.353797 Batch=419, step=7620, lr=0.168500, batch loss=0.258324, epoch loss=1.612121 Batch=479, step=7680, lr=0.168250, batch loss=0.235589, epoch loss=1.847710 Batch=539, step=7740, lr=0.168000, batch loss=0.190216, epoch loss=2.037927 Batch=599, step=7800, lr=0.167750, batch loss=0.230524, epoch loss=2.268451 Batch=659, step=7860, lr=0.167500, batch loss=0.304480, epoch loss=2.572931 Batch=719, step=7920, lr=0.167250, batch loss=0.310167, epoch loss=2.883098 Batch=779, step=7980, lr=0.167000, batch loss=0.328295, epoch loss=3.211393 Batch=839, step=8040, lr=0.166750, batch loss=0.293168, epoch loss=3.504560 Batch=899, step=8100, lr=0.166500, batch loss=0.262246, epoch loss=3.766806 Batch=959, step=8160, lr=0.166250, batch loss=0.196505, epoch loss=3.963311 Batch=1019, step=8220, lr=0.166000, batch loss=0.326464, epoch loss=4.289776 Batch=1079, step=8280, lr=0.165750, batch loss=0.194768, epoch loss=4.484544 Batch=1139, step=8340, lr=0.165500, batch loss=0.224093, epoch loss=4.708637 Batch=1199, step=8400, lr=0.165250, batch loss=0.174131, epoch loss=4.882768 Epoch=6, step=8400, lr=0.165250, epoch loss=4.882768 Batch=59, step=8460, lr=0.165000, batch loss=0.209127, epoch loss=0.209127 Batch=119, step=8520, lr=0.164750, batch loss=0.174742, epoch loss=0.383869 Batch=179, step=8580, lr=0.164250, batch loss=0.188439, epoch loss=0.572308 Batch=239, step=8640, lr=0.164250, batch loss=0.278685, epoch loss=0.850993 Batch=299, step=8700, lr=0.163750, batch loss=0.193882, epoch loss=1.044876 Batch=359, step=8760, lr=0.163750, batch loss=0.255454, epoch loss=1.300330 Batch=419, step=8820, lr=0.163500, batch loss=0.244876, epoch loss=1.545206 Batch=479, step=8880, lr=0.163250, batch loss=0.229948, epoch loss=1.775154 Batch=539, step=8940, lr=0.163000, batch loss=0.177663, epoch loss=1.952817 Batch=599, step=9000, lr=0.162750, batch loss=0.219563, epoch loss=2.172380 Batch=659, step=9060, lr=0.162250, batch loss=0.293830, epoch loss=2.466209 Batch=719, step=9120, lr=0.162000, batch loss=0.297303, epoch loss=2.763512 Batch=779, step=9180, lr=0.162000, batch loss=0.315602, epoch loss=3.079114 Batch=839, step=9240, lr=0.161500, batch loss=0.281923, epoch loss=3.361037 Batch=899, step=9300, lr=0.161500, batch loss=0.253152, epoch loss=3.614189 Batch=959, step=9360, lr=0.161000, batch loss=0.199550, epoch loss=3.813739 Batch=1019, step=9420, lr=0.161000, batch loss=0.310234, epoch loss=4.123973 Batch=1079, step=9480, lr=0.160500, batch loss=0.197017, epoch loss=4.320990 Batch=1139, step=9540, lr=0.160250, batch loss=0.214856, epoch loss=4.535846 Batch=1199, step=9600, lr=0.160250, batch loss=0.167646, epoch loss=4.703492 Epoch=7, step=9600, lr=0.160250, epoch loss=4.703492 Batch=59, step=9660, lr=0.159750, batch loss=0.214478, epoch loss=0.214478 Batch=119, step=9720, lr=0.159500, batch loss=0.170018, epoch loss=0.384496 Batch=179, step=9780, lr=0.159500, batch loss=0.179212, epoch loss=0.563708 Batch=239, step=9840, lr=0.159000, batch loss=0.262797, epoch loss=0.826505 Batch=299, step=9900, lr=0.158750, batch loss=0.183345, epoch loss=1.009851 Batch=359, step=9960, lr=0.158750, batch loss=0.241265, epoch loss=1.251115 Batch=419, step=10020, lr=0.158250, batch loss=0.232547, epoch loss=1.483663 Batch=479, step=10080, lr=0.158250, batch loss=0.215558, epoch loss=1.699220 Batch=539, step=10140, lr=0.158000, batch loss=0.170999, epoch loss=1.870219 Batch=599, step=10200, lr=0.157750, batch loss=0.203587, epoch loss=2.073806 Batch=659, step=10260, lr=0.157500, batch loss=0.281665, epoch loss=2.355472 Batch=719, step=10320, lr=0.157250, batch loss=0.286174, epoch loss=2.641645 Batch=779, step=10380, lr=0.157000, batch loss=0.295267, epoch loss=2.936913 Batch=839, step=10440, lr=0.156500, batch loss=0.267223, epoch loss=3.204135 Batch=899, step=10500, lr=0.156500, batch loss=0.245072, epoch loss=3.449207 Batch=959, step=10560, lr=0.156250, batch loss=0.194486, epoch loss=3.643693 Batch=1019, step=10620, lr=0.155750, batch loss=0.280734, epoch loss=3.924427 Batch=1079, step=10680, lr=0.155750, batch loss=0.168662, epoch loss=4.093089 Batch=1139, step=10740, lr=0.155500, batch loss=0.195192, epoch loss=4.288281 Batch=1199, step=10800, lr=0.155250, batch loss=0.154556, epoch loss=4.442838 Epoch=8, step=10800, lr=0.155250, epoch loss=4.442838 Batch=59, step=10860, lr=0.155000, batch loss=0.175887, epoch loss=0.175887 Batch=119, step=10920, lr=0.154500, batch loss=0.147305, epoch loss=0.323192 Batch=179, step=10980, lr=0.154500, batch loss=0.165999, epoch loss=0.489191 Batch=239, step=11040, lr=0.154250, batch loss=0.242845, epoch loss=0.732036 Batch=299, step=11100, lr=0.154000, batch loss=0.169973, epoch loss=0.902009 Batch=359, step=11160, lr=0.153750, batch loss=0.223966, epoch loss=1.125975 Batch=419, step=11220, lr=0.153250, batch loss=0.227023, epoch loss=1.352999 Batch=479, step=11280, lr=0.153250, batch loss=0.206554, epoch loss=1.559553 Batch=539, step=11340, lr=0.153000, batch loss=0.157558, epoch loss=1.717111 Batch=599, step=11400, lr=0.152750, batch loss=0.182782, epoch loss=1.899893 Batch=659, step=11460, lr=0.152250, batch loss=0.261940, epoch loss=2.161833 Batch=719, step=11520, lr=0.152250, batch loss=0.259020, epoch loss=2.420853 Batch=779, step=11580, lr=0.151750, batch loss=0.270824, epoch loss=2.691677 Batch=839, step=11640, lr=0.151750, batch loss=0.250764, epoch loss=2.942441 Batch=899, step=11700, lr=0.151250, batch loss=0.218530, epoch loss=3.160971 Batch=959, step=11760, lr=0.151000, batch loss=0.172685, epoch loss=3.333656 Batch=1019, step=11820, lr=0.150750, batch loss=0.262814, epoch loss=3.596470 Batch=1079, step=11880, lr=0.150750, batch loss=0.159122, epoch loss=3.755592 Batch=1139, step=11940, lr=0.150250, batch loss=0.192684, epoch loss=3.948276 Batch=1199, step=12000, lr=0.150250, batch loss=0.140138, epoch loss=4.088414 Epoch=9, step=12000, lr=0.150250, epoch loss=4.088414 Batch=59, step=12060, lr=0.149750, batch loss=0.163834, epoch loss=0.163834 Batch=119, step=12120, lr=0.149500, batch loss=0.135904, epoch loss=0.299738 Batch=179, step=12180, lr=0.149250, batch loss=0.151026, epoch loss=0.450764 Batch=239, step=12240, lr=0.149000, batch loss=0.219188, epoch loss=0.669953 Batch=299, step=12300, lr=0.148750, batch loss=0.144762, epoch loss=0.814715 Batch=359, step=12360, lr=0.148750, batch loss=0.196755, epoch loss=1.011470 Batch=419, step=12420, lr=0.148500, batch loss=0.207948, epoch loss=1.219419 Batch=479, step=12480, lr=0.148000, batch loss=0.179999, epoch loss=1.399418 Batch=539, step=12540, lr=0.147750, batch loss=0.143736, epoch loss=1.543154 Batch=599, step=12600, lr=0.147500, batch loss=0.150542, epoch loss=1.693695 Batch=659, step=12660, lr=0.147500, batch loss=0.227046, epoch loss=1.920741 Batch=719, step=12720, lr=0.147000, batch loss=0.234908, epoch loss=2.155649 Batch=779, step=12780, lr=0.147000, batch loss=0.257691, epoch loss=2.413340 Batch=839, step=12840, lr=0.146750, batch loss=0.235857, epoch loss=2.649197 Batch=899, step=12900, lr=0.146500, batch loss=0.223824, epoch loss=2.873020 Batch=959, step=12960, lr=0.146250, batch loss=0.148641, epoch loss=3.021662 Batch=1019, step=13020, lr=0.146000, batch loss=0.270369, epoch loss=3.292031 Batch=1079, step=13080, lr=0.145750, batch loss=0.110327, epoch loss=3.402357 Batch=1139, step=13140, lr=0.145500, batch loss=0.147169, epoch loss=3.549526 Batch=1199, step=13200, lr=0.145250, batch loss=0.118415, epoch loss=3.667942 Epoch=10, step=13200, lr=0.145250, epoch loss=3.667942 Batch=59, step=13260, lr=0.145000, batch loss=0.135879, epoch loss=0.135879 Batch=119, step=13320, lr=0.144750, batch loss=0.114692, epoch loss=0.250571 Batch=179, step=13380, lr=0.144250, batch loss=0.127793, epoch loss=0.378365 Batch=239, step=13440, lr=0.144250, batch loss=0.184291, epoch loss=0.562656 Batch=299, step=13500, lr=0.144000, batch loss=0.121483, epoch loss=0.684139 Batch=359, step=13560, lr=0.143750, batch loss=0.162993, epoch loss=0.847133 Batch=419, step=13620, lr=0.143500, batch loss=0.161071, epoch loss=1.008204 Batch=479, step=13680, lr=0.143250, batch loss=0.148794, epoch loss=1.156998 Batch=539, step=13740, lr=0.143000, batch loss=0.118580, epoch loss=1.275577 Batch=599, step=13800, lr=0.142750, batch loss=0.121812, epoch loss=1.397390 Batch=659, step=13860, lr=0.142500, batch loss=0.180898, epoch loss=1.578288 Batch=719, step=13920, lr=0.142000, batch loss=0.194895, epoch loss=1.773183 Batch=779, step=13980, lr=0.142000, batch loss=0.226557, epoch loss=1.999739 Batch=839, step=14040, lr=0.141750, batch loss=0.205436, epoch loss=2.205175 Batch=899, step=14100, lr=0.141500, batch loss=0.226698, epoch loss=2.431873 Batch=959, step=14160, lr=0.141250, batch loss=0.096946, epoch loss=2.528819 Batch=1019, step=14220, lr=0.140750, batch loss=0.191024, epoch loss=2.719843 Batch=1079, step=14280, lr=0.140750, batch loss=0.079022, epoch loss=2.798865 Batch=1139, step=14340, lr=0.140250, batch loss=0.121854, epoch loss=2.920719 Batch=1199, step=14400, lr=0.140250, batch loss=0.087257, epoch loss=3.007976 Epoch=11, step=14400, lr=0.140250, epoch loss=3.007976 Batch=59, step=14460, lr=0.140000, batch loss=0.109776, epoch loss=0.109776 Batch=119, step=14520, lr=0.139750, batch loss=0.104947, epoch loss=0.214723 Batch=179, step=14580, lr=0.139500, batch loss=0.102402, epoch loss=0.317125 Batch=239, step=14640, lr=0.139000, batch loss=0.143825, epoch loss=0.460950 Batch=299, step=14700, lr=0.139000, batch loss=0.077174, epoch loss=0.538124 Batch=359, step=14760, lr=0.138750, batch loss=0.121792, epoch loss=0.659916 Batch=419, step=14820, lr=0.138500, batch loss=0.131358, epoch loss=0.791275 Batch=479, step=14880, lr=0.138250, batch loss=0.103806, epoch loss=0.895080 Batch=539, step=14940, lr=0.138000, batch loss=0.105725, epoch loss=1.000806 Batch=599, step=15000, lr=0.137750, batch loss=0.085086, epoch loss=1.085892 Batch=659, step=15060, lr=0.137500, batch loss=0.133565, epoch loss=1.219457 Batch=719, step=15120, lr=0.137000, batch loss=0.145717, epoch loss=1.365174 Batch=779, step=15180, lr=0.137000, batch loss=0.217295, epoch loss=1.582469 Batch=839, step=15240, lr=0.136750, batch loss=0.149612, epoch loss=1.732081 Batch=899, step=15300, lr=0.136500, batch loss=0.149077, epoch loss=1.881158 Batch=959, step=15360, lr=0.136250, batch loss=0.092395, epoch loss=1.973554 Batch=1019, step=15420, lr=0.135750, batch loss=0.165357, epoch loss=2.138911 Batch=1079, step=15480, lr=0.135750, batch loss=0.042891, epoch loss=2.181802 Batch=1139, step=15540, lr=0.135500, batch loss=0.103678, epoch loss=2.285480 Batch=1199, step=15600, lr=0.135000, batch loss=0.060572, epoch loss=2.346052 Epoch=12, step=15600, lr=0.135000, epoch loss=2.346052 Batch=59, step=15660, lr=0.134750, batch loss=0.084285, epoch loss=0.084285 Batch=119, step=15720, lr=0.134750, batch loss=0.138422, epoch loss=0.222707 Batch=179, step=15780, lr=0.134250, batch loss=0.097450, epoch loss=0.320157 Batch=239, step=15840, lr=0.134000, batch loss=0.098120, epoch loss=0.418276 Batch=299, step=15900, lr=0.134000, batch loss=0.046822, epoch loss=0.465098 Batch=359, step=15960, lr=0.133750, batch loss=0.087479, epoch loss=0.552577 Batch=419, step=16020, lr=0.133500, batch loss=0.079529, epoch loss=0.632106 Batch=479, step=16080, lr=0.133250, batch loss=0.060282, epoch loss=0.692388 Batch=539, step=16140, lr=0.132750, batch loss=0.065818, epoch loss=0.758207 Batch=599, step=16200, lr=0.132750, batch loss=0.149648, epoch loss=0.907855 Batch=659, step=16260, lr=0.132500, batch loss=0.088749, epoch loss=0.996604 Batch=719, step=16320, lr=0.132250, batch loss=0.132933, epoch loss=1.129537 Batch=779, step=16380, lr=0.132000, batch loss=0.289484, epoch loss=1.419021 Batch=839, step=16440, lr=0.131500, batch loss=0.092561, epoch loss=1.511582 Batch=899, step=16500, lr=0.131500, batch loss=0.079929, epoch loss=1.591511 Batch=959, step=16560, lr=0.131250, batch loss=0.031972, epoch loss=1.623483 Batch=1019, step=16620, lr=0.131000, batch loss=0.064491, epoch loss=1.687974 Batch=1079, step=16680, lr=0.130750, batch loss=0.056073, epoch loss=1.744048 Batch=1139, step=16740, lr=0.130500, batch loss=0.107894, epoch loss=1.851942 Batch=1199, step=16800, lr=0.130250, batch loss=0.050921, epoch loss=1.902863 Epoch=13, step=16800, lr=0.130250, epoch loss=1.902863 Batch=59, step=16860, lr=0.130000, batch loss=0.034918, epoch loss=0.034918 Batch=119, step=16920, lr=0.129750, batch loss=0.034912, epoch loss=0.069830 Batch=179, step=16980, lr=0.129500, batch loss=0.043940, epoch loss=0.113770 Batch=239, step=17040, lr=0.129000, batch loss=0.061417, epoch loss=0.175188 Batch=299, step=17100, lr=0.128750, batch loss=0.025944, epoch loss=0.201132 Batch=359, step=17160, lr=0.128750, batch loss=0.050562, epoch loss=0.251694 Batch=419, step=17220, lr=0.128500, batch loss=0.076765, epoch loss=0.328458 Batch=479, step=17280, lr=0.128000, batch loss=0.023554, epoch loss=0.352012 Batch=539, step=17340, lr=0.127750, batch loss=0.027885, epoch loss=0.379897 Batch=599, step=17400, lr=0.127750, batch loss=0.040090, epoch loss=0.419987 Batch=659, step=17460, lr=0.127250, batch loss=0.055267, epoch loss=0.475253 Batch=719, step=17520, lr=0.127000, batch loss=0.072693, epoch loss=0.547946 Batch=779, step=17580, lr=0.127000, batch loss=0.066114, epoch loss=0.614060 Batch=839, step=17640, lr=0.126500, batch loss=0.099476, epoch loss=0.713536 Batch=899, step=17700, lr=0.126250, batch loss=0.050101, epoch loss=0.763637 Batch=959, step=17760, lr=0.126250, batch loss=0.019983, epoch loss=0.783620 Batch=1019, step=17820, lr=0.125750, batch loss=0.053735, epoch loss=0.837355 Batch=1079, step=17880, lr=0.125750, batch loss=0.043980, epoch loss=0.881335 Batch=1139, step=17940, lr=0.125250, batch loss=0.075320, epoch loss=0.956654 Batch=1199, step=18000, lr=0.125250, batch loss=0.024008, epoch loss=0.980663 Epoch=14, step=18000, lr=0.125250, epoch loss=0.980663 Batch=59, step=18060, lr=0.125000, batch loss=0.017627, epoch loss=0.017627 Batch=119, step=18120, lr=0.124500, batch loss=0.020088, epoch loss=0.037715 Batch=179, step=18180, lr=0.124500, batch loss=0.029650, epoch loss=0.067365 Batch=239, step=18240, lr=0.124250, batch loss=0.042186, epoch loss=0.109550 Batch=299, step=18300, lr=0.124000, batch loss=0.018452, epoch loss=0.128002 Batch=359, step=18360, lr=0.123750, batch loss=0.025953, epoch loss=0.153955 Batch=419, step=18420, lr=0.123500, batch loss=0.033507, epoch loss=0.187462 Batch=479, step=18480, lr=0.123250, batch loss=0.024826, epoch loss=0.212289 Batch=539, step=18540, lr=0.123000, batch loss=0.052828, epoch loss=0.265116 Batch=599, step=18600, lr=0.122750, batch loss=0.027461, epoch loss=0.292577 Batch=659, step=18660, lr=0.122500, batch loss=0.033340, epoch loss=0.325917 Batch=719, step=18720, lr=0.122250, batch loss=0.039578, epoch loss=0.365495 Batch=779, step=18780, lr=0.121750, batch loss=0.113491, epoch loss=0.478986 Batch=839, step=18840, lr=0.121750, batch loss=0.053095, epoch loss=0.532081 Batch=899, step=18900, lr=0.121250, batch loss=0.049591, epoch loss=0.581672 Batch=959, step=18960, lr=0.121250, batch loss=0.013587, epoch loss=0.595259 Batch=1019, step=19020, lr=0.121000, batch loss=0.016604, epoch loss=0.611863 Batch=1079, step=19080, lr=0.120500, batch loss=0.007664, epoch loss=0.619527 Batch=1139, step=19140, lr=0.120500, batch loss=0.024226, epoch loss=0.643753 Batch=1199, step=19200, lr=0.120250, batch loss=0.009961, epoch loss=0.653714 Epoch=15, step=19200, lr=0.120250, epoch loss=0.653714 Batch=59, step=19260, lr=0.120000, batch loss=0.005146, epoch loss=0.005146 Batch=119, step=19320, lr=0.119750, batch loss=0.018706, epoch loss=0.023852 Batch=179, step=19380, lr=0.119250, batch loss=0.042779, epoch loss=0.066631 Batch=239, step=19440, lr=0.119250, batch loss=0.021800, epoch loss=0.088431 Batch=299, step=19500, lr=0.119000, batch loss=0.005189, epoch loss=0.093619 Batch=359, step=19560, lr=0.118750, batch loss=0.017323, epoch loss=0.110943 Batch=419, step=19620, lr=0.118500, batch loss=0.021137, epoch loss=0.132080 Batch=479, step=19680, lr=0.118250, batch loss=0.006424, epoch loss=0.138504 Batch=539, step=19740, lr=0.117750, batch loss=0.017294, epoch loss=0.155798 Batch=599, step=19800, lr=0.117750, batch loss=0.023126, epoch loss=0.178923 Batch=659, step=19860, lr=0.117500, batch loss=0.018875, epoch loss=0.197799 Batch=719, step=19920, lr=0.117000, batch loss=0.050398, epoch loss=0.248197 Batch=779, step=19980, lr=0.116750, batch loss=0.079443, epoch loss=0.327640 Batch=839, step=20040, lr=0.116500, batch loss=0.032322, epoch loss=0.359963 Batch=899, step=20100, lr=0.116500, batch loss=0.029845, epoch loss=0.389807 Batch=959, step=20160, lr=0.116250, batch loss=0.015936, epoch loss=0.405743 Batch=1019, step=20220, lr=0.116000, batch loss=0.019130, epoch loss=0.424873 Batch=1079, step=20280, lr=0.115750, batch loss=0.005500, epoch loss=0.430372 Batch=1139, step=20340, lr=0.115500, batch loss=0.018962, epoch loss=0.449334 Batch=1199, step=20400, lr=0.115250, batch loss=0.006841, epoch loss=0.456175 Epoch=16, step=20400, lr=0.115250, epoch loss=0.456175 Batch=59, step=20460, lr=0.115000, batch loss=0.002977, epoch loss=0.002977 Batch=119, step=20520, lr=0.114750, batch loss=0.010591, epoch loss=0.013568 Batch=179, step=20580, lr=0.114500, batch loss=0.026064, epoch loss=0.039633 Batch=239, step=20640, lr=0.114250, batch loss=0.015361, epoch loss=0.054994 Batch=299, step=20700, lr=0.114000, batch loss=0.003187, epoch loss=0.058181 Batch=359, step=20760, lr=0.113750, batch loss=0.012921, epoch loss=0.071102 Batch=419, step=20820, lr=0.113500, batch loss=0.015959, epoch loss=0.087061 Batch=479, step=20880, lr=0.113250, batch loss=0.003643, epoch loss=0.090704 Batch=539, step=20940, lr=0.113000, batch loss=0.015940, epoch loss=0.106644 Batch=599, step=21000, lr=0.112750, batch loss=0.018137, epoch loss=0.124781 Batch=659, step=21060, lr=0.112500, batch loss=0.014863, epoch loss=0.139644 Batch=719, step=21120, lr=0.112250, batch loss=0.044549, epoch loss=0.184193 Batch=779, step=21180, lr=0.112000, batch loss=0.071814, epoch loss=0.256007 Batch=839, step=21240, lr=0.111750, batch loss=0.026059, epoch loss=0.282066 Batch=899, step=21300, lr=0.111250, batch loss=0.027621, epoch loss=0.309687 Batch=959, step=21360, lr=0.111250, batch loss=0.009860, epoch loss=0.319547 Batch=1019, step=21420, lr=0.111000, batch loss=0.011366, epoch loss=0.330913 Batch=1079, step=21480, lr=0.110750, batch loss=0.001243, epoch loss=0.332155 Batch=1139, step=21540, lr=0.110500, batch loss=0.013069, epoch loss=0.345225 Batch=1199, step=21600, lr=0.110250, batch loss=0.005396, epoch loss=0.350621 Epoch=17, step=21600, lr=0.110250, epoch loss=0.350621 Batch=59, step=21660, lr=0.110000, batch loss=0.002382, epoch loss=0.002382 Batch=119, step=21720, lr=0.109750, batch loss=0.006695, epoch loss=0.009077 Batch=179, step=21780, lr=0.109500, batch loss=0.013243, epoch loss=0.022320 Batch=239, step=21840, lr=0.109000, batch loss=0.009391, epoch loss=0.031711 Batch=299, step=21900, lr=0.108750, batch loss=0.010904, epoch loss=0.042615 Batch=359, step=21960, lr=0.108500, batch loss=0.014108, epoch loss=0.056724 Batch=419, step=22020, lr=0.108250, batch loss=0.012443, epoch loss=0.069167 Batch=479, step=22080, lr=0.108000, batch loss=0.002724, epoch loss=0.071891 Batch=539, step=22140, lr=0.108000, batch loss=0.017798, epoch loss=0.089689 Batch=599, step=22200, lr=0.107750, batch loss=0.017236, epoch loss=0.106925 Batch=659, step=22260, lr=0.107500, batch loss=0.014446, epoch loss=0.121371 Batch=719, step=22320, lr=0.107250, batch loss=0.027094, epoch loss=0.148464 Batch=779, step=22380, lr=0.107000, batch loss=0.043396, epoch loss=0.191860 Batch=839, step=22440, lr=0.106750, batch loss=0.022147, epoch loss=0.214007 Batch=899, step=22500, lr=0.106500, batch loss=0.022365, epoch loss=0.236372 Batch=959, step=22560, lr=0.106250, batch loss=0.010646, epoch loss=0.247018 Batch=1019, step=22620, lr=0.106000, batch loss=0.009877, epoch loss=0.256895 Batch=1079, step=22680, lr=0.105750, batch loss=0.000537, epoch loss=0.257433 Batch=1139, step=22740, lr=0.105500, batch loss=0.010764, epoch loss=0.268197 Batch=1199, step=22800, lr=0.105250, batch loss=0.004288, epoch loss=0.272484 Epoch=18, step=22800, lr=0.105250, epoch loss=0.272484 Batch=59, step=22860, lr=0.105000, batch loss=0.001190, epoch loss=0.001190 Batch=119, step=22920, lr=0.104750, batch loss=0.005675, epoch loss=0.006865 Batch=179, step=22980, lr=0.104500, batch loss=0.010298, epoch loss=0.017163 Batch=239, step=23040, lr=0.104250, batch loss=0.008619, epoch loss=0.025782 Batch=299, step=23100, lr=0.104000, batch loss=0.007478, epoch loss=0.033261 Batch=359, step=23160, lr=0.103750, batch loss=0.013448, epoch loss=0.046709 Batch=419, step=23220, lr=0.103500, batch loss=0.010909, epoch loss=0.057617 Batch=479, step=23280, lr=0.103250, batch loss=0.002489, epoch loss=0.060106 Batch=539, step=23340, lr=0.103000, batch loss=0.017335, epoch loss=0.077441 Batch=599, step=23400, lr=0.102750, batch loss=0.013843, epoch loss=0.091283 Batch=659, step=23460, lr=0.102250, batch loss=0.010336, epoch loss=0.101620 Batch=719, step=23520, lr=0.102000, batch loss=0.014519, epoch loss=0.116139 Batch=779, step=23580, lr=0.102000, batch loss=0.021808, epoch loss=0.137947 Batch=839, step=23640, lr=0.101750, batch loss=0.027862, epoch loss=0.165810 Batch=899, step=23700, lr=0.101500, batch loss=0.022156, epoch loss=0.187965 Batch=959, step=23760, lr=0.101250, batch loss=0.010344, epoch loss=0.198309 Batch=1019, step=23820, lr=0.101000, batch loss=0.008153, epoch loss=0.206463 Batch=1079, step=23880, lr=0.100750, batch loss=0.000416, epoch loss=0.206878 Batch=1139, step=23940, lr=0.100500, batch loss=0.009855, epoch loss=0.216733 Batch=1199, step=24000, lr=0.100250, batch loss=0.004658, epoch loss=0.221391 Epoch=19, step=24000, lr=0.100250, epoch loss=0.221391 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########******************************************│ │***********###########*************************############***************************************..│ │********######*####*********************************###*###*#**********************************.....│ │*******######**##**********************************#*######*#*******************************........│ │*******##*##**##**********...........***************########*##***************************..........│ │*****#######************.......%...%%...***************#########*************************.........%.│ │******######***********.........%........***************##*#####************************......%.%.%.│ │***#########**********.........%%%.%%.....**************#*#######*********************.......%.%%%%.│ │****#######**********..........%%%%.........************#########********************........%%.%%.%│ │**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│ │*#######************...........%%%%%%%..........************#######**************............%%%%%%.│ │*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.│ │*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│ │#####*#***********..............%%%%%%%..............********#######*********..............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######**********................%%%%%%%%...............*****###*###******................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....│ │#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................*******..................%.%%.%%......│ │**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%................***...............%%%%%%%.%.%.......│ │*************...........................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │************.............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │***********..................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-04-10 12:55.00 ---> saved as "ab2dedfa7fccf0632fa7b2c8da712d98137a5b4b635fffd9f2cb4640c281b344" Job succeeded 2025-04-10 12:55.01: Job succeeded