2025-03-20 22:29.24: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (1a4d0ebb2cb2be9bf8422ae739357d8626151cd1) (linux-x86_64:fedora-40-5.3_opam-2.3) Base: ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 1a4d0ebb cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e # fedora-40-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo dnf install -y findutils RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-03-20 22:29.24: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e-fedora-40-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5" 2025-03-20 22:29.24: Using OBuilder spec: ((from ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e) (comment fedora-40-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (network host) (shell "sudo dnf install -y findutils")) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-03-20 22:29.24: Waiting for resource in pool OCluster 2025-03-20 22:29.24: Waiting for worker… 2025-03-20 22:31.41: Got resource from pool OCluster Building on toxis.caelum.ci.dev All commits already cached HEAD is now at 1a4d0ebb fPIC for cc: but only openSUSE complained https://ocaml.ci.dev/github/ahrefs/ocannl/commit/ccaf459c55f1e1dab014a65af54e1ba2ec3b9ad0/variant/opensuse-15.6-5.3_opam-2.3 (from ocaml/opam:fedora-40-ocaml-5.3@sha256:9a3e2a687015db2704b91431286f4f84f9c6e8d286761ed5417eadf8288bcc9e) 2025-03-20 22:31.41 ---> using "1476c452edd56a53215a301540bb2009c8d2a15dba423047d40300e18599671f" from cache /: (comment fedora-40-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (network host) (shell "sudo dnf install -y findutils")) Fedora 40 - x86_64 87 kB/s | 27 kB 00:00 Fedora 40 - x86_64 - Updates 181 kB/s | 23 kB 00:00 Fedora 40 - x86_64 - Updates 4.0 MB/s | 5.7 MB 00:01 Package findutils-1:4.9.0-9.fc40.x86_64 is already installed. Dependencies resolved. Nothing to do. Complete! 2025-03-20 22:31.41 ---> using "94cbd80bcc330ce623803fbb520a85cab1400e88c96764ea5c2e02cd7e19ad53" from cache /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-03-20 22:31.41 ---> using "3cf8028e21344054f79e658239efeb83a6d7aca5695c51333e87cfafa85482ca" from cache /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. Continue? [y/n] y This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Format upgrade done. [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com" [default] synchronised from file:///home/opam/opam-repository 2025-03-20 22:31.41 ---> using "e8183cd9ba1a23c4d7ddb3cf8b2d2350a9e0a7713ccd1e3ac8d0c1e7fffb4080" from cache /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-03-20 22:31.41 ---> using "e1cd23eff74270a484a5911f8cc469d53dbcdcfafe79312b85310d9254a411b6" from cache /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-03-20 22:31.41 ---> using "c4ed4798c67e57846c83ac2aba073c0c9239aa56a5e2c21f39faf8e0638895ed" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD 862a7640b1..6cf83229dd master -> origin/master 4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-03-20 22:31.41 ---> using "dab772aca77df98414fc328dd4b874f99de8a0ed482ec5e55cda5daf184a72c6" from cache /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-03-20 22:31.41 ---> saved as "b3c1458d2dffc241e60bbd215faf56a55dcfb986863e21eac7f53a89b40c9287" /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-03-20 22:31.44 ---> saved as "bc242b916d36f5a95dd95336431aadab2520d42f08a0a9d7cf798fdf8a85a529" /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-03-20 22:31.44 ---> saved as "99abe2e5638a7b6021666aebe939dd0066e9cb52eacde91026c9424c3fb52584" /src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "yum" "makecache" - Fedora 40 - x86_64 108 kB/s | 27 kB 00:00 - Fedora 40 openh264 (From Cisco) - x86_64 9.8 kB/s | 989 B 00:00 - Fedora 40 - x86_64 - Updates 179 kB/s | 23 kB 00:00 - Metadata cache created. <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-devel <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "yum" "install" "-y" "libffi-devel" - Last metadata expiration check: 0:00:18 ago on Thu Mar 20 22:31:45 2025. - Dependencies resolved. - ================================================================================ - Package Architecture Version Repository Size - ================================================================================ - Installing: - libffi-devel x86_64 3.4.4-7.fc40 fedora 28 k - - Transaction Summary - ================================================================================ - Install 1 Package - - Total download size: 28 k - Installed size: 33 k - Downloading Packages: - libffi-devel-3.4.4-7.fc40.x86_64.rpm 280 kB/s | 28 kB 00:00 - -------------------------------------------------------------------------------- - Total 122 kB/s | 28 kB 00:00 - Running transaction check - Transaction check succeeded. - Running transaction test - Transaction test succeeded. - Running transaction - Preparing : 1/1 - Installing : libffi-devel-3.4.4-7.fc40.x86_64 1/1 - Running scriptlet: libffi-devel-3.4.4-7.fc40.x86_64 1/1 - - Installed: - libffi-devel-3.4.4-7.fc40.x86_64 - - Complete! + /usr/bin/rpm "-q" "--whatprovides" "libffi-devel" - libffi-devel-3.4.4-7.fc40.x86_64 2025-03-20 22:32.04 ---> saved as "fc908412141f84d4618e32af5e50641ccc3c42263dd940e51e569bd85795e2a0" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 65 packages - install angstrom 0.16.1 - install backoff 0.1.1 - install base v0.17.1 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.17.2 - install dune-configurator 3.17.2 - install fieldslib v0.17.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.1.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.2 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.1 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved cppo.1.8.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved dune.3.17.2, dune-configurator.3.17.2 (cached) -> installed num.1.5-1 -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_minidebug.2.1.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib_jane.v0.17.2 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved sexplib.v0.17.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved uucp.16.0.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> installed ocamlfind.1.9.8 -> installed ocamlbuild.0.16.1 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed ptime.1.2.0 -> installed dune.3.17.2 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed printbox.0.12 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed multicore-magic.2.3.1 -> installed ocaml-syntax-shims.1.0.0 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed sexplib0.v0.17.0 -> installed ocaml-compiler-libs.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed cppo.1.8.0 -> installed re.1.12.0 -> installed integers.0.7.0 -> installed saturn_lockfree.0.5.0 -> installed parsexp.v0.17.0 -> installed dune-configurator.3.17.2 -> installed bigstringaf.0.10.0 -> installed sexplib.v0.17.0 -> installed angstrom.0.16.1 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.1 -> installed variantslib.v0.17.0 -> installed fieldslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.35.0 -> installed ppxlib_jane.v0.17.2 -> installed ppx_optcomp.v0.17.0 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.1.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-03-20 22:33.41 ---> saved as "8a6dc909c2f8092d37652e785545688022c93f063e0b881d32efb3c60109259c" /src: (copy (src .) (dst /src)) 2025-03-20 22:33.42 ---> saved as "089820badd36741250c08041ef297b2ff9b2d422759851f6476bbadc71f8bc67" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45faefeb2323faebc11c1688d6c324c6/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/88092fa15afeca44bf86a9d605ef39af/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382 Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039 Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216 Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512 Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081 Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141 Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385 Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263 Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603 Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902 Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024 Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685 Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407 Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543 Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049 Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829 Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269 Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952 Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952 Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138 Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381 Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025 Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921 Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269 Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828 Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563 Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831 Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657 Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042 Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243 Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160 Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542 Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179 Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413 Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446 Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419 Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654 Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028 Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821 Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821 Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363 Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325 Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481 Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578 Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525 Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415 Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160 Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004 Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802 Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881 Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937 Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479 Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468 Batch=839, step=3240, lr=0.186500, batch loss=0.321779, epoch loss=3.844248 Batch=899, step=3300, lr=0.186500, batch loss=0.283850, epoch loss=4.128098 Batch=959, step=3360, lr=0.186250, batch loss=0.214439, epoch loss=4.342536 Batch=1019, step=3420, lr=0.186000, batch loss=0.306367, epoch loss=4.648904 Batch=1079, step=3480, lr=0.185750, batch loss=0.177343, epoch loss=4.826247 Batch=1139, step=3540, lr=0.185500, batch loss=0.235559, epoch loss=5.061805 Batch=1199, step=3600, lr=0.185250, batch loss=0.197910, epoch loss=5.259715 Epoch=2, step=3600, lr=0.185250, epoch loss=5.259715 Batch=59, step=3660, lr=0.185000, batch loss=0.226564, epoch loss=0.226564 Batch=119, step=3720, lr=0.184750, batch loss=0.191973, epoch loss=0.418537 Batch=179, step=3780, lr=0.184500, batch loss=0.210595, epoch loss=0.629133 Batch=239, step=3840, lr=0.184250, batch loss=0.316682, epoch loss=0.945815 Batch=299, step=3900, lr=0.184000, batch loss=0.207846, epoch loss=1.153661 Batch=359, step=3960, lr=0.183750, batch loss=0.289042, epoch loss=1.442704 Batch=419, step=4020, lr=0.183500, batch loss=0.290977, epoch loss=1.733681 Batch=479, step=4080, lr=0.183250, batch loss=0.259816, epoch loss=1.993497 Batch=539, step=4140, lr=0.183000, batch loss=0.197454, epoch loss=2.190951 Batch=599, step=4200, lr=0.182750, batch loss=0.236682, epoch loss=2.427633 Batch=659, step=4260, lr=0.182500, batch loss=0.332770, epoch loss=2.760403 Batch=719, step=4320, lr=0.182250, batch loss=0.337076, epoch loss=3.097479 Batch=779, step=4380, lr=0.182000, batch loss=0.349702, epoch loss=3.447180 Batch=839, step=4440, lr=0.181750, batch loss=0.315443, epoch loss=3.762623 Batch=899, step=4500, lr=0.181500, batch loss=0.285767, epoch loss=4.048390 Batch=959, step=4560, lr=0.181250, batch loss=0.251862, epoch loss=4.300252 Batch=1019, step=4620, lr=0.181000, batch loss=0.363787, epoch loss=4.664039 Batch=1079, step=4680, lr=0.180750, batch loss=0.208092, epoch loss=4.872132 Batch=1139, step=4740, lr=0.180500, batch loss=0.249695, epoch loss=5.121827 Batch=1199, step=4800, lr=0.180250, batch loss=0.191133, epoch loss=5.312960 Epoch=3, step=4800, lr=0.180250, epoch loss=5.312960 Batch=59, step=4860, lr=0.180000, batch loss=0.228281, epoch loss=0.228281 Batch=119, step=4920, lr=0.179750, batch loss=0.189892, epoch loss=0.418173 Batch=179, step=4980, lr=0.179500, batch loss=0.205852, epoch loss=0.624025 Batch=239, step=5040, lr=0.179250, batch loss=0.306115, epoch loss=0.930140 Batch=299, step=5100, lr=0.179000, batch loss=0.202595, epoch loss=1.132734 Batch=359, step=5160, lr=0.178750, batch loss=0.270753, epoch loss=1.403488 Batch=419, step=5220, lr=0.178500, batch loss=0.264295, epoch loss=1.667783 Batch=479, step=5280, lr=0.178250, batch loss=0.240352, epoch loss=1.908135 Batch=539, step=5340, lr=0.178000, batch loss=0.190292, epoch loss=2.098427 Batch=599, step=5400, lr=0.177750, batch loss=0.231517, epoch loss=2.329944 Batch=659, step=5460, lr=0.177500, batch loss=0.323149, epoch loss=2.653094 Batch=719, step=5520, lr=0.177250, batch loss=0.325023, epoch loss=2.978117 Batch=779, step=5580, lr=0.177000, batch loss=0.337550, epoch loss=3.315667 Batch=839, step=5640, lr=0.176750, batch loss=0.314058, epoch loss=3.629725 Batch=899, step=5700, lr=0.176500, batch loss=0.277496, epoch loss=3.907221 Batch=959, step=5760, lr=0.176250, batch loss=0.207570, epoch loss=4.114791 Batch=1019, step=5820, lr=0.176000, batch loss=0.343362, epoch loss=4.458153 Batch=1079, step=5880, lr=0.175750, batch loss=0.208379, epoch loss=4.666532 Batch=1139, step=5940, lr=0.175500, batch loss=0.240009, epoch loss=4.906541 Batch=1199, step=6000, lr=0.175250, batch loss=0.187897, epoch loss=5.094438 Epoch=4, step=6000, lr=0.175250, epoch loss=5.094438 Batch=59, step=6060, lr=0.175000, batch loss=0.239302, epoch loss=0.239302 Batch=119, step=6120, lr=0.174750, batch loss=0.188184, epoch loss=0.427486 Batch=179, step=6180, lr=0.174500, batch loss=0.201710, epoch loss=0.629197 Batch=239, step=6240, lr=0.174250, batch loss=0.301320, epoch loss=0.930517 Batch=299, step=6300, lr=0.174000, batch loss=0.213402, epoch loss=1.143919 Batch=359, step=6360, lr=0.173750, batch loss=0.271046, epoch loss=1.414965 Batch=419, step=6420, lr=0.173500, batch loss=0.262999, epoch loss=1.677964 Batch=479, step=6480, lr=0.173250, batch loss=0.246353, epoch loss=1.924317 Batch=539, step=6540, lr=0.173000, batch loss=0.190610, epoch loss=2.114928 Batch=599, step=6600, lr=0.172750, batch loss=0.231787, epoch loss=2.346715 Batch=659, step=6660, lr=0.172500, batch loss=0.314267, epoch loss=2.660981 Batch=719, step=6720, lr=0.172250, batch loss=0.317751, epoch loss=2.978732 Batch=779, step=6780, lr=0.172000, batch loss=0.331159, epoch loss=3.309891 Batch=839, step=6840, lr=0.171750, batch loss=0.305284, epoch loss=3.615175 Batch=899, step=6900, lr=0.171500, batch loss=0.265807, epoch loss=3.880982 Batch=959, step=6960, lr=0.171250, batch loss=0.216048, epoch loss=4.097030 Batch=1019, step=7020, lr=0.171000, batch loss=0.324208, epoch loss=4.421238 Batch=1079, step=7080, lr=0.170750, batch loss=0.176115, epoch loss=4.597354 Batch=1139, step=7140, lr=0.170500, batch loss=0.213908, epoch loss=4.811261 Batch=1199, step=7200, lr=0.170250, batch loss=0.183382, epoch loss=4.994643 Epoch=5, step=7200, lr=0.170250, epoch loss=4.994643 Batch=59, step=7260, lr=0.170000, batch loss=0.239136, epoch loss=0.239136 Batch=119, step=7320, lr=0.169750, batch loss=0.180147, epoch loss=0.419284 Batch=179, step=7380, lr=0.169500, batch loss=0.195013, epoch loss=0.614297 Batch=239, step=7440, lr=0.169250, batch loss=0.291150, epoch loss=0.905447 Batch=299, step=7500, lr=0.169000, batch loss=0.206675, epoch loss=1.112122 Batch=359, step=7560, lr=0.168750, batch loss=0.263976, epoch loss=1.376098 Batch=419, step=7620, lr=0.168500, batch loss=0.256602, epoch loss=1.632700 Batch=479, step=7680, lr=0.168250, batch loss=0.236241, epoch loss=1.868942 Batch=539, step=7740, lr=0.168000, batch loss=0.188636, epoch loss=2.057577 Batch=599, step=7800, lr=0.167750, batch loss=0.224771, epoch loss=2.282348 Batch=659, step=7860, lr=0.167500, batch loss=0.304578, epoch loss=2.586927 Batch=719, step=7920, lr=0.167250, batch loss=0.307453, epoch loss=2.894379 Batch=779, step=7980, lr=0.167000, batch loss=0.320058, epoch loss=3.214437 Batch=839, step=8040, lr=0.166750, batch loss=0.296781, epoch loss=3.511218 Batch=899, step=8100, lr=0.166500, batch loss=0.258305, epoch loss=3.769523 Batch=959, step=8160, lr=0.166250, batch loss=0.208734, epoch loss=3.978257 Batch=1019, step=8220, lr=0.166000, batch loss=0.318497, epoch loss=4.296754 Batch=1079, step=8280, lr=0.165750, batch loss=0.176002, epoch loss=4.472756 Batch=1139, step=8340, lr=0.165500, batch loss=0.208886, epoch loss=4.681642 Batch=1199, step=8400, lr=0.165250, batch loss=0.177486, epoch loss=4.859128 Epoch=6, step=8400, lr=0.165250, epoch loss=4.859128 Batch=59, step=8460, lr=0.165000, batch loss=0.229886, epoch loss=0.229886 Batch=119, step=8520, lr=0.164750, batch loss=0.174007, epoch loss=0.403893 Batch=179, step=8580, lr=0.164500, batch loss=0.187539, epoch loss=0.591433 Batch=239, step=8640, lr=0.164250, batch loss=0.276969, epoch loss=0.868402 Batch=299, step=8700, lr=0.164000, batch loss=0.190227, epoch loss=1.058629 Batch=359, step=8760, lr=0.163750, batch loss=0.248937, epoch loss=1.307566 Batch=419, step=8820, lr=0.163500, batch loss=0.244242, epoch loss=1.551808 Batch=479, step=8880, lr=0.163250, batch loss=0.228885, epoch loss=1.780693 Batch=539, step=8940, lr=0.163000, batch loss=0.177328, epoch loss=1.958021 Batch=599, step=9000, lr=0.162750, batch loss=0.218753, epoch loss=2.176774 Batch=659, step=9060, lr=0.162500, batch loss=0.293403, epoch loss=2.470176 Batch=719, step=9120, lr=0.162250, batch loss=0.295796, epoch loss=2.765972 Batch=779, step=9180, lr=0.162000, batch loss=0.314413, epoch loss=3.080386 Batch=839, step=9240, lr=0.161750, batch loss=0.281006, epoch loss=3.361391 Batch=899, step=9300, lr=0.161500, batch loss=0.253097, epoch loss=3.614489 Batch=959, step=9360, lr=0.161250, batch loss=0.185342, epoch loss=3.799830 Batch=1019, step=9420, lr=0.161000, batch loss=0.319728, epoch loss=4.119559 Batch=1079, step=9480, lr=0.160750, batch loss=0.184162, epoch loss=4.303720 Batch=1139, step=9540, lr=0.160500, batch loss=0.208001, epoch loss=4.511721 Batch=1199, step=9600, lr=0.160250, batch loss=0.166302, epoch loss=4.678024 Epoch=7, step=9600, lr=0.160250, epoch loss=4.678024 Batch=59, step=9660, lr=0.160000, batch loss=0.194644, epoch loss=0.194644 Batch=119, step=9720, lr=0.159750, batch loss=0.166765, epoch loss=0.361408 Batch=179, step=9780, lr=0.159500, batch loss=0.179395, epoch loss=0.540803 Batch=239, step=9840, lr=0.159250, batch loss=0.262998, epoch loss=0.803801 Batch=299, step=9900, lr=0.159000, batch loss=0.182079, epoch loss=0.985879 Batch=359, step=9960, lr=0.158750, batch loss=0.239728, epoch loss=1.225607 Batch=419, step=10020, lr=0.158500, batch loss=0.231642, epoch loss=1.457250 Batch=479, step=10080, lr=0.158250, batch loss=0.212866, epoch loss=1.670116 Batch=539, step=10140, lr=0.158000, batch loss=0.170141, epoch loss=1.840257 Batch=599, step=10200, lr=0.157750, batch loss=0.201204, epoch loss=2.041461 Batch=659, step=10260, lr=0.157500, batch loss=0.280314, epoch loss=2.321775 Batch=719, step=10320, lr=0.157250, batch loss=0.280120, epoch loss=2.601894 Batch=779, step=10380, lr=0.157000, batch loss=0.296460, epoch loss=2.898354 Batch=839, step=10440, lr=0.156750, batch loss=0.272386, epoch loss=3.170740 Batch=899, step=10500, lr=0.156500, batch loss=0.236547, epoch loss=3.407286 Batch=959, step=10560, lr=0.156250, batch loss=0.189989, epoch loss=3.597275 Batch=1019, step=10620, lr=0.156000, batch loss=0.286409, epoch loss=3.883685 Batch=1079, step=10680, lr=0.155750, batch loss=0.174300, epoch loss=4.057985 Batch=1139, step=10740, lr=0.155500, batch loss=0.201178, epoch loss=4.259163 Batch=1199, step=10800, lr=0.155250, batch loss=0.154731, epoch loss=4.413893 Epoch=8, step=10800, lr=0.155250, epoch loss=4.413893 Batch=59, step=10860, lr=0.155000, batch loss=0.176126, epoch loss=0.176126 Batch=119, step=10920, lr=0.154750, batch loss=0.146834, epoch loss=0.322960 Batch=179, step=10980, lr=0.154500, batch loss=0.165796, epoch loss=0.488756 Batch=239, step=11040, lr=0.154250, batch loss=0.240634, epoch loss=0.729390 Batch=299, step=11100, lr=0.154000, batch loss=0.171801, epoch loss=0.901191 Batch=359, step=11160, lr=0.153750, batch loss=0.219822, epoch loss=1.121014 Batch=419, step=11220, lr=0.153500, batch loss=0.217280, epoch loss=1.338294 Batch=479, step=11280, lr=0.153250, batch loss=0.210323, epoch loss=1.548617 Batch=539, step=11340, lr=0.153000, batch loss=0.163465, epoch loss=1.712082 Batch=599, step=11400, lr=0.152750, batch loss=0.176698, epoch loss=1.888780 Batch=659, step=11460, lr=0.152500, batch loss=0.265100, epoch loss=2.153880 Batch=719, step=11520, lr=0.152250, batch loss=0.261820, epoch loss=2.415700 Batch=779, step=11580, lr=0.152000, batch loss=0.271750, epoch loss=2.687450 Batch=839, step=11640, lr=0.151750, batch loss=0.249728, epoch loss=2.937178 Batch=899, step=11700, lr=0.151500, batch loss=0.219242, epoch loss=3.156420 Batch=959, step=11760, lr=0.151250, batch loss=0.182527, epoch loss=3.338947 Batch=1019, step=11820, lr=0.151000, batch loss=0.276972, epoch loss=3.615920 Batch=1079, step=11880, lr=0.150750, batch loss=0.145719, epoch loss=3.761639 Batch=1139, step=11940, lr=0.150500, batch loss=0.180217, epoch loss=3.941856 Batch=1199, step=12000, lr=0.150250, batch loss=0.139373, epoch loss=4.081229 Epoch=9, step=12000, lr=0.150250, epoch loss=4.081229 Batch=59, step=12060, lr=0.150000, batch loss=0.160143, epoch loss=0.160143 Batch=119, step=12120, lr=0.149750, batch loss=0.131018, epoch loss=0.291161 Batch=179, step=12180, lr=0.149500, batch loss=0.150747, epoch loss=0.441907 Batch=239, step=12240, lr=0.149250, batch loss=0.220893, epoch loss=0.662800 Batch=299, step=12300, lr=0.149000, batch loss=0.135619, epoch loss=0.798419 Batch=359, step=12360, lr=0.148750, batch loss=0.200633, epoch loss=0.999052 Batch=419, step=12420, lr=0.148500, batch loss=0.204876, epoch loss=1.203928 Batch=479, step=12480, lr=0.148250, batch loss=0.178666, epoch loss=1.382594 Batch=539, step=12540, lr=0.148000, batch loss=0.141201, epoch loss=1.523795 Batch=599, step=12600, lr=0.147750, batch loss=0.148462, epoch loss=1.672257 Batch=659, step=12660, lr=0.147500, batch loss=0.226075, epoch loss=1.898333 Batch=719, step=12720, lr=0.147250, batch loss=0.242089, epoch loss=2.140422 Batch=779, step=12780, lr=0.147000, batch loss=0.262711, epoch loss=2.403133 Batch=839, step=12840, lr=0.146750, batch loss=0.236649, epoch loss=2.639783 Batch=899, step=12900, lr=0.146500, batch loss=0.202529, epoch loss=2.842311 Batch=959, step=12960, lr=0.146250, batch loss=0.150909, epoch loss=2.993220 Batch=1019, step=13020, lr=0.146000, batch loss=0.267995, epoch loss=3.261215 Batch=1079, step=13080, lr=0.145750, batch loss=0.108052, epoch loss=3.369267 Batch=1139, step=13140, lr=0.145500, batch loss=0.143859, epoch loss=3.513126 Batch=1199, step=13200, lr=0.145250, batch loss=0.119671, epoch loss=3.632797 Epoch=10, step=13200, lr=0.145250, epoch loss=3.632797 Batch=59, step=13260, lr=0.145000, batch loss=0.137599, epoch loss=0.137599 Batch=119, step=13320, lr=0.144750, batch loss=0.116916, epoch loss=0.254515 Batch=179, step=13380, lr=0.144500, batch loss=0.126809, epoch loss=0.381324 Batch=239, step=13440, lr=0.144250, batch loss=0.182680, epoch loss=0.564004 Batch=299, step=13500, lr=0.144000, batch loss=0.119368, epoch loss=0.683373 Batch=359, step=13560, lr=0.143750, batch loss=0.161049, epoch loss=0.844421 Batch=419, step=13620, lr=0.143500, batch loss=0.162260, epoch loss=1.006681 Batch=479, step=13680, lr=0.143250, batch loss=0.145146, epoch loss=1.151827 Batch=539, step=13740, lr=0.143000, batch loss=0.119119, epoch loss=1.270946 Batch=599, step=13800, lr=0.142750, batch loss=0.119623, epoch loss=1.390569 Batch=659, step=13860, lr=0.142500, batch loss=0.174348, epoch loss=1.564917 Batch=719, step=13920, lr=0.142250, batch loss=0.174532, epoch loss=1.739449 Batch=779, step=13980, lr=0.142000, batch loss=0.197548, epoch loss=1.936997 Batch=839, step=14040, lr=0.141750, batch loss=0.205639, epoch loss=2.142636 Batch=899, step=14100, lr=0.141500, batch loss=0.221993, epoch loss=2.364628 Batch=959, step=14160, lr=0.141250, batch loss=0.099273, epoch loss=2.463901 Batch=1019, step=14220, lr=0.141000, batch loss=0.200350, epoch loss=2.664251 Batch=1079, step=14280, lr=0.140750, batch loss=0.074341, epoch loss=2.738592 Batch=1139, step=14340, lr=0.140500, batch loss=0.116118, epoch loss=2.854710 Batch=1199, step=14400, lr=0.140250, batch loss=0.083813, epoch loss=2.938523 Epoch=11, step=14400, lr=0.140250, epoch loss=2.938523 Batch=59, step=14460, lr=0.140000, batch loss=0.109406, epoch loss=0.109406 Batch=119, step=14520, lr=0.139750, batch loss=0.111410, epoch loss=0.220817 Batch=179, step=14580, lr=0.139500, batch loss=0.101221, epoch loss=0.322038 Batch=239, step=14640, lr=0.139250, batch loss=0.139412, epoch loss=0.461450 Batch=299, step=14700, lr=0.139000, batch loss=0.076986, epoch loss=0.538436 Batch=359, step=14760, lr=0.138750, batch loss=0.119005, epoch loss=0.657441 Batch=419, step=14820, lr=0.138500, batch loss=0.131386, epoch loss=0.788827 Batch=479, step=14880, lr=0.138250, batch loss=0.099036, epoch loss=0.887862 Batch=539, step=14940, lr=0.138000, batch loss=0.110680, epoch loss=0.998542 Batch=599, step=15000, lr=0.137750, batch loss=0.084034, epoch loss=1.082577 Batch=659, step=15060, lr=0.137500, batch loss=0.127427, epoch loss=1.210003 Batch=719, step=15120, lr=0.137250, batch loss=0.119400, epoch loss=1.329404 Batch=779, step=15180, lr=0.137000, batch loss=0.120551, epoch loss=1.449955 Batch=839, step=15240, lr=0.136750, batch loss=0.124016, epoch loss=1.573971 Batch=899, step=15300, lr=0.136500, batch loss=0.138668, epoch loss=1.712639 Batch=959, step=15360, lr=0.136250, batch loss=0.151560, epoch loss=1.864199 Batch=1019, step=15420, lr=0.136000, batch loss=0.132828, epoch loss=1.997027 Batch=1079, step=15480, lr=0.135750, batch loss=0.058387, epoch loss=2.055414 Batch=1139, step=15540, lr=0.135500, batch loss=0.104170, epoch loss=2.159584 Batch=1199, step=15600, lr=0.135250, batch loss=0.056828, epoch loss=2.216412 Epoch=12, step=15600, lr=0.135250, epoch loss=2.216412 Batch=59, step=15660, lr=0.135000, batch loss=0.085754, epoch loss=0.085754 Batch=119, step=15720, lr=0.134750, batch loss=0.119564, epoch loss=0.205318 Batch=179, step=15780, lr=0.134500, batch loss=0.092309, epoch loss=0.297627 Batch=239, step=15840, lr=0.134250, batch loss=0.097106, epoch loss=0.394734 Batch=299, step=15900, lr=0.134000, batch loss=0.042579, epoch loss=0.437313 Batch=359, step=15960, lr=0.133750, batch loss=0.078555, epoch loss=0.515868 Batch=419, step=16020, lr=0.133500, batch loss=0.078414, epoch loss=0.594282 Batch=479, step=16080, lr=0.133250, batch loss=0.060889, epoch loss=0.655171 Batch=539, step=16140, lr=0.133000, batch loss=0.062473, epoch loss=0.717644 Batch=599, step=16200, lr=0.132750, batch loss=0.136373, epoch loss=0.854018 Batch=659, step=16260, lr=0.132500, batch loss=0.087834, epoch loss=0.941851 Batch=719, step=16320, lr=0.132250, batch loss=0.139731, epoch loss=1.081582 Batch=779, step=16380, lr=0.132000, batch loss=0.332376, epoch loss=1.413958 Batch=839, step=16440, lr=0.131750, batch loss=0.098245, epoch loss=1.512203 Batch=899, step=16500, lr=0.131500, batch loss=0.087913, epoch loss=1.600116 Batch=959, step=16560, lr=0.131250, batch loss=0.035391, epoch loss=1.635507 Batch=1019, step=16620, lr=0.131000, batch loss=0.063948, epoch loss=1.699455 Batch=1079, step=16680, lr=0.130750, batch loss=0.052758, epoch loss=1.752213 Batch=1139, step=16740, lr=0.130500, batch loss=0.088092, epoch loss=1.840305 Batch=1199, step=16800, lr=0.130250, batch loss=0.044185, epoch loss=1.884490 Epoch=13, step=16800, lr=0.130250, epoch loss=1.884490 Batch=59, step=16860, lr=0.130000, batch loss=0.037997, epoch loss=0.037997 Batch=119, step=16920, lr=0.129750, batch loss=0.037046, epoch loss=0.075043 Batch=179, step=16980, lr=0.129500, batch loss=0.044487, epoch loss=0.119530 Batch=239, step=17040, lr=0.129250, batch loss=0.062104, epoch loss=0.181634 Batch=299, step=17100, lr=0.129000, batch loss=0.022628, epoch loss=0.204262 Batch=359, step=17160, lr=0.128750, batch loss=0.044254, epoch loss=0.248516 Batch=419, step=17220, lr=0.128500, batch loss=0.065768, epoch loss=0.314284 Batch=479, step=17280, lr=0.128250, batch loss=0.021944, epoch loss=0.336228 Batch=539, step=17340, lr=0.128000, batch loss=0.028797, epoch loss=0.365025 Batch=599, step=17400, lr=0.127750, batch loss=0.036312, epoch loss=0.401337 Batch=659, step=17460, lr=0.127500, batch loss=0.049116, epoch loss=0.450454 Batch=719, step=17520, lr=0.127250, batch loss=0.060571, epoch loss=0.511025 Batch=779, step=17580, lr=0.127000, batch loss=0.069128, epoch loss=0.580153 Batch=839, step=17640, lr=0.126750, batch loss=0.101948, epoch loss=0.682101 Batch=899, step=17700, lr=0.126500, batch loss=0.050634, epoch loss=0.732735 Batch=959, step=17760, lr=0.126250, batch loss=0.016851, epoch loss=0.749586 Batch=1019, step=17820, lr=0.126000, batch loss=0.038577, epoch loss=0.788164 Batch=1079, step=17880, lr=0.125750, batch loss=0.036682, epoch loss=0.824846 Batch=1139, step=17940, lr=0.125500, batch loss=0.076101, epoch loss=0.900947 Batch=1199, step=18000, lr=0.125250, batch loss=0.025807, epoch loss=0.926754 Epoch=14, step=18000, lr=0.125250, epoch loss=0.926754 Batch=59, step=18060, lr=0.125000, batch loss=0.016698, epoch loss=0.016698 Batch=119, step=18120, lr=0.124750, batch loss=0.024318, epoch loss=0.041016 Batch=179, step=18180, lr=0.124500, batch loss=0.058501, epoch loss=0.099517 Batch=239, step=18240, lr=0.124250, batch loss=0.036712, epoch loss=0.136228 Batch=299, step=18300, lr=0.124000, batch loss=0.011489, epoch loss=0.147718 Batch=359, step=18360, lr=0.123750, batch loss=0.027589, epoch loss=0.175306 Batch=419, step=18420, lr=0.123500, batch loss=0.028737, epoch loss=0.204044 Batch=479, step=18480, lr=0.123250, batch loss=0.013229, epoch loss=0.217273 Batch=539, step=18540, lr=0.123000, batch loss=0.021790, epoch loss=0.239063 Batch=599, step=18600, lr=0.122750, batch loss=0.026570, epoch loss=0.265634 Batch=659, step=18660, lr=0.122500, batch loss=0.028717, epoch loss=0.294351 Batch=719, step=18720, lr=0.122250, batch loss=0.042754, epoch loss=0.337105 Batch=779, step=18780, lr=0.122000, batch loss=0.095971, epoch loss=0.433076 Batch=839, step=18840, lr=0.121750, batch loss=0.069466, epoch loss=0.502542 Batch=899, step=18900, lr=0.121500, batch loss=0.082365, epoch loss=0.584907 Batch=959, step=18960, lr=0.121250, batch loss=0.014550, epoch loss=0.599457 Batch=1019, step=19020, lr=0.121000, batch loss=0.026531, epoch loss=0.625989 Batch=1079, step=19080, lr=0.120750, batch loss=0.004414, epoch loss=0.630403 Batch=1139, step=19140, lr=0.120500, batch loss=0.025420, epoch loss=0.655823 Batch=1199, step=19200, lr=0.120250, batch loss=0.011128, epoch loss=0.666951 Epoch=15, step=19200, lr=0.120250, epoch loss=0.666951 Batch=59, step=19260, lr=0.120000, batch loss=0.005645, epoch loss=0.005645 Batch=119, step=19320, lr=0.119750, batch loss=0.014931, epoch loss=0.020577 Batch=179, step=19380, lr=0.119500, batch loss=0.032909, epoch loss=0.053486 Batch=239, step=19440, lr=0.119250, batch loss=0.022201, epoch loss=0.075686 Batch=299, step=19500, lr=0.119000, batch loss=0.006132, epoch loss=0.081819 Batch=359, step=19560, lr=0.118750, batch loss=0.017233, epoch loss=0.099051 Batch=419, step=19620, lr=0.118500, batch loss=0.020111, epoch loss=0.119163 Batch=479, step=19680, lr=0.118250, batch loss=0.010128, epoch loss=0.129291 Batch=539, step=19740, lr=0.118000, batch loss=0.020400, epoch loss=0.149691 Batch=599, step=19800, lr=0.117750, batch loss=0.020137, epoch loss=0.169828 Batch=659, step=19860, lr=0.117500, batch loss=0.020872, epoch loss=0.190700 Batch=719, step=19920, lr=0.117250, batch loss=0.022867, epoch loss=0.213567 Batch=779, step=19980, lr=0.117000, batch loss=0.048997, epoch loss=0.262564 Batch=839, step=20040, lr=0.116750, batch loss=0.045122, epoch loss=0.307686 Batch=899, step=20100, lr=0.116500, batch loss=0.050441, epoch loss=0.358127 Batch=959, step=20160, lr=0.116250, batch loss=0.011396, epoch loss=0.369523 Batch=1019, step=20220, lr=0.116000, batch loss=0.013394, epoch loss=0.382917 Batch=1079, step=20280, lr=0.115750, batch loss=0.003363, epoch loss=0.386280 Batch=1139, step=20340, lr=0.115500, batch loss=0.015324, epoch loss=0.401604 Batch=1199, step=20400, lr=0.115250, batch loss=0.008392, epoch loss=0.409996 Epoch=16, step=20400, lr=0.115250, epoch loss=0.409996 Batch=59, step=20460, lr=0.115000, batch loss=0.004823, epoch loss=0.004823 Batch=119, step=20520, lr=0.114750, batch loss=0.012313, epoch loss=0.017136 Batch=179, step=20580, lr=0.114500, batch loss=0.025218, epoch loss=0.042354 Batch=239, step=20640, lr=0.114250, batch loss=0.014786, epoch loss=0.057140 Batch=299, step=20700, lr=0.114000, batch loss=0.003070, epoch loss=0.060210 Batch=359, step=20760, lr=0.113750, batch loss=0.013444, epoch loss=0.073654 Batch=419, step=20820, lr=0.113500, batch loss=0.015633, epoch loss=0.089287 Batch=479, step=20880, lr=0.113250, batch loss=0.004899, epoch loss=0.094186 Batch=539, step=20940, lr=0.113000, batch loss=0.015758, epoch loss=0.109943 Batch=599, step=21000, lr=0.112750, batch loss=0.019025, epoch loss=0.128968 Batch=659, step=21060, lr=0.112500, batch loss=0.015833, epoch loss=0.144801 Batch=719, step=21120, lr=0.112250, batch loss=0.041613, epoch loss=0.186414 Batch=779, step=21180, lr=0.112000, batch loss=0.074155, epoch loss=0.260569 Batch=839, step=21240, lr=0.111750, batch loss=0.026720, epoch loss=0.287289 Batch=899, step=21300, lr=0.111500, batch loss=0.027883, epoch loss=0.315172 Batch=959, step=21360, lr=0.111250, batch loss=0.009843, epoch loss=0.325015 Batch=1019, step=21420, lr=0.111000, batch loss=0.012158, epoch loss=0.337174 Batch=1079, step=21480, lr=0.110750, batch loss=0.002458, epoch loss=0.339631 Batch=1139, step=21540, lr=0.110500, batch loss=0.012314, epoch loss=0.351945 Batch=1199, step=21600, lr=0.110250, batch loss=0.005161, epoch loss=0.357106 Epoch=17, step=21600, lr=0.110250, epoch loss=0.357106 Batch=59, step=21660, lr=0.110000, batch loss=0.002201, epoch loss=0.002201 Batch=119, step=21720, lr=0.109750, batch loss=0.006844, epoch loss=0.009045 Batch=179, step=21780, lr=0.109500, batch loss=0.013157, epoch loss=0.022202 Batch=239, step=21840, lr=0.109250, batch loss=0.010593, epoch loss=0.032796 Batch=299, step=21900, lr=0.109000, batch loss=0.013227, epoch loss=0.046023 Batch=359, step=21960, lr=0.108750, batch loss=0.013095, epoch loss=0.059118 Batch=419, step=22020, lr=0.108500, batch loss=0.012560, epoch loss=0.071678 Batch=479, step=22080, lr=0.108250, batch loss=0.002842, epoch loss=0.074519 Batch=539, step=22140, lr=0.108000, batch loss=0.017084, epoch loss=0.091603 Batch=599, step=22200, lr=0.107750, batch loss=0.016782, epoch loss=0.108386 Batch=659, step=22260, lr=0.107500, batch loss=0.016970, epoch loss=0.125356 Batch=719, step=22320, lr=0.107250, batch loss=0.031999, epoch loss=0.157355 Batch=779, step=22380, lr=0.107000, batch loss=0.039998, epoch loss=0.197353 Batch=839, step=22440, lr=0.106750, batch loss=0.021677, epoch loss=0.219029 Batch=899, step=22500, lr=0.106500, batch loss=0.026269, epoch loss=0.245298 Batch=959, step=22560, lr=0.106250, batch loss=0.009485, epoch loss=0.254784 Batch=1019, step=22620, lr=0.106000, batch loss=0.009175, epoch loss=0.263959 Batch=1079, step=22680, lr=0.105750, batch loss=0.000373, epoch loss=0.264332 Batch=1139, step=22740, lr=0.105500, batch loss=0.011138, epoch loss=0.275470 Batch=1199, step=22800, lr=0.105250, batch loss=0.004530, epoch loss=0.280000 Epoch=18, step=22800, lr=0.105250, epoch loss=0.280000 Batch=59, step=22860, lr=0.105000, batch loss=0.001917, epoch loss=0.001917 Batch=119, step=22920, lr=0.104750, batch loss=0.005801, epoch loss=0.007719 Batch=179, step=22980, lr=0.104500, batch loss=0.010590, epoch loss=0.018308 Batch=239, step=23040, lr=0.104250, batch loss=0.009520, epoch loss=0.027828 Batch=299, step=23100, lr=0.104000, batch loss=0.001044, epoch loss=0.028872 Batch=359, step=23160, lr=0.103750, batch loss=0.011011, epoch loss=0.039883 Batch=419, step=23220, lr=0.103500, batch loss=0.010442, epoch loss=0.050325 Batch=479, step=23280, lr=0.103250, batch loss=0.002556, epoch loss=0.052881 Batch=539, step=23340, lr=0.103000, batch loss=0.017242, epoch loss=0.070122 Batch=599, step=23400, lr=0.102750, batch loss=0.013790, epoch loss=0.083912 Batch=659, step=23460, lr=0.102500, batch loss=0.010143, epoch loss=0.094056 Batch=719, step=23520, lr=0.102250, batch loss=0.016882, epoch loss=0.110938 Batch=779, step=23580, lr=0.102000, batch loss=0.022189, epoch loss=0.133127 Batch=839, step=23640, lr=0.101750, batch loss=0.027355, epoch loss=0.160482 Batch=899, step=23700, lr=0.101500, batch loss=0.021452, epoch loss=0.181934 Batch=959, step=23760, lr=0.101250, batch loss=0.010991, epoch loss=0.192925 Batch=1019, step=23820, lr=0.101000, batch loss=0.009468, epoch loss=0.202394 Batch=1079, step=23880, lr=0.100750, batch loss=0.001035, epoch loss=0.203429 Batch=1139, step=23940, lr=0.100500, batch loss=0.009138, epoch loss=0.212567 Batch=1199, step=24000, lr=0.100250, batch loss=0.004691, epoch loss=0.217258 Epoch=19, step=24000, lr=0.100250, epoch loss=0.217258 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########******************************************│ │***********###########*************************############**************************************...│ │********######*####*********************************###*###*#*********************************......│ │*******######**##*************....*****************#*######*#*******************************........│ │*******##*##**##**********...........***************########*##**************************...........│ │*****#######************.......%...%%...***************#########************************..........%.│ │******######***********.........%.........**************##*#####***********************.......%.%.%.│ │***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│ │****#######**********..........%%%%.........************#########*******************.........%%.%%.%│ │**#######************..........%%%%%%%........*************###*###*****************..........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.│ │*#######************...........%%%%%%%..........************#######**************............%%%%%%.│ │*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.│ │*#####*#***********.............%%%%%%%............**********##*###***********...............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..│ │#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######*********.................%%%%%%%%................****###*###*****.................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│ │#****##********....................%%%%%%%%%.................**###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................*******..................%.%%.%%......│ │**************.......................%..%%%%%%%................****...............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│ │*************...........................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-03-20 22:34.05 ---> saved as "995ebb91db5130b313275e1f121e067f4194cf446ac5b0dfd0ef8085dbd5cbb2" Job succeeded 2025-03-20 22:34.10: Job succeeded