2025-05-09 14:37.38: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (86bf625e6f345ccadda7a57dd16ed6a6649ff4ca) (linux-ppc64:debian-12-5.3_ppc64_opam-2.3)Base: ocaml/opam:debian-12-ocaml-5.3@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617eOpam project buildTo reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 86bf625ecat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:debian-12-ocaml-5.3@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e# debian-12-5.3_ppc64_opam-2.3USER 1000:1000ENV CLICOLOR_FORCE="1"ENV OPAMCOLOR="always"WORKDIR /srcRUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opamRUN opam init --reinit -niRUN uname -rs && opam exec -- ocaml -version && opam --versionWORKDIR /srcRUN sudo chown opam /srcRUN cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -uCOPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./RUN opam pin add -yn neural_nets_lib.dev './' && \opam pin add -yn arrayjit.dev './'RUN echo '(lang dune 3.0)' > './dune-project'ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"ENV CI="true"ENV OCAMLCI="true"RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPSRUN opam install $DEPSCOPY --chown=1000:1000 . /srcRUN opam exec -- dune build @install @check @runtest && rm -rf _buildEND-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-05-09 14:37.38: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.3@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e-debian-12-5.3_ppc64_opam-2.3-cfc8860510df1264e4dceb092b0ce2dc"2025-05-09 14:37.38: Using OBuilder spec:((from ocaml/opam:debian-12-ocaml-5.3@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e)(comment debian-12-5.3_ppc64_opam-2.3)(user (uid 1000) (gid 1000))(env CLICOLOR_FORCE 1)(env OPAMCOLOR always)(workdir /src)(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))(run (shell "opam init --reinit -ni"))(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))(workdir /src)(run (shell "sudo chown opam /src"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u"))(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))(run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))(run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")(env CI true)(env OCAMLCI true)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))(copy (src .) (dst /src))(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")))2025-05-09 14:37.38: Waiting for resource in pool OCluster2025-05-09 14:37.38: Waiting for worker…2025-05-09 14:37.38: Got resource from pool OClusterBuilding on orithia.caelum.ci.devHEAD is now at a68de5eb FormattingHEAD is now at 86bf625e cuda_backend.ml tiny refactoring fixes(from ocaml/opam:debian-12-ocaml-5.3@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e)Unable to find image 'ocaml/opam:debian-12-ocaml-5.3@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e' locallydocker.io/ocaml/opam@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e: Pulling from ocaml/opamDigest: sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617eStatus: Downloaded newer image for ocaml/opam@sha256:b9ad47cc37e058f3242d0343c3d6aab281880610738467625c84e9f51776617e2025-05-09 14:37.39 ---> using "11564f06579ac1abfc4bc8dcbf5fe8e8e80256686f8892cdaae583cd9afa42b8" from cache/: (comment debian-12-5.3_ppc64_opam-2.3)/: (user (uid 1000) (gid 1000))/: (env CLICOLOR_FORCE 1)/: (env OPAMCOLOR always)/: (workdir /src)/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))2025-05-09 14:37.39 ---> using "7d9d83b5f12977bcf8066c022c05c5bd5be16362fd0a470da1495b1fd5537301" from cache/src: (run (shell "opam init --reinit -ni"))Configuring from /home/opam/.opamrc and then from built-in defaults.Checking for available remotes: rsync and local, git.- you won't be able to use mercurial repositories unless you install the hg command on your system.- you won't be able to use darcs repositories unless you install the darcs command on your system.This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.You may want to back it up before going further.Continue? [y/n] yFormat upgrade done.<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>[default] Initialised2025-05-09 14:37.39 ---> using "3356b2ee63007def5ceadc9915328f1eea8f5bf3d1e4e2f434bb4052fe0919a6" from cache/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))Linux 6.8.0-55-genericThe OCaml toplevel, version 5.3.02.3.02025-05-09 14:37.39 ---> using "0ab9777a1a6ec89dec2e09cd4f5c7ca0aae1a08bd0cf5e38d16b5986de7b61c6" from cache/src: (workdir /src)/src: (run (shell "sudo chown opam /src"))2025-05-09 14:37.39 ---> using "8b1bba9974efdc8414b2b8b2a4fed5020d231764a7da89b3dfbeaf9ab99cfe97" from cache/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u"))From https://github.com/ocaml/opam-repository* branch master -> FETCH_HEADda74d7829f..e28c86445c master -> origin/master997e4758ac Merge pull request #27839 from public-release/opam-publish-base.v0.17.2<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] synchronised from git+file:///home/opam/opam-repositoryEverything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# To update the current shell environment, run: eval $(opam env)2025-05-09 14:37.39 ---> using "fc413ac52c2d5a89de1d48cc1a1256d0fbe953cd12f95f094c860c7a8f51d79a" from cache/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))2025-05-09 14:37.39 ---> using "edabc3aebfd87acaf4fad3eebe33e30cdaeb435901f4608ea303bf5b4a9a7634" from cache/src: (run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))[neural_nets_lib.dev] synchronised (file:///src)neural_nets_lib is now pinned to file:///src (version dev)[arrayjit.dev] synchronised (file:///src)arrayjit is now pinned to file:///src (version dev)2025-05-09 14:37.39 ---> using "26beb80314b675d138bba30192f965393c8083f9cbd832ccf014196879db9bc5" from cache/src: (run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))2025-05-09 14:37.39 ---> using "fa1f1f82c4ecfb8045e0e3c953a8d541f30b1b4e1ecbf1f5afe4a90c2fd4d4ef" from cache/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")/src: (env CI true)/src: (env OCAMLCI true)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))+ /usr/bin/sudo "apt-get" "update"- Hit:1 http://deb.debian.org/debian bookworm InRelease- Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB]- Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB]- Get:4 http://deb.debian.org/debian-security bookworm-security/main ppc64el Packages [247 kB]- Fetched 350 kB in 1s (435 kB/s)- Reading package lists...-<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>[arrayjit.dev] synchronised (file:///src)[neural_nets_lib.dev] synchronised (file:///src)[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following system packages will first need to be installed:libffi-dev pkg-config<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>+ /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config"- debconf: delaying package configuration, since apt-utils is not installed- Selecting previously unselected package libffi-dev:ppc64el.- (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 18720 files and directories currently installed.)- Preparing to unpack .../libffi-dev_3.4.4-1_ppc64el.deb ...- Unpacking libffi-dev:ppc64el (3.4.4-1) ...- Selecting previously unselected package libpkgconf3:ppc64el.- Preparing to unpack .../libpkgconf3_1.8.1-1_ppc64el.deb ...- Unpacking libpkgconf3:ppc64el (1.8.1-1) ...- Selecting previously unselected package pkgconf-bin.- Preparing to unpack .../pkgconf-bin_1.8.1-1_ppc64el.deb ...- Unpacking pkgconf-bin (1.8.1-1) ...- Selecting previously unselected package pkgconf:ppc64el.- Preparing to unpack .../pkgconf_1.8.1-1_ppc64el.deb ...- Unpacking pkgconf:ppc64el (1.8.1-1) ...- Selecting previously unselected package pkg-config:ppc64el.- Preparing to unpack .../pkg-config_1.8.1-1_ppc64el.deb ...- Unpacking pkg-config:ppc64el (1.8.1-1) ...- Setting up libffi-dev:ppc64el (3.4.4-1) ...- Setting up libpkgconf3:ppc64el (1.8.1-1) ...- Setting up pkgconf-bin (1.8.1-1) ...- Setting up pkgconf:ppc64el (1.8.1-1) ...- Setting up pkg-config:ppc64el (1.8.1-1) ...- Processing triggers for libc-bin (2.36-9+deb12u10) ...2025-05-09 14:37.39 ---> using "f551a02d497253f367f80c69ee589090f98b5d9d3d884b8dbad4ca802e6d574e" from cache/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following actions will be performed:=== install 74 packages- install angstrom 0.16.1- install astring 0.8.5- install backoff 0.1.1- install base v0.17.2- install bigarray-compat 1.1.0- install bigstringaf 0.10.0- install camlp-streams 5.0.1- install cmdliner 1.3.0- install conf-libffi 2.0.0- install conf-pkg-config 4- install cppo 1.8.0- install csexp 1.5.2- install ctypes 0.23.0- install ctypes-foreign 0.23.0- install dune 3.18.2- install dune-configurator 3.18.2- install fieldslib v0.17.0- install fmt 0.10.0- install integers 0.7.0- install jane-street-headers v0.17.0- install jst-config v0.17.0- install logs 0.8.0- install mdx 2.5.0- install mtime 2.1.0- install multicore-magic 2.3.1- install num 1.5-1- install ocaml-compiler-libs v0.17.0- install ocaml-syntax-shims 1.0.0- install ocaml-version 4.0.0- install ocaml_intrinsics_kernel v0.17.1- install ocamlbuild 0.16.1- install ocamlfind 1.9.8- install parsexp v0.17.0- install ppx_assert v0.17.0- install ppx_base v0.17.0- install ppx_cold v0.17.0- install ppx_compare v0.17.0- install ppx_derivers 1.2.1- install ppx_deriving 6.0.3- install ppx_enumerate v0.17.0- install ppx_expect v0.17.2- install ppx_fields_conv v0.17.0- install ppx_globalize v0.17.0- install ppx_hash v0.17.0- install ppx_here v0.17.0- install ppx_inline_test v0.17.0- install ppx_minidebug 2.2.0- install ppx_optcomp v0.17.0- install ppx_sexp_conv v0.17.0- install ppx_string v0.17.0- install ppx_variants_conv v0.17.0- install ppxlib 0.35.0- install ppxlib_jane v0.17.2- install printbox 0.12- install printbox-ext-plot 0.12- install printbox-html 0.12- install printbox-md 0.12- install printbox-text 0.12- install ptime 1.2.0- install re 1.12.0- install result 1.5- install saturn_lockfree 0.5.0- install seq base- install sexplib v0.17.0- install sexplib0 v0.17.0- install stdio v0.17.0- install stdlib-shims 0.3.0- install thread-local-storage 0.2- install time_now v0.17.0- install topkg 1.0.8- install tyxml 4.6.0- install uucp 16.0.0- install uutf 1.0.4- install variantslib v0.17.0<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> retrieved backoff.0.1.1 (cached)-> retrieved angstrom.0.16.1 (cached)-> retrieved astring.0.8.5 (cached)-> retrieved base.v0.17.2 (cached)-> retrieved bigarray-compat.1.1.0 (cached)-> retrieved bigstringaf.0.10.0 (cached)-> retrieved camlp-streams.5.0.1 (cached)-> installed conf-pkg-config.4-> retrieved cmdliner.1.3.0 (cached)-> retrieved cppo.1.8.0 (cached)-> retrieved csexp.1.5.2 (cached)-> installed conf-libffi.2.0.0-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached)-> retrieved fieldslib.v0.17.0 (cached)-> retrieved integers.0.7.0 (cached)-> retrieved fmt.0.10.0 (cached)-> retrieved jane-street-headers.v0.17.0 (cached)-> retrieved jst-config.v0.17.0 (cached)-> retrieved logs.0.8.0 (cached)-> retrieved mtime.2.1.0 (cached)-> retrieved mdx.2.5.0 (cached)-> retrieved dune.3.18.2, dune-configurator.3.18.2 (cached)-> retrieved multicore-magic.2.3.1 (cached)-> retrieved num.1.5-1 (cached)-> retrieved ocaml-compiler-libs.v0.17.0 (cached)-> retrieved ocaml-syntax-shims.1.0.0 (cached)-> retrieved ocaml-version.4.0.0 (cached)-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)-> installed cmdliner.1.3.0-> retrieved ocamlbuild.0.16.1 (cached)-> retrieved ocamlfind.1.9.8 (cached)-> retrieved parsexp.v0.17.0 (cached)-> retrieved ppx_assert.v0.17.0 (cached)-> retrieved ppx_base.v0.17.0 (cached)-> retrieved ppx_cold.v0.17.0 (cached)-> retrieved ppx_compare.v0.17.0 (cached)-> retrieved ppx_derivers.1.2.1 (cached)-> retrieved ppx_deriving.6.0.3 (cached)-> retrieved ppx_enumerate.v0.17.0 (cached)-> retrieved ppx_expect.v0.17.2 (cached)-> retrieved ppx_fields_conv.v0.17.0 (cached)-> retrieved ppx_globalize.v0.17.0 (cached)-> retrieved ppx_hash.v0.17.0 (cached)-> retrieved ppx_here.v0.17.0 (cached)-> retrieved ppx_inline_test.v0.17.0 (cached)-> retrieved ppx_optcomp.v0.17.0 (cached)-> retrieved ppx_sexp_conv.v0.17.0 (cached)-> retrieved ppx_minidebug.2.2.0 (cached)-> retrieved ppx_string.v0.17.0 (cached)-> retrieved ppx_variants_conv.v0.17.0 (cached)-> retrieved ppxlib_jane.v0.17.2 (cached)-> retrieved ptime.1.2.0 (cached)-> retrieved re.1.12.0 (cached)-> retrieved ppxlib.0.35.0 (cached)-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached)-> retrieved seq.base (cached)-> installed seq.base-> retrieved result.1.5 (cached)-> retrieved saturn_lockfree.0.5.0 (cached)-> retrieved sexplib.v0.17.0 (cached)-> retrieved sexplib0.v0.17.0 (cached)-> retrieved stdio.v0.17.0 (cached)-> retrieved stdlib-shims.0.3.0 (cached)-> retrieved thread-local-storage.0.2 (cached)-> retrieved time_now.v0.17.0 (cached)-> retrieved topkg.1.0.8 (cached)-> retrieved tyxml.4.6.0 (cached)-> retrieved uutf.1.0.4 (cached)-> retrieved variantslib.v0.17.0 (cached)-> retrieved uucp.16.0.0 (cached)-> installed num.1.5-1-> installed ocamlfind.1.9.8-> installed ocamlbuild.0.16.1-> installed topkg.1.0.8-> installed mtime.2.1.0-> installed uutf.1.0.4-> installed fmt.0.10.0-> installed ptime.1.2.0-> installed astring.0.8.5-> installed logs.0.8.0-> installed dune.3.18.2-> installed jane-street-headers.v0.17.0-> installed ppx_derivers.1.2.1-> installed csexp.1.5.2-> installed backoff.0.1.1-> installed bigarray-compat.1.1.0-> installed camlp-streams.5.0.1-> installed cppo.1.8.0-> installed multicore-magic.2.3.1-> installed ocaml-compiler-libs.v0.17.0-> installed ocaml-syntax-shims.1.0.0-> installed ocaml-version.4.0.0-> installed ocaml_intrinsics_kernel.v0.17.1-> installed printbox.0.12-> installed re.1.12.0-> installed result.1.5-> installed sexplib0.v0.17.0-> installed stdlib-shims.0.3.0-> installed thread-local-storage.0.2-> installed saturn_lockfree.0.5.0-> installed integers.0.7.0-> installed parsexp.v0.17.0-> installed dune-configurator.3.18.2-> installed sexplib.v0.17.0-> installed mdx.2.5.0-> installed bigstringaf.0.10.0-> installed angstrom.0.16.1-> installed tyxml.4.6.0-> installed printbox-html.0.12-> installed ctypes.0.23.0-> installed base.v0.17.2-> installed variantslib.v0.17.0-> installed fieldslib.v0.17.0-> installed stdio.v0.17.0-> installed ctypes-foreign.0.23.0-> installed ppxlib.0.35.0-> installed ppxlib_jane.v0.17.2-> installed ppx_optcomp.v0.17.0-> installed uucp.16.0.0-> installed ppx_cold.v0.17.0-> installed ppx_here.v0.17.0-> installed ppx_variants_conv.v0.17.0-> installed printbox-text.0.12-> installed ppx_fields_conv.v0.17.0-> installed printbox-md.0.12-> installed ppx_enumerate.v0.17.0-> installed printbox-ext-plot.0.12-> installed ppx_globalize.v0.17.0-> installed ppx_deriving.6.0.3-> installed ppx_compare.v0.17.0-> installed ppx_sexp_conv.v0.17.0-> installed ppx_hash.v0.17.0-> installed ppx_assert.v0.17.0-> installed ppx_minidebug.2.2.0-> installed ppx_base.v0.17.0-> installed jst-config.v0.17.0-> installed ppx_string.v0.17.0-> installed time_now.v0.17.0-> installed ppx_inline_test.v0.17.0-> installed ppx_expect.v0.17.2Done.# To update the current shell environment, run: eval $(opam env)2025-05-09 14:37.39 ---> using "c151cf1c8cbcb6155f815e7c147004f9685dff84d11ab660ff56be22c684af09" from cache/src: (copy (src .) (dst /src))2025-05-09 14:37.43 ---> saved as "bc2d8a8b1c86cb179c1dc786e3ac474ba7735dabf9a31c5014a011985f18debb"/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test_ppx && ./test_ppx_op.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/58b4e46b3df4a58eea44858ae7452854/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/61471110065a7f521ff95687d37e25ae/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test && ./moons_demo_parallel_run.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file("Set log_level to" 1)└─{orphaned from #2}Retrieving commandline, environment, or config file variable ocannl_backendFound cc, in the config fileRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccRetrieving commandline, environment, or config file variable ocannl_never_capture_stdoutNot found, using default falseBatch=59, step=60, lr=0.199750, batch loss=23.609453, epoch loss=23.609453Batch=119, step=120, lr=0.199750, batch loss=8.516926, epoch loss=32.126379Batch=179, step=180, lr=0.199250, batch loss=2.644440, epoch loss=34.770819Batch=239, step=240, lr=0.199250, batch loss=0.856726, epoch loss=35.627545Batch=299, step=300, lr=0.199000, batch loss=1.435283, epoch loss=37.062828Batch=359, step=360, lr=0.198750, batch loss=1.340531, epoch loss=38.403359Batch=419, step=420, lr=0.198500, batch loss=0.610731, epoch loss=39.014090Batch=479, step=480, lr=0.198250, batch loss=0.747171, epoch loss=39.761261Batch=539, step=540, lr=0.197750, batch loss=0.688775, epoch loss=40.450036Batch=599, step=600, lr=0.197500, batch loss=1.101677, epoch loss=41.551713Batch=659, step=660, lr=0.197500, batch loss=0.481680, epoch loss=42.033392Batch=719, step=720, lr=0.197000, batch loss=0.410593, epoch loss=42.443985Batch=779, step=780, lr=0.196750, batch loss=0.469305, epoch loss=42.913290Batch=839, step=840, lr=0.196500, batch loss=0.450224, epoch loss=43.363514Batch=899, step=900, lr=0.196500, batch loss=0.383468, epoch loss=43.746982Batch=959, step=960, lr=0.196000, batch loss=0.268881, epoch loss=44.015863Batch=1019, step=1020, lr=0.196000, batch loss=0.486200, epoch loss=44.502064Batch=1079, step=1080, lr=0.195750, batch loss=0.266782, epoch loss=44.768845Batch=1139, step=1140, lr=0.195250, batch loss=0.329853, epoch loss=45.098698Batch=1199, step=1200, lr=0.195000, batch loss=0.263580, epoch loss=45.362279Epoch=0, step=1200, lr=0.195000, epoch loss=45.362279Batch=59, step=1260, lr=0.195000, batch loss=0.263559, epoch loss=0.263559Batch=119, step=1320, lr=0.194750, batch loss=0.208087, epoch loss=0.471646Batch=179, step=1380, lr=0.194250, batch loss=0.249306, epoch loss=0.720952Batch=239, step=1440, lr=0.194000, batch loss=0.351486, epoch loss=1.072438Batch=299, step=1500, lr=0.194000, batch loss=0.236924, epoch loss=1.309362Batch=359, step=1560, lr=0.193750, batch loss=0.313200, epoch loss=1.622561Batch=419, step=1620, lr=0.193500, batch loss=0.312612, epoch loss=1.935173Batch=479, step=1680, lr=0.193250, batch loss=0.277159, epoch loss=2.212332Batch=539, step=1740, lr=0.193000, batch loss=0.209773, epoch loss=2.422105Batch=599, step=1800, lr=0.192750, batch loss=0.245687, epoch loss=2.667792Batch=659, step=1860, lr=0.192500, batch loss=0.362901, epoch loss=3.030693Batch=719, step=1920, lr=0.192000, batch loss=0.356889, epoch loss=3.387582Batch=779, step=1980, lr=0.192000, batch loss=0.382460, epoch loss=3.770042Batch=839, step=2040, lr=0.191500, batch loss=0.342061, epoch loss=4.112103Batch=899, step=2100, lr=0.191500, batch loss=0.295851, epoch loss=4.407954Batch=959, step=2160, lr=0.191250, batch loss=0.215415, epoch loss=4.623369Batch=1019, step=2220, lr=0.190750, batch loss=0.331896, epoch loss=4.955265Batch=1079, step=2280, lr=0.190750, batch loss=0.196440, epoch loss=5.151705Batch=1139, step=2340, lr=0.190250, batch loss=0.273668, epoch loss=5.425373Batch=1199, step=2400, lr=0.190250, batch loss=0.220913, epoch loss=5.646286Epoch=1, step=2400, lr=0.190250, epoch loss=5.646286Batch=59, step=2460, lr=0.190000, batch loss=0.229783, epoch loss=0.229783Batch=119, step=2520, lr=0.189500, batch loss=0.194354, epoch loss=0.424136Batch=179, step=2580, lr=0.189500, batch loss=0.222071, epoch loss=0.646208Batch=239, step=2640, lr=0.189250, batch loss=0.328146, epoch loss=0.974353Batch=299, step=2700, lr=0.188750, batch loss=0.205348, epoch loss=1.179701Batch=359, step=2760, lr=0.188750, batch loss=0.293501, epoch loss=1.473202Batch=419, step=2820, lr=0.188500, batch loss=0.281318, epoch loss=1.754520Batch=479, step=2880, lr=0.188250, batch loss=0.256262, epoch loss=2.010782Batch=539, step=2940, lr=0.188000, batch loss=0.196451, epoch loss=2.207233Batch=599, step=3000, lr=0.187500, batch loss=0.242431, epoch loss=2.449664Batch=659, step=3060, lr=0.187500, batch loss=0.347932, epoch loss=2.797595Batch=719, step=3120, lr=0.187000, batch loss=0.347011, epoch loss=3.144606Batch=779, step=3180, lr=0.187000, batch loss=0.366023, epoch loss=3.510629Batch=839, step=3240, lr=0.186500, batch loss=0.323408, epoch loss=3.834037Batch=899, step=3300, lr=0.186500, batch loss=0.291399, epoch loss=4.125437Batch=959, step=3360, lr=0.186250, batch loss=0.233344, epoch loss=4.358781Batch=1019, step=3420, lr=0.185750, batch loss=0.344987, epoch loss=4.703769Batch=1079, step=3480, lr=0.185500, batch loss=0.227337, epoch loss=4.931106Batch=1139, step=3540, lr=0.185250, batch loss=0.267202, epoch loss=5.198308Batch=1199, step=3600, lr=0.185000, batch loss=0.201650, epoch loss=5.399957Epoch=2, step=3600, lr=0.185000, epoch loss=5.399957Batch=59, step=3660, lr=0.184750, batch loss=0.225344, epoch loss=0.225344Batch=119, step=3720, lr=0.184750, batch loss=0.188200, epoch loss=0.413545Batch=179, step=3780, lr=0.184500, batch loss=0.213532, epoch loss=0.627076Batch=239, step=3840, lr=0.184000, batch loss=0.317202, epoch loss=0.944279Batch=299, step=3900, lr=0.184000, batch loss=0.205777, epoch loss=1.150055Batch=359, step=3960, lr=0.183500, batch loss=0.291413, epoch loss=1.441469Batch=419, step=4020, lr=0.183500, batch loss=0.290745, epoch loss=1.732213Batch=479, step=4080, lr=0.183000, batch loss=0.259067, epoch loss=1.991281Batch=539, step=4140, lr=0.182750, batch loss=0.196993, epoch loss=2.188274Batch=599, step=4200, lr=0.182750, batch loss=0.232010, epoch loss=2.420284Batch=659, step=4260, lr=0.182250, batch loss=0.337471, epoch loss=2.757755Batch=719, step=4320, lr=0.182000, batch loss=0.346242, epoch loss=3.103997Batch=779, step=4380, lr=0.182000, batch loss=0.347025, epoch loss=3.451022Batch=839, step=4440, lr=0.181500, batch loss=0.318726, epoch loss=3.769748Batch=899, step=4500, lr=0.181250, batch loss=0.285000, epoch loss=4.054748Batch=959, step=4560, lr=0.181000, batch loss=0.215494, epoch loss=4.270242Batch=1019, step=4620, lr=0.181000, batch loss=0.348742, epoch loss=4.618984Batch=1079, step=4680, lr=0.180500, batch loss=0.212014, epoch loss=4.830998Batch=1139, step=4740, lr=0.180500, batch loss=0.248273, epoch loss=5.079272Batch=1199, step=4800, lr=0.180000, batch loss=0.191126, epoch loss=5.270398Epoch=3, step=4800, lr=0.180000, epoch loss=5.270398Batch=59, step=4860, lr=0.179750, batch loss=0.226648, epoch loss=0.226648Batch=119, step=4920, lr=0.179500, batch loss=0.185808, epoch loss=0.412456Batch=179, step=4980, lr=0.179250, batch loss=0.207928, epoch loss=0.620384Batch=239, step=5040, lr=0.179000, batch loss=0.310696, epoch loss=0.931080Batch=299, step=5100, lr=0.178750, batch loss=0.205838, epoch loss=1.136918Batch=359, step=5160, lr=0.178500, batch loss=0.271569, epoch loss=1.408487Batch=419, step=5220, lr=0.178250, batch loss=0.265307, epoch loss=1.673794Batch=479, step=5280, lr=0.178250, batch loss=0.239379, epoch loss=1.913173Batch=539, step=5340, lr=0.178000, batch loss=0.191164, epoch loss=2.104337Batch=599, step=5400, lr=0.177750, batch loss=0.229110, epoch loss=2.333446Batch=659, step=5460, lr=0.177500, batch loss=0.325064, epoch loss=2.658510Batch=719, step=5520, lr=0.177250, batch loss=0.327294, epoch loss=2.985804Batch=779, step=5580, lr=0.176750, batch loss=0.349504, epoch loss=3.335307Batch=839, step=5640, lr=0.176750, batch loss=0.307283, epoch loss=3.642590Batch=899, step=5700, lr=0.176250, batch loss=0.264524, epoch loss=3.907114Batch=959, step=5760, lr=0.176000, batch loss=0.193446, epoch loss=4.100560Batch=1019, step=5820, lr=0.175750, batch loss=0.309711, epoch loss=4.410272Batch=1079, step=5880, lr=0.175500, batch loss=0.188472, epoch loss=4.598744Batch=1139, step=5940, lr=0.175250, batch loss=0.225528, epoch loss=4.824272Batch=1199, step=6000, lr=0.175000, batch loss=0.187865, epoch loss=5.012137Epoch=4, step=6000, lr=0.175000, epoch loss=5.012137Batch=59, step=6060, lr=0.175000, batch loss=0.238322, epoch loss=0.238322Batch=119, step=6120, lr=0.174500, batch loss=0.190807, epoch loss=0.429129Batch=179, step=6180, lr=0.174250, batch loss=0.201482, epoch loss=0.630612Batch=239, step=6240, lr=0.174000, batch loss=0.302408, epoch loss=0.933019Batch=299, step=6300, lr=0.173750, batch loss=0.212801, epoch loss=1.145820Batch=359, step=6360, lr=0.173750, batch loss=0.274336, epoch loss=1.420156Batch=419, step=6420, lr=0.173250, batch loss=0.268733, epoch loss=1.688889Batch=479, step=6480, lr=0.173000, batch loss=0.242569, epoch loss=1.931458Batch=539, step=6540, lr=0.172750, batch loss=0.197985, epoch loss=2.129444Batch=599, step=6600, lr=0.172500, batch loss=0.239939, epoch loss=2.369382Batch=659, step=6660, lr=0.172250, batch loss=0.315887, epoch loss=2.685269Batch=719, step=6720, lr=0.172000, batch loss=0.318863, epoch loss=3.004132Batch=779, step=6780, lr=0.171750, batch loss=0.332646, epoch loss=3.336778Batch=839, step=6840, lr=0.171750, batch loss=0.308156, epoch loss=3.644934Batch=899, step=6900, lr=0.171250, batch loss=0.269352, epoch loss=3.914286Batch=959, step=6960, lr=0.171250, batch loss=0.209530, epoch loss=4.123815Batch=1019, step=7020, lr=0.170750, batch loss=0.331101, epoch loss=4.454917Batch=1079, step=7080, lr=0.170500, batch loss=0.180893, epoch loss=4.635810Batch=1139, step=7140, lr=0.170500, batch loss=0.216945, epoch loss=4.852755Batch=1199, step=7200, lr=0.170250, batch loss=0.183133, epoch loss=5.035889Epoch=5, step=7200, lr=0.170250, epoch loss=5.035889Batch=59, step=7260, lr=0.169750, batch loss=0.234575, epoch loss=0.234575Batch=119, step=7320, lr=0.169500, batch loss=0.184422, epoch loss=0.418996Batch=179, step=7380, lr=0.169500, batch loss=0.197208, epoch loss=0.616204Batch=239, step=7440, lr=0.169000, batch loss=0.295566, epoch loss=0.911771Batch=299, step=7500, lr=0.169000, batch loss=0.204768, epoch loss=1.116538Batch=359, step=7560, lr=0.168500, batch loss=0.264717, epoch loss=1.381255Batch=419, step=7620, lr=0.168250, batch loss=0.259406, epoch loss=1.640661Batch=479, step=7680, lr=0.168250, batch loss=0.236281, epoch loss=1.876942Batch=539, step=7740, lr=0.167750, batch loss=0.190114, epoch loss=2.067056Batch=599, step=7800, lr=0.167500, batch loss=0.230181, epoch loss=2.297237Batch=659, step=7860, lr=0.167250, batch loss=0.305081, epoch loss=2.602317Batch=719, step=7920, lr=0.167250, batch loss=0.306108, epoch loss=2.908426Batch=779, step=7980, lr=0.166750, batch loss=0.326565, epoch loss=3.234990Batch=839, step=8040, lr=0.166750, batch loss=0.295012, epoch loss=3.530002Batch=899, step=8100, lr=0.166500, batch loss=0.261854, epoch loss=3.791856Batch=959, step=8160, lr=0.166000, batch loss=0.203723, epoch loss=3.995580Batch=1019, step=8220, lr=0.165750, batch loss=0.317980, epoch loss=4.313560Batch=1079, step=8280, lr=0.165750, batch loss=0.168912, epoch loss=4.482472Batch=1139, step=8340, lr=0.165500, batch loss=0.205879, epoch loss=4.688350Batch=1199, step=8400, lr=0.165000, batch loss=0.185393, epoch loss=4.873743Epoch=6, step=8400, lr=0.165000, epoch loss=4.873743Batch=59, step=8460, lr=0.164750, batch loss=0.216487, epoch loss=0.216487Batch=119, step=8520, lr=0.164750, batch loss=0.185447, epoch loss=0.401934Batch=179, step=8580, lr=0.164250, batch loss=0.188923, epoch loss=0.590857Batch=239, step=8640, lr=0.164000, batch loss=0.280513, epoch loss=0.871370Batch=299, step=8700, lr=0.164000, batch loss=0.192104, epoch loss=1.063474Batch=359, step=8760, lr=0.163500, batch loss=0.249940, epoch loss=1.313414Batch=419, step=8820, lr=0.163250, batch loss=0.245641, epoch loss=1.559055Batch=479, step=8880, lr=0.163250, batch loss=0.228935, epoch loss=1.787989Batch=539, step=8940, lr=0.162750, batch loss=0.179975, epoch loss=1.967964Batch=599, step=9000, lr=0.162750, batch loss=0.217316, epoch loss=2.185280Batch=659, step=9060, lr=0.162500, batch loss=0.295916, epoch loss=2.481197Batch=719, step=9120, lr=0.162000, batch loss=0.298588, epoch loss=2.779784Batch=779, step=9180, lr=0.161750, batch loss=0.317062, epoch loss=3.096847Batch=839, step=9240, lr=0.161500, batch loss=0.280757, epoch loss=3.377604Batch=899, step=9300, lr=0.161500, batch loss=0.254246, epoch loss=3.631850Batch=959, step=9360, lr=0.161000, batch loss=0.209325, epoch loss=3.841175Batch=1019, step=9420, lr=0.161000, batch loss=0.299621, epoch loss=4.140796Batch=1079, step=9480, lr=0.160750, batch loss=0.169283, epoch loss=4.310079Batch=1139, step=9540, lr=0.160500, batch loss=0.201504, epoch loss=4.511583Batch=1199, step=9600, lr=0.160000, batch loss=0.164654, epoch loss=4.676237Epoch=7, step=9600, lr=0.160000, epoch loss=4.676237Batch=59, step=9660, lr=0.159750, batch loss=0.189250, epoch loss=0.189250Batch=119, step=9720, lr=0.159750, batch loss=0.159594, epoch loss=0.348844Batch=179, step=9780, lr=0.159250, batch loss=0.179689, epoch loss=0.528534Batch=239, step=9840, lr=0.159000, batch loss=0.265555, epoch loss=0.794089Batch=299, step=9900, lr=0.158750, batch loss=0.186793, epoch loss=0.980882Batch=359, step=9960, lr=0.158750, batch loss=0.241953, epoch loss=1.222835Batch=419, step=10020, lr=0.158500, batch loss=0.232829, epoch loss=1.455665Batch=479, step=10080, lr=0.158000, batch loss=0.215402, epoch loss=1.671067Batch=539, step=10140, lr=0.158000, batch loss=0.170355, epoch loss=1.841422Batch=599, step=10200, lr=0.157750, batch loss=0.203620, epoch loss=2.045041Batch=659, step=10260, lr=0.157250, batch loss=0.282094, epoch loss=2.327135Batch=719, step=10320, lr=0.157250, batch loss=0.278627, epoch loss=2.605762Batch=779, step=10380, lr=0.157000, batch loss=0.300059, epoch loss=2.905820Batch=839, step=10440, lr=0.156750, batch loss=0.271801, epoch loss=3.177621Batch=899, step=10500, lr=0.156500, batch loss=0.239287, epoch loss=3.416908Batch=959, step=10560, lr=0.156250, batch loss=0.199781, epoch loss=3.616688Batch=1019, step=10620, lr=0.155750, batch loss=0.281090, epoch loss=3.897779Batch=1079, step=10680, lr=0.155500, batch loss=0.172631, epoch loss=4.070409Batch=1139, step=10740, lr=0.155250, batch loss=0.204937, epoch loss=4.275346Batch=1199, step=10800, lr=0.155250, batch loss=0.155716, epoch loss=4.431062Epoch=8, step=10800, lr=0.155250, epoch loss=4.431062Batch=59, step=10860, lr=0.155000, batch loss=0.181480, epoch loss=0.181480Batch=119, step=10920, lr=0.154750, batch loss=0.150618, epoch loss=0.332099Batch=179, step=10980, lr=0.154500, batch loss=0.167495, epoch loss=0.499594Batch=239, step=11040, lr=0.154000, batch loss=0.245278, epoch loss=0.744872Batch=299, step=11100, lr=0.154000, batch loss=0.164990, epoch loss=0.909862Batch=359, step=11160, lr=0.153750, batch loss=0.226169, epoch loss=1.136031Batch=419, step=11220, lr=0.153500, batch loss=0.226518, epoch loss=1.362549Batch=479, step=11280, lr=0.153000, batch loss=0.206933, epoch loss=1.569482Batch=539, step=11340, lr=0.152750, batch loss=0.163756, epoch loss=1.733238Batch=599, step=11400, lr=0.152500, batch loss=0.180981, epoch loss=1.914219Batch=659, step=11460, lr=0.152500, batch loss=0.264033, epoch loss=2.178252Batch=719, step=11520, lr=0.152000, batch loss=0.260537, epoch loss=2.438789Batch=779, step=11580, lr=0.152000, batch loss=0.272253, epoch loss=2.711042Batch=839, step=11640, lr=0.151500, batch loss=0.257334, epoch loss=2.968377Batch=899, step=11700, lr=0.151500, batch loss=0.217639, epoch loss=3.186015Batch=959, step=11760, lr=0.151250, batch loss=0.173535, epoch loss=3.359550Batch=1019, step=11820, lr=0.150750, batch loss=0.262090, epoch loss=3.621640Batch=1079, step=11880, lr=0.150750, batch loss=0.143276, epoch loss=3.764916Batch=1139, step=11940, lr=0.150500, batch loss=0.182467, epoch loss=3.947383Batch=1199, step=12000, lr=0.150000, batch loss=0.139778, epoch loss=4.087161Epoch=9, step=12000, lr=0.150000, epoch loss=4.087161Batch=59, step=12060, lr=0.150000, batch loss=0.160983, epoch loss=0.160983Batch=119, step=12120, lr=0.149750, batch loss=0.138182, epoch loss=0.299166Batch=179, step=12180, lr=0.149250, batch loss=0.151597, epoch loss=0.450763Batch=239, step=12240, lr=0.149250, batch loss=0.220198, epoch loss=0.670961Batch=299, step=12300, lr=0.148750, batch loss=0.143471, epoch loss=0.814432Batch=359, step=12360, lr=0.148500, batch loss=0.199207, epoch loss=1.013639Batch=419, step=12420, lr=0.148250, batch loss=0.207436, epoch loss=1.221075Batch=479, step=12480, lr=0.148000, batch loss=0.179231, epoch loss=1.400306Batch=539, step=12540, lr=0.147750, batch loss=0.143026, epoch loss=1.543331Batch=599, step=12600, lr=0.147500, batch loss=0.149366, epoch loss=1.692698Batch=659, step=12660, lr=0.147500, batch loss=0.227761, epoch loss=1.920459Batch=719, step=12720, lr=0.147250, batch loss=0.237073, epoch loss=2.157532Batch=779, step=12780, lr=0.146750, batch loss=0.265771, epoch loss=2.423303Batch=839, step=12840, lr=0.146500, batch loss=0.235353, epoch loss=2.658656Batch=899, step=12900, lr=0.146250, batch loss=0.204797, epoch loss=2.863453Batch=959, step=12960, lr=0.146250, batch loss=0.147251, epoch loss=3.010704Batch=1019, step=13020, lr=0.145750, batch loss=0.257534, epoch loss=3.268238Batch=1079, step=13080, lr=0.145500, batch loss=0.120589, epoch loss=3.388827Batch=1139, step=13140, lr=0.145250, batch loss=0.160210, epoch loss=3.549037Batch=1199, step=13200, lr=0.145000, batch loss=0.119645, epoch loss=3.668682Epoch=10, step=13200, lr=0.145000, epoch loss=3.668682Batch=59, step=13260, lr=0.144750, batch loss=0.140369, epoch loss=0.140369Batch=119, step=13320, lr=0.144500, batch loss=0.123704, epoch loss=0.264074Batch=179, step=13380, lr=0.144250, batch loss=0.129113, epoch loss=0.393187Batch=239, step=13440, lr=0.144250, batch loss=0.193375, epoch loss=0.586562Batch=299, step=13500, lr=0.143750, batch loss=0.118178, epoch loss=0.704740Batch=359, step=13560, lr=0.143500, batch loss=0.164993, epoch loss=0.869732Batch=419, step=13620, lr=0.143250, batch loss=0.168902, epoch loss=1.038635Batch=479, step=13680, lr=0.143250, batch loss=0.149839, epoch loss=1.188474Batch=539, step=13740, lr=0.143000, batch loss=0.117897, epoch loss=1.306371Batch=599, step=13800, lr=0.142500, batch loss=0.121061, epoch loss=1.427431Batch=659, step=13860, lr=0.142250, batch loss=0.178213, epoch loss=1.605644Batch=719, step=13920, lr=0.142000, batch loss=0.173654, epoch loss=1.779299Batch=779, step=13980, lr=0.142000, batch loss=0.180798, epoch loss=1.960096Batch=839, step=14040, lr=0.141500, batch loss=0.205729, epoch loss=2.165825Batch=899, step=14100, lr=0.141250, batch loss=0.224662, epoch loss=2.390487Batch=959, step=14160, lr=0.141000, batch loss=0.106742, epoch loss=2.497229Batch=1019, step=14220, lr=0.140750, batch loss=0.193194, epoch loss=2.690423Batch=1079, step=14280, lr=0.140500, batch loss=0.082592, epoch loss=2.773015Batch=1139, step=14340, lr=0.140500, batch loss=0.128114, epoch loss=2.901129Batch=1199, step=14400, lr=0.140000, batch loss=0.087166, epoch loss=2.988294Epoch=11, step=14400, lr=0.140000, epoch loss=2.988294Batch=59, step=14460, lr=0.139750, batch loss=0.106270, epoch loss=0.106270Batch=119, step=14520, lr=0.139750, batch loss=0.106898, epoch loss=0.213167Batch=179, step=14580, lr=0.139500, batch loss=0.101881, epoch loss=0.315048Batch=239, step=14640, lr=0.139000, batch loss=0.143698, epoch loss=0.458746Batch=299, step=14700, lr=0.139000, batch loss=0.076592, epoch loss=0.535338Batch=359, step=14760, lr=0.138750, batch loss=0.122149, epoch loss=0.657487Batch=419, step=14820, lr=0.138500, batch loss=0.130962, epoch loss=0.788449Batch=479, step=14880, lr=0.138250, batch loss=0.104188, epoch loss=0.892638Batch=539, step=14940, lr=0.138000, batch loss=0.109107, epoch loss=1.001745Batch=599, step=15000, lr=0.137500, batch loss=0.084657, epoch loss=1.086402Batch=659, step=15060, lr=0.137250, batch loss=0.128131, epoch loss=1.214533Batch=719, step=15120, lr=0.137250, batch loss=0.130057, epoch loss=1.344589Batch=779, step=15180, lr=0.137000, batch loss=0.169302, epoch loss=1.513891Batch=839, step=15240, lr=0.136750, batch loss=0.168836, epoch loss=1.682727Batch=899, step=15300, lr=0.136500, batch loss=0.293647, epoch loss=1.976374Batch=959, step=15360, lr=0.136250, batch loss=0.059292, epoch loss=2.035666Batch=1019, step=15420, lr=0.136000, batch loss=0.139571, epoch loss=2.175237Batch=1079, step=15480, lr=0.135500, batch loss=0.040839, epoch loss=2.216075Batch=1139, step=15540, lr=0.135250, batch loss=0.096354, epoch loss=2.312429Batch=1199, step=15600, lr=0.135250, batch loss=0.061144, epoch loss=2.373573Epoch=12, step=15600, lr=0.135250, epoch loss=2.373573Batch=59, step=15660, lr=0.134750, batch loss=0.088960, epoch loss=0.088960Batch=119, step=15720, lr=0.134750, batch loss=0.140586, epoch loss=0.229546Batch=179, step=15780, lr=0.134250, batch loss=0.101821, epoch loss=0.331367Batch=239, step=15840, lr=0.134000, batch loss=0.100913, epoch loss=0.432280Batch=299, step=15900, lr=0.133750, batch loss=0.044900, epoch loss=0.477181Batch=359, step=15960, lr=0.133750, batch loss=0.083101, epoch loss=0.560281Batch=419, step=16020, lr=0.133500, batch loss=0.079446, epoch loss=0.639727Batch=479, step=16080, lr=0.133000, batch loss=0.055493, epoch loss=0.695221Batch=539, step=16140, lr=0.132750, batch loss=0.077778, epoch loss=0.772999Batch=599, step=16200, lr=0.132750, batch loss=0.174352, epoch loss=0.947351Batch=659, step=16260, lr=0.132250, batch loss=0.100948, epoch loss=1.048299Batch=719, step=16320, lr=0.132250, batch loss=0.119845, epoch loss=1.168144Batch=779, step=16380, lr=0.131750, batch loss=0.268265, epoch loss=1.436409Batch=839, step=16440, lr=0.131500, batch loss=0.090343, epoch loss=1.526751Batch=899, step=16500, lr=0.131500, batch loss=0.075427, epoch loss=1.602178Batch=959, step=16560, lr=0.131250, batch loss=0.031488, epoch loss=1.633666Batch=1019, step=16620, lr=0.131000, batch loss=0.070173, epoch loss=1.703840Batch=1079, step=16680, lr=0.130750, batch loss=0.045064, epoch loss=1.748904Batch=1139, step=16740, lr=0.130250, batch loss=0.078643, epoch loss=1.827547Batch=1199, step=16800, lr=0.130000, batch loss=0.034848, epoch loss=1.862395Epoch=13, step=16800, lr=0.130000, epoch loss=1.862395Batch=59, step=16860, lr=0.129750, batch loss=0.037089, epoch loss=0.037089Batch=119, step=16920, lr=0.129500, batch loss=0.040121, epoch loss=0.077210Batch=179, step=16980, lr=0.129500, batch loss=0.044102, epoch loss=0.121312Batch=239, step=17040, lr=0.129000, batch loss=0.060726, epoch loss=0.182037Batch=299, step=17100, lr=0.128750, batch loss=0.024308, epoch loss=0.206346Batch=359, step=17160, lr=0.128500, batch loss=0.056052, epoch loss=0.262398Batch=419, step=17220, lr=0.128250, batch loss=0.103770, epoch loss=0.366168Batch=479, step=17280, lr=0.128250, batch loss=0.023909, epoch loss=0.390077Batch=539, step=17340, lr=0.127750, batch loss=0.028666, epoch loss=0.418743Batch=599, step=17400, lr=0.127500, batch loss=0.037427, epoch loss=0.456170Batch=659, step=17460, lr=0.127250, batch loss=0.047028, epoch loss=0.503197Batch=719, step=17520, lr=0.127000, batch loss=0.042085, epoch loss=0.545282Batch=779, step=17580, lr=0.127000, batch loss=0.051611, epoch loss=0.596893Batch=839, step=17640, lr=0.126500, batch loss=0.096225, epoch loss=0.693118Batch=899, step=17700, lr=0.126250, batch loss=0.123382, epoch loss=0.816500Batch=959, step=17760, lr=0.126000, batch loss=0.038650, epoch loss=0.855150Batch=1019, step=17820, lr=0.125750, batch loss=0.080530, epoch loss=0.935681Batch=1079, step=17880, lr=0.125500, batch loss=0.017557, epoch loss=0.953238Batch=1139, step=17940, lr=0.125250, batch loss=0.031132, epoch loss=0.984370Batch=1199, step=18000, lr=0.125250, batch loss=0.016075, epoch loss=1.000445Epoch=14, step=18000, lr=0.125250, epoch loss=1.000445Batch=59, step=18060, lr=0.124750, batch loss=0.013313, epoch loss=0.013313Batch=119, step=18120, lr=0.124500, batch loss=0.020116, epoch loss=0.033429Batch=179, step=18180, lr=0.124500, batch loss=0.034385, epoch loss=0.067813Batch=239, step=18240, lr=0.124000, batch loss=0.038912, epoch loss=0.106725Batch=299, step=18300, lr=0.123750, batch loss=0.015838, epoch loss=0.122563Batch=359, step=18360, lr=0.123500, batch loss=0.024521, epoch loss=0.147084Batch=419, step=18420, lr=0.123500, batch loss=0.027550, epoch loss=0.174633Batch=479, step=18480, lr=0.123000, batch loss=0.021702, epoch loss=0.196335Batch=539, step=18540, lr=0.123000, batch loss=0.044573, epoch loss=0.240908Batch=599, step=18600, lr=0.122500, batch loss=0.026351, epoch loss=0.267260Batch=659, step=18660, lr=0.122500, batch loss=0.037053, epoch loss=0.304312Batch=719, step=18720, lr=0.122000, batch loss=0.032164, epoch loss=0.336476Batch=779, step=18780, lr=0.122000, batch loss=0.090091, epoch loss=0.426567Batch=839, step=18840, lr=0.121500, batch loss=0.076465, epoch loss=0.503032Batch=899, step=18900, lr=0.121500, batch loss=0.110621, epoch loss=0.613653Batch=959, step=18960, lr=0.121000, batch loss=0.015480, epoch loss=0.629133Batch=1019, step=19020, lr=0.121000, batch loss=0.024062, epoch loss=0.653195Batch=1079, step=19080, lr=0.120500, batch loss=0.004280, epoch loss=0.657475Batch=1139, step=19140, lr=0.120250, batch loss=0.020941, epoch loss=0.678416Batch=1199, step=19200, lr=0.120000, batch loss=0.009740, epoch loss=0.688156Epoch=15, step=19200, lr=0.120000, epoch loss=0.688156Batch=59, step=19260, lr=0.119750, batch loss=0.004604, epoch loss=0.004604Batch=119, step=19320, lr=0.119500, batch loss=0.011304, epoch loss=0.015908Batch=179, step=19380, lr=0.119500, batch loss=0.019644, epoch loss=0.035552Batch=239, step=19440, lr=0.119000, batch loss=0.020124, epoch loss=0.055677Batch=299, step=19500, lr=0.119000, batch loss=0.004801, epoch loss=0.060477Batch=359, step=19560, lr=0.118750, batch loss=0.017125, epoch loss=0.077603Batch=419, step=19620, lr=0.118250, batch loss=0.020509, epoch loss=0.098112Batch=479, step=19680, lr=0.118250, batch loss=0.006878, epoch loss=0.104990Batch=539, step=19740, lr=0.117750, batch loss=0.017522, epoch loss=0.122512Batch=599, step=19800, lr=0.117750, batch loss=0.022326, epoch loss=0.144839Batch=659, step=19860, lr=0.117250, batch loss=0.017640, epoch loss=0.162478Batch=719, step=19920, lr=0.117250, batch loss=0.038946, epoch loss=0.201424Batch=779, step=19980, lr=0.117000, batch loss=0.078147, epoch loss=0.279571Batch=839, step=20040, lr=0.116750, batch loss=0.030639, epoch loss=0.310210Batch=899, step=20100, lr=0.116500, batch loss=0.028476, epoch loss=0.338686Batch=959, step=20160, lr=0.116000, batch loss=0.017525, epoch loss=0.356212Batch=1019, step=20220, lr=0.116000, batch loss=0.024361, epoch loss=0.380572Batch=1079, step=20280, lr=0.115500, batch loss=0.002465, epoch loss=0.383037Batch=1139, step=20340, lr=0.115250, batch loss=0.014484, epoch loss=0.397522Batch=1199, step=20400, lr=0.115000, batch loss=0.005940, epoch loss=0.403462Epoch=16, step=20400, lr=0.115000, epoch loss=0.403462Batch=59, step=20460, lr=0.114750, batch loss=0.002895, epoch loss=0.002895Batch=119, step=20520, lr=0.114750, batch loss=0.009394, epoch loss=0.012289Batch=179, step=20580, lr=0.114500, batch loss=0.022442, epoch loss=0.034731Batch=239, step=20640, lr=0.114000, batch loss=0.016221, epoch loss=0.050952Batch=299, step=20700, lr=0.114000, batch loss=0.006153, epoch loss=0.057106Batch=359, step=20760, lr=0.113500, batch loss=0.012897, epoch loss=0.070003Batch=419, step=20820, lr=0.113250, batch loss=0.015911, epoch loss=0.085913Batch=479, step=20880, lr=0.113250, batch loss=0.006956, epoch loss=0.092869Batch=539, step=20940, lr=0.113000, batch loss=0.014929, epoch loss=0.107799Batch=599, step=21000, lr=0.112500, batch loss=0.018903, epoch loss=0.126702Batch=659, step=21060, lr=0.112500, batch loss=0.015227, epoch loss=0.141929Batch=719, step=21120, lr=0.112250, batch loss=0.038449, epoch loss=0.180378Batch=779, step=21180, lr=0.111750, batch loss=0.073581, epoch loss=0.253959Batch=839, step=21240, lr=0.111500, batch loss=0.026096, epoch loss=0.280055Batch=899, step=21300, lr=0.111500, batch loss=0.035525, epoch loss=0.315579Batch=959, step=21360, lr=0.111250, batch loss=0.009783, epoch loss=0.325363Batch=1019, step=21420, lr=0.110750, batch loss=0.010727, epoch loss=0.336090Batch=1079, step=21480, lr=0.110750, batch loss=0.001475, epoch loss=0.337565Batch=1139, step=21540, lr=0.110250, batch loss=0.012785, epoch loss=0.350351Batch=1199, step=21600, lr=0.110250, batch loss=0.005424, epoch loss=0.355775Epoch=17, step=21600, lr=0.110250, epoch loss=0.355775Batch=59, step=21660, lr=0.110000, batch loss=0.002437, epoch loss=0.002437Batch=119, step=21720, lr=0.109750, batch loss=0.006329, epoch loss=0.008766Batch=179, step=21780, lr=0.109250, batch loss=0.012552, epoch loss=0.021318Batch=239, step=21840, lr=0.109000, batch loss=0.009184, epoch loss=0.030502Batch=299, step=21900, lr=0.109000, batch loss=0.003045, epoch loss=0.033547Batch=359, step=21960, lr=0.108500, batch loss=0.015405, epoch loss=0.048953Batch=419, step=22020, lr=0.108500, batch loss=0.011991, epoch loss=0.060944Batch=479, step=22080, lr=0.108250, batch loss=0.002677, epoch loss=0.063621Batch=539, step=22140, lr=0.107750, batch loss=0.017413, epoch loss=0.081034Batch=599, step=22200, lr=0.107750, batch loss=0.016994, epoch loss=0.098028Batch=659, step=22260, lr=0.107500, batch loss=0.014212, epoch loss=0.112240Batch=719, step=22320, lr=0.107000, batch loss=0.026234, epoch loss=0.138473Batch=779, step=22380, lr=0.107000, batch loss=0.042371, epoch loss=0.180845Batch=839, step=22440, lr=0.106750, batch loss=0.021941, epoch loss=0.202786Batch=899, step=22500, lr=0.106500, batch loss=0.022587, epoch loss=0.225372Batch=959, step=22560, lr=0.106000, batch loss=0.010977, epoch loss=0.236350Batch=1019, step=22620, lr=0.105750, batch loss=0.009056, epoch loss=0.245406Batch=1079, step=22680, lr=0.105500, batch loss=0.000099, epoch loss=0.245505Batch=1139, step=22740, lr=0.105500, batch loss=0.010172, epoch loss=0.255677Batch=1199, step=22800, lr=0.105250, batch loss=0.004659, epoch loss=0.260336Epoch=18, step=22800, lr=0.105250, epoch loss=0.260336Batch=59, step=22860, lr=0.105000, batch loss=0.001763, epoch loss=0.001763Batch=119, step=22920, lr=0.104500, batch loss=0.005535, epoch loss=0.007298Batch=179, step=22980, lr=0.104250, batch loss=0.010633, epoch loss=0.017931Batch=239, step=23040, lr=0.104000, batch loss=0.011244, epoch loss=0.029175Batch=299, step=23100, lr=0.104000, batch loss=0.007796, epoch loss=0.036971Batch=359, step=23160, lr=0.103500, batch loss=0.010995, epoch loss=0.047965Batch=419, step=23220, lr=0.103500, batch loss=0.010655, epoch loss=0.058621Batch=479, step=23280, lr=0.103000, batch loss=0.002500, epoch loss=0.061121Batch=539, step=23340, lr=0.102750, batch loss=0.017241, epoch loss=0.078362Batch=599, step=23400, lr=0.102750, batch loss=0.013830, epoch loss=0.092192Batch=659, step=23460, lr=0.102250, batch loss=0.010236, epoch loss=0.102427Batch=719, step=23520, lr=0.102250, batch loss=0.013702, epoch loss=0.116129Batch=779, step=23580, lr=0.101750, batch loss=0.022034, epoch loss=0.138163Batch=839, step=23640, lr=0.101500, batch loss=0.027770, epoch loss=0.165933Batch=899, step=23700, lr=0.101500, batch loss=0.021763, epoch loss=0.187696Batch=959, step=23760, lr=0.101000, batch loss=0.009945, epoch loss=0.197641Batch=1019, step=23820, lr=0.100750, batch loss=0.007138, epoch loss=0.204779Batch=1079, step=23880, lr=0.100500, batch loss=0.001141, epoch loss=0.205920Batch=1139, step=23940, lr=0.100500, batch loss=0.008657, epoch loss=0.214578Batch=1199, step=24000, lr=0.100250, batch loss=0.004587, epoch loss=0.219164Epoch=19, step=24000, lr=0.100250, epoch loss=0.219164Half-moons scatterplot and decision boundary:┌────────────────────────────────────────────────────────────────────────────────────────────────────┐│********************************#*******************************************************************││**********************#*#*#######*###*#####*********************************************************││**********************#########################*****************************************************││*****************#**########*######*###########*###*************************************************││***************#################*###################************************************************││************######*#################*#################**********************************************││**********#*#####*########*#**************##*#########*#********************************************││***********########*##*#******************#*****##########*****************************************.││***********###########*************************############**************************************...││********######*####*********************************###*###*#*********************************......││*******######**##*************...******************#*######*#*******************************........││*******##*##**##***********..........***************########*##***************************..........││*****#######************.......%...%%...***************#########************************..........%.││******######***********.........%........***************##*#####***********************.......%.%.%.││***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.││****#######**********..........%%%%.........************#########********************........%%.%%.%││**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.││*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.││*#######************...........%%%%%%%..........************#######**************............%%%%%%.││*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.││*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..││#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..││#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.││###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...││#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....││######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...││*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....││#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....││**************.....................%.%%%%%%...................*******..................%.%%.%%......││**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......││*************.........................%.%%%.%%%%................***...............%%%%%%%.%.%.......││************............................%..%%%%..%................................%%%%%%%%..........││************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........││***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........││***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............││**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............││**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................││*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................││*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................││********................................................%...%%%%.%%.%%%%..%.........................│└────────────────────────────────────────────────────────────────────────────────────────────────────┘2025-05-09 14:38.16 ---> saved as "2f288e1cafc0b2bf1cd1c884c91396612b50093c0abe8198df2528914681ee83"Job succeeded2025-05-09 14:38.16: Job succeeded