2025-03-20 21:04.47: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (ccaf459c55f1e1dab014a65af54e1ba2ec3b9ad0) (linux-x86_64:fedora-41-5.3_opam-2.3)Base: ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925Opam project buildTo reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard ccaf459ccat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925# fedora-41-5.3_opam-2.3USER 1000:1000ENV CLICOLOR_FORCE="1"ENV OPAMCOLOR="always"WORKDIR /srcRUN sudo dnf install -y findutilsRUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opamRUN opam init --reinit -niRUN uname -rs && opam exec -- ocaml -version && opam --versionWORKDIR /srcRUN sudo chown opam /srcRUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -uCOPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./RUN opam pin add -yn neural_nets_lib.dev './' && \opam pin add -yn arrayjit.dev './'RUN echo '(lang dune 3.0)' > './dune-project'ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"ENV CI="true"ENV OCAMLCI="true"RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPSRUN opam install $DEPSCOPY --chown=1000:1000 . /srcRUN opam exec -- dune build @install @check @runtest && rm -rf _buildEND-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-03-20 21:04.47: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925-fedora-41-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5"2025-03-20 21:04.47: Using OBuilder spec:((from ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925)(comment fedora-41-5.3_opam-2.3)(user (uid 1000) (gid 1000))(env CLICOLOR_FORCE 1)(env OPAMCOLOR always)(workdir /src)(run (network host)(shell "sudo dnf install -y findutils"))(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))(run (shell "opam init --reinit -ni"))(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))(workdir /src)(run (shell "sudo chown opam /src"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u"))(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))(run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))(run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))(env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")(env CI true)(env OCAMLCI true)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))(copy (src .) (dst /src))(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")))2025-03-20 21:04.47: Waiting for resource in pool OCluster2025-03-20 21:04.47: Waiting for worker…2025-03-20 21:14.15: Got resource from pool OClusterBuilding on toxis.caelum.ci.devHEAD is now at 367be265 Load log_level earlier to consistently log all settingsHEAD is now at ccaf459c Missing from previous commit: test update(from ocaml/opam:fedora-41-ocaml-5.3@sha256:fdc2429ee5b38ad9a34e9df8dcce7161f1b807433a983120453cb37a7b29e925)2025-03-20 21:16.53 ---> saved as "189447b5abf4f84b3141564fa16299d77370e888a929cebc6495802de6bce562"/: (comment fedora-41-5.3_opam-2.3)/: (user (uid 1000) (gid 1000))/: (env CLICOLOR_FORCE 1)/: (env OPAMCOLOR always)/: (workdir /src)/src: (run (network host)(shell "sudo dnf install -y findutils"))Updating and loading repositories:Fedora 41 - x86_64 - Updates 100% | 174.3 KiB/s | 23.4 KiB | 00m00sFedora 41 - x86_64 100% | 203.8 KiB/s | 26.5 KiB | 00m00sFedora 41 - x86_64 - Updates 100% | 5.5 MiB/s | 4.5 MiB | 00m01sRepositories loaded.Package "findutils-1:4.10.0-4.fc41.x86_64" is already installed.Nothing to do.2025-03-20 21:17.00 ---> saved as "947881be467bcf2fd69517dfbab49f9c08bd1fde805595ee55ebf2224778ca57"/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))2025-03-20 21:17.00 ---> saved as "92a7f2480fc833b8b6e6f7c055e96edecd7a23b7a1425abe59b46f58f5769a58"/src: (run (shell "opam init --reinit -ni"))Configuring from /home/opam/.opamrc and then from built-in defaults.Checking for available remotes: rsync and local, git.- you won't be able to use mercurial repositories unless you install the hg command on your system.- you won't be able to use darcs repositories unless you install the darcs command on your system.This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.You may want to back it up before going further.Continue? [y/n] yFormat upgrade done.<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>[ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com"[default] synchronised from file:///home/opam/opam-repository2025-03-20 21:18.20 ---> saved as "22323ebe734803b688970a8a4b44abf93aa58863ccedfb0ae4ead813beb5698c"/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))Linux 5.15.0-134-genericThe OCaml toplevel, version 5.3.02.3.02025-03-20 21:18.20 ---> saved as "2024869fea33faf19369ee49d7e2bfcec68103aae198063a36b6d1df2607faff"/src: (workdir /src)/src: (run (shell "sudo chown opam /src"))2025-03-20 21:18.20 ---> saved as "b766911cfcef799c6c2bb8aec82e30e2bd662c95ed64a05054ff7a173457d537"/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u"))From https://github.com/ocaml/opam-repository* branch master -> FETCH_HEAD862a7640b1..6cf83229dd master -> origin/master4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive[default] synchronised from file:///home/opam/opam-repositoryEverything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# To update the current shell environment, run: eval $(opam env)2025-03-20 21:19.18 ---> saved as "ece3a959e2ef17442b29b6ddfc99e8140b85a2f969c4a3b7d543a174bcd5db3d"/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))2025-03-20 21:19.18 ---> saved as "a7995f649c73d9e2babfcd97b0e8e8ffa838d8e1a37a5b41cef0fa20c5171493"/src: (run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))[neural_nets_lib.dev] synchronised (file:///src)neural_nets_lib is now pinned to file:///src (version dev)[arrayjit.dev] synchronised (file:///src)arrayjit is now pinned to file:///src (version dev)2025-03-20 21:19.21 ---> saved as "0a48fa20bdf4e6bdf92de50ad1b179566e59a5d19580d481942ca723221ad166"/src: (run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))2025-03-20 21:19.21 ---> saved as "e5fa21efb6ec9bcee7c096aa55a7baad8cdd660b0019042e49dcb36e3acc6d45"/src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")/src: (env CI true)/src: (env OCAMLCI true)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))+ /usr/bin/sudo "yum" "makecache"- Updating and loading repositories:- Repositories loaded.- Metadata cache created.<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>[arrayjit.dev] synchronised (file:///src)[neural_nets_lib.dev] synchronised (file:///src)[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following system packages will first need to be installed:libffi-devel<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>+ /usr/bin/sudo "yum" "install" "-y" "libffi-devel"- Updating and loading repositories:- Repositories loaded.- Package Arch Version Repository Size- Installing:- libffi-devel x86_64 3.4.6-3.fc41 fedora 33.1 KiB-- Transaction Summary:- Installing: 1 package-- Total size of inbound packages is 29 KiB. Need to download 29 KiB.- After this operation, 33 KiB extra will be used (install 33 KiB, remove 0 B).- [1/1] libffi-devel-0:3.4.6-3.fc41.x86_6 100% | 346.6 KiB/s | 28.8 KiB | 00m00s- --------------------------------------------------------------------------------- [1/1] Total 100% | 130.7 KiB/s | 28.8 KiB | 00m00s- Running transaction- [1/3] Verify package files 100% | 0.0 B/s | 1.0 B | 00m00s- [2/3] Prepare transaction 100% | 40.0 B/s | 1.0 B | 00m00s- [3/3] Installing libffi-devel-0:3.4.6-3 100% | 589.7 KiB/s | 34.8 KiB | 00m00s- Complete!+ /usr/bin/rpm "-q" "--whatprovides" "libffi-devel"- libffi-devel-3.4.6-3.fc41.x86_642025-03-20 21:19.41 ---> saved as "ec2cd6f2dd31abf670c32d6062964da56a43e5a89a2f82d108cb5a2ddba5eba7"/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following actions will be performed:=== install 65 packages- install angstrom 0.16.1- install backoff 0.1.1- install base v0.17.1- install bigarray-compat 1.1.0- install bigstringaf 0.10.0- install conf-libffi 2.0.0- install conf-pkg-config 4- install cppo 1.8.0- install csexp 1.5.2- install ctypes 0.23.0- install ctypes-foreign 0.23.0- install dune 3.17.2- install dune-configurator 3.17.2- install fieldslib v0.17.0- install integers 0.7.0- install jane-street-headers v0.17.0- install jst-config v0.17.0- install mtime 2.1.0- install multicore-magic 2.3.1- install num 1.5-1- install ocaml-compiler-libs v0.17.0- install ocaml-syntax-shims 1.0.0- install ocaml_intrinsics_kernel v0.17.1- install ocamlbuild 0.16.1- install ocamlfind 1.9.8- install parsexp v0.17.0- install ppx_assert v0.17.0- install ppx_base v0.17.0- install ppx_cold v0.17.0- install ppx_compare v0.17.0- install ppx_derivers 1.2.1- install ppx_deriving 6.0.3- install ppx_enumerate v0.17.0- install ppx_expect v0.17.2- install ppx_fields_conv v0.17.0- install ppx_globalize v0.17.0- install ppx_hash v0.17.0- install ppx_here v0.17.0- install ppx_inline_test v0.17.0- install ppx_minidebug 2.1.0- install ppx_optcomp v0.17.0- install ppx_sexp_conv v0.17.0- install ppx_string v0.17.0- install ppx_variants_conv v0.17.0- install ppxlib 0.35.0- install ppxlib_jane v0.17.2- install printbox 0.12- install printbox-ext-plot 0.12- install printbox-html 0.12- install printbox-md 0.12- install printbox-text 0.12- install ptime 1.2.0- install re 1.12.0- install saturn_lockfree 0.5.0- install seq base- install sexplib v0.17.0- install sexplib0 v0.17.0- install stdio v0.17.0- install stdlib-shims 0.3.0- install time_now v0.17.0- install topkg 1.0.8- install tyxml 4.6.0- install uucp 16.0.0- install uutf 1.0.4- install variantslib v0.17.0<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> retrieved backoff.0.1.1 (cached)-> retrieved bigarray-compat.1.1.0 (cached)-> retrieved angstrom.0.16.1 (cached)-> retrieved base.v0.17.1 (cached)-> retrieved bigstringaf.0.10.0 (cached)-> retrieved cppo.1.8.0 (cached)-> installed conf-pkg-config.4-> retrieved csexp.1.5.2 (cached)-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached)-> installed conf-libffi.2.0.0-> retrieved fieldslib.v0.17.0 (cached)-> retrieved integers.0.7.0 (cached)-> retrieved jane-street-headers.v0.17.0 (cached)-> retrieved jst-config.v0.17.0 (cached)-> retrieved mtime.2.1.0 (cached)-> retrieved multicore-magic.2.3.1 (cached)-> retrieved num.1.5-1 (cached)-> retrieved ocaml-compiler-libs.v0.17.0 (cached)-> retrieved ocaml-syntax-shims.1.0.0 (cached)-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)-> retrieved ocamlbuild.0.16.1 (cached)-> retrieved ocamlfind.1.9.8 (cached)-> retrieved parsexp.v0.17.0 (cached)-> retrieved ppx_assert.v0.17.0 (cached)-> retrieved ppx_base.v0.17.0 (cached)-> retrieved ppx_cold.v0.17.0 (cached)-> retrieved ppx_compare.v0.17.0 (cached)-> retrieved ppx_derivers.1.2.1 (cached)-> retrieved ppx_enumerate.v0.17.0 (cached)-> retrieved ppx_deriving.6.0.3 (cached)-> retrieved ppx_expect.v0.17.2 (cached)-> retrieved ppx_fields_conv.v0.17.0 (cached)-> retrieved ppx_globalize.v0.17.0 (cached)-> retrieved dune.3.17.2, dune-configurator.3.17.2 (cached)-> installed num.1.5-1-> retrieved ppx_hash.v0.17.0 (cached)-> retrieved ppx_here.v0.17.0 (cached)-> retrieved ppx_inline_test.v0.17.0 (cached)-> retrieved ppx_optcomp.v0.17.0 (cached)-> retrieved ppx_sexp_conv.v0.17.0 (cached)-> retrieved ppx_string.v0.17.0 (cached)-> retrieved ppx_variants_conv.v0.17.0 (cached)-> retrieved ppxlib_jane.v0.17.2 (cached)-> retrieved ppx_minidebug.2.1.0 (cached)-> retrieved ptime.1.2.0 (cached)-> retrieved re.1.12.0 (cached)-> retrieved saturn_lockfree.0.5.0 (cached)-> retrieved seq.base (cached)-> installed seq.base-> retrieved sexplib.v0.17.0 (cached)-> retrieved sexplib0.v0.17.0 (cached)-> retrieved stdio.v0.17.0 (cached)-> retrieved stdlib-shims.0.3.0 (cached)-> retrieved time_now.v0.17.0 (cached)-> retrieved ppxlib.0.35.0 (cached)-> retrieved topkg.1.0.8 (cached)-> retrieved tyxml.4.6.0 (cached)-> retrieved uutf.1.0.4 (cached)-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached)-> retrieved variantslib.v0.17.0 (cached)-> retrieved uucp.16.0.0 (cached)-> installed ocamlbuild.0.16.1-> installed ocamlfind.1.9.8-> installed topkg.1.0.8-> installed uutf.1.0.4-> installed mtime.2.1.0-> installed ptime.1.2.0-> installed dune.3.17.2-> installed jane-street-headers.v0.17.0-> installed ppx_derivers.1.2.1-> installed csexp.1.5.2-> installed backoff.0.1.1-> installed bigarray-compat.1.1.0-> installed multicore-magic.2.3.1-> installed ocaml-syntax-shims.1.0.0-> installed ocaml_intrinsics_kernel.v0.17.1-> installed printbox.0.12-> installed sexplib0.v0.17.0-> installed stdlib-shims.0.3.0-> installed re.1.12.0-> installed cppo.1.8.0-> installed ocaml-compiler-libs.v0.17.0-> installed saturn_lockfree.0.5.0-> installed integers.0.7.0-> installed parsexp.v0.17.0-> installed dune-configurator.3.17.2-> installed bigstringaf.0.10.0-> installed sexplib.v0.17.0-> installed angstrom.0.16.1-> installed tyxml.4.6.0-> installed printbox-html.0.12-> installed ctypes.0.23.0-> installed base.v0.17.1-> installed uucp.16.0.0-> installed variantslib.v0.17.0-> installed fieldslib.v0.17.0-> installed stdio.v0.17.0-> installed printbox-text.0.12-> installed ctypes-foreign.0.23.0-> installed printbox-md.0.12-> installed printbox-ext-plot.0.12-> installed ppxlib.0.35.0-> installed ppxlib_jane.v0.17.2-> installed ppx_optcomp.v0.17.0-> installed ppx_here.v0.17.0-> installed ppx_cold.v0.17.0-> installed ppx_variants_conv.v0.17.0-> installed ppx_fields_conv.v0.17.0-> installed ppx_globalize.v0.17.0-> installed ppx_deriving.6.0.3-> installed ppx_enumerate.v0.17.0-> installed ppx_compare.v0.17.0-> installed ppx_sexp_conv.v0.17.0-> installed ppx_hash.v0.17.0-> installed ppx_assert.v0.17.0-> installed ppx_base.v0.17.0-> installed ppx_minidebug.2.1.0-> installed jst-config.v0.17.0-> installed ppx_string.v0.17.0-> installed time_now.v0.17.0-> installed ppx_inline_test.v0.17.0-> installed ppx_expect.v0.17.2Done.# To update the current shell environment, run: eval $(opam env)2025-03-20 21:23.14 ---> saved as "7921041fe58f6d2d694d077196babd22dfea63ab7b6a5facc67bb1556370f14b"/src: (copy (src .) (dst /src))2025-03-20 21:23.16 ---> saved as "a42db06464056c8d085fdd3c65874fbb40ca491672ec12e2dc77958a419cd48f"/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))(cd _build/default/test_ppx && ./test_ppx_op.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/0c2544f328c3ee6d493ac59029ff37ba/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d91c5a789a8c08b387c17d563c7cb7c5/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test && ./moons_demo_parallel_run.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file("Set log_level to" 1)└─{orphaned from #2}Retrieving commandline, environment, or config file variable ocannl_backendFound cc, in the config fileRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccRetrieving commandline, environment, or config file variable ocannl_never_capture_stdoutNot found, using default falseBatch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543Batch=1019, step=1020, lr=0.195750, batch loss=0.466506, epoch loss=44.502049Batch=1079, step=1080, lr=0.195750, batch loss=0.248523, epoch loss=44.750571Batch=1139, step=1140, lr=0.195250, batch loss=0.317671, epoch loss=45.068243Batch=1199, step=1200, lr=0.195250, batch loss=0.263585, epoch loss=45.331828Epoch=0, step=1200, lr=0.195250, epoch loss=45.331828Batch=59, step=1260, lr=0.195000, batch loss=0.262058, epoch loss=0.262058Batch=119, step=1320, lr=0.194750, batch loss=0.205376, epoch loss=0.467435Batch=179, step=1380, lr=0.194500, batch loss=0.243658, epoch loss=0.711092Batch=239, step=1440, lr=0.194250, batch loss=0.347786, epoch loss=1.058878Batch=299, step=1500, lr=0.194000, batch loss=0.247564, epoch loss=1.306442Batch=359, step=1560, lr=0.193750, batch loss=0.316406, epoch loss=1.622848Batch=419, step=1620, lr=0.193500, batch loss=0.308832, epoch loss=1.931680Batch=479, step=1680, lr=0.193250, batch loss=0.275066, epoch loss=2.206746Batch=539, step=1740, lr=0.193000, batch loss=0.210851, epoch loss=2.417597Batch=599, step=1800, lr=0.192750, batch loss=0.261902, epoch loss=2.679499Batch=659, step=1860, lr=0.192500, batch loss=0.375671, epoch loss=3.055170Batch=719, step=1920, lr=0.192250, batch loss=0.357987, epoch loss=3.413156Batch=779, step=1980, lr=0.192000, batch loss=0.386950, epoch loss=3.800107Batch=839, step=2040, lr=0.191750, batch loss=0.346431, epoch loss=4.146538Batch=899, step=2100, lr=0.191500, batch loss=0.319047, epoch loss=4.465584Batch=959, step=2160, lr=0.191000, batch loss=0.246412, epoch loss=4.711997Batch=1019, step=2220, lr=0.191000, batch loss=0.377297, epoch loss=5.089293Batch=1079, step=2280, lr=0.190750, batch loss=0.216869, epoch loss=5.306163Batch=1139, step=2340, lr=0.190500, batch loss=0.265708, epoch loss=5.571871Batch=1199, step=2400, lr=0.190250, batch loss=0.212226, epoch loss=5.784097Epoch=1, step=2400, lr=0.190250, epoch loss=5.784097Batch=59, step=2460, lr=0.190000, batch loss=0.233942, epoch loss=0.233942Batch=119, step=2520, lr=0.189750, batch loss=0.195925, epoch loss=0.429866Batch=179, step=2580, lr=0.189500, batch loss=0.221271, epoch loss=0.651137Batch=239, step=2640, lr=0.189250, batch loss=0.329016, epoch loss=0.980153Batch=299, step=2700, lr=0.189000, batch loss=0.206828, epoch loss=1.186981Batch=359, step=2760, lr=0.188750, batch loss=0.292886, epoch loss=1.479866Batch=419, step=2820, lr=0.188500, batch loss=0.281143, epoch loss=1.761010Batch=479, step=2880, lr=0.188250, batch loss=0.255383, epoch loss=2.016392Batch=539, step=2940, lr=0.188000, batch loss=0.195933, epoch loss=2.212325Batch=599, step=3000, lr=0.187750, batch loss=0.234684, epoch loss=2.447009Batch=659, step=3060, lr=0.187500, batch loss=0.340160, epoch loss=2.787169Batch=719, step=3120, lr=0.187250, batch loss=0.335206, epoch loss=3.122376Batch=779, step=3180, lr=0.187000, batch loss=0.360069, epoch loss=3.482445Batch=839, step=3240, lr=0.186750, batch loss=0.326870, epoch loss=3.809315Batch=899, step=3300, lr=0.186500, batch loss=0.293718, epoch loss=4.103033Batch=959, step=3360, lr=0.186250, batch loss=0.226743, epoch loss=4.329776Batch=1019, step=3420, lr=0.186000, batch loss=0.336947, epoch loss=4.666723Batch=1079, step=3480, lr=0.185750, batch loss=0.190037, epoch loss=4.856761Batch=1139, step=3540, lr=0.185500, batch loss=0.231876, epoch loss=5.088636Batch=1199, step=3600, lr=0.185250, batch loss=0.198904, epoch loss=5.287540Epoch=2, step=3600, lr=0.185250, epoch loss=5.287540Batch=59, step=3660, lr=0.185000, batch loss=0.232538, epoch loss=0.232538Batch=119, step=3720, lr=0.184750, batch loss=0.195730, epoch loss=0.428268Batch=179, step=3780, lr=0.184500, batch loss=0.212802, epoch loss=0.641070Batch=239, step=3840, lr=0.184250, batch loss=0.318254, epoch loss=0.959324Batch=299, step=3900, lr=0.184000, batch loss=0.208867, epoch loss=1.168191Batch=359, step=3960, lr=0.183750, batch loss=0.284958, epoch loss=1.453150Batch=419, step=4020, lr=0.183500, batch loss=0.272128, epoch loss=1.725278Batch=479, step=4080, lr=0.183250, batch loss=0.249106, epoch loss=1.974384Batch=539, step=4140, lr=0.183000, batch loss=0.194729, epoch loss=2.169113Batch=599, step=4200, lr=0.182750, batch loss=0.232681, epoch loss=2.401794Batch=659, step=4260, lr=0.182500, batch loss=0.334922, epoch loss=2.736716Batch=719, step=4320, lr=0.182250, batch loss=0.342500, epoch loss=3.079215Batch=779, step=4380, lr=0.182000, batch loss=0.350542, epoch loss=3.429758Batch=839, step=4440, lr=0.181750, batch loss=0.316256, epoch loss=3.746014Batch=899, step=4500, lr=0.181500, batch loss=0.286454, epoch loss=4.032468Batch=959, step=4560, lr=0.181250, batch loss=0.238091, epoch loss=4.270560Batch=1019, step=4620, lr=0.181000, batch loss=0.333241, epoch loss=4.603801Batch=1079, step=4680, lr=0.180750, batch loss=0.195135, epoch loss=4.798936Batch=1139, step=4740, lr=0.180500, batch loss=0.229125, epoch loss=5.028061Batch=1199, step=4800, lr=0.180250, batch loss=0.194347, epoch loss=5.222408Epoch=3, step=4800, lr=0.180250, epoch loss=5.222408Batch=59, step=4860, lr=0.180000, batch loss=0.241929, epoch loss=0.241929Batch=119, step=4920, lr=0.179750, batch loss=0.198695, epoch loss=0.440625Batch=179, step=4980, lr=0.179500, batch loss=0.206146, epoch loss=0.646770Batch=239, step=5040, lr=0.179250, batch loss=0.309477, epoch loss=0.956247Batch=299, step=5100, lr=0.179000, batch loss=0.205409, epoch loss=1.161656Batch=359, step=5160, lr=0.178750, batch loss=0.276741, epoch loss=1.438398Batch=419, step=5220, lr=0.178500, batch loss=0.270592, epoch loss=1.708990Batch=479, step=5280, lr=0.178250, batch loss=0.251811, epoch loss=1.960801Batch=539, step=5340, lr=0.178000, batch loss=0.194373, epoch loss=2.155174Batch=599, step=5400, lr=0.177750, batch loss=0.236071, epoch loss=2.391245Batch=659, step=5460, lr=0.177500, batch loss=0.321593, epoch loss=2.712837Batch=719, step=5520, lr=0.177250, batch loss=0.323740, epoch loss=3.036578Batch=779, step=5580, lr=0.177000, batch loss=0.340438, epoch loss=3.377015Batch=839, step=5640, lr=0.176750, batch loss=0.312098, epoch loss=3.689114Batch=899, step=5700, lr=0.176500, batch loss=0.280222, epoch loss=3.969335Batch=959, step=5760, lr=0.176250, batch loss=0.227072, epoch loss=4.196408Batch=1019, step=5820, lr=0.176000, batch loss=0.324489, epoch loss=4.520897Batch=1079, step=5880, lr=0.175750, batch loss=0.189719, epoch loss=4.710616Batch=1139, step=5940, lr=0.175500, batch loss=0.222201, epoch loss=4.932817Batch=1199, step=6000, lr=0.175250, batch loss=0.186174, epoch loss=5.118991Epoch=4, step=6000, lr=0.175250, epoch loss=5.118991Batch=59, step=6060, lr=0.175000, batch loss=0.232244, epoch loss=0.232244Batch=119, step=6120, lr=0.174750, batch loss=0.194034, epoch loss=0.426279Batch=179, step=6180, lr=0.174500, batch loss=0.200793, epoch loss=0.627071Batch=239, step=6240, lr=0.174250, batch loss=0.303417, epoch loss=0.930488Batch=299, step=6300, lr=0.174000, batch loss=0.210577, epoch loss=1.141065Batch=359, step=6360, lr=0.173750, batch loss=0.274056, epoch loss=1.415121Batch=419, step=6420, lr=0.173500, batch loss=0.266421, epoch loss=1.681543Batch=479, step=6480, lr=0.173250, batch loss=0.243472, epoch loss=1.925014Batch=539, step=6540, lr=0.173000, batch loss=0.194957, epoch loss=2.119971Batch=599, step=6600, lr=0.172750, batch loss=0.234204, epoch loss=2.354176Batch=659, step=6660, lr=0.172500, batch loss=0.312504, epoch loss=2.666679Batch=719, step=6720, lr=0.172250, batch loss=0.318417, epoch loss=2.985096Batch=779, step=6780, lr=0.172000, batch loss=0.334332, epoch loss=3.319428Batch=839, step=6840, lr=0.171750, batch loss=0.303579, epoch loss=3.623007Batch=899, step=6900, lr=0.171500, batch loss=0.267336, epoch loss=3.890343Batch=959, step=6960, lr=0.171250, batch loss=0.211110, epoch loss=4.101453Batch=1019, step=7020, lr=0.171000, batch loss=0.327581, epoch loss=4.429033Batch=1079, step=7080, lr=0.170750, batch loss=0.182867, epoch loss=4.611901Batch=1139, step=7140, lr=0.170500, batch loss=0.216231, epoch loss=4.828132Batch=1199, step=7200, lr=0.170250, batch loss=0.182285, epoch loss=5.010417Epoch=5, step=7200, lr=0.170250, epoch loss=5.010417Batch=59, step=7260, lr=0.170000, batch loss=0.241590, epoch loss=0.241590Batch=119, step=7320, lr=0.169750, batch loss=0.178989, epoch loss=0.420579Batch=179, step=7380, lr=0.169500, batch loss=0.195030, epoch loss=0.615609Batch=239, step=7440, lr=0.169250, batch loss=0.291224, epoch loss=0.906833Batch=299, step=7500, lr=0.169000, batch loss=0.204973, epoch loss=1.111806Batch=359, step=7560, lr=0.168750, batch loss=0.263104, epoch loss=1.374910Batch=419, step=7620, lr=0.168500, batch loss=0.253351, epoch loss=1.628261Batch=479, step=7680, lr=0.168250, batch loss=0.233701, epoch loss=1.861962Batch=539, step=7740, lr=0.168000, batch loss=0.191217, epoch loss=2.053180Batch=599, step=7800, lr=0.167750, batch loss=0.227273, epoch loss=2.280453Batch=659, step=7860, lr=0.167500, batch loss=0.305604, epoch loss=2.586056Batch=719, step=7920, lr=0.167250, batch loss=0.309013, epoch loss=2.895069Batch=779, step=7980, lr=0.167000, batch loss=0.329651, epoch loss=3.224720Batch=839, step=8040, lr=0.166750, batch loss=0.292215, epoch loss=3.516935Batch=899, step=8100, lr=0.166500, batch loss=0.263549, epoch loss=3.780484Batch=959, step=8160, lr=0.166250, batch loss=0.207173, epoch loss=3.987657Batch=1019, step=8220, lr=0.166000, batch loss=0.315022, epoch loss=4.302679Batch=1079, step=8280, lr=0.165750, batch loss=0.169767, epoch loss=4.472446Batch=1139, step=8340, lr=0.165500, batch loss=0.207105, epoch loss=4.679551Batch=1199, step=8400, lr=0.165250, batch loss=0.179247, epoch loss=4.858798Epoch=6, step=8400, lr=0.165250, epoch loss=4.858798Batch=59, step=8460, lr=0.165000, batch loss=0.227343, epoch loss=0.227343Batch=119, step=8520, lr=0.164750, batch loss=0.175633, epoch loss=0.402976Batch=179, step=8580, lr=0.164500, batch loss=0.187661, epoch loss=0.590637Batch=239, step=8640, lr=0.164250, batch loss=0.277509, epoch loss=0.868146Batch=299, step=8700, lr=0.164000, batch loss=0.189838, epoch loss=1.057985Batch=359, step=8760, lr=0.163750, batch loss=0.249860, epoch loss=1.307845Batch=419, step=8820, lr=0.163500, batch loss=0.244767, epoch loss=1.552612Batch=479, step=8880, lr=0.163250, batch loss=0.227350, epoch loss=1.779961Batch=539, step=8940, lr=0.163000, batch loss=0.180017, epoch loss=1.959978Batch=599, step=9000, lr=0.162750, batch loss=0.216216, epoch loss=2.176195Batch=659, step=9060, lr=0.162500, batch loss=0.293952, epoch loss=2.470146Batch=719, step=9120, lr=0.162250, batch loss=0.295094, epoch loss=2.765240Batch=779, step=9180, lr=0.162000, batch loss=0.314752, epoch loss=3.079991Batch=839, step=9240, lr=0.161750, batch loss=0.286924, epoch loss=3.366915Batch=899, step=9300, lr=0.161500, batch loss=0.250865, epoch loss=3.617780Batch=959, step=9360, lr=0.161250, batch loss=0.189539, epoch loss=3.807319Batch=1019, step=9420, lr=0.161000, batch loss=0.313518, epoch loss=4.120837Batch=1079, step=9480, lr=0.160750, batch loss=0.188718, epoch loss=4.309554Batch=1139, step=9540, lr=0.160500, batch loss=0.204744, epoch loss=4.514299Batch=1199, step=9600, lr=0.160250, batch loss=0.165675, epoch loss=4.679974Epoch=7, step=9600, lr=0.160250, epoch loss=4.679974Batch=59, step=9660, lr=0.160000, batch loss=0.195514, epoch loss=0.195514Batch=119, step=9720, lr=0.159750, batch loss=0.166278, epoch loss=0.361791Batch=179, step=9780, lr=0.159500, batch loss=0.179237, epoch loss=0.541028Batch=239, step=9840, lr=0.159250, batch loss=0.263010, epoch loss=0.804039Batch=299, step=9900, lr=0.159000, batch loss=0.183183, epoch loss=0.987221Batch=359, step=9960, lr=0.158750, batch loss=0.239791, epoch loss=1.227012Batch=419, step=10020, lr=0.158500, batch loss=0.232761, epoch loss=1.459773Batch=479, step=10080, lr=0.158250, batch loss=0.212581, epoch loss=1.672355Batch=539, step=10140, lr=0.158000, batch loss=0.170985, epoch loss=1.843340Batch=599, step=10200, lr=0.157750, batch loss=0.199994, epoch loss=2.043334Batch=659, step=10260, lr=0.157500, batch loss=0.282693, epoch loss=2.326027Batch=719, step=10320, lr=0.157250, batch loss=0.287593, epoch loss=2.613620Batch=779, step=10380, lr=0.157000, batch loss=0.296175, epoch loss=2.909795Batch=839, step=10440, lr=0.156750, batch loss=0.267032, epoch loss=3.176827Batch=899, step=10500, lr=0.156500, batch loss=0.243064, epoch loss=3.419892Batch=959, step=10560, lr=0.156250, batch loss=0.197105, epoch loss=3.616997Batch=1019, step=10620, lr=0.156000, batch loss=0.282302, epoch loss=3.899299Batch=1079, step=10680, lr=0.155750, batch loss=0.165824, epoch loss=4.065122Batch=1139, step=10740, lr=0.155500, batch loss=0.197721, epoch loss=4.262843Batch=1199, step=10800, lr=0.155250, batch loss=0.155149, epoch loss=4.417992Epoch=8, step=10800, lr=0.155250, epoch loss=4.417992Batch=59, step=10860, lr=0.155000, batch loss=0.194755, epoch loss=0.194755Batch=119, step=10920, lr=0.154750, batch loss=0.159895, epoch loss=0.354650Batch=179, step=10980, lr=0.154500, batch loss=0.168495, epoch loss=0.523145Batch=239, step=11040, lr=0.154250, batch loss=0.244831, epoch loss=0.767976Batch=299, step=11100, lr=0.154000, batch loss=0.166254, epoch loss=0.934229Batch=359, step=11160, lr=0.153750, batch loss=0.221428, epoch loss=1.155658Batch=419, step=11220, lr=0.153500, batch loss=0.227175, epoch loss=1.382833Batch=479, step=11280, lr=0.153250, batch loss=0.203599, epoch loss=1.586432Batch=539, step=11340, lr=0.153000, batch loss=0.157077, epoch loss=1.743508Batch=599, step=11400, lr=0.152750, batch loss=0.173766, epoch loss=1.917275Batch=659, step=11460, lr=0.152500, batch loss=0.262750, epoch loss=2.180025Batch=719, step=11520, lr=0.152250, batch loss=0.260949, epoch loss=2.440974Batch=779, step=11580, lr=0.152000, batch loss=0.269421, epoch loss=2.710395Batch=839, step=11640, lr=0.151750, batch loss=0.257126, epoch loss=2.967521Batch=899, step=11700, lr=0.151500, batch loss=0.216800, epoch loss=3.184321Batch=959, step=11760, lr=0.151250, batch loss=0.172312, epoch loss=3.356633Batch=1019, step=11820, lr=0.151000, batch loss=0.258888, epoch loss=3.615521Batch=1079, step=11880, lr=0.150750, batch loss=0.135733, epoch loss=3.751254Batch=1139, step=11940, lr=0.150500, batch loss=0.161973, epoch loss=3.913227Batch=1199, step=12000, lr=0.150250, batch loss=0.138320, epoch loss=4.051547Epoch=9, step=12000, lr=0.150250, epoch loss=4.051547Batch=59, step=12060, lr=0.150000, batch loss=0.162318, epoch loss=0.162318Batch=119, step=12120, lr=0.149750, batch loss=0.142871, epoch loss=0.305189Batch=179, step=12180, lr=0.149500, batch loss=0.149511, epoch loss=0.454699Batch=239, step=12240, lr=0.149250, batch loss=0.216509, epoch loss=0.671208Batch=299, step=12300, lr=0.149000, batch loss=0.145233, epoch loss=0.816441Batch=359, step=12360, lr=0.148750, batch loss=0.194903, epoch loss=1.011345Batch=419, step=12420, lr=0.148500, batch loss=0.207141, epoch loss=1.218486Batch=479, step=12480, lr=0.148250, batch loss=0.177595, epoch loss=1.396081Batch=539, step=12540, lr=0.148000, batch loss=0.143250, epoch loss=1.539331Batch=599, step=12600, lr=0.147750, batch loss=0.149168, epoch loss=1.688499Batch=659, step=12660, lr=0.147500, batch loss=0.225424, epoch loss=1.913924Batch=719, step=12720, lr=0.147250, batch loss=0.230087, epoch loss=2.144011Batch=779, step=12780, lr=0.147000, batch loss=0.256829, epoch loss=2.400840Batch=839, step=12840, lr=0.146750, batch loss=0.234762, epoch loss=2.635602Batch=899, step=12900, lr=0.146500, batch loss=0.232063, epoch loss=2.867665Batch=959, step=12960, lr=0.146250, batch loss=0.138121, epoch loss=3.005786Batch=1019, step=13020, lr=0.146000, batch loss=0.211115, epoch loss=3.216901Batch=1079, step=13080, lr=0.145750, batch loss=0.115882, epoch loss=3.332783Batch=1139, step=13140, lr=0.145500, batch loss=0.158323, epoch loss=3.491105Batch=1199, step=13200, lr=0.145250, batch loss=0.116751, epoch loss=3.607857Epoch=10, step=13200, lr=0.145250, epoch loss=3.607857Batch=59, step=13260, lr=0.145000, batch loss=0.137760, epoch loss=0.137760Batch=119, step=13320, lr=0.144750, batch loss=0.116719, epoch loss=0.254479Batch=179, step=13380, lr=0.144500, batch loss=0.127141, epoch loss=0.381620Batch=239, step=13440, lr=0.144250, batch loss=0.184142, epoch loss=0.565762Batch=299, step=13500, lr=0.144000, batch loss=0.118312, epoch loss=0.684075Batch=359, step=13560, lr=0.143750, batch loss=0.162695, epoch loss=0.846769Batch=419, step=13620, lr=0.143500, batch loss=0.160410, epoch loss=1.007179Batch=479, step=13680, lr=0.143250, batch loss=0.147062, epoch loss=1.154241Batch=539, step=13740, lr=0.143000, batch loss=0.116914, epoch loss=1.271154Batch=599, step=13800, lr=0.142750, batch loss=0.119689, epoch loss=1.390844Batch=659, step=13860, lr=0.142500, batch loss=0.176962, epoch loss=1.567805Batch=719, step=13920, lr=0.142250, batch loss=0.172434, epoch loss=1.740239Batch=779, step=13980, lr=0.142000, batch loss=0.179509, epoch loss=1.919748Batch=839, step=14040, lr=0.141750, batch loss=0.203309, epoch loss=2.123057Batch=899, step=14100, lr=0.141500, batch loss=0.219820, epoch loss=2.342877Batch=959, step=14160, lr=0.141250, batch loss=0.101245, epoch loss=2.444122Batch=1019, step=14220, lr=0.141000, batch loss=0.197157, epoch loss=2.641279Batch=1079, step=14280, lr=0.140750, batch loss=0.081281, epoch loss=2.722560Batch=1139, step=14340, lr=0.140500, batch loss=0.127035, epoch loss=2.849595Batch=1199, step=14400, lr=0.140250, batch loss=0.085763, epoch loss=2.935358Epoch=11, step=14400, lr=0.140250, epoch loss=2.935358Batch=59, step=14460, lr=0.140000, batch loss=0.108552, epoch loss=0.108552Batch=119, step=14520, lr=0.139750, batch loss=0.102743, epoch loss=0.211295Batch=179, step=14580, lr=0.139500, batch loss=0.099294, epoch loss=0.310588Batch=239, step=14640, lr=0.139250, batch loss=0.141565, epoch loss=0.452154Batch=299, step=14700, lr=0.139000, batch loss=0.074930, epoch loss=0.527084Batch=359, step=14760, lr=0.138750, batch loss=0.121854, epoch loss=0.648938Batch=419, step=14820, lr=0.138500, batch loss=0.136889, epoch loss=0.785827Batch=479, step=14880, lr=0.138250, batch loss=0.092822, epoch loss=0.878649Batch=539, step=14940, lr=0.138000, batch loss=0.085482, epoch loss=0.964131Batch=599, step=15000, lr=0.137750, batch loss=0.086179, epoch loss=1.050310Batch=659, step=15060, lr=0.137500, batch loss=0.141435, epoch loss=1.191745Batch=719, step=15120, lr=0.137250, batch loss=0.168932, epoch loss=1.360677Batch=779, step=15180, lr=0.137000, batch loss=0.337616, epoch loss=1.698293Batch=839, step=15240, lr=0.136750, batch loss=0.134731, epoch loss=1.833024Batch=899, step=15300, lr=0.136500, batch loss=0.111274, epoch loss=1.944298Batch=959, step=15360, lr=0.136250, batch loss=0.098062, epoch loss=2.042360Batch=1019, step=15420, lr=0.136000, batch loss=0.168811, epoch loss=2.211171Batch=1079, step=15480, lr=0.135750, batch loss=0.040449, epoch loss=2.251620Batch=1139, step=15540, lr=0.135500, batch loss=0.093481, epoch loss=2.345100Batch=1199, step=15600, lr=0.135250, batch loss=0.058738, epoch loss=2.403839Epoch=12, step=15600, lr=0.135250, epoch loss=2.403839Batch=59, step=15660, lr=0.135000, batch loss=0.074345, epoch loss=0.074345Batch=119, step=15720, lr=0.134750, batch loss=0.122739, epoch loss=0.197084Batch=179, step=15780, lr=0.134500, batch loss=0.092051, epoch loss=0.289134Batch=239, step=15840, lr=0.134250, batch loss=0.094314, epoch loss=0.383449Batch=299, step=15900, lr=0.134000, batch loss=0.038141, epoch loss=0.421589Batch=359, step=15960, lr=0.133750, batch loss=0.081912, epoch loss=0.503501Batch=419, step=16020, lr=0.133500, batch loss=0.080279, epoch loss=0.583780Batch=479, step=16080, lr=0.133250, batch loss=0.069131, epoch loss=0.652912Batch=539, step=16140, lr=0.133000, batch loss=0.053219, epoch loss=0.706131Batch=599, step=16200, lr=0.132750, batch loss=0.098679, epoch loss=0.804810Batch=659, step=16260, lr=0.132500, batch loss=0.076336, epoch loss=0.881146Batch=719, step=16320, lr=0.132250, batch loss=0.085634, epoch loss=0.966780Batch=779, step=16380, lr=0.132000, batch loss=0.111744, epoch loss=1.078525Batch=839, step=16440, lr=0.131750, batch loss=0.122053, epoch loss=1.200577Batch=899, step=16500, lr=0.131500, batch loss=0.155099, epoch loss=1.355676Batch=959, step=16560, lr=0.131250, batch loss=0.044209, epoch loss=1.399885Batch=1019, step=16620, lr=0.131000, batch loss=0.101740, epoch loss=1.501625Batch=1079, step=16680, lr=0.130750, batch loss=0.025892, epoch loss=1.527517Batch=1139, step=16740, lr=0.130500, batch loss=0.053691, epoch loss=1.581207Batch=1199, step=16800, lr=0.130250, batch loss=0.029929, epoch loss=1.611136Epoch=13, step=16800, lr=0.130250, epoch loss=1.611136Batch=59, step=16860, lr=0.130000, batch loss=0.036071, epoch loss=0.036071Batch=119, step=16920, lr=0.129750, batch loss=0.048231, epoch loss=0.084301Batch=179, step=16980, lr=0.129500, batch loss=0.048320, epoch loss=0.132622Batch=239, step=17040, lr=0.129250, batch loss=0.069277, epoch loss=0.201899Batch=299, step=17100, lr=0.129000, batch loss=0.042899, epoch loss=0.244798Batch=359, step=17160, lr=0.128750, batch loss=0.051271, epoch loss=0.296070Batch=419, step=17220, lr=0.128500, batch loss=0.084853, epoch loss=0.380923Batch=479, step=17280, lr=0.128250, batch loss=0.026057, epoch loss=0.406980Batch=539, step=17340, lr=0.128000, batch loss=0.027975, epoch loss=0.434955Batch=599, step=17400, lr=0.127750, batch loss=0.034554, epoch loss=0.469509Batch=659, step=17460, lr=0.127500, batch loss=0.046274, epoch loss=0.515783Batch=719, step=17520, lr=0.127250, batch loss=0.052959, epoch loss=0.568742Batch=779, step=17580, lr=0.127000, batch loss=0.129055, epoch loss=0.697797Batch=839, step=17640, lr=0.126750, batch loss=0.094454, epoch loss=0.792251Batch=899, step=17700, lr=0.126500, batch loss=0.121769, epoch loss=0.914020Batch=959, step=17760, lr=0.126250, batch loss=0.021236, epoch loss=0.935257Batch=1019, step=17820, lr=0.126000, batch loss=0.033974, epoch loss=0.969230Batch=1079, step=17880, lr=0.125750, batch loss=0.011704, epoch loss=0.980934Batch=1139, step=17940, lr=0.125500, batch loss=0.031616, epoch loss=1.012550Batch=1199, step=18000, lr=0.125250, batch loss=0.014908, epoch loss=1.027458Epoch=14, step=18000, lr=0.125250, epoch loss=1.027458Batch=59, step=18060, lr=0.125000, batch loss=0.012103, epoch loss=0.012103Batch=119, step=18120, lr=0.124750, batch loss=0.035049, epoch loss=0.047153Batch=179, step=18180, lr=0.124500, batch loss=0.095140, epoch loss=0.142292Batch=239, step=18240, lr=0.124250, batch loss=0.046552, epoch loss=0.188844Batch=299, step=18300, lr=0.124000, batch loss=0.009618, epoch loss=0.198463Batch=359, step=18360, lr=0.123750, batch loss=0.026827, epoch loss=0.225289Batch=419, step=18420, lr=0.123500, batch loss=0.027961, epoch loss=0.253250Batch=479, step=18480, lr=0.123250, batch loss=0.012041, epoch loss=0.265291Batch=539, step=18540, lr=0.123000, batch loss=0.021214, epoch loss=0.286506Batch=599, step=18600, lr=0.122750, batch loss=0.024919, epoch loss=0.311425Batch=659, step=18660, lr=0.122500, batch loss=0.030826, epoch loss=0.342251Batch=719, step=18720, lr=0.122250, batch loss=0.045292, epoch loss=0.387543Batch=779, step=18780, lr=0.122000, batch loss=0.115289, epoch loss=0.502831Batch=839, step=18840, lr=0.121750, batch loss=0.055623, epoch loss=0.558454Batch=899, step=18900, lr=0.121500, batch loss=0.052428, epoch loss=0.610883Batch=959, step=18960, lr=0.121250, batch loss=0.013602, epoch loss=0.624484Batch=1019, step=19020, lr=0.121000, batch loss=0.017757, epoch loss=0.642241Batch=1079, step=19080, lr=0.120750, batch loss=0.005949, epoch loss=0.648190Batch=1139, step=19140, lr=0.120500, batch loss=0.025050, epoch loss=0.673240Batch=1199, step=19200, lr=0.120250, batch loss=0.010395, epoch loss=0.683635Epoch=15, step=19200, lr=0.120250, epoch loss=0.683635Batch=59, step=19260, lr=0.120000, batch loss=0.005950, epoch loss=0.005950Batch=119, step=19320, lr=0.119750, batch loss=0.018048, epoch loss=0.023998Batch=179, step=19380, lr=0.119500, batch loss=0.055286, epoch loss=0.079284Batch=239, step=19440, lr=0.119250, batch loss=0.023266, epoch loss=0.102550Batch=299, step=19500, lr=0.119000, batch loss=0.019022, epoch loss=0.121572Batch=359, step=19560, lr=0.118750, batch loss=0.033441, epoch loss=0.155013Batch=419, step=19620, lr=0.118500, batch loss=0.020290, epoch loss=0.175304Batch=479, step=19680, lr=0.118250, batch loss=0.008998, epoch loss=0.184302Batch=539, step=19740, lr=0.118000, batch loss=0.017646, epoch loss=0.201948Batch=599, step=19800, lr=0.117500, batch loss=0.023315, epoch loss=0.225262Batch=659, step=19860, lr=0.117250, batch loss=0.020409, epoch loss=0.245672Batch=719, step=19920, lr=0.117250, batch loss=0.052804, epoch loss=0.298476Batch=779, step=19980, lr=0.116750, batch loss=0.085355, epoch loss=0.383831Batch=839, step=20040, lr=0.116750, batch loss=0.030969, epoch loss=0.414800Batch=899, step=20100, lr=0.116500, batch loss=0.032913, epoch loss=0.447713Batch=959, step=20160, lr=0.116000, batch loss=0.011360, epoch loss=0.459073Batch=1019, step=20220, lr=0.115750, batch loss=0.015046, epoch loss=0.474119Batch=1079, step=20280, lr=0.115750, batch loss=0.001857, epoch loss=0.475976Batch=1139, step=20340, lr=0.115500, batch loss=0.014883, epoch loss=0.490859Batch=1199, step=20400, lr=0.115000, batch loss=0.005328, epoch loss=0.496186Epoch=16, step=20400, lr=0.115000, epoch loss=0.496186Batch=59, step=20460, lr=0.114750, batch loss=0.002896, epoch loss=0.002896Batch=119, step=20520, lr=0.114750, batch loss=0.008323, epoch loss=0.011219Batch=179, step=20580, lr=0.114500, batch loss=0.015927, epoch loss=0.027146Batch=239, step=20640, lr=0.114250, batch loss=0.026179, epoch loss=0.053325Batch=299, step=20700, lr=0.114000, batch loss=0.016495, epoch loss=0.069820Batch=359, step=20760, lr=0.113750, batch loss=0.017195, epoch loss=0.087015Batch=419, step=20820, lr=0.113500, batch loss=0.014907, epoch loss=0.101922Batch=479, step=20880, lr=0.113250, batch loss=0.003723, epoch loss=0.105645Batch=539, step=20940, lr=0.113000, batch loss=0.017849, epoch loss=0.123495Batch=599, step=21000, lr=0.112750, batch loss=0.020227, epoch loss=0.143722Batch=659, step=21060, lr=0.112500, batch loss=0.017278, epoch loss=0.161000Batch=719, step=21120, lr=0.112250, batch loss=0.052291, epoch loss=0.213291Batch=779, step=21180, lr=0.112000, batch loss=0.065431, epoch loss=0.278721Batch=839, step=21240, lr=0.111750, batch loss=0.024667, epoch loss=0.303389Batch=899, step=21300, lr=0.111500, batch loss=0.026974, epoch loss=0.330363Batch=959, step=21360, lr=0.111250, batch loss=0.010565, epoch loss=0.340928Batch=1019, step=21420, lr=0.111000, batch loss=0.010449, epoch loss=0.351376Batch=1079, step=21480, lr=0.110750, batch loss=0.001900, epoch loss=0.353276Batch=1139, step=21540, lr=0.110500, batch loss=0.012027, epoch loss=0.365303Batch=1199, step=21600, lr=0.110250, batch loss=0.004420, epoch loss=0.369723Epoch=17, step=21600, lr=0.110250, epoch loss=0.369723Batch=59, step=21660, lr=0.110000, batch loss=0.002159, epoch loss=0.002159Batch=119, step=21720, lr=0.109750, batch loss=0.007251, epoch loss=0.009410Batch=179, step=21780, lr=0.109500, batch loss=0.012134, epoch loss=0.021544Batch=239, step=21840, lr=0.109250, batch loss=0.009505, epoch loss=0.031048Batch=299, step=21900, lr=0.109000, batch loss=0.005454, epoch loss=0.036502Batch=359, step=21960, lr=0.108750, batch loss=0.011653, epoch loss=0.048155Batch=419, step=22020, lr=0.108500, batch loss=0.012086, epoch loss=0.060242Batch=479, step=22080, lr=0.108250, batch loss=0.003931, epoch loss=0.064172Batch=539, step=22140, lr=0.108000, batch loss=0.015172, epoch loss=0.079345Batch=599, step=22200, lr=0.107750, batch loss=0.015505, epoch loss=0.094850Batch=659, step=22260, lr=0.107500, batch loss=0.013033, epoch loss=0.107883Batch=719, step=22320, lr=0.107250, batch loss=0.021831, epoch loss=0.129713Batch=779, step=22380, lr=0.107000, batch loss=0.029894, epoch loss=0.159607Batch=839, step=22440, lr=0.106750, batch loss=0.027498, epoch loss=0.187105Batch=899, step=22500, lr=0.106500, batch loss=0.024855, epoch loss=0.211960Batch=959, step=22560, lr=0.106250, batch loss=0.009492, epoch loss=0.221453Batch=1019, step=22620, lr=0.106000, batch loss=0.009338, epoch loss=0.230791Batch=1079, step=22680, lr=0.105750, batch loss=0.001983, epoch loss=0.232775Batch=1139, step=22740, lr=0.105500, batch loss=0.011475, epoch loss=0.244249Batch=1199, step=22800, lr=0.105250, batch loss=0.005176, epoch loss=0.249426Epoch=18, step=22800, lr=0.105250, epoch loss=0.249426Batch=59, step=22860, lr=0.105000, batch loss=0.002290, epoch loss=0.002290Batch=119, step=22920, lr=0.104750, batch loss=0.005039, epoch loss=0.007329Batch=179, step=22980, lr=0.104500, batch loss=0.012142, epoch loss=0.019471Batch=239, step=23040, lr=0.104250, batch loss=0.008075, epoch loss=0.027547Batch=299, step=23100, lr=0.104000, batch loss=0.004160, epoch loss=0.031706Batch=359, step=23160, lr=0.103750, batch loss=0.011925, epoch loss=0.043631Batch=419, step=23220, lr=0.103500, batch loss=0.010390, epoch loss=0.054021Batch=479, step=23280, lr=0.103250, batch loss=0.003889, epoch loss=0.057910Batch=539, step=23340, lr=0.103000, batch loss=0.014492, epoch loss=0.072402Batch=599, step=23400, lr=0.102750, batch loss=0.014038, epoch loss=0.086441Batch=659, step=23460, lr=0.102500, batch loss=0.013065, epoch loss=0.099505Batch=719, step=23520, lr=0.102250, batch loss=0.015245, epoch loss=0.114751Batch=779, step=23580, lr=0.102000, batch loss=0.022051, epoch loss=0.136802Batch=839, step=23640, lr=0.101750, batch loss=0.026075, epoch loss=0.162877Batch=899, step=23700, lr=0.101500, batch loss=0.021333, epoch loss=0.184210Batch=959, step=23760, lr=0.101250, batch loss=0.008009, epoch loss=0.192218Batch=1019, step=23820, lr=0.101000, batch loss=0.008178, epoch loss=0.200397Batch=1079, step=23880, lr=0.100750, batch loss=0.001126, epoch loss=0.201523Batch=1139, step=23940, lr=0.100500, batch loss=0.009085, epoch loss=0.210608Batch=1199, step=24000, lr=0.100250, batch loss=0.004789, epoch loss=0.215397Epoch=19, step=24000, lr=0.100250, epoch loss=0.215397Half-moons scatterplot and decision boundary:┌────────────────────────────────────────────────────────────────────────────────────────────────────┐│********************************#*******************************************************************││**********************#*#*#######*###*#####*********************************************************││**********************#########################*****************************************************││*****************#**########*######*###########*###*************************************************││***************#################*###################************************************************││************######*#################*#################**********************************************││**********#*#####*########*#**************##*#########*#********************************************││***********########*##*#******************#*****##########******************************************││***********###########*************************############**************************************...││********######*####*********************************###*###*#*********************************......││*******######**##*************....*****************#*######*#*******************************........││*******##*##**##***********..........***************########*##***************************..........││*****#######************.......%...%%...***************#########*************************.........%.││******######***********.........%........***************##*#####************************......%.%.%.││***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.││****#######**********..........%%%%.........************#########********************........%%.%%.%││**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.││*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.││*#######************...........%%%%%%%..........************#######**************............%%%%%%.││*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.││*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..││#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..││#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.││###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...││#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....││######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...││*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....││#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....││**************.....................%.%%%%%%...................*******..................%.%%.%%......││**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......││*************.........................%.%%%.%%%%................**................%%%%%%%.%.%.......││*************...........................%..%%%%..%................................%%%%%%%%..........││************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........││***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........││***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............││**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............││**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................││*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................││*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................││********................................................%...%%%%.%%.%%%%..%.........................│└────────────────────────────────────────────────────────────────────────────────────────────────────┘2025-03-20 21:24.06 ---> saved as "4055ed94995557625b962950ac076f796fc745ecccd3f4c8c80bc1ed301b60c6"Job succeeded2025-03-20 21:24.07: Job succeeded