2025-03-20 22:11.56: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (4ee46a20839684c520fd8d1cc91b4a5416d1e783) (linux-x86_64:alpine-3.21-5.3_opam-2.3) Base: ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 4ee46a20 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0 # alpine-3.21-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-03-20 22:11.56: Using cache hint "ahrefs/ocannl-ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0-alpine-3.21-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5" 2025-03-20 22:11.56: Using OBuilder spec: ((from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0) (comment alpine-3.21-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-03-20 22:11.56: Waiting for resource in pool OCluster 2025-03-20 22:11.56: Waiting for worker… 2025-03-20 22:15.27: Got resource from pool OCluster Building on asteria.caelum.ci.dev All commits already cached HEAD is now at 4ee46a20 Update GitHub actions (from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0) 2025-03-20 22:15.28 ---> using "dacbb842f0a2d29eaad3e832c76ea718382fa9cd05776a3fdb8016e465aa5e7b" from cache /: (comment alpine-3.21-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-03-20 22:15.28 ---> using "4d575331d18a103bb0cc6f4a0dd61826f91da9419ba321b0b1320c4ec539b0db" from cache /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [y/n] y Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com" [default] synchronised from file:///home/opam/opam-repository 2025-03-20 22:15.28 ---> using "6937929ef5611e557c4991cd8189eebf46058c4ee81139f68d15319429555db6" from cache /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-03-20 22:15.28 ---> using "bb90a277fb48150bfb58577b4b61ca2e911fa7d8ff44939da041b865bac9afe8" from cache /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-03-20 22:15.28 ---> using "dc9101c084e576ab6dcbeea601d2b330166d0d274d8fdcaf9cff106e658a7bf6" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD 862a7640b1..6cf83229dd master -> origin/master 4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-03-20 22:15.28 ---> using "746fbb62527431fddf1e025894b82c1bcf0d23e4b907494eb6fa8ff4f5667dbb" from cache /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-03-20 22:15.28 ---> using "94daf93885bb20da30b7bd894ab4b5194f7de20111454b11600a212859a18f84" from cache /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-03-20 22:15.28 ---> using "0263072f472d78ecf3b17a0817836e08e773c27c25d1f17b527af2a488b50fa9" from cache /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-03-20 22:15.28 ---> using "1abe0d07a81ba3fbf11431fd854c9418fb1ec56eaa9e2362a1603bddc4bb93b2" from cache /src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "apk" "update" - fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/main/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/community/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/edge/main/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/edge/testing/x86_64/APKINDEX.tar.gz - v3.21.3-193-g2da8f1a642c [https://dl-cdn.alpinelinux.org/alpine/v3.21/main] - v3.21.3-194-g25fc289152f [https://dl-cdn.alpinelinux.org/alpine/v3.21/community] - v20250108-4318-g3d9c1aad998 [https://dl-cdn.alpinelinux.org/alpine/edge/main] - v20250108-4328-gbd32d9239d0 [https://dl-cdn.alpinelinux.org/alpine/edge/community] - v20250108-3887-g4037a56c69f [https://dl-cdn.alpinelinux.org/alpine/edge/testing] - OK: 57226 distinct packages available <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-dev <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "apk" "add" "libffi-dev" - (1/2) Installing linux-headers (6.6-r1) - (2/2) Installing libffi-dev (3.4.7-r0) - OK: 312 MiB in 104 packages 2025-03-20 22:15.28 ---> using "fdd5e0321f573549db44114708b95cad3810d8bd019f60bb2801b8917fbcf989" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 65 packages - install angstrom 0.16.1 - install backoff 0.1.1 - install base v0.17.1 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.17.2 - install dune-configurator 3.17.2 - install fieldslib v0.17.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.1.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.2 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.1 (cached) -> retrieved cppo.1.8.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved dune.3.17.2, dune-configurator.3.17.2 (cached) -> installed num.1.5-1 -> retrieved parsexp.v0.17.0 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_minidebug.2.1.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib_jane.v0.17.2 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved sexplib.v0.17.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved uucp.16.0.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> installed ocamlfind.1.9.8 -> installed ocamlbuild.0.16.1 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed ptime.1.2.0 -> installed dune.3.17.2 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed cppo.1.8.0 -> installed multicore-magic.2.3.1 -> installed ocaml-compiler-libs.v0.17.0 -> installed ocaml-syntax-shims.1.0.0 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed printbox.0.12 -> installed re.1.12.0 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed saturn_lockfree.0.5.0 -> installed integers.0.7.0 -> installed parsexp.v0.17.0 -> installed dune-configurator.3.17.2 -> installed bigstringaf.0.10.0 -> installed sexplib.v0.17.0 -> installed angstrom.0.16.1 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.1 -> installed fieldslib.v0.17.0 -> installed variantslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.35.0 -> installed ppx_optcomp.v0.17.0 -> installed ppxlib_jane.v0.17.2 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_minidebug.2.1.0 -> installed ppx_base.v0.17.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-03-20 22:15.28 ---> using "e61318ce5b248e2f259dd70a4a4aeed073c57dfb774958b5357cd3bb0527f48e" from cache /src: (copy (src .) (dst /src)) 2025-03-20 22:15.28 ---> saved as "f9ba3cc08b6015ab0dc0649f37af946093277f162ba6ca865742e5421e9f2eab" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/6eb92fad3341f8cc6a4f0392883b71f9/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/a009cd59939885a0ddf1a43e95c433cd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382 Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039 Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216 Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512 Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081 Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141 Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385 Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263 Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603 Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902 Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024 Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685 Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407 Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543 Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049 Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829 Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269 Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952 Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952 Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138 Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381 Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025 Batch=239, step=1440, lr=0.194000, batch loss=0.347897, epoch loss=1.058921 Batch=299, step=1500, lr=0.194000, batch loss=0.247327, epoch loss=1.306248 Batch=359, step=1560, lr=0.193750, batch loss=0.316583, epoch loss=1.622832 Batch=419, step=1620, lr=0.193500, batch loss=0.312717, epoch loss=1.935549 Batch=479, step=1680, lr=0.193250, batch loss=0.276284, epoch loss=2.211832 Batch=539, step=1740, lr=0.193000, batch loss=0.209812, epoch loss=2.421644 Batch=599, step=1800, lr=0.192750, batch loss=0.250416, epoch loss=2.672061 Batch=659, step=1860, lr=0.192500, batch loss=0.367178, epoch loss=3.039239 Batch=719, step=1920, lr=0.192250, batch loss=0.354947, epoch loss=3.394186 Batch=779, step=1980, lr=0.192000, batch loss=0.381363, epoch loss=3.775549 Batch=839, step=2040, lr=0.191750, batch loss=0.339642, epoch loss=4.115191 Batch=899, step=2100, lr=0.191500, batch loss=0.295230, epoch loss=4.410421 Batch=959, step=2160, lr=0.191250, batch loss=0.214055, epoch loss=4.624476 Batch=1019, step=2220, lr=0.191000, batch loss=0.330926, epoch loss=4.955402 Batch=1079, step=2280, lr=0.190750, batch loss=0.208293, epoch loss=5.163695 Batch=1139, step=2340, lr=0.190500, batch loss=0.278322, epoch loss=5.442017 Batch=1199, step=2400, lr=0.190250, batch loss=0.220811, epoch loss=5.662828 Epoch=1, step=2400, lr=0.190250, epoch loss=5.662828 Batch=59, step=2460, lr=0.190000, batch loss=0.230385, epoch loss=0.230385 Batch=119, step=2520, lr=0.189750, batch loss=0.195939, epoch loss=0.426324 Batch=179, step=2580, lr=0.189500, batch loss=0.221166, epoch loss=0.647490 Batch=239, step=2640, lr=0.189250, batch loss=0.328099, epoch loss=0.975589 Batch=299, step=2700, lr=0.189000, batch loss=0.202927, epoch loss=1.178516 Batch=359, step=2760, lr=0.188750, batch loss=0.288270, epoch loss=1.466786 Batch=419, step=2820, lr=0.188500, batch loss=0.280167, epoch loss=1.746953 Batch=479, step=2880, lr=0.188250, batch loss=0.251464, epoch loss=1.998416 Batch=539, step=2940, lr=0.188000, batch loss=0.191273, epoch loss=2.189690 Batch=599, step=3000, lr=0.187750, batch loss=0.224961, epoch loss=2.414651 Batch=659, step=3060, lr=0.187500, batch loss=0.335774, epoch loss=2.750424 Batch=719, step=3120, lr=0.187250, batch loss=0.331410, epoch loss=3.081835 Batch=779, step=3180, lr=0.187000, batch loss=0.357934, epoch loss=3.439769 Batch=839, step=3240, lr=0.186750, batch loss=0.325602, epoch loss=3.765371 Batch=899, step=3300, lr=0.186500, batch loss=0.292384, epoch loss=4.057755 Batch=959, step=3360, lr=0.186250, batch loss=0.244937, epoch loss=4.302692 Batch=1019, step=3420, lr=0.186000, batch loss=0.361949, epoch loss=4.664641 Batch=1079, step=3480, lr=0.185750, batch loss=0.222003, epoch loss=4.886643 Batch=1139, step=3540, lr=0.185500, batch loss=0.264078, epoch loss=5.150721 Batch=1199, step=3600, lr=0.185250, batch loss=0.199931, epoch loss=5.350652 Epoch=2, step=3600, lr=0.185250, epoch loss=5.350652 Batch=59, step=3660, lr=0.185000, batch loss=0.221204, epoch loss=0.221204 Batch=119, step=3720, lr=0.184750, batch loss=0.184972, epoch loss=0.406176 Batch=179, step=3780, lr=0.184500, batch loss=0.211396, epoch loss=0.617572 Batch=239, step=3840, lr=0.184250, batch loss=0.316953, epoch loss=0.934525 Batch=299, step=3900, lr=0.184000, batch loss=0.209230, epoch loss=1.143755 Batch=359, step=3960, lr=0.183750, batch loss=0.285616, epoch loss=1.429371 Batch=419, step=4020, lr=0.183500, batch loss=0.278256, epoch loss=1.707627 Batch=479, step=4080, lr=0.183250, batch loss=0.254956, epoch loss=1.962583 Batch=539, step=4140, lr=0.183000, batch loss=0.199965, epoch loss=2.162547 Batch=599, step=4200, lr=0.182750, batch loss=0.242635, epoch loss=2.405182 Batch=659, step=4260, lr=0.182500, batch loss=0.327756, epoch loss=2.732938 Batch=719, step=4320, lr=0.182250, batch loss=0.330863, epoch loss=3.063801 Batch=779, step=4380, lr=0.182000, batch loss=0.349746, epoch loss=3.413547 Batch=839, step=4440, lr=0.181750, batch loss=0.317834, epoch loss=3.731381 Batch=899, step=4500, lr=0.181500, batch loss=0.285780, epoch loss=4.017162 Batch=959, step=4560, lr=0.181250, batch loss=0.239671, epoch loss=4.256833 Batch=1019, step=4620, lr=0.181000, batch loss=0.332985, epoch loss=4.589818 Batch=1079, step=4680, lr=0.180750, batch loss=0.195748, epoch loss=4.785566 Batch=1139, step=4740, lr=0.180500, batch loss=0.229059, epoch loss=5.014625 Batch=1199, step=4800, lr=0.180250, batch loss=0.193311, epoch loss=5.207936 Epoch=3, step=4800, lr=0.180250, epoch loss=5.207936 Batch=59, step=4860, lr=0.180000, batch loss=0.234163, epoch loss=0.234163 Batch=119, step=4920, lr=0.179750, batch loss=0.192078, epoch loss=0.426241 Batch=179, step=4980, lr=0.179500, batch loss=0.205725, epoch loss=0.631967 Batch=239, step=5040, lr=0.179250, batch loss=0.307543, epoch loss=0.939510 Batch=299, step=5100, lr=0.179000, batch loss=0.205976, epoch loss=1.145486 Batch=359, step=5160, lr=0.178750, batch loss=0.270789, epoch loss=1.416275 Batch=419, step=5220, lr=0.178500, batch loss=0.265478, epoch loss=1.681753 Batch=479, step=5280, lr=0.178250, batch loss=0.241990, epoch loss=1.923743 Batch=539, step=5340, lr=0.178000, batch loss=0.195165, epoch loss=2.118908 Batch=599, step=5400, lr=0.177750, batch loss=0.243529, epoch loss=2.362437 Batch=659, step=5460, lr=0.177500, batch loss=0.317072, epoch loss=2.679508 Batch=719, step=5520, lr=0.177250, batch loss=0.317767, epoch loss=2.997275 Batch=779, step=5580, lr=0.177000, batch loss=0.343138, epoch loss=3.340413 Batch=839, step=5640, lr=0.176750, batch loss=0.308247, epoch loss=3.648660 Batch=899, step=5700, lr=0.176500, batch loss=0.273826, epoch loss=3.922486 Batch=959, step=5760, lr=0.176250, batch loss=0.210780, epoch loss=4.133266 Batch=1019, step=5820, lr=0.176000, batch loss=0.338777, epoch loss=4.472043 Batch=1079, step=5880, lr=0.175750, batch loss=0.201316, epoch loss=4.673359 Batch=1139, step=5940, lr=0.175500, batch loss=0.243028, epoch loss=4.916387 Batch=1199, step=6000, lr=0.175250, batch loss=0.185380, epoch loss=5.101767 Epoch=4, step=6000, lr=0.175250, epoch loss=5.101767 Batch=59, step=6060, lr=0.175000, batch loss=0.226388, epoch loss=0.226388 Batch=119, step=6120, lr=0.174750, batch loss=0.184059, epoch loss=0.410447 Batch=179, step=6180, lr=0.174500, batch loss=0.201405, epoch loss=0.611852 Batch=239, step=6240, lr=0.174250, batch loss=0.301269, epoch loss=0.913121 Batch=299, step=6300, lr=0.174000, batch loss=0.205701, epoch loss=1.118822 Batch=359, step=6360, lr=0.173750, batch loss=0.264940, epoch loss=1.383763 Batch=419, step=6420, lr=0.173500, batch loss=0.261317, epoch loss=1.645080 Batch=479, step=6480, lr=0.173250, batch loss=0.241191, epoch loss=1.886271 Batch=539, step=6540, lr=0.173000, batch loss=0.196270, epoch loss=2.082541 Batch=599, step=6600, lr=0.172750, batch loss=0.232369, epoch loss=2.314910 Batch=659, step=6660, lr=0.172500, batch loss=0.313607, epoch loss=2.628517 Batch=719, step=6720, lr=0.172250, batch loss=0.316688, epoch loss=2.945204 Batch=779, step=6780, lr=0.172000, batch loss=0.330910, epoch loss=3.276114 Batch=839, step=6840, lr=0.171750, batch loss=0.306303, epoch loss=3.582417 Batch=899, step=6900, lr=0.171500, batch loss=0.268589, epoch loss=3.851007 Batch=959, step=6960, lr=0.171250, batch loss=0.207614, epoch loss=4.058621 Batch=1019, step=7020, lr=0.171000, batch loss=0.331502, epoch loss=4.390122 Batch=1079, step=7080, lr=0.170750, batch loss=0.180595, epoch loss=4.570717 Batch=1139, step=7140, lr=0.170500, batch loss=0.216861, epoch loss=4.787578 Batch=1199, step=7200, lr=0.170250, batch loss=0.182272, epoch loss=4.969850 Epoch=5, step=7200, lr=0.170250, epoch loss=4.969850 Batch=59, step=7260, lr=0.170000, batch loss=0.240113, epoch loss=0.240113 Batch=119, step=7320, lr=0.169750, batch loss=0.179771, epoch loss=0.419884 Batch=179, step=7380, lr=0.169500, batch loss=0.195251, epoch loss=0.615135 Batch=239, step=7440, lr=0.169250, batch loss=0.292254, epoch loss=0.907389 Batch=299, step=7500, lr=0.169000, batch loss=0.207890, epoch loss=1.115278 Batch=359, step=7560, lr=0.168750, batch loss=0.261241, epoch loss=1.376519 Batch=419, step=7620, lr=0.168500, batch loss=0.253869, epoch loss=1.630387 Batch=479, step=7680, lr=0.168250, batch loss=0.232158, epoch loss=1.862545 Batch=539, step=7740, lr=0.168000, batch loss=0.185258, epoch loss=2.047803 Batch=599, step=7800, lr=0.167750, batch loss=0.227454, epoch loss=2.275258 Batch=659, step=7860, lr=0.167500, batch loss=0.304122, epoch loss=2.579380 Batch=719, step=7920, lr=0.167250, batch loss=0.308442, epoch loss=2.887822 Batch=779, step=7980, lr=0.167000, batch loss=0.325031, epoch loss=3.212852 Batch=839, step=8040, lr=0.166750, batch loss=0.293988, epoch loss=3.506840 Batch=899, step=8100, lr=0.166500, batch loss=0.261077, epoch loss=3.767917 Batch=959, step=8160, lr=0.166250, batch loss=0.202788, epoch loss=3.970704 Batch=1019, step=8220, lr=0.166000, batch loss=0.321480, epoch loss=4.292184 Batch=1079, step=8280, lr=0.165750, batch loss=0.184306, epoch loss=4.476491 Batch=1139, step=8340, lr=0.165500, batch loss=0.213483, epoch loss=4.689973 Batch=1199, step=8400, lr=0.165250, batch loss=0.172101, epoch loss=4.862074 Epoch=6, step=8400, lr=0.165250, epoch loss=4.862074 Batch=59, step=8460, lr=0.165000, batch loss=0.210620, epoch loss=0.210620 Batch=119, step=8520, lr=0.164750, batch loss=0.172781, epoch loss=0.383401 Batch=179, step=8580, lr=0.164500, batch loss=0.187800, epoch loss=0.571202 Batch=239, step=8640, lr=0.164250, batch loss=0.276871, epoch loss=0.848072 Batch=299, step=8700, lr=0.164000, batch loss=0.192501, epoch loss=1.040573 Batch=359, step=8760, lr=0.163750, batch loss=0.249419, epoch loss=1.289992 Batch=419, step=8820, lr=0.163500, batch loss=0.243895, epoch loss=1.533887 Batch=479, step=8880, lr=0.163250, batch loss=0.229487, epoch loss=1.763373 Batch=539, step=8940, lr=0.163000, batch loss=0.176893, epoch loss=1.940266 Batch=599, step=9000, lr=0.162750, batch loss=0.218888, epoch loss=2.159154 Batch=659, step=9060, lr=0.162500, batch loss=0.293252, epoch loss=2.452406 Batch=719, step=9120, lr=0.162250, batch loss=0.297374, epoch loss=2.749780 Batch=779, step=9180, lr=0.162000, batch loss=0.314214, epoch loss=3.063994 Batch=839, step=9240, lr=0.161750, batch loss=0.281817, epoch loss=3.345811 Batch=899, step=9300, lr=0.161500, batch loss=0.252414, epoch loss=3.598225 Batch=959, step=9360, lr=0.161250, batch loss=0.187572, epoch loss=3.785797 Batch=1019, step=9420, lr=0.161000, batch loss=0.318605, epoch loss=4.104402 Batch=1079, step=9480, lr=0.160750, batch loss=0.184714, epoch loss=4.289116 Batch=1139, step=9540, lr=0.160500, batch loss=0.207341, epoch loss=4.496457 Batch=1199, step=9600, lr=0.160250, batch loss=0.166114, epoch loss=4.662571 Epoch=7, step=9600, lr=0.160250, epoch loss=4.662571 Batch=59, step=9660, lr=0.160000, batch loss=0.200696, epoch loss=0.200696 Batch=119, step=9720, lr=0.159750, batch loss=0.162818, epoch loss=0.363514 Batch=179, step=9780, lr=0.159500, batch loss=0.178707, epoch loss=0.542222 Batch=239, step=9840, lr=0.159250, batch loss=0.260221, epoch loss=0.802442 Batch=299, step=9900, lr=0.159000, batch loss=0.178692, epoch loss=0.981134 Batch=359, step=9960, lr=0.158750, batch loss=0.233787, epoch loss=1.214921 Batch=419, step=10020, lr=0.158500, batch loss=0.230765, epoch loss=1.445686 Batch=479, step=10080, lr=0.158250, batch loss=0.212472, epoch loss=1.658158 Batch=539, step=10140, lr=0.158000, batch loss=0.170922, epoch loss=1.829080 Batch=599, step=10200, lr=0.157750, batch loss=0.200761, epoch loss=2.029841 Batch=659, step=10260, lr=0.157500, batch loss=0.280915, epoch loss=2.310757 Batch=719, step=10320, lr=0.157250, batch loss=0.278944, epoch loss=2.589700 Batch=779, step=10380, lr=0.157000, batch loss=0.299548, epoch loss=2.889248 Batch=839, step=10440, lr=0.156750, batch loss=0.270366, epoch loss=3.159614 Batch=899, step=10500, lr=0.156500, batch loss=0.239742, epoch loss=3.399356 Batch=959, step=10560, lr=0.156250, batch loss=0.197500, epoch loss=3.596856 Batch=1019, step=10620, lr=0.156000, batch loss=0.277349, epoch loss=3.874205 Batch=1079, step=10680, lr=0.155750, batch loss=0.150770, epoch loss=4.024975 Batch=1139, step=10740, lr=0.155500, batch loss=0.180005, epoch loss=4.204980 Batch=1199, step=10800, lr=0.155250, batch loss=0.163587, epoch loss=4.368568 Epoch=8, step=10800, lr=0.155250, epoch loss=4.368568 Batch=59, step=10860, lr=0.155000, batch loss=0.192391, epoch loss=0.192391 Batch=119, step=10920, lr=0.154750, batch loss=0.162997, epoch loss=0.355388 Batch=179, step=10980, lr=0.154500, batch loss=0.165627, epoch loss=0.521016 Batch=239, step=11040, lr=0.154250, batch loss=0.242774, epoch loss=0.763790 Batch=299, step=11100, lr=0.154000, batch loss=0.165807, epoch loss=0.929597 Batch=359, step=11160, lr=0.153750, batch loss=0.222597, epoch loss=1.152195 Batch=419, step=11220, lr=0.153500, batch loss=0.229435, epoch loss=1.381630 Batch=479, step=11280, lr=0.153250, batch loss=0.202240, epoch loss=1.583870 Batch=539, step=11340, lr=0.153000, batch loss=0.160828, epoch loss=1.744698 Batch=599, step=11400, lr=0.152750, batch loss=0.184130, epoch loss=1.928828 Batch=659, step=11460, lr=0.152500, batch loss=0.260908, epoch loss=2.189737 Batch=719, step=11520, lr=0.152250, batch loss=0.251674, epoch loss=2.441410 Batch=779, step=11580, lr=0.152000, batch loss=0.268837, epoch loss=2.710247 Batch=839, step=11640, lr=0.151750, batch loss=0.255901, epoch loss=2.966148 Batch=899, step=11700, lr=0.151500, batch loss=0.231095, epoch loss=3.197243 Batch=959, step=11760, lr=0.151250, batch loss=0.167374, epoch loss=3.364617 Batch=1019, step=11820, lr=0.151000, batch loss=0.263265, epoch loss=3.627882 Batch=1079, step=11880, lr=0.150750, batch loss=0.136034, epoch loss=3.763916 Batch=1139, step=11940, lr=0.150500, batch loss=0.161891, epoch loss=3.925807 Batch=1199, step=12000, lr=0.150250, batch loss=0.138302, epoch loss=4.064109 Epoch=9, step=12000, lr=0.150250, epoch loss=4.064109 Batch=59, step=12060, lr=0.150000, batch loss=0.162030, epoch loss=0.162030 Batch=119, step=12120, lr=0.149750, batch loss=0.142691, epoch loss=0.304721 Batch=179, step=12180, lr=0.149500, batch loss=0.148584, epoch loss=0.453305 Batch=239, step=12240, lr=0.149250, batch loss=0.214989, epoch loss=0.668294 Batch=299, step=12300, lr=0.149000, batch loss=0.145719, epoch loss=0.814013 Batch=359, step=12360, lr=0.148750, batch loss=0.193573, epoch loss=1.007585 Batch=419, step=12420, lr=0.148500, batch loss=0.207449, epoch loss=1.215034 Batch=479, step=12480, lr=0.148250, batch loss=0.176883, epoch loss=1.391917 Batch=539, step=12540, lr=0.148000, batch loss=0.143284, epoch loss=1.535201 Batch=599, step=12600, lr=0.147750, batch loss=0.148370, epoch loss=1.683572 Batch=659, step=12660, lr=0.147500, batch loss=0.224460, epoch loss=1.908032 Batch=719, step=12720, lr=0.147250, batch loss=0.227188, epoch loss=2.135220 Batch=779, step=12780, lr=0.147000, batch loss=0.257870, epoch loss=2.393090 Batch=839, step=12840, lr=0.146750, batch loss=0.231993, epoch loss=2.625083 Batch=899, step=12900, lr=0.146500, batch loss=0.233415, epoch loss=2.858498 Batch=959, step=12960, lr=0.146250, batch loss=0.135014, epoch loss=2.993512 Batch=1019, step=13020, lr=0.146000, batch loss=0.212169, epoch loss=3.205681 Batch=1079, step=13080, lr=0.145750, batch loss=0.115667, epoch loss=3.321348 Batch=1139, step=13140, lr=0.145500, batch loss=0.152754, epoch loss=3.474102 Batch=1199, step=13200, lr=0.145250, batch loss=0.115770, epoch loss=3.589873 Epoch=10, step=13200, lr=0.145250, epoch loss=3.589873 Batch=59, step=13260, lr=0.145000, batch loss=0.136741, epoch loss=0.136741 Batch=119, step=13320, lr=0.144750, batch loss=0.115417, epoch loss=0.252159 Batch=179, step=13380, lr=0.144500, batch loss=0.125960, epoch loss=0.378119 Batch=239, step=13440, lr=0.144250, batch loss=0.182406, epoch loss=0.560525 Batch=299, step=13500, lr=0.144000, batch loss=0.117310, epoch loss=0.677836 Batch=359, step=13560, lr=0.143750, batch loss=0.161516, epoch loss=0.839352 Batch=419, step=13620, lr=0.143500, batch loss=0.159290, epoch loss=0.998642 Batch=479, step=13680, lr=0.143250, batch loss=0.145902, epoch loss=1.144544 Batch=539, step=13740, lr=0.143000, batch loss=0.115847, epoch loss=1.260391 Batch=599, step=13800, lr=0.142750, batch loss=0.118866, epoch loss=1.379257 Batch=659, step=13860, lr=0.142500, batch loss=0.178482, epoch loss=1.557740 Batch=719, step=13920, lr=0.142250, batch loss=0.191643, epoch loss=1.749382 Batch=779, step=13980, lr=0.142000, batch loss=0.223673, epoch loss=1.973055 Batch=839, step=14040, lr=0.141750, batch loss=0.202719, epoch loss=2.175773 Batch=899, step=14100, lr=0.141500, batch loss=0.224630, epoch loss=2.400403 Batch=959, step=14160, lr=0.141250, batch loss=0.094159, epoch loss=2.494562 Batch=1019, step=14220, lr=0.141000, batch loss=0.187882, epoch loss=2.682443 Batch=1079, step=14280, lr=0.140750, batch loss=0.078452, epoch loss=2.760895 Batch=1139, step=14340, lr=0.140500, batch loss=0.131087, epoch loss=2.891982 Batch=1199, step=14400, lr=0.140250, batch loss=0.085788, epoch loss=2.977770 Epoch=11, step=14400, lr=0.140250, epoch loss=2.977770 Batch=59, step=14460, lr=0.140000, batch loss=0.102864, epoch loss=0.102864 Batch=119, step=14520, lr=0.139750, batch loss=0.097361, epoch loss=0.200225 Batch=179, step=14580, lr=0.139500, batch loss=0.099976, epoch loss=0.300201 Batch=239, step=14640, lr=0.139250, batch loss=0.138400, epoch loss=0.438601 Batch=299, step=14700, lr=0.139000, batch loss=0.075494, epoch loss=0.514095 Batch=359, step=14760, lr=0.138750, batch loss=0.118204, epoch loss=0.632299 Batch=419, step=14820, lr=0.138500, batch loss=0.130240, epoch loss=0.762539 Batch=479, step=14880, lr=0.138250, batch loss=0.097563, epoch loss=0.860102 Batch=539, step=14940, lr=0.138000, batch loss=0.100311, epoch loss=0.960413 Batch=599, step=15000, lr=0.137750, batch loss=0.082866, epoch loss=1.043279 Batch=659, step=15060, lr=0.137500, batch loss=0.133878, epoch loss=1.177157 Batch=719, step=15120, lr=0.137250, batch loss=0.157690, epoch loss=1.334847 Batch=779, step=15180, lr=0.137000, batch loss=0.275066, epoch loss=1.609913 Batch=839, step=15240, lr=0.136750, batch loss=0.140866, epoch loss=1.750779 Batch=899, step=15300, lr=0.136500, batch loss=0.145761, epoch loss=1.896540 Batch=959, step=15360, lr=0.136250, batch loss=0.073367, epoch loss=1.969907 Batch=1019, step=15420, lr=0.136000, batch loss=0.166393, epoch loss=2.136300 Batch=1079, step=15480, lr=0.135750, batch loss=0.037383, epoch loss=2.173683 Batch=1139, step=15540, lr=0.135500, batch loss=0.091040, epoch loss=2.264724 Batch=1199, step=15600, lr=0.135250, batch loss=0.057068, epoch loss=2.321792 Epoch=12, step=15600, lr=0.135250, epoch loss=2.321792 Batch=59, step=15660, lr=0.135000, batch loss=0.076656, epoch loss=0.076656 Batch=119, step=15720, lr=0.134750, batch loss=0.127773, epoch loss=0.204429 Batch=179, step=15780, lr=0.134500, batch loss=0.092849, epoch loss=0.297278 Batch=239, step=15840, lr=0.134250, batch loss=0.093344, epoch loss=0.390622 Batch=299, step=15900, lr=0.134000, batch loss=0.037247, epoch loss=0.427868 Batch=359, step=15960, lr=0.133750, batch loss=0.080686, epoch loss=0.508554 Batch=419, step=16020, lr=0.133500, batch loss=0.080468, epoch loss=0.589022 Batch=479, step=16080, lr=0.133250, batch loss=0.061965, epoch loss=0.650987 Batch=539, step=16140, lr=0.133000, batch loss=0.060265, epoch loss=0.711252 Batch=599, step=16200, lr=0.132750, batch loss=0.122700, epoch loss=0.833951 Batch=659, step=16260, lr=0.132500, batch loss=0.082503, epoch loss=0.916454 Batch=719, step=16320, lr=0.132250, batch loss=0.117870, epoch loss=1.034324 Batch=779, step=16380, lr=0.132000, batch loss=0.292120, epoch loss=1.326444 Batch=839, step=16440, lr=0.131750, batch loss=0.090346, epoch loss=1.416790 Batch=899, step=16500, lr=0.131500, batch loss=0.075428, epoch loss=1.492218 Batch=959, step=16560, lr=0.131250, batch loss=0.049710, epoch loss=1.541927 Batch=1019, step=16620, lr=0.131000, batch loss=0.128548, epoch loss=1.670475 Batch=1079, step=16680, lr=0.130750, batch loss=0.021088, epoch loss=1.691564 Batch=1139, step=16740, lr=0.130500, batch loss=0.044602, epoch loss=1.736165 Batch=1199, step=16800, lr=0.130250, batch loss=0.025293, epoch loss=1.761458 Epoch=13, step=16800, lr=0.130250, epoch loss=1.761458 Batch=59, step=16860, lr=0.130000, batch loss=0.041983, epoch loss=0.041983 Batch=119, step=16920, lr=0.129750, batch loss=0.085701, epoch loss=0.127684 Batch=179, step=16980, lr=0.129500, batch loss=0.064759, epoch loss=0.192444 Batch=239, step=17040, lr=0.129250, batch loss=0.061048, epoch loss=0.253491 Batch=299, step=17100, lr=0.129000, batch loss=0.019979, epoch loss=0.273471 Batch=359, step=17160, lr=0.128750, batch loss=0.047960, epoch loss=0.321431 Batch=419, step=17220, lr=0.128500, batch loss=0.072184, epoch loss=0.393615 Batch=479, step=17280, lr=0.128250, batch loss=0.022090, epoch loss=0.415704 Batch=539, step=17340, lr=0.128000, batch loss=0.029907, epoch loss=0.445611 Batch=599, step=17400, lr=0.127750, batch loss=0.034314, epoch loss=0.479925 Batch=659, step=17460, lr=0.127500, batch loss=0.045147, epoch loss=0.525072 Batch=719, step=17520, lr=0.127250, batch loss=0.041111, epoch loss=0.566183 Batch=779, step=17580, lr=0.127000, batch loss=0.059048, epoch loss=0.625231 Batch=839, step=17640, lr=0.126750, batch loss=0.079908, epoch loss=0.705138 Batch=899, step=17700, lr=0.126500, batch loss=0.118087, epoch loss=0.823226 Batch=959, step=17760, lr=0.126250, batch loss=0.035137, epoch loss=0.858363 Batch=1019, step=17820, lr=0.126000, batch loss=0.074845, epoch loss=0.933208 Batch=1079, step=17880, lr=0.125750, batch loss=0.016441, epoch loss=0.949648 Batch=1139, step=17940, lr=0.125500, batch loss=0.030869, epoch loss=0.980517 Batch=1199, step=18000, lr=0.125250, batch loss=0.016185, epoch loss=0.996702 Epoch=14, step=18000, lr=0.125250, epoch loss=0.996702 Batch=59, step=18060, lr=0.125000, batch loss=0.013489, epoch loss=0.013489 Batch=119, step=18120, lr=0.124750, batch loss=0.030668, epoch loss=0.044158 Batch=179, step=18180, lr=0.124500, batch loss=0.079804, epoch loss=0.123961 Batch=239, step=18240, lr=0.124250, batch loss=0.041291, epoch loss=0.165252 Batch=299, step=18300, lr=0.124000, batch loss=0.011176, epoch loss=0.176428 Batch=359, step=18360, lr=0.123750, batch loss=0.027185, epoch loss=0.203613 Batch=419, step=18420, lr=0.123500, batch loss=0.028675, epoch loss=0.232289 Batch=479, step=18480, lr=0.123250, batch loss=0.014524, epoch loss=0.246813 Batch=539, step=18540, lr=0.123000, batch loss=0.020210, epoch loss=0.267023 Batch=599, step=18600, lr=0.122750, batch loss=0.027008, epoch loss=0.294030 Batch=659, step=18660, lr=0.122500, batch loss=0.025891, epoch loss=0.319922 Batch=719, step=18720, lr=0.122250, batch loss=0.055689, epoch loss=0.375611 Batch=779, step=18780, lr=0.122000, batch loss=0.102590, epoch loss=0.478201 Batch=839, step=18840, lr=0.121750, batch loss=0.066476, epoch loss=0.544677 Batch=899, step=18900, lr=0.121500, batch loss=0.085341, epoch loss=0.630018 Batch=959, step=18960, lr=0.121250, batch loss=0.014430, epoch loss=0.644447 Batch=1019, step=19020, lr=0.121000, batch loss=0.022749, epoch loss=0.667196 Batch=1079, step=19080, lr=0.120750, batch loss=0.007160, epoch loss=0.674356 Batch=1139, step=19140, lr=0.120500, batch loss=0.024537, epoch loss=0.698893 Batch=1199, step=19200, lr=0.120250, batch loss=0.010615, epoch loss=0.709508 Epoch=15, step=19200, lr=0.120250, epoch loss=0.709508 Batch=59, step=19260, lr=0.120000, batch loss=0.004696, epoch loss=0.004696 Batch=119, step=19320, lr=0.119750, batch loss=0.021626, epoch loss=0.026321 Batch=179, step=19380, lr=0.119500, batch loss=0.061828, epoch loss=0.088150 Batch=239, step=19440, lr=0.119250, batch loss=0.027405, epoch loss=0.115555 Batch=299, step=19500, lr=0.119000, batch loss=0.010775, epoch loss=0.126329 Batch=359, step=19560, lr=0.118750, batch loss=0.024223, epoch loss=0.150552 Batch=419, step=19620, lr=0.118500, batch loss=0.021016, epoch loss=0.171568 Batch=479, step=19680, lr=0.118250, batch loss=0.006958, epoch loss=0.178526 Batch=539, step=19740, lr=0.118000, batch loss=0.017896, epoch loss=0.196421 Batch=599, step=19800, lr=0.117750, batch loss=0.024751, epoch loss=0.221173 Batch=659, step=19860, lr=0.117500, batch loss=0.021819, epoch loss=0.242992 Batch=719, step=19920, lr=0.117250, batch loss=0.042484, epoch loss=0.285476 Batch=779, step=19980, lr=0.117000, batch loss=0.082011, epoch loss=0.367487 Batch=839, step=20040, lr=0.116750, batch loss=0.031274, epoch loss=0.398762 Batch=899, step=20100, lr=0.116500, batch loss=0.034617, epoch loss=0.433379 Batch=959, step=20160, lr=0.116250, batch loss=0.011656, epoch loss=0.445034 Batch=1019, step=20220, lr=0.116000, batch loss=0.015325, epoch loss=0.460359 Batch=1079, step=20280, lr=0.115750, batch loss=0.001043, epoch loss=0.461402 Batch=1139, step=20340, lr=0.115500, batch loss=0.015517, epoch loss=0.476919 Batch=1199, step=20400, lr=0.115250, batch loss=0.005628, epoch loss=0.482547 Epoch=16, step=20400, lr=0.115250, epoch loss=0.482547 Batch=59, step=20460, lr=0.115000, batch loss=0.003197, epoch loss=0.003197 Batch=119, step=20520, lr=0.114750, batch loss=0.010903, epoch loss=0.014101 Batch=179, step=20580, lr=0.114500, batch loss=0.025266, epoch loss=0.039367 Batch=239, step=20640, lr=0.114250, batch loss=0.015634, epoch loss=0.055001 Batch=299, step=20700, lr=0.114000, batch loss=0.003893, epoch loss=0.058894 Batch=359, step=20760, lr=0.113750, batch loss=0.014251, epoch loss=0.073145 Batch=419, step=20820, lr=0.113500, batch loss=0.014565, epoch loss=0.087711 Batch=479, step=20880, lr=0.113250, batch loss=0.004009, epoch loss=0.091719 Batch=539, step=20940, lr=0.113000, batch loss=0.016054, epoch loss=0.107774 Batch=599, step=21000, lr=0.112750, batch loss=0.017283, epoch loss=0.125057 Batch=659, step=21060, lr=0.112500, batch loss=0.013543, epoch loss=0.138600 Batch=719, step=21120, lr=0.112250, batch loss=0.030979, epoch loss=0.169580 Batch=779, step=21180, lr=0.112000, batch loss=0.048501, epoch loss=0.218081 Batch=839, step=21240, lr=0.111750, batch loss=0.027345, epoch loss=0.245426 Batch=899, step=21300, lr=0.111500, batch loss=0.026945, epoch loss=0.272371 Batch=959, step=21360, lr=0.111250, batch loss=0.012718, epoch loss=0.285089 Batch=1019, step=21420, lr=0.111000, batch loss=0.011703, epoch loss=0.296792 Batch=1079, step=21480, lr=0.110750, batch loss=0.002863, epoch loss=0.299655 Batch=1139, step=21540, lr=0.110500, batch loss=0.012603, epoch loss=0.312257 Batch=1199, step=21600, lr=0.110250, batch loss=0.005127, epoch loss=0.317384 Epoch=17, step=21600, lr=0.110250, epoch loss=0.317384 Batch=59, step=21660, lr=0.110000, batch loss=0.002454, epoch loss=0.002454 Batch=119, step=21720, lr=0.109750, batch loss=0.006430, epoch loss=0.008884 Batch=179, step=21780, lr=0.109500, batch loss=0.012298, epoch loss=0.021181 Batch=239, step=21840, lr=0.109250, batch loss=0.009010, epoch loss=0.030192 Batch=299, step=21900, lr=0.109000, batch loss=0.003848, epoch loss=0.034040 Batch=359, step=21960, lr=0.108750, batch loss=0.013518, epoch loss=0.047559 Batch=419, step=22020, lr=0.108500, batch loss=0.012432, epoch loss=0.059991 Batch=479, step=22080, lr=0.108250, batch loss=0.003213, epoch loss=0.063205 Batch=539, step=22140, lr=0.108000, batch loss=0.017114, epoch loss=0.080319 Batch=599, step=22200, lr=0.107750, batch loss=0.016678, epoch loss=0.096996 Batch=659, step=22260, lr=0.107500, batch loss=0.016212, epoch loss=0.113209 Batch=719, step=22320, lr=0.107250, batch loss=0.021194, epoch loss=0.134402 Batch=779, step=22380, lr=0.107000, batch loss=0.030268, epoch loss=0.164670 Batch=839, step=22440, lr=0.106750, batch loss=0.029754, epoch loss=0.194424 Batch=899, step=22500, lr=0.106500, batch loss=0.023686, epoch loss=0.218109 Batch=959, step=22560, lr=0.106250, batch loss=0.011127, epoch loss=0.229236 Batch=1019, step=22620, lr=0.106000, batch loss=0.009279, epoch loss=0.238515 Batch=1079, step=22680, lr=0.105750, batch loss=0.002289, epoch loss=0.240805 Batch=1139, step=22740, lr=0.105500, batch loss=0.011337, epoch loss=0.252141 Batch=1199, step=22800, lr=0.105250, batch loss=0.005423, epoch loss=0.257564 Epoch=18, step=22800, lr=0.105250, epoch loss=0.257564 Batch=59, step=22860, lr=0.105000, batch loss=0.002464, epoch loss=0.002464 Batch=119, step=22920, lr=0.104750, batch loss=0.004876, epoch loss=0.007340 Batch=179, step=22980, lr=0.104500, batch loss=0.012894, epoch loss=0.020234 Batch=239, step=23040, lr=0.104250, batch loss=0.009124, epoch loss=0.029357 Batch=299, step=23100, lr=0.104000, batch loss=0.001478, epoch loss=0.030835 Batch=359, step=23160, lr=0.103750, batch loss=0.011569, epoch loss=0.042405 Batch=419, step=23220, lr=0.103500, batch loss=0.009974, epoch loss=0.052378 Batch=479, step=23280, lr=0.103250, batch loss=0.002539, epoch loss=0.054918 Batch=539, step=23340, lr=0.103000, batch loss=0.016802, epoch loss=0.071720 Batch=599, step=23400, lr=0.102750, batch loss=0.013390, epoch loss=0.085110 Batch=659, step=23460, lr=0.102500, batch loss=0.011216, epoch loss=0.096326 Batch=719, step=23520, lr=0.102250, batch loss=0.014633, epoch loss=0.110958 Batch=779, step=23580, lr=0.102000, batch loss=0.022542, epoch loss=0.133500 Batch=839, step=23640, lr=0.101750, batch loss=0.024172, epoch loss=0.157673 Batch=899, step=23700, lr=0.101500, batch loss=0.023129, epoch loss=0.180802 Batch=959, step=23760, lr=0.101250, batch loss=0.007983, epoch loss=0.188785 Batch=1019, step=23820, lr=0.101000, batch loss=0.007574, epoch loss=0.196359 Batch=1079, step=23880, lr=0.100750, batch loss=0.001032, epoch loss=0.197391 Batch=1139, step=23940, lr=0.100500, batch loss=0.008453, epoch loss=0.205844 Batch=1199, step=24000, lr=0.100250, batch loss=0.004358, epoch loss=0.210202 Epoch=19, step=24000, lr=0.100250, epoch loss=0.210202 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########******************************************│ │***********###########*************************############**************************************...│ │********######*####*********************************###*###*#**********************************.....│ │*******######**##*************....*****************#*######*#*******************************........│ │*******##*##**##**********...........***************########*##***************************..........│ │*****#######************.......%...%%...***************#########*************************.........%.│ │******######**********..........%.........**************##*#####************************......%.%.%.│ │***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│ │****#######**********..........%%%%.........************#########********************........%%.%%.%│ │**#######************..........%%%%%%%........*************###*###******************.........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│ │*#######************...........%%%%%%%..........************#######**************............%%%%%%.│ │*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.│ │*#####*#**********..............%%%%%%%............**********##*###************..............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..│ │#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######*********.................%%%%%%%%................****###*###******................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│ │#****##********....................%%%%%%%%%.................**###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................*******..................%.%%.%%......│ │*************........................%..%%%%%%%................****...............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│ │************............................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │*********.....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-03-20 22:15.52 ---> saved as "2d8fcd7b96a9ad7e28ecbb7cfdc67c254f20fad6c1c64e0a4c085be81ad4adce" Job succeeded 2025-03-20 22:15.53: Job succeeded