2025-03-20 22:29.24: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (1a4d0ebb2cb2be9bf8422ae739357d8626151cd1) (linux-x86_64:alpine-3.21-5.3_opam-2.3)Base: ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0Opam project buildTo reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 1a4d0ebbcat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0# alpine-3.21-5.3_opam-2.3USER 1000:1000ENV CLICOLOR_FORCE="1"ENV OPAMCOLOR="always"WORKDIR /srcRUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opamRUN opam init --reinit -niRUN uname -rs && opam exec -- ocaml -version && opam --versionWORKDIR /srcRUN sudo chown opam /srcRUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -uCOPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./RUN opam pin add -yn neural_nets_lib.dev './' && \opam pin add -yn arrayjit.dev './'RUN echo '(lang dune 3.0)' > './dune-project'ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"ENV CI="true"ENV OCAMLCI="true"RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPSRUN opam install $DEPSCOPY --chown=1000:1000 . /srcRUN opam exec -- dune build @install @check @runtest && rm -rf _buildEND-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-03-20 22:29.24: Using cache hint "ahrefs/ocannl-ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0-alpine-3.21-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5"2025-03-20 22:29.24: Using OBuilder spec:((from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0)(comment alpine-3.21-5.3_opam-2.3)(user (uid 1000) (gid 1000))(env CLICOLOR_FORCE 1)(env OPAMCOLOR always)(workdir /src)(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))(run (shell "opam init --reinit -ni"))(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))(workdir /src)(run (shell "sudo chown opam /src"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u"))(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))(run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))(run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))(env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")(env CI true)(env OCAMLCI true)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))(copy (src .) (dst /src))(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")))2025-03-20 22:29.24: Waiting for resource in pool OCluster2025-03-20 22:29.24: Waiting for worker…2025-03-20 22:31.44: Got resource from pool OClusterBuilding on asteria.caelum.ci.devAll commits already cachedHEAD is now at 1a4d0ebb fPIC for cc: but only openSUSE complained https://ocaml.ci.dev/github/ahrefs/ocannl/commit/ccaf459c55f1e1dab014a65af54e1ba2ec3b9ad0/variant/opensuse-15.6-5.3_opam-2.3(from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:4ae4f6e361088eb06bab0beb78c7db535e8b7a689cca4c4dd76add4242c4dab0)2025-03-20 22:31.45 ---> using "dacbb842f0a2d29eaad3e832c76ea718382fa9cd05776a3fdb8016e465aa5e7b" from cache/: (comment alpine-3.21-5.3_opam-2.3)/: (user (uid 1000) (gid 1000))/: (env CLICOLOR_FORCE 1)/: (env OPAMCOLOR always)/: (workdir /src)/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))2025-03-20 22:31.45 ---> using "4d575331d18a103bb0cc6f4a0dd61826f91da9419ba321b0b1320c4ec539b0db" from cache/src: (run (shell "opam init --reinit -ni"))Configuring from /home/opam/.opamrc and then from built-in defaults.Checking for available remotes: rsync and local, git.- you won't be able to use mercurial repositories unless you install the hg command on your system.- you won't be able to use darcs repositories unless you install the darcs command on your system.This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.You may want to back it up before going further.Continue? [y/n] yFormat upgrade done.<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>[ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com"[default] synchronised from file:///home/opam/opam-repository2025-03-20 22:31.45 ---> using "6937929ef5611e557c4991cd8189eebf46058c4ee81139f68d15319429555db6" from cache/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))Linux 5.15.0-134-genericThe OCaml toplevel, version 5.3.02.3.02025-03-20 22:31.45 ---> using "bb90a277fb48150bfb58577b4b61ca2e911fa7d8ff44939da041b865bac9afe8" from cache/src: (workdir /src)/src: (run (shell "sudo chown opam /src"))2025-03-20 22:31.45 ---> using "dc9101c084e576ab6dcbeea601d2b330166d0d274d8fdcaf9cff106e658a7bf6" from cache/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u"))From https://github.com/ocaml/opam-repository* branch master -> FETCH_HEAD862a7640b1..6cf83229dd master -> origin/master4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive[default] synchronised from file:///home/opam/opam-repositoryEverything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# To update the current shell environment, run: eval $(opam env)2025-03-20 22:31.45 ---> using "746fbb62527431fddf1e025894b82c1bcf0d23e4b907494eb6fa8ff4f5667dbb" from cache/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))2025-03-20 22:31.45 ---> saved as "0fda56c50f94acc8076c1938dcc63d2cf437304e0ebc7bc9ffa0862b14ba29cb"/src: (run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))[neural_nets_lib.dev] synchronised (file:///src)neural_nets_lib is now pinned to file:///src (version dev)[arrayjit.dev] synchronised (file:///src)arrayjit is now pinned to file:///src (version dev)2025-03-20 22:31.50 ---> saved as "189ebad2a0b57bb896db3f938290f0b71b7afa6755777d620d1deb6f23a61aaa"/src: (run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))2025-03-20 22:31.50 ---> saved as "41e1aa8d4c8718f6fa9eca8319e39ceafea6113098eef042d7f1a053e76b7e91"/src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")/src: (env CI true)/src: (env OCAMLCI true)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))+ /usr/bin/sudo "apk" "update"- fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/main/x86_64/APKINDEX.tar.gz- fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/community/x86_64/APKINDEX.tar.gz- fetch https://dl-cdn.alpinelinux.org/alpine/edge/main/x86_64/APKINDEX.tar.gz- fetch https://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz- fetch https://dl-cdn.alpinelinux.org/alpine/edge/testing/x86_64/APKINDEX.tar.gz- v3.21.3-193-g2da8f1a642c [https://dl-cdn.alpinelinux.org/alpine/v3.21/main]- v3.21.3-194-g25fc289152f [https://dl-cdn.alpinelinux.org/alpine/v3.21/community]- v20250108-4318-g3d9c1aad998 [https://dl-cdn.alpinelinux.org/alpine/edge/main]- v20250108-4328-gbd32d9239d0 [https://dl-cdn.alpinelinux.org/alpine/edge/community]- v20250108-3887-g4037a56c69f [https://dl-cdn.alpinelinux.org/alpine/edge/testing]- OK: 57226 distinct packages available<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>[arrayjit.dev] synchronised (file:///src)[neural_nets_lib.dev] synchronised (file:///src)[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following system packages will first need to be installed:libffi-dev<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>+ /usr/bin/sudo "apk" "add" "libffi-dev"- (1/2) Installing linux-headers (6.6-r1)- (2/2) Installing libffi-dev (3.4.7-r0)- OK: 312 MiB in 104 packages2025-03-20 22:32.07 ---> saved as "ce6980b534df263abf656a5a140a260b33cee55483b143478efd6c02a3694176"/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following actions will be performed:=== install 65 packages- install angstrom 0.16.1- install backoff 0.1.1- install base v0.17.1- install bigarray-compat 1.1.0- install bigstringaf 0.10.0- install conf-libffi 2.0.0- install conf-pkg-config 4- install cppo 1.8.0- install csexp 1.5.2- install ctypes 0.23.0- install ctypes-foreign 0.23.0- install dune 3.17.2- install dune-configurator 3.17.2- install fieldslib v0.17.0- install integers 0.7.0- install jane-street-headers v0.17.0- install jst-config v0.17.0- install mtime 2.1.0- install multicore-magic 2.3.1- install num 1.5-1- install ocaml-compiler-libs v0.17.0- install ocaml-syntax-shims 1.0.0- install ocaml_intrinsics_kernel v0.17.1- install ocamlbuild 0.16.1- install ocamlfind 1.9.8- install parsexp v0.17.0- install ppx_assert v0.17.0- install ppx_base v0.17.0- install ppx_cold v0.17.0- install ppx_compare v0.17.0- install ppx_derivers 1.2.1- install ppx_deriving 6.0.3- install ppx_enumerate v0.17.0- install ppx_expect v0.17.2- install ppx_fields_conv v0.17.0- install ppx_globalize v0.17.0- install ppx_hash v0.17.0- install ppx_here v0.17.0- install ppx_inline_test v0.17.0- install ppx_minidebug 2.1.0- install ppx_optcomp v0.17.0- install ppx_sexp_conv v0.17.0- install ppx_string v0.17.0- install ppx_variants_conv v0.17.0- install ppxlib 0.35.0- install ppxlib_jane v0.17.2- install printbox 0.12- install printbox-ext-plot 0.12- install printbox-html 0.12- install printbox-md 0.12- install printbox-text 0.12- install ptime 1.2.0- install re 1.12.0- install saturn_lockfree 0.5.0- install seq base- install sexplib v0.17.0- install sexplib0 v0.17.0- install stdio v0.17.0- install stdlib-shims 0.3.0- install time_now v0.17.0- install topkg 1.0.8- install tyxml 4.6.0- install uucp 16.0.0- install uutf 1.0.4- install variantslib v0.17.0<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> retrieved backoff.0.1.1 (cached)-> retrieved bigarray-compat.1.1.0 (cached)-> retrieved angstrom.0.16.1 (cached)-> retrieved base.v0.17.1 (cached)-> retrieved bigstringaf.0.10.0 (cached)-> retrieved cppo.1.8.0 (cached)-> installed conf-pkg-config.4-> retrieved csexp.1.5.2 (cached)-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached)-> installed conf-libffi.2.0.0-> retrieved fieldslib.v0.17.0 (cached)-> retrieved integers.0.7.0 (cached)-> retrieved jane-street-headers.v0.17.0 (cached)-> retrieved jst-config.v0.17.0 (cached)-> retrieved mtime.2.1.0 (cached)-> retrieved multicore-magic.2.3.1 (cached)-> retrieved num.1.5-1 (cached)-> retrieved ocaml-compiler-libs.v0.17.0 (cached)-> retrieved ocaml-syntax-shims.1.0.0 (cached)-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)-> retrieved ocamlbuild.0.16.1 (cached)-> retrieved ocamlfind.1.9.8 (cached)-> retrieved dune.3.17.2, dune-configurator.3.17.2 (cached)-> installed num.1.5-1-> retrieved parsexp.v0.17.0 (cached)-> retrieved ppx_assert.v0.17.0 (cached)-> retrieved ppx_base.v0.17.0 (cached)-> retrieved ppx_cold.v0.17.0 (cached)-> retrieved ppx_compare.v0.17.0 (cached)-> retrieved ppx_derivers.1.2.1 (cached)-> retrieved ppx_enumerate.v0.17.0 (cached)-> retrieved ppx_deriving.6.0.3 (cached)-> retrieved ppx_expect.v0.17.2 (cached)-> retrieved ppx_fields_conv.v0.17.0 (cached)-> retrieved ppx_globalize.v0.17.0 (cached)-> retrieved ppx_hash.v0.17.0 (cached)-> retrieved ppx_here.v0.17.0 (cached)-> retrieved ppx_inline_test.v0.17.0 (cached)-> retrieved ppx_optcomp.v0.17.0 (cached)-> retrieved ppx_minidebug.2.1.0 (cached)-> retrieved ppx_sexp_conv.v0.17.0 (cached)-> retrieved ppx_string.v0.17.0 (cached)-> retrieved ppx_variants_conv.v0.17.0 (cached)-> retrieved ppxlib_jane.v0.17.2 (cached)-> retrieved ptime.1.2.0 (cached)-> retrieved re.1.12.0 (cached)-> retrieved saturn_lockfree.0.5.0 (cached)-> retrieved seq.base (cached)-> installed seq.base-> retrieved ppxlib.0.35.0 (cached)-> retrieved sexplib.v0.17.0 (cached)-> retrieved sexplib0.v0.17.0 (cached)-> retrieved stdio.v0.17.0 (cached)-> retrieved stdlib-shims.0.3.0 (cached)-> retrieved time_now.v0.17.0 (cached)-> retrieved topkg.1.0.8 (cached)-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached)-> retrieved tyxml.4.6.0 (cached)-> retrieved uutf.1.0.4 (cached)-> retrieved variantslib.v0.17.0 (cached)-> retrieved uucp.16.0.0 (cached)-> installed ocamlfind.1.9.8-> installed ocamlbuild.0.16.1-> installed topkg.1.0.8-> installed uutf.1.0.4-> installed mtime.2.1.0-> installed ptime.1.2.0-> installed dune.3.17.2-> installed jane-street-headers.v0.17.0-> installed ppx_derivers.1.2.1-> installed csexp.1.5.2-> installed backoff.0.1.1-> installed bigarray-compat.1.1.0-> installed cppo.1.8.0-> installed multicore-magic.2.3.1-> installed ocaml-compiler-libs.v0.17.0-> installed ocaml-syntax-shims.1.0.0-> installed ocaml_intrinsics_kernel.v0.17.1-> installed printbox.0.12-> installed re.1.12.0-> installed sexplib0.v0.17.0-> installed stdlib-shims.0.3.0-> installed saturn_lockfree.0.5.0-> installed integers.0.7.0-> installed parsexp.v0.17.0-> installed dune-configurator.3.17.2-> installed bigstringaf.0.10.0-> installed sexplib.v0.17.0-> installed angstrom.0.16.1-> installed tyxml.4.6.0-> installed printbox-html.0.12-> installed ctypes.0.23.0-> installed base.v0.17.1-> installed variantslib.v0.17.0-> installed fieldslib.v0.17.0-> installed stdio.v0.17.0-> installed ctypes-foreign.0.23.0-> installed uucp.16.0.0-> installed printbox-text.0.12-> installed printbox-md.0.12-> installed printbox-ext-plot.0.12-> installed ppxlib.0.35.0-> installed ppxlib_jane.v0.17.2-> installed ppx_optcomp.v0.17.0-> installed ppx_cold.v0.17.0-> installed ppx_here.v0.17.0-> installed ppx_variants_conv.v0.17.0-> installed ppx_fields_conv.v0.17.0-> installed ppx_enumerate.v0.17.0-> installed ppx_globalize.v0.17.0-> installed ppx_deriving.6.0.3-> installed ppx_compare.v0.17.0-> installed ppx_sexp_conv.v0.17.0-> installed ppx_hash.v0.17.0-> installed ppx_assert.v0.17.0-> installed ppx_minidebug.2.1.0-> installed ppx_base.v0.17.0-> installed jst-config.v0.17.0-> installed ppx_string.v0.17.0-> installed time_now.v0.17.0-> installed ppx_inline_test.v0.17.0-> installed ppx_expect.v0.17.2Done.# To update the current shell environment, run: eval $(opam env)2025-03-20 22:33.31 ---> saved as "3f87e6347972f62058b086385de718660334101fcb30eaa774491c4f4678c917"/src: (copy (src .) (dst /src))2025-03-20 22:33.31 ---> saved as "88372a935653d8fb99d7223e89cbf05bf7fb3e50c67c6fb84723ae4f66869635"/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test_ppx && ./test_ppx_op.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ee66895401ceddd04420d09796d0b681/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/2dd2ce99b18759f611fa6d9a59e473e0/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test && ./moons_demo_parallel_run.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file("Set log_level to" 1)└─{orphaned from #2}Retrieving commandline, environment, or config file variable ocannl_backendFound cc, in the config fileRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccRetrieving commandline, environment, or config file variable ocannl_never_capture_stdoutNot found, using default falseBatch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762Batch=299, step=8700, lr=0.164000, batch loss=0.191994, epoch loss=1.062755Batch=359, step=8760, lr=0.163750, batch loss=0.248632, epoch loss=1.311388Batch=419, step=8820, lr=0.163500, batch loss=0.245601, epoch loss=1.556988Batch=479, step=8880, lr=0.163250, batch loss=0.228591, epoch loss=1.785580Batch=539, step=8940, lr=0.163000, batch loss=0.178132, epoch loss=1.963712Batch=599, step=9000, lr=0.162750, batch loss=0.217388, epoch loss=2.181101Batch=659, step=9060, lr=0.162500, batch loss=0.294814, epoch loss=2.475915Batch=719, step=9120, lr=0.162250, batch loss=0.296433, epoch loss=2.772348Batch=779, step=9180, lr=0.162000, batch loss=0.316728, epoch loss=3.089075Batch=839, step=9240, lr=0.161750, batch loss=0.287243, epoch loss=3.376318Batch=899, step=9300, lr=0.161500, batch loss=0.251060, epoch loss=3.627378Batch=959, step=9360, lr=0.161250, batch loss=0.190532, epoch loss=3.817911Batch=1019, step=9420, lr=0.161000, batch loss=0.311728, epoch loss=4.129639Batch=1079, step=9480, lr=0.160750, batch loss=0.191595, epoch loss=4.321234Batch=1139, step=9540, lr=0.160500, batch loss=0.215772, epoch loss=4.537006Batch=1199, step=9600, lr=0.160250, batch loss=0.165620, epoch loss=4.702626Epoch=7, step=9600, lr=0.160250, epoch loss=4.702626Batch=59, step=9660, lr=0.160000, batch loss=0.197217, epoch loss=0.197217Batch=119, step=9720, lr=0.159750, batch loss=0.165467, epoch loss=0.362684Batch=179, step=9780, lr=0.159500, batch loss=0.179286, epoch loss=0.541970Batch=239, step=9840, lr=0.159250, batch loss=0.263837, epoch loss=0.805807Batch=299, step=9900, lr=0.159000, batch loss=0.182187, epoch loss=0.987994Batch=359, step=9960, lr=0.158750, batch loss=0.240842, epoch loss=1.228836Batch=419, step=10020, lr=0.158500, batch loss=0.232979, epoch loss=1.461815Batch=479, step=10080, lr=0.158250, batch loss=0.213194, epoch loss=1.675009Batch=539, step=10140, lr=0.158000, batch loss=0.170694, epoch loss=1.845703Batch=599, step=10200, lr=0.157750, batch loss=0.200247, epoch loss=2.045950Batch=659, step=10260, lr=0.157500, batch loss=0.283032, epoch loss=2.328982Batch=719, step=10320, lr=0.157250, batch loss=0.288754, epoch loss=2.617735Batch=779, step=10380, lr=0.157000, batch loss=0.296880, epoch loss=2.914615Batch=839, step=10440, lr=0.156750, batch loss=0.267657, epoch loss=3.182272Batch=899, step=10500, lr=0.156500, batch loss=0.242699, epoch loss=3.424972Batch=959, step=10560, lr=0.156250, batch loss=0.198668, epoch loss=3.623639Batch=1019, step=10620, lr=0.156000, batch loss=0.295119, epoch loss=3.918758Batch=1079, step=10680, lr=0.155750, batch loss=0.178662, epoch loss=4.097421Batch=1139, step=10740, lr=0.155500, batch loss=0.205425, epoch loss=4.302846Batch=1199, step=10800, lr=0.155250, batch loss=0.156138, epoch loss=4.458984Epoch=8, step=10800, lr=0.155250, epoch loss=4.458984Batch=59, step=10860, lr=0.155000, batch loss=0.177430, epoch loss=0.177430Batch=119, step=10920, lr=0.154750, batch loss=0.152366, epoch loss=0.329795Batch=179, step=10980, lr=0.154500, batch loss=0.167114, epoch loss=0.496909Batch=239, step=11040, lr=0.154250, batch loss=0.242622, epoch loss=0.739531Batch=299, step=11100, lr=0.154000, batch loss=0.169984, epoch loss=0.909515Batch=359, step=11160, lr=0.153750, batch loss=0.222140, epoch loss=1.131654Batch=419, step=11220, lr=0.153500, batch loss=0.229250, epoch loss=1.360905Batch=479, step=11280, lr=0.153250, batch loss=0.202871, epoch loss=1.563775Batch=539, step=11340, lr=0.152750, batch loss=0.159118, epoch loss=1.722894Batch=599, step=11400, lr=0.152750, batch loss=0.178434, epoch loss=1.901328Batch=659, step=11460, lr=0.152500, batch loss=0.264780, epoch loss=2.166108Batch=719, step=11520, lr=0.152250, batch loss=0.256891, epoch loss=2.422999Batch=779, step=11580, lr=0.152000, batch loss=0.273364, epoch loss=2.696363Batch=839, step=11640, lr=0.151750, batch loss=0.255731, epoch loss=2.952093Batch=899, step=11700, lr=0.151500, batch loss=0.212993, epoch loss=3.165086Batch=959, step=11760, lr=0.151250, batch loss=0.168090, epoch loss=3.333176Batch=1019, step=11820, lr=0.151000, batch loss=0.266169, epoch loss=3.599346Batch=1079, step=11880, lr=0.150750, batch loss=0.149645, epoch loss=3.748991Batch=1139, step=11940, lr=0.150500, batch loss=0.185616, epoch loss=3.934607Batch=1199, step=12000, lr=0.150250, batch loss=0.139472, epoch loss=4.074079Epoch=9, step=12000, lr=0.150250, epoch loss=4.074079Batch=59, step=12060, lr=0.150000, batch loss=0.157985, epoch loss=0.157985Batch=119, step=12120, lr=0.149750, batch loss=0.128867, epoch loss=0.286851Batch=179, step=12180, lr=0.149500, batch loss=0.150413, epoch loss=0.437264Batch=239, step=12240, lr=0.149250, batch loss=0.223237, epoch loss=0.660502Batch=299, step=12300, lr=0.149000, batch loss=0.142675, epoch loss=0.803177Batch=359, step=12360, lr=0.148750, batch loss=0.195384, epoch loss=0.998561Batch=419, step=12420, lr=0.148500, batch loss=0.206274, epoch loss=1.204835Batch=479, step=12480, lr=0.148250, batch loss=0.178709, epoch loss=1.383544Batch=539, step=12540, lr=0.148000, batch loss=0.142629, epoch loss=1.526172Batch=599, step=12600, lr=0.147750, batch loss=0.150414, epoch loss=1.676587Batch=659, step=12660, lr=0.147500, batch loss=0.224906, epoch loss=1.901493Batch=719, step=12720, lr=0.147250, batch loss=0.235284, epoch loss=2.136777Batch=779, step=12780, lr=0.147000, batch loss=0.252705, epoch loss=2.389482Batch=839, step=12840, lr=0.146750, batch loss=0.224951, epoch loss=2.614433Batch=899, step=12900, lr=0.146500, batch loss=0.185332, epoch loss=2.799766Batch=959, step=12960, lr=0.146250, batch loss=0.148921, epoch loss=2.948686Batch=1019, step=13020, lr=0.146000, batch loss=0.268067, epoch loss=3.216753Batch=1079, step=13080, lr=0.145750, batch loss=0.115315, epoch loss=3.332068Batch=1139, step=13140, lr=0.145500, batch loss=0.155416, epoch loss=3.487484Batch=1199, step=13200, lr=0.145250, batch loss=0.118515, epoch loss=3.605999Epoch=10, step=13200, lr=0.145250, epoch loss=3.605999Batch=59, step=13260, lr=0.145000, batch loss=0.143356, epoch loss=0.143356Batch=119, step=13320, lr=0.144750, batch loss=0.119712, epoch loss=0.263068Batch=179, step=13380, lr=0.144500, batch loss=0.127195, epoch loss=0.390264Batch=239, step=13440, lr=0.144250, batch loss=0.186784, epoch loss=0.577048Batch=299, step=13500, lr=0.144000, batch loss=0.112761, epoch loss=0.689809Batch=359, step=13560, lr=0.143750, batch loss=0.161588, epoch loss=0.851397Batch=419, step=13620, lr=0.143500, batch loss=0.160594, epoch loss=1.011991Batch=479, step=13680, lr=0.143250, batch loss=0.147331, epoch loss=1.159322Batch=539, step=13740, lr=0.143000, batch loss=0.118072, epoch loss=1.277393Batch=599, step=13800, lr=0.142750, batch loss=0.119995, epoch loss=1.397388Batch=659, step=13860, lr=0.142500, batch loss=0.175910, epoch loss=1.573299Batch=719, step=13920, lr=0.142250, batch loss=0.172514, epoch loss=1.745813Batch=779, step=13980, lr=0.142000, batch loss=0.178521, epoch loss=1.924335Batch=839, step=14040, lr=0.141750, batch loss=0.186058, epoch loss=2.110393Batch=899, step=14100, lr=0.141500, batch loss=0.178294, epoch loss=2.288687Batch=959, step=14160, lr=0.141250, batch loss=0.146030, epoch loss=2.434717Batch=1019, step=14220, lr=0.141000, batch loss=0.330291, epoch loss=2.765008Batch=1079, step=14280, lr=0.140750, batch loss=0.082629, epoch loss=2.847638Batch=1139, step=14340, lr=0.140500, batch loss=0.122355, epoch loss=2.969993Batch=1199, step=14400, lr=0.140250, batch loss=0.091755, epoch loss=3.061748Epoch=11, step=14400, lr=0.140250, epoch loss=3.061748Batch=59, step=14460, lr=0.140000, batch loss=0.112758, epoch loss=0.112758Batch=119, step=14520, lr=0.139750, batch loss=0.102800, epoch loss=0.215558Batch=179, step=14580, lr=0.139500, batch loss=0.105851, epoch loss=0.321409Batch=239, step=14640, lr=0.139250, batch loss=0.140948, epoch loss=0.462356Batch=299, step=14700, lr=0.139000, batch loss=0.081108, epoch loss=0.543464Batch=359, step=14760, lr=0.138750, batch loss=0.125331, epoch loss=0.668796Batch=419, step=14820, lr=0.138500, batch loss=0.129252, epoch loss=0.798047Batch=479, step=14880, lr=0.138250, batch loss=0.101433, epoch loss=0.899480Batch=539, step=14940, lr=0.138000, batch loss=0.090716, epoch loss=0.990196Batch=599, step=15000, lr=0.137750, batch loss=0.084920, epoch loss=1.075116Batch=659, step=15060, lr=0.137500, batch loss=0.129185, epoch loss=1.204302Batch=719, step=15120, lr=0.137250, batch loss=0.121481, epoch loss=1.325782Batch=779, step=15180, lr=0.137000, batch loss=0.134478, epoch loss=1.460261Batch=839, step=15240, lr=0.136750, batch loss=0.174636, epoch loss=1.634896Batch=899, step=15300, lr=0.136500, batch loss=0.294503, epoch loss=1.929399Batch=959, step=15360, lr=0.136250, batch loss=0.059356, epoch loss=1.988755Batch=1019, step=15420, lr=0.136000, batch loss=0.136213, epoch loss=2.124968Batch=1079, step=15480, lr=0.135750, batch loss=0.049591, epoch loss=2.174559Batch=1139, step=15540, lr=0.135500, batch loss=0.106118, epoch loss=2.280677Batch=1199, step=15600, lr=0.135250, batch loss=0.062271, epoch loss=2.342948Epoch=12, step=15600, lr=0.135250, epoch loss=2.342948Batch=59, step=15660, lr=0.135000, batch loss=0.077293, epoch loss=0.077293Batch=119, step=15720, lr=0.134750, batch loss=0.100691, epoch loss=0.177984Batch=179, step=15780, lr=0.134500, batch loss=0.088245, epoch loss=0.266229Batch=239, step=15840, lr=0.134250, batch loss=0.090851, epoch loss=0.357080Batch=299, step=15900, lr=0.134000, batch loss=0.041395, epoch loss=0.398475Batch=359, step=15960, lr=0.133750, batch loss=0.077571, epoch loss=0.476046Batch=419, step=16020, lr=0.133500, batch loss=0.083844, epoch loss=0.559890Batch=479, step=16080, lr=0.133250, batch loss=0.076910, epoch loss=0.636800Batch=539, step=16140, lr=0.133000, batch loss=0.052670, epoch loss=0.689470Batch=599, step=16200, lr=0.132750, batch loss=0.097622, epoch loss=0.787092Batch=659, step=16260, lr=0.132500, batch loss=0.075901, epoch loss=0.862992Batch=719, step=16320, lr=0.132250, batch loss=0.086960, epoch loss=0.949953Batch=779, step=16380, lr=0.132000, batch loss=0.127476, epoch loss=1.077429Batch=839, step=16440, lr=0.131750, batch loss=0.121526, epoch loss=1.198955Batch=899, step=16500, lr=0.131500, batch loss=0.132528, epoch loss=1.331483Batch=959, step=16560, lr=0.131250, batch loss=0.042555, epoch loss=1.374038Batch=1019, step=16620, lr=0.131000, batch loss=0.101892, epoch loss=1.475930Batch=1079, step=16680, lr=0.130750, batch loss=0.028053, epoch loss=1.503983Batch=1139, step=16740, lr=0.130500, batch loss=0.056088, epoch loss=1.560071Batch=1199, step=16800, lr=0.130250, batch loss=0.029344, epoch loss=1.589415Epoch=13, step=16800, lr=0.130250, epoch loss=1.589415Batch=59, step=16860, lr=0.130000, batch loss=0.039120, epoch loss=0.039120Batch=119, step=16920, lr=0.129750, batch loss=0.065266, epoch loss=0.104386Batch=179, step=16980, lr=0.129500, batch loss=0.053305, epoch loss=0.157691Batch=239, step=17040, lr=0.129250, batch loss=0.063709, epoch loss=0.221400Batch=299, step=17100, lr=0.129000, batch loss=0.025475, epoch loss=0.246875Batch=359, step=17160, lr=0.128750, batch loss=0.046183, epoch loss=0.293058Batch=419, step=17220, lr=0.128500, batch loss=0.047737, epoch loss=0.340795Batch=479, step=17280, lr=0.128250, batch loss=0.027295, epoch loss=0.368090Batch=539, step=17340, lr=0.128000, batch loss=0.054191, epoch loss=0.422282Batch=599, step=17400, lr=0.127750, batch loss=0.034578, epoch loss=0.456859Batch=659, step=17460, lr=0.127500, batch loss=0.045901, epoch loss=0.502760Batch=719, step=17520, lr=0.127250, batch loss=0.040226, epoch loss=0.542986Batch=779, step=17580, lr=0.127000, batch loss=0.072440, epoch loss=0.615426Batch=839, step=17640, lr=0.126750, batch loss=0.087074, epoch loss=0.702500Batch=899, step=17700, lr=0.126500, batch loss=0.116737, epoch loss=0.819237Batch=959, step=17760, lr=0.126250, batch loss=0.028245, epoch loss=0.847481Batch=1019, step=17820, lr=0.126000, batch loss=0.047805, epoch loss=0.895286Batch=1079, step=17880, lr=0.125750, batch loss=0.012729, epoch loss=0.908015Batch=1139, step=17940, lr=0.125500, batch loss=0.030629, epoch loss=0.938643Batch=1199, step=18000, lr=0.125250, batch loss=0.014416, epoch loss=0.953060Epoch=14, step=18000, lr=0.125250, epoch loss=0.953060Batch=59, step=18060, lr=0.125000, batch loss=0.010737, epoch loss=0.010737Batch=119, step=18120, lr=0.124750, batch loss=0.034188, epoch loss=0.044924Batch=179, step=18180, lr=0.124500, batch loss=0.095734, epoch loss=0.140658Batch=239, step=18240, lr=0.124250, batch loss=0.046376, epoch loss=0.187034Batch=299, step=18300, lr=0.124000, batch loss=0.009601, epoch loss=0.196635Batch=359, step=18360, lr=0.123750, batch loss=0.026782, epoch loss=0.223416Batch=419, step=18420, lr=0.123500, batch loss=0.028252, epoch loss=0.251668Batch=479, step=18480, lr=0.123250, batch loss=0.015448, epoch loss=0.267116Batch=539, step=18540, lr=0.123000, batch loss=0.020734, epoch loss=0.287850Batch=599, step=18600, lr=0.122750, batch loss=0.025938, epoch loss=0.313788Batch=659, step=18660, lr=0.122500, batch loss=0.028531, epoch loss=0.342319Batch=719, step=18720, lr=0.122250, batch loss=0.050271, epoch loss=0.392590Batch=779, step=18780, lr=0.122000, batch loss=0.105846, epoch loss=0.498436Batch=839, step=18840, lr=0.121750, batch loss=0.063655, epoch loss=0.562091Batch=899, step=18900, lr=0.121500, batch loss=0.078085, epoch loss=0.640176Batch=959, step=18960, lr=0.121250, batch loss=0.014186, epoch loss=0.654362Batch=1019, step=19020, lr=0.121000, batch loss=0.020439, epoch loss=0.674801Batch=1079, step=19080, lr=0.120750, batch loss=0.006414, epoch loss=0.681216Batch=1139, step=19140, lr=0.120500, batch loss=0.024543, epoch loss=0.705759Batch=1199, step=19200, lr=0.120250, batch loss=0.010214, epoch loss=0.715973Epoch=15, step=19200, lr=0.120250, epoch loss=0.715973Batch=59, step=19260, lr=0.120000, batch loss=0.004946, epoch loss=0.004946Batch=119, step=19320, lr=0.119750, batch loss=0.016378, epoch loss=0.021324Batch=179, step=19380, lr=0.119500, batch loss=0.038477, epoch loss=0.059801Batch=239, step=19440, lr=0.119250, batch loss=0.020656, epoch loss=0.080457Batch=299, step=19500, lr=0.119000, batch loss=0.005080, epoch loss=0.085537Batch=359, step=19560, lr=0.118750, batch loss=0.016968, epoch loss=0.102505Batch=419, step=19620, lr=0.118500, batch loss=0.018992, epoch loss=0.121497Batch=479, step=19680, lr=0.118250, batch loss=0.010312, epoch loss=0.131809Batch=539, step=19740, lr=0.118000, batch loss=0.019923, epoch loss=0.151732Batch=599, step=19800, lr=0.117750, batch loss=0.020099, epoch loss=0.171831Batch=659, step=19860, lr=0.117500, batch loss=0.017466, epoch loss=0.189297Batch=719, step=19920, lr=0.117250, batch loss=0.015924, epoch loss=0.205222Batch=779, step=19980, lr=0.117000, batch loss=0.018399, epoch loss=0.223621Batch=839, step=20040, lr=0.116750, batch loss=0.030699, epoch loss=0.254320Batch=899, step=20100, lr=0.116500, batch loss=0.026383, epoch loss=0.280703Batch=959, step=20160, lr=0.116250, batch loss=0.012529, epoch loss=0.293232Batch=1019, step=20220, lr=0.116000, batch loss=0.015310, epoch loss=0.308542Batch=1079, step=20280, lr=0.115750, batch loss=0.003682, epoch loss=0.312224Batch=1139, step=20340, lr=0.115500, batch loss=0.016538, epoch loss=0.328762Batch=1199, step=20400, lr=0.115250, batch loss=0.007895, epoch loss=0.336657Epoch=16, step=20400, lr=0.115250, epoch loss=0.336657Batch=59, step=20460, lr=0.115000, batch loss=0.003750, epoch loss=0.003750Batch=119, step=20520, lr=0.114750, batch loss=0.008817, epoch loss=0.012568Batch=179, step=20580, lr=0.114500, batch loss=0.017341, epoch loss=0.029909Batch=239, step=20640, lr=0.114250, batch loss=0.013439, epoch loss=0.043348Batch=299, step=20700, lr=0.114000, batch loss=0.003641, epoch loss=0.046989Batch=359, step=20760, lr=0.113750, batch loss=0.015449, epoch loss=0.062438Batch=419, step=20820, lr=0.113500, batch loss=0.015211, epoch loss=0.077649Batch=479, step=20880, lr=0.113250, batch loss=0.003892, epoch loss=0.081540Batch=539, step=20940, lr=0.113000, batch loss=0.025697, epoch loss=0.107237Batch=599, step=21000, lr=0.112750, batch loss=0.022662, epoch loss=0.129899Batch=659, step=21060, lr=0.112500, batch loss=0.016684, epoch loss=0.146583Batch=719, step=21120, lr=0.112250, batch loss=0.050679, epoch loss=0.197262Batch=779, step=21180, lr=0.112000, batch loss=0.070332, epoch loss=0.267594Batch=839, step=21240, lr=0.111750, batch loss=0.025489, epoch loss=0.293083Batch=899, step=21300, lr=0.111500, batch loss=0.030093, epoch loss=0.323176Batch=959, step=21360, lr=0.111250, batch loss=0.010458, epoch loss=0.333634Batch=1019, step=21420, lr=0.111000, batch loss=0.012499, epoch loss=0.346133Batch=1079, step=21480, lr=0.110750, batch loss=0.002019, epoch loss=0.348152Batch=1139, step=21540, lr=0.110500, batch loss=0.012901, epoch loss=0.361053Batch=1199, step=21600, lr=0.110250, batch loss=0.005215, epoch loss=0.366267Epoch=17, step=21600, lr=0.110250, epoch loss=0.366267Batch=59, step=21660, lr=0.110000, batch loss=0.002438, epoch loss=0.002438Batch=119, step=21720, lr=0.109750, batch loss=0.006782, epoch loss=0.009220Batch=179, step=21780, lr=0.109500, batch loss=0.012986, epoch loss=0.022206Batch=239, step=21840, lr=0.109250, batch loss=0.010205, epoch loss=0.032411Batch=299, step=21900, lr=0.109000, batch loss=0.010059, epoch loss=0.042470Batch=359, step=21960, lr=0.108750, batch loss=0.015106, epoch loss=0.057577Batch=419, step=22020, lr=0.108500, batch loss=0.012275, epoch loss=0.069851Batch=479, step=22080, lr=0.108250, batch loss=0.002763, epoch loss=0.072615Batch=539, step=22140, lr=0.108000, batch loss=0.019114, epoch loss=0.091729Batch=599, step=22200, lr=0.107750, batch loss=0.017199, epoch loss=0.108928Batch=659, step=22260, lr=0.107500, batch loss=0.014507, epoch loss=0.123435Batch=719, step=22320, lr=0.107250, batch loss=0.028151, epoch loss=0.151586Batch=779, step=22380, lr=0.107000, batch loss=0.043840, epoch loss=0.195426Batch=839, step=22440, lr=0.106750, batch loss=0.021890, epoch loss=0.217316Batch=899, step=22500, lr=0.106500, batch loss=0.022476, epoch loss=0.239792Batch=959, step=22560, lr=0.106250, batch loss=0.011403, epoch loss=0.251195Batch=1019, step=22620, lr=0.106000, batch loss=0.009198, epoch loss=0.260393Batch=1079, step=22680, lr=0.105750, batch loss=0.000140, epoch loss=0.260532Batch=1139, step=22740, lr=0.105500, batch loss=0.010038, epoch loss=0.270570Batch=1199, step=22800, lr=0.105250, batch loss=0.004360, epoch loss=0.274930Epoch=18, step=22800, lr=0.105250, epoch loss=0.274930Batch=59, step=22860, lr=0.105000, batch loss=0.001448, epoch loss=0.001448Batch=119, step=22920, lr=0.104750, batch loss=0.005869, epoch loss=0.007317Batch=179, step=22980, lr=0.104500, batch loss=0.011184, epoch loss=0.018501Batch=239, step=23040, lr=0.104250, batch loss=0.011490, epoch loss=0.029991Batch=299, step=23100, lr=0.104000, batch loss=0.008692, epoch loss=0.038683Batch=359, step=23160, lr=0.103750, batch loss=0.011062, epoch loss=0.049746Batch=419, step=23220, lr=0.103500, batch loss=0.010710, epoch loss=0.060456Batch=479, step=23280, lr=0.103250, batch loss=0.002556, epoch loss=0.063011Batch=539, step=23340, lr=0.103000, batch loss=0.017508, epoch loss=0.080519Batch=599, step=23400, lr=0.102750, batch loss=0.013803, epoch loss=0.094322Batch=659, step=23460, lr=0.102500, batch loss=0.010230, epoch loss=0.104552Batch=719, step=23520, lr=0.102250, batch loss=0.015812, epoch loss=0.120364Batch=779, step=23580, lr=0.102000, batch loss=0.021885, epoch loss=0.142249Batch=839, step=23640, lr=0.101750, batch loss=0.027557, epoch loss=0.169807Batch=899, step=23700, lr=0.101500, batch loss=0.021839, epoch loss=0.191646Batch=959, step=23760, lr=0.101250, batch loss=0.010633, epoch loss=0.202278Batch=1019, step=23820, lr=0.101000, batch loss=0.008117, epoch loss=0.210396Batch=1079, step=23880, lr=0.100750, batch loss=0.000764, epoch loss=0.211159Batch=1139, step=23940, lr=0.100500, batch loss=0.009429, epoch loss=0.220589Batch=1199, step=24000, lr=0.100250, batch loss=0.004585, epoch loss=0.225174Epoch=19, step=24000, lr=0.100250, epoch loss=0.225174Half-moons scatterplot and decision boundary:┌────────────────────────────────────────────────────────────────────────────────────────────────────┐│********************************#*******************************************************************││**********************#*#*#######*###*#####*********************************************************││**********************#########################*****************************************************││*****************#**########*######*###########*###*************************************************││***************#################*###################************************************************││************######*#################*#################**********************************************││**********#*#####*########*#**************##*#########*#********************************************││***********########*##*#******************#*****##########******************************************││***********###########*************************############**************************************...││********######*####*********************************###*###*#*********************************......││*******######**##**********************************#*######*#*******************************........││*******##*##**##***********..........***************########*##***************************..........││*****#######************.......%...%%...***************#########*************************.........%.││******######***********.........%........***************##*#####************************......%.%.%.││***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.││****#######***********.........%%%%.........************#########********************........%%.%%.%││**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.││*##*####*************..........%%%%%%%.........***********########*****************..........%%%%%%.││*#######************...........%%%%%%%..........************#######**************............%%%%%%.││*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.││*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..││#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..││#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.││###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...││#######**********................%%%%%%%%...............*****###*###******................%%%%%%....││######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...││*#*##*#*********..................%%%%%%%%%%...............***######****.................%%%%%%.....││#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....││**************.....................%.%%%%%%...................*******..................%.%%.%%......││**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......││*************.........................%.%%%.%%%%.................**...............%%%%%%%.%.%.......││*************...........................%..%%%%..%................................%%%%%%%%..........││************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........││************.............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........││***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............││***********..................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............││**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................││*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................││*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................││********................................................%...%%%%.%%.%%%%..%.........................│└────────────────────────────────────────────────────────────────────────────────────────────────────┘2025-03-20 22:33.53 ---> saved as "51a4c9a309d4fff4b1eae410300c4e45e141f1b20756f4791c5e62d4f0f9c28a"Job succeeded2025-03-20 22:33.54: Job succeeded