2025-03-21 15:22.29: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (f6ea3750d181d26e3fd23df51a415f362cff8525) (linux-x86_64:debian-12-5.3_opam-2.3) Base: ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard f6ea3750 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51 # debian-12-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-03-21 15:22.29: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51-debian-12-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5" 2025-03-21 15:22.29: Using OBuilder spec: ((from ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51) (comment debian-12-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-03-21 15:22.29: Waiting for resource in pool OCluster 2025-03-21 15:22.29: Waiting for worker… 2025-03-21 15:25.15: Got resource from pool OCluster Building on toxis.caelum.ci.dev All commits already cached HEAD is now at f6ea3750 Untested: revert the Cmpne primitive op: can be used to test for NaN (x <> x ==> x = NaN) (from ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51) 2025-03-21 15:26.01 ---> saved as "4c4507dc2c915bc2fe880d11485a066845ce2ccaf87ae0c7a8ec26d34c0670c5" /: (comment debian-12-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-03-21 15:26.02 ---> saved as "172407e1e661134309aea2eb4045783b25179cce1174e2b787c71a0e229d6f99" /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [y/n] y [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com" [default] synchronised from file:///home/opam/opam-repository 2025-03-21 15:27.50 ---> saved as "09dad00ef8e83b2a63e7fe35b5cddf78fe4785748596ca052e2c9a3bfcd80b21" /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-03-21 15:27.51 ---> saved as "4c6e6131f0e3a298407e281b1e9dfd9b93e12d2f56fdd382c559c07f8223d814" /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-03-21 15:27.51 ---> saved as "a6d2d725d9ab586b26e2389e81730115d2ead44f3f6db0f5c35ba85c11f7e8a2" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD 862a7640b1..acfb0e6e94 master -> origin/master 4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-03-21 15:29.22 ---> saved as "16e82825fcbf79b3bd0cf9f43e22f07b70fcacf3627db24e2aace55a03ddf26a" /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-03-21 15:29.23 ---> saved as "58a53e2a3dd4d132e7b94535201b52d935bf6e768ec6163b1f46501d34443546" /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-03-21 15:29.28 ---> saved as "cb89076554ba8957a03621180f60934d6cf4212d458e883b14a9a41a2017d5ae" /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-03-21 15:29.29 ---> saved as "18096c8c7e2d63a4fac760017a7ca2f078f9ec7b6b838a592e612464b9b5d3b4" /src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "apt-get" "update" - Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB] - Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB] - Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB] - Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8792 kB] - Get:5 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [249 kB] - Fetched 9296 kB in 2s (4162 kB/s) - Reading package lists... - <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-dev pkg-config <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config" - debconf: delaying package configuration, since apt-utils is not installed - Selecting previously unselected package libffi-dev:amd64. - (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 18776 files and directories currently installed.) - Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ... - Unpacking libffi-dev:amd64 (3.4.4-1) ... - Selecting previously unselected package libpkgconf3:amd64. - Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ... - Unpacking libpkgconf3:amd64 (1.8.1-1) ... - Selecting previously unselected package pkgconf-bin. - Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ... - Unpacking pkgconf-bin (1.8.1-1) ... - Selecting previously unselected package pkgconf:amd64. - Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ... - Unpacking pkgconf:amd64 (1.8.1-1) ... - Selecting previously unselected package pkg-config:amd64. - Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ... - Unpacking pkg-config:amd64 (1.8.1-1) ... - Setting up libffi-dev:amd64 (3.4.4-1) ... - Setting up libpkgconf3:amd64 (1.8.1-1) ... - Setting up pkgconf-bin (1.8.1-1) ... - Setting up pkgconf:amd64 (1.8.1-1) ... - Setting up pkg-config:amd64 (1.8.1-1) ... - Processing triggers for libc-bin (2.36-9+deb12u9) ... 2025-03-21 15:30.06 ---> saved as "cbad75e01466966f4e0a25626ca7a554e66e02ce762ea53b462d2c65de7b6f32" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 65 packages - install angstrom 0.16.1 - install backoff 0.1.1 - install base v0.17.1 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.17.2 - install dune-configurator 3.17.2 - install fieldslib v0.17.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.1.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.2 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.1 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved cppo.1.8.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved dune.3.17.2, dune-configurator.3.17.2 (cached) -> installed num.1.5-1 -> retrieved ppx_minidebug.2.1.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved ppxlib_jane.v0.17.2 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved sexplib.v0.17.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved uucp.16.0.0 (cached) -> installed ocamlbuild.0.16.1 -> installed ocamlfind.1.9.8 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed ptime.1.2.0 -> installed dune.3.17.2 -> installed jane-street-headers.v0.17.0 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed multicore-magic.2.3.1 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed ppx_derivers.1.2.1 -> installed printbox.0.12 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed ocaml-syntax-shims.1.0.0 -> installed re.1.12.0 -> installed ocaml-compiler-libs.v0.17.0 -> installed cppo.1.8.0 -> installed integers.0.7.0 -> installed saturn_lockfree.0.5.0 -> installed dune-configurator.3.17.2 -> installed parsexp.v0.17.0 -> installed bigstringaf.0.10.0 -> installed angstrom.0.16.1 -> installed sexplib.v0.17.0 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.1 -> installed variantslib.v0.17.0 -> installed fieldslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.35.0 -> installed ppxlib_jane.v0.17.2 -> installed ppx_optcomp.v0.17.0 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.1.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-03-21 15:32.35 ---> saved as "fb82ddc3efb677dbe27b91a8d7e4ccbba191089e6f2b4759d2c9fec9271a24db" /src: (copy (src .) (dst /src)) 2025-03-21 15:32.36 ---> saved as "c2c2c83b964ac1846c98489a2f719b6f6acdc7560d748f4efdcfe6afc8628d71" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/8dd261d56fbbecff0b34c186f01438b7/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d8642ca847ada47eaa056ec2c3d8648a/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382 Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039 Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216 Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512 Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081 Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141 Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385 Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263 Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603 Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902 Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024 Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685 Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407 Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543 Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049 Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829 Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269 Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952 Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952 Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138 Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381 Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025 Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921 Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269 Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828 Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563 Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831 Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657 Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042 Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243 Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160 Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542 Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179 Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413 Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446 Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419 Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654 Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028 Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821 Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821 Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363 Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325 Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481 Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578 Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525 Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415 Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160 Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004 Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802 Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881 Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937 Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479 Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468 Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248 Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112 Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523 Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923 Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236 Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812 Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723 Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723 Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539 Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341 Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053 Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157 Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168 Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420 Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854 Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365 Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593 Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989 Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121 Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243 Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563 Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333 Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239 Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610 Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501 Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094 Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306 Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074 Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074 Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079 Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298 Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203 Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269 Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862 Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001 Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056 Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748 Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194 Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213 Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805 Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672 Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851 Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195 Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366 Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287 Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436 Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264 Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319 Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180 Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180 Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529 Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820 Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361 Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741 Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671 Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786 Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067 Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387 Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355 Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855 Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963 Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184 Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407 Batch=839, step=6840, lr=0.171750, batch loss=0.303757, epoch loss=3.598164 Batch=899, step=6900, lr=0.171500, batch loss=0.268468, epoch loss=3.866633 Batch=959, step=6960, lr=0.171250, batch loss=0.211039, epoch loss=4.077671 Batch=1019, step=7020, lr=0.171000, batch loss=0.330462, epoch loss=4.408133 Batch=1079, step=7080, lr=0.170750, batch loss=0.180866, epoch loss=4.588999 Batch=1139, step=7140, lr=0.170500, batch loss=0.216313, epoch loss=4.805312 Batch=1199, step=7200, lr=0.170250, batch loss=0.181911, epoch loss=4.987223 Epoch=5, step=7200, lr=0.170250, epoch loss=4.987223 Batch=59, step=7260, lr=0.170000, batch loss=0.232877, epoch loss=0.232877 Batch=119, step=7320, lr=0.169750, batch loss=0.184424, epoch loss=0.417301 Batch=179, step=7380, lr=0.169500, batch loss=0.196292, epoch loss=0.613593 Batch=239, step=7440, lr=0.169250, batch loss=0.290823, epoch loss=0.904416 Batch=299, step=7500, lr=0.169000, batch loss=0.200837, epoch loss=1.105253 Batch=359, step=7560, lr=0.168750, batch loss=0.258435, epoch loss=1.363689 Batch=419, step=7620, lr=0.168500, batch loss=0.256808, epoch loss=1.620497 Batch=479, step=7680, lr=0.168250, batch loss=0.235998, epoch loss=1.856495 Batch=539, step=7740, lr=0.168000, batch loss=0.187895, epoch loss=2.044390 Batch=599, step=7800, lr=0.167750, batch loss=0.223924, epoch loss=2.268314 Batch=659, step=7860, lr=0.167500, batch loss=0.305915, epoch loss=2.574229 Batch=719, step=7920, lr=0.167250, batch loss=0.309289, epoch loss=2.883518 Batch=779, step=7980, lr=0.167000, batch loss=0.329942, epoch loss=3.213460 Batch=839, step=8040, lr=0.166750, batch loss=0.292425, epoch loss=3.505885 Batch=899, step=8100, lr=0.166500, batch loss=0.261775, epoch loss=3.767660 Batch=959, step=8160, lr=0.166250, batch loss=0.193295, epoch loss=3.960955 Batch=1019, step=8220, lr=0.166000, batch loss=0.314033, epoch loss=4.274988 Batch=1079, step=8280, lr=0.165750, batch loss=0.172099, epoch loss=4.447087 Batch=1139, step=8340, lr=0.165500, batch loss=0.209742, epoch loss=4.656829 Batch=1199, step=8400, lr=0.165250, batch loss=0.178275, epoch loss=4.835103 Epoch=6, step=8400, lr=0.165250, epoch loss=4.835103 Batch=59, step=8460, lr=0.165000, batch loss=0.229725, epoch loss=0.229725 Batch=119, step=8520, lr=0.164750, batch loss=0.175017, epoch loss=0.404742 Batch=179, step=8580, lr=0.164500, batch loss=0.187817, epoch loss=0.592559 Batch=239, step=8640, lr=0.164250, batch loss=0.278203, epoch loss=0.870762 Batch=299, step=8700, lr=0.164000, batch loss=0.191994, epoch loss=1.062755 Batch=359, step=8760, lr=0.163750, batch loss=0.248632, epoch loss=1.311388 Batch=419, step=8820, lr=0.163500, batch loss=0.245601, epoch loss=1.556988 Batch=479, step=8880, lr=0.163250, batch loss=0.228591, epoch loss=1.785580 Batch=539, step=8940, lr=0.163000, batch loss=0.178132, epoch loss=1.963712 Batch=599, step=9000, lr=0.162750, batch loss=0.217388, epoch loss=2.181101 Batch=659, step=9060, lr=0.162500, batch loss=0.294814, epoch loss=2.475915 Batch=719, step=9120, lr=0.162250, batch loss=0.296433, epoch loss=2.772348 Batch=779, step=9180, lr=0.162000, batch loss=0.316728, epoch loss=3.089075 Batch=839, step=9240, lr=0.161750, batch loss=0.287243, epoch loss=3.376318 Batch=899, step=9300, lr=0.161500, batch loss=0.251060, epoch loss=3.627378 Batch=959, step=9360, lr=0.161250, batch loss=0.190532, epoch loss=3.817911 Batch=1019, step=9420, lr=0.161000, batch loss=0.311728, epoch loss=4.129639 Batch=1079, step=9480, lr=0.160750, batch loss=0.191595, epoch loss=4.321234 Batch=1139, step=9540, lr=0.160500, batch loss=0.215772, epoch loss=4.537006 Batch=1199, step=9600, lr=0.160250, batch loss=0.165620, epoch loss=4.702626 Epoch=7, step=9600, lr=0.160250, epoch loss=4.702626 Batch=59, step=9660, lr=0.160000, batch loss=0.197217, epoch loss=0.197217 Batch=119, step=9720, lr=0.159750, batch loss=0.165467, epoch loss=0.362684 Batch=179, step=9780, lr=0.159500, batch loss=0.179286, epoch loss=0.541970 Batch=239, step=9840, lr=0.159250, batch loss=0.263837, epoch loss=0.805807 Batch=299, step=9900, lr=0.159000, batch loss=0.182187, epoch loss=0.987994 Batch=359, step=9960, lr=0.158750, batch loss=0.240842, epoch loss=1.228836 Batch=419, step=10020, lr=0.158500, batch loss=0.232979, epoch loss=1.461815 Batch=479, step=10080, lr=0.158250, batch loss=0.213194, epoch loss=1.675009 Batch=539, step=10140, lr=0.158000, batch loss=0.170694, epoch loss=1.845703 Batch=599, step=10200, lr=0.157750, batch loss=0.200247, epoch loss=2.045950 Batch=659, step=10260, lr=0.157500, batch loss=0.283032, epoch loss=2.328982 Batch=719, step=10320, lr=0.157250, batch loss=0.288754, epoch loss=2.617735 Batch=779, step=10380, lr=0.157000, batch loss=0.296880, epoch loss=2.914615 Batch=839, step=10440, lr=0.156750, batch loss=0.267657, epoch loss=3.182272 Batch=899, step=10500, lr=0.156500, batch loss=0.242699, epoch loss=3.424972 Batch=959, step=10560, lr=0.156250, batch loss=0.198668, epoch loss=3.623639 Batch=1019, step=10620, lr=0.156000, batch loss=0.295119, epoch loss=3.918758 Batch=1079, step=10680, lr=0.155750, batch loss=0.178662, epoch loss=4.097421 Batch=1139, step=10740, lr=0.155500, batch loss=0.205425, epoch loss=4.302846 Batch=1199, step=10800, lr=0.155250, batch loss=0.156138, epoch loss=4.458984 Epoch=8, step=10800, lr=0.155250, epoch loss=4.458984 Batch=59, step=10860, lr=0.155000, batch loss=0.177430, epoch loss=0.177430 Batch=119, step=10920, lr=0.154750, batch loss=0.152366, epoch loss=0.329795 Batch=179, step=10980, lr=0.154500, batch loss=0.167114, epoch loss=0.496909 Batch=239, step=11040, lr=0.154250, batch loss=0.242622, epoch loss=0.739531 Batch=299, step=11100, lr=0.154000, batch loss=0.169984, epoch loss=0.909515 Batch=359, step=11160, lr=0.153750, batch loss=0.222140, epoch loss=1.131654 Batch=419, step=11220, lr=0.153500, batch loss=0.229250, epoch loss=1.360905 Batch=479, step=11280, lr=0.153250, batch loss=0.202871, epoch loss=1.563775 Batch=539, step=11340, lr=0.153000, batch loss=0.159118, epoch loss=1.722894 Batch=599, step=11400, lr=0.152750, batch loss=0.178498, epoch loss=1.901392 Batch=659, step=11460, lr=0.152500, batch loss=0.264724, epoch loss=2.166116 Batch=719, step=11520, lr=0.152250, batch loss=0.256959, epoch loss=2.423075 Batch=779, step=11580, lr=0.152000, batch loss=0.273281, epoch loss=2.696355 Batch=839, step=11640, lr=0.151500, batch loss=0.255783, epoch loss=2.952138 Batch=899, step=11700, lr=0.151500, batch loss=0.212898, epoch loss=3.165036 Batch=959, step=11760, lr=0.151250, batch loss=0.168296, epoch loss=3.333332 Batch=1019, step=11820, lr=0.151000, batch loss=0.265906, epoch loss=3.599238 Batch=1079, step=11880, lr=0.150750, batch loss=0.149896, epoch loss=3.749134 Batch=1139, step=11940, lr=0.150500, batch loss=0.185432, epoch loss=3.934566 Batch=1199, step=12000, lr=0.150250, batch loss=0.139495, epoch loss=4.074060 Epoch=9, step=12000, lr=0.150250, epoch loss=4.074060 Batch=59, step=12060, lr=0.150000, batch loss=0.158072, epoch loss=0.158072 Batch=119, step=12120, lr=0.149750, batch loss=0.128806, epoch loss=0.286877 Batch=179, step=12180, lr=0.149500, batch loss=0.150373, epoch loss=0.437250 Batch=239, step=12240, lr=0.149250, batch loss=0.223336, epoch loss=0.660586 Batch=299, step=12300, lr=0.149000, batch loss=0.142521, epoch loss=0.803107 Batch=359, step=12360, lr=0.148750, batch loss=0.195564, epoch loss=0.998671 Batch=419, step=12420, lr=0.148500, batch loss=0.206113, epoch loss=1.204784 Batch=479, step=12480, lr=0.148250, batch loss=0.178855, epoch loss=1.383639 Batch=539, step=12540, lr=0.148000, batch loss=0.142531, epoch loss=1.526170 Batch=599, step=12600, lr=0.147750, batch loss=0.150485, epoch loss=1.676655 Batch=659, step=12660, lr=0.147500, batch loss=0.224800, epoch loss=1.901455 Batch=719, step=12720, lr=0.147250, batch loss=0.235482, epoch loss=2.136938 Batch=779, step=12780, lr=0.147000, batch loss=0.252525, epoch loss=2.389463 Batch=839, step=12840, lr=0.146750, batch loss=0.225061, epoch loss=2.614524 Batch=899, step=12900, lr=0.146500, batch loss=0.185228, epoch loss=2.799753 Batch=959, step=12960, lr=0.146250, batch loss=0.149193, epoch loss=2.948945 Batch=1019, step=13020, lr=0.146000, batch loss=0.267779, epoch loss=3.216725 Batch=1079, step=13080, lr=0.145750, batch loss=0.115531, epoch loss=3.332255 Batch=1139, step=13140, lr=0.145500, batch loss=0.155275, epoch loss=3.487530 Batch=1199, step=13200, lr=0.145250, batch loss=0.118480, epoch loss=3.606010 Epoch=10, step=13200, lr=0.145250, epoch loss=3.606010 Batch=59, step=13260, lr=0.145000, batch loss=0.143518, epoch loss=0.143518 Batch=119, step=13320, lr=0.144750, batch loss=0.119590, epoch loss=0.263108 Batch=179, step=13380, lr=0.144500, batch loss=0.127181, epoch loss=0.390289 Batch=239, step=13440, lr=0.144250, batch loss=0.186876, epoch loss=0.577165 Batch=299, step=13500, lr=0.144000, batch loss=0.112622, epoch loss=0.689786 Batch=359, step=13560, lr=0.143750, batch loss=0.161722, epoch loss=0.851508 Batch=419, step=13620, lr=0.143500, batch loss=0.160605, epoch loss=1.012113 Batch=479, step=13680, lr=0.143250, batch loss=0.147549, epoch loss=1.159662 Batch=539, step=13740, lr=0.143000, batch loss=0.118025, epoch loss=1.277687 Batch=599, step=13800, lr=0.142750, batch loss=0.120077, epoch loss=1.397763 Batch=659, step=13860, lr=0.142500, batch loss=0.175818, epoch loss=1.573582 Batch=719, step=13920, lr=0.142250, batch loss=0.172405, epoch loss=1.745987 Batch=779, step=13980, lr=0.142000, batch loss=0.188113, epoch loss=1.934100 Batch=839, step=14040, lr=0.141500, batch loss=0.194219, epoch loss=2.128319 Batch=899, step=14100, lr=0.141250, batch loss=0.207921, epoch loss=2.336240 Batch=959, step=14160, lr=0.141250, batch loss=0.110440, epoch loss=2.446680 Batch=1019, step=14220, lr=0.140750, batch loss=0.197945, epoch loss=2.644625 Batch=1079, step=14280, lr=0.140750, batch loss=0.080044, epoch loss=2.724669 Batch=1139, step=14340, lr=0.140500, batch loss=0.127607, epoch loss=2.852276 Batch=1199, step=14400, lr=0.140250, batch loss=0.087109, epoch loss=2.939385 Epoch=11, step=14400, lr=0.140250, epoch loss=2.939385 Batch=59, step=14460, lr=0.140000, batch loss=0.102013, epoch loss=0.102013 Batch=119, step=14520, lr=0.139750, batch loss=0.100622, epoch loss=0.202635 Batch=179, step=14580, lr=0.139500, batch loss=0.098461, epoch loss=0.301096 Batch=239, step=14640, lr=0.139250, batch loss=0.142473, epoch loss=0.443569 Batch=299, step=14700, lr=0.139000, batch loss=0.088131, epoch loss=0.531700 Batch=359, step=14760, lr=0.138500, batch loss=0.146920, epoch loss=0.678620 Batch=419, step=14820, lr=0.138500, batch loss=0.209454, epoch loss=0.888074 Batch=479, step=14880, lr=0.138250, batch loss=0.090651, epoch loss=0.978725 Batch=539, step=14940, lr=0.138000, batch loss=0.073511, epoch loss=1.052235 Batch=599, step=15000, lr=0.137750, batch loss=0.099841, epoch loss=1.152076 Batch=659, step=15060, lr=0.137500, batch loss=0.129223, epoch loss=1.281298 Batch=719, step=15120, lr=0.137250, batch loss=0.129821, epoch loss=1.411119 Batch=779, step=15180, lr=0.137000, batch loss=0.152452, epoch loss=1.563571 Batch=839, step=15240, lr=0.136750, batch loss=0.146158, epoch loss=1.709729 Batch=899, step=15300, lr=0.136500, batch loss=0.140964, epoch loss=1.850692 Batch=959, step=15360, lr=0.136250, batch loss=0.099386, epoch loss=1.950079 Batch=1019, step=15420, lr=0.136000, batch loss=0.194514, epoch loss=2.144593 Batch=1079, step=15480, lr=0.135750, batch loss=0.036057, epoch loss=2.180650 Batch=1139, step=15540, lr=0.135500, batch loss=0.089015, epoch loss=2.269665 Batch=1199, step=15600, lr=0.135250, batch loss=0.055120, epoch loss=2.324785 Epoch=12, step=15600, lr=0.135250, epoch loss=2.324785 Batch=59, step=15660, lr=0.135000, batch loss=0.086096, epoch loss=0.086096 Batch=119, step=15720, lr=0.134750, batch loss=0.149526, epoch loss=0.235622 Batch=179, step=15780, lr=0.134500, batch loss=0.100474, epoch loss=0.336095 Batch=239, step=15840, lr=0.134250, batch loss=0.097846, epoch loss=0.433941 Batch=299, step=15900, lr=0.133750, batch loss=0.043108, epoch loss=0.477049 Batch=359, step=15960, lr=0.133750, batch loss=0.080462, epoch loss=0.557511 Batch=419, step=16020, lr=0.133500, batch loss=0.079959, epoch loss=0.637470 Batch=479, step=16080, lr=0.133250, batch loss=0.067334, epoch loss=0.704804 Batch=539, step=16140, lr=0.133000, batch loss=0.054339, epoch loss=0.759143 Batch=599, step=16200, lr=0.132750, batch loss=0.102979, epoch loss=0.862121 Batch=659, step=16260, lr=0.132500, batch loss=0.076799, epoch loss=0.938920 Batch=719, step=16320, lr=0.132250, batch loss=0.085281, epoch loss=1.024201 Batch=779, step=16380, lr=0.132000, batch loss=0.098347, epoch loss=1.122548 Batch=839, step=16440, lr=0.131750, batch loss=0.119081, epoch loss=1.241629 Batch=899, step=16500, lr=0.131500, batch loss=0.161970, epoch loss=1.403599 Batch=959, step=16560, lr=0.131250, batch loss=0.040519, epoch loss=1.444118 Batch=1019, step=16620, lr=0.130750, batch loss=0.106021, epoch loss=1.550139 Batch=1079, step=16680, lr=0.130500, batch loss=0.027542, epoch loss=1.577681 Batch=1139, step=16740, lr=0.130500, batch loss=0.057332, epoch loss=1.635013 Batch=1199, step=16800, lr=0.130250, batch loss=0.029983, epoch loss=1.664997 Epoch=13, step=16800, lr=0.130250, epoch loss=1.664997 Batch=59, step=16860, lr=0.130000, batch loss=0.038211, epoch loss=0.038211 Batch=119, step=16920, lr=0.129750, batch loss=0.053178, epoch loss=0.091390 Batch=179, step=16980, lr=0.129500, batch loss=0.049900, epoch loss=0.141290 Batch=239, step=17040, lr=0.129250, batch loss=0.067537, epoch loss=0.208827 Batch=299, step=17100, lr=0.129000, batch loss=0.031280, epoch loss=0.240107 Batch=359, step=17160, lr=0.128750, batch loss=0.048397, epoch loss=0.288504 Batch=419, step=17220, lr=0.128500, batch loss=0.074866, epoch loss=0.363370 Batch=479, step=17280, lr=0.128250, batch loss=0.021440, epoch loss=0.384811 Batch=539, step=17340, lr=0.128000, batch loss=0.027933, epoch loss=0.412744 Batch=599, step=17400, lr=0.127750, batch loss=0.033817, epoch loss=0.446561 Batch=659, step=17460, lr=0.127500, batch loss=0.044137, epoch loss=0.490697 Batch=719, step=17520, lr=0.127250, batch loss=0.037981, epoch loss=0.528678 Batch=779, step=17580, lr=0.127000, batch loss=0.037808, epoch loss=0.566486 Batch=839, step=17640, lr=0.126750, batch loss=0.054054, epoch loss=0.620540 Batch=899, step=17700, lr=0.126500, batch loss=0.053227, epoch loss=0.673767 Batch=959, step=17760, lr=0.126250, batch loss=0.043214, epoch loss=0.716982 Batch=1019, step=17820, lr=0.126000, batch loss=0.111711, epoch loss=0.828693 Batch=1079, step=17880, lr=0.125750, batch loss=0.029883, epoch loss=0.858576 Batch=1139, step=17940, lr=0.125500, batch loss=0.053718, epoch loss=0.912294 Batch=1199, step=18000, lr=0.125250, batch loss=0.021495, epoch loss=0.933789 Epoch=14, step=18000, lr=0.125250, epoch loss=0.933789 Batch=59, step=18060, lr=0.125000, batch loss=0.012112, epoch loss=0.012112 Batch=119, step=18120, lr=0.124750, batch loss=0.019410, epoch loss=0.031522 Batch=179, step=18180, lr=0.124500, batch loss=0.029212, epoch loss=0.060734 Batch=239, step=18240, lr=0.124250, batch loss=0.032647, epoch loss=0.093381 Batch=299, step=18300, lr=0.124000, batch loss=0.016571, epoch loss=0.109953 Batch=359, step=18360, lr=0.123750, batch loss=0.043895, epoch loss=0.153848 Batch=419, step=18420, lr=0.123500, batch loss=0.030777, epoch loss=0.184625 Batch=479, step=18480, lr=0.123250, batch loss=0.012146, epoch loss=0.196772 Batch=539, step=18540, lr=0.123000, batch loss=0.024633, epoch loss=0.221404 Batch=599, step=18600, lr=0.122750, batch loss=0.037198, epoch loss=0.258603 Batch=659, step=18660, lr=0.122500, batch loss=0.025524, epoch loss=0.284127 Batch=719, step=18720, lr=0.122250, batch loss=0.033693, epoch loss=0.317820 Batch=779, step=18780, lr=0.122000, batch loss=0.082940, epoch loss=0.400760 Batch=839, step=18840, lr=0.121750, batch loss=0.045484, epoch loss=0.446244 Batch=899, step=18900, lr=0.121500, batch loss=0.046388, epoch loss=0.492632 Batch=959, step=18960, lr=0.121250, batch loss=0.013951, epoch loss=0.506583 Batch=1019, step=19020, lr=0.121000, batch loss=0.023546, epoch loss=0.530129 Batch=1079, step=19080, lr=0.120750, batch loss=0.009102, epoch loss=0.539231 Batch=1139, step=19140, lr=0.120500, batch loss=0.023017, epoch loss=0.562247 Batch=1199, step=19200, lr=0.120250, batch loss=0.008728, epoch loss=0.570975 Epoch=15, step=19200, lr=0.120250, epoch loss=0.570975 Batch=59, step=19260, lr=0.120000, batch loss=0.004336, epoch loss=0.004336 Batch=119, step=19320, lr=0.119750, batch loss=0.013046, epoch loss=0.017382 Batch=179, step=19380, lr=0.119500, batch loss=0.029254, epoch loss=0.046636 Batch=239, step=19440, lr=0.119250, batch loss=0.024384, epoch loss=0.071020 Batch=299, step=19500, lr=0.119000, batch loss=0.011587, epoch loss=0.082607 Batch=359, step=19560, lr=0.118750, batch loss=0.017001, epoch loss=0.099608 Batch=419, step=19620, lr=0.118500, batch loss=0.020342, epoch loss=0.119950 Batch=479, step=19680, lr=0.118250, batch loss=0.007793, epoch loss=0.127743 Batch=539, step=19740, lr=0.118000, batch loss=0.017112, epoch loss=0.144855 Batch=599, step=19800, lr=0.117750, batch loss=0.023144, epoch loss=0.167999 Batch=659, step=19860, lr=0.117500, batch loss=0.017978, epoch loss=0.185977 Batch=719, step=19920, lr=0.117250, batch loss=0.050936, epoch loss=0.236914 Batch=779, step=19980, lr=0.117000, batch loss=0.075962, epoch loss=0.312876 Batch=839, step=20040, lr=0.116750, batch loss=0.030792, epoch loss=0.343669 Batch=899, step=20100, lr=0.116500, batch loss=0.029322, epoch loss=0.372991 Batch=959, step=20160, lr=0.116250, batch loss=0.015159, epoch loss=0.388150 Batch=1019, step=20220, lr=0.116000, batch loss=0.019579, epoch loss=0.407729 Batch=1079, step=20280, lr=0.115750, batch loss=0.004853, epoch loss=0.412582 Batch=1139, step=20340, lr=0.115500, batch loss=0.018815, epoch loss=0.431397 Batch=1199, step=20400, lr=0.115250, batch loss=0.006756, epoch loss=0.438153 Epoch=16, step=20400, lr=0.115250, epoch loss=0.438153 Batch=59, step=20460, lr=0.115000, batch loss=0.002962, epoch loss=0.002962 Batch=119, step=20520, lr=0.114750, batch loss=0.010739, epoch loss=0.013701 Batch=179, step=20580, lr=0.114500, batch loss=0.025435, epoch loss=0.039136 Batch=239, step=20640, lr=0.114250, batch loss=0.015566, epoch loss=0.054702 Batch=299, step=20700, lr=0.114000, batch loss=0.005112, epoch loss=0.059813 Batch=359, step=20760, lr=0.113750, batch loss=0.014064, epoch loss=0.073877 Batch=419, step=20820, lr=0.113500, batch loss=0.015334, epoch loss=0.089211 Batch=479, step=20880, lr=0.113250, batch loss=0.005968, epoch loss=0.095179 Batch=539, step=20940, lr=0.113000, batch loss=0.016180, epoch loss=0.111359 Batch=599, step=21000, lr=0.112750, batch loss=0.018524, epoch loss=0.129883 Batch=659, step=21060, lr=0.112500, batch loss=0.014169, epoch loss=0.144052 Batch=719, step=21120, lr=0.112250, batch loss=0.043727, epoch loss=0.187779 Batch=779, step=21180, lr=0.112000, batch loss=0.065154, epoch loss=0.252933 Batch=839, step=21240, lr=0.111750, batch loss=0.024392, epoch loss=0.277326 Batch=899, step=21300, lr=0.111500, batch loss=0.033164, epoch loss=0.310489 Batch=959, step=21360, lr=0.111250, batch loss=0.011649, epoch loss=0.322138 Batch=1019, step=21420, lr=0.111000, batch loss=0.014850, epoch loss=0.336988 Batch=1079, step=21480, lr=0.110750, batch loss=0.000136, epoch loss=0.337124 Batch=1139, step=21540, lr=0.110500, batch loss=0.012335, epoch loss=0.349459 Batch=1199, step=21600, lr=0.110250, batch loss=0.005257, epoch loss=0.354716 Epoch=17, step=21600, lr=0.110250, epoch loss=0.354716 Batch=59, step=21660, lr=0.110000, batch loss=0.001976, epoch loss=0.001976 Batch=119, step=21720, lr=0.109750, batch loss=0.007142, epoch loss=0.009117 Batch=179, step=21780, lr=0.109500, batch loss=0.012249, epoch loss=0.021366 Batch=239, step=21840, lr=0.109250, batch loss=0.010098, epoch loss=0.031464 Batch=299, step=21900, lr=0.109000, batch loss=0.011527, epoch loss=0.042990 Batch=359, step=21960, lr=0.108750, batch loss=0.013597, epoch loss=0.056587 Batch=419, step=22020, lr=0.108500, batch loss=0.013056, epoch loss=0.069643 Batch=479, step=22080, lr=0.108250, batch loss=0.003948, epoch loss=0.073591 Batch=539, step=22140, lr=0.108000, batch loss=0.014918, epoch loss=0.088510 Batch=599, step=22200, lr=0.107750, batch loss=0.016172, epoch loss=0.104682 Batch=659, step=22260, lr=0.107500, batch loss=0.012573, epoch loss=0.117255 Batch=719, step=22320, lr=0.107250, batch loss=0.023085, epoch loss=0.140340 Batch=779, step=22380, lr=0.107000, batch loss=0.048093, epoch loss=0.188433 Batch=839, step=22440, lr=0.106750, batch loss=0.021749, epoch loss=0.210182 Batch=899, step=22500, lr=0.106500, batch loss=0.023013, epoch loss=0.233196 Batch=959, step=22560, lr=0.106250, batch loss=0.010266, epoch loss=0.243461 Batch=1019, step=22620, lr=0.106000, batch loss=0.009999, epoch loss=0.253461 Batch=1079, step=22680, lr=0.105750, batch loss=0.000403, epoch loss=0.253864 Batch=1139, step=22740, lr=0.105500, batch loss=0.010325, epoch loss=0.264189 Batch=1199, step=22800, lr=0.105250, batch loss=0.004686, epoch loss=0.268875 Epoch=18, step=22800, lr=0.105250, epoch loss=0.268875 Batch=59, step=22860, lr=0.105000, batch loss=0.001499, epoch loss=0.001499 Batch=119, step=22920, lr=0.104750, batch loss=0.006038, epoch loss=0.007537 Batch=179, step=22980, lr=0.104500, batch loss=0.010091, epoch loss=0.017628 Batch=239, step=23040, lr=0.104250, batch loss=0.009821, epoch loss=0.027449 Batch=299, step=23100, lr=0.104000, batch loss=0.001930, epoch loss=0.029379 Batch=359, step=23160, lr=0.103750, batch loss=0.010587, epoch loss=0.039966 Batch=419, step=23220, lr=0.103500, batch loss=0.010690, epoch loss=0.050657 Batch=479, step=23280, lr=0.103250, batch loss=0.002148, epoch loss=0.052805 Batch=539, step=23340, lr=0.103000, batch loss=0.016501, epoch loss=0.069306 Batch=599, step=23400, lr=0.102750, batch loss=0.013340, epoch loss=0.082646 Batch=659, step=23460, lr=0.102500, batch loss=0.010983, epoch loss=0.093629 Batch=719, step=23520, lr=0.102250, batch loss=0.013374, epoch loss=0.107003 Batch=779, step=23580, lr=0.102000, batch loss=0.022499, epoch loss=0.129502 Batch=839, step=23640, lr=0.101750, batch loss=0.025538, epoch loss=0.155040 Batch=899, step=23700, lr=0.101500, batch loss=0.023243, epoch loss=0.178283 Batch=959, step=23760, lr=0.101250, batch loss=0.008099, epoch loss=0.186381 Batch=1019, step=23820, lr=0.101000, batch loss=0.008447, epoch loss=0.194829 Batch=1079, step=23880, lr=0.100750, batch loss=0.001394, epoch loss=0.196223 Batch=1139, step=23940, lr=0.100500, batch loss=0.009368, epoch loss=0.205591 Batch=1199, step=24000, lr=0.100250, batch loss=0.004945, epoch loss=0.210536 Epoch=19, step=24000, lr=0.100250, epoch loss=0.210536 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########*****************************************.│ │***********###########*************************############*************************************....│ │********######*####*********************************###*###*#*********************************......│ │*******######**##**********************************#*######*#******************************.........│ │*******##*##**##***********..........***************########*##**************************...........│ │*****#######************.......%...%%...***************#########************************..........%.│ │******######***********.........%........***************##*#####***********************.......%.%.%.│ │***#########**********.........%%%.%%.....**************#*#######*********************.......%.%%%%.│ │****#######**********..........%%%%.........************#########*******************.........%%.%%.%│ │**#######************..........%%%%%%%.......**************###*###*****************..........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.│ │*#######************...........%%%%%%%..........************#######**************............%%%%%%.│ │*##*####***********............%%.%%%%%..........************####**************.............%%%%%%%.│ │*#####*#***********.............%%%%%%%............**********##*###***********...............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..│ │#####*#***********..............%%%%%%%...............*******#######********...............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######**********................%%%%%%%%...............*****###*###*****.................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│ │#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................*******..................%.%%.%%......│ │**************.......................%..%%%%%%%................****...............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│ │*************...........................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │************.............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-03-21 15:33.12 ---> saved as "fc209f888770960864705337f349163810809bfa90866f3183ba38c32e5ef6b8" Job succeeded 2025-03-21 15:33.13: Job succeeded