2025-03-20 22:11.56: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (4ee46a20839684c520fd8d1cc91b4a5416d1e783) (linux-x86_64:debian-12-5.3_opam-2.3) Base: ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 4ee46a20 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51 # debian-12-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-03-20 22:11.56: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51-debian-12-5.3_opam-2.3-3fcdf15be1e8f7dcae915b4cdb940fd5" 2025-03-20 22:11.56: Using OBuilder spec: ((from ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51) (comment debian-12-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-03-20 22:11.56: Waiting for resource in pool OCluster 2025-03-20 22:11.56: Waiting for worker… 2025-03-20 22:17.03: Got resource from pool OCluster Building on toxis.caelum.ci.dev All commits already cached HEAD is now at 4ee46a20 Update GitHub actions (from ocaml/opam:debian-12-ocaml-5.3@sha256:bc8aaa1230ecb57c5762546b9227e538394dff3b0a270fc783baf8bc6bd0bf51) 2025-03-20 22:17.05 ---> using "4c4507dc2c915bc2fe880d11485a066845ce2ccaf87ae0c7a8ec26d34c0670c5" from cache /: (comment debian-12-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-03-20 22:17.05 ---> using "172407e1e661134309aea2eb4045783b25179cce1174e2b787c71a0e229d6f99" from cache /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [y/n] y Format upgrade done. [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com" [default] synchronised from file:///home/opam/opam-repository 2025-03-20 22:17.05 ---> using "09dad00ef8e83b2a63e7fe35b5cddf78fe4785748596ca052e2c9a3bfcd80b21" from cache /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-03-20 22:17.05 ---> using "4c6e6131f0e3a298407e281b1e9dfd9b93e12d2f56fdd382c559c07f8223d814" from cache /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-03-20 22:17.05 ---> using "a6d2d725d9ab586b26e2389e81730115d2ead44f3f6db0f5c35ba85c11f7e8a2" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD 862a7640b1..6cf83229dd master -> origin/master 4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-03-20 22:17.05 ---> using "16e82825fcbf79b3bd0cf9f43e22f07b70fcacf3627db24e2aace55a03ddf26a" from cache /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-03-20 22:17.05 ---> using "55b8a186f7a7e77febcc0889f481a9c4b8be0c7191d53281121d78ef733b6ee6" from cache /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-03-20 22:17.05 ---> using "9507a0af8ff6f3ef3a34da31b485a5849b0212c1002a29988a42f08493b902bc" from cache /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-03-20 22:17.05 ---> using "bd2c6f13d320852a12f09e2cc69fb0fcaae24227a75acc4a5da2016becbcbd77" from cache /src: (env DEPS "angstrom.0.16.1 backoff.0.1.1 base.v0.17.1 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.17.2 dune-configurator.3.17.2 fieldslib.v0.17.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "apt-get" "update" - Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB] - Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB] - Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB] - Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8792 kB] - Get:5 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [249 kB] - Fetched 9296 kB in 2s (5648 kB/s) - Reading package lists... - <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-dev pkg-config <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config" - debconf: delaying package configuration, since apt-utils is not installed - Selecting previously unselected package libffi-dev:amd64. - (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 18776 files and directories currently installed.) - Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ... - Unpacking libffi-dev:amd64 (3.4.4-1) ... - Selecting previously unselected package libpkgconf3:amd64. - Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ... - Unpacking libpkgconf3:amd64 (1.8.1-1) ... - Selecting previously unselected package pkgconf-bin. - Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ... - Unpacking pkgconf-bin (1.8.1-1) ... - Selecting previously unselected package pkgconf:amd64. - Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ... - Unpacking pkgconf:amd64 (1.8.1-1) ... - Selecting previously unselected package pkg-config:amd64. - Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ... - Unpacking pkg-config:amd64 (1.8.1-1) ... - Setting up libffi-dev:amd64 (3.4.4-1) ... - Setting up libpkgconf3:amd64 (1.8.1-1) ... - Setting up pkgconf-bin (1.8.1-1) ... - Setting up pkgconf:amd64 (1.8.1-1) ... - Setting up pkg-config:amd64 (1.8.1-1) ... - Processing triggers for libc-bin (2.36-9+deb12u9) ... 2025-03-20 22:17.05 ---> using "ade82b1c1d358a121a2f7feec800627f7953ed24cd9173c7b7f57e46661825be" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 65 packages - install angstrom 0.16.1 - install backoff 0.1.1 - install base v0.17.1 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.17.2 - install dune-configurator 3.17.2 - install fieldslib v0.17.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.1.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.2 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.1 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved cppo.1.8.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved dune.3.17.2, dune-configurator.3.17.2 (cached) -> installed num.1.5-1 -> retrieved ppx_minidebug.2.1.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib_jane.v0.17.2 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved sexplib.v0.17.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved uucp.16.0.0 (cached) -> installed ocamlfind.1.9.8 -> installed ocamlbuild.0.16.1 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed ptime.1.2.0 -> installed dune.3.17.2 -> installed ppx_derivers.1.2.1 -> installed jane-street-headers.v0.17.0 -> installed printbox.0.12 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed multicore-magic.2.3.1 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed ocaml-compiler-libs.v0.17.0 -> installed cppo.1.8.0 -> installed ocaml-syntax-shims.1.0.0 -> installed re.1.12.0 -> installed integers.0.7.0 -> installed saturn_lockfree.0.5.0 -> installed dune-configurator.3.17.2 -> installed parsexp.v0.17.0 -> installed bigstringaf.0.10.0 -> installed angstrom.0.16.1 -> installed sexplib.v0.17.0 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.1 -> installed ctypes-foreign.0.23.0 -> installed variantslib.v0.17.0 -> installed fieldslib.v0.17.0 -> installed stdio.v0.17.0 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.35.0 -> installed ppxlib_jane.v0.17.2 -> installed ppx_optcomp.v0.17.0 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.1.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-03-20 22:17.05 ---> using "640cfa0ea8fc3d21e3ffc8de32c46e4abd4fe6b886f8f1fd53c088465e1a3fa1" from cache /src: (copy (src .) (dst /src)) 2025-03-20 22:17.06 ---> saved as "0b64535d3f7010d8325de08ab1d9c8c9502f40638708688605f6116e71356069" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/ad42805ae5047f3244b63b573745312a/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/af1bc50dcbd65ce7182b51973a3676dd/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382 Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039 Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216 Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512 Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081 Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141 Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385 Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263 Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603 Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902 Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024 Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685 Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407 Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543 Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049 Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829 Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269 Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952 Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952 Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138 Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381 Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025 Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921 Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269 Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828 Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563 Batch=479, step=1680, lr=0.193000, batch loss=0.276268, epoch loss=2.211831 Batch=539, step=1740, lr=0.193000, batch loss=0.209810, epoch loss=2.421642 Batch=599, step=1800, lr=0.192750, batch loss=0.250421, epoch loss=2.672062 Batch=659, step=1860, lr=0.192500, batch loss=0.367173, epoch loss=3.039235 Batch=719, step=1920, lr=0.192250, batch loss=0.354950, epoch loss=3.394185 Batch=779, step=1980, lr=0.192000, batch loss=0.381359, epoch loss=3.775544 Batch=839, step=2040, lr=0.191500, batch loss=0.339643, epoch loss=4.115187 Batch=899, step=2100, lr=0.191250, batch loss=0.295232, epoch loss=4.410418 Batch=959, step=2160, lr=0.191000, batch loss=0.214065, epoch loss=4.624483 Batch=1019, step=2220, lr=0.190750, batch loss=0.330831, epoch loss=4.955314 Batch=1079, step=2280, lr=0.190500, batch loss=0.208175, epoch loss=5.163489 Batch=1139, step=2340, lr=0.190250, batch loss=0.278198, epoch loss=5.441687 Batch=1199, step=2400, lr=0.190000, batch loss=0.220784, epoch loss=5.662471 Epoch=1, step=2400, lr=0.190000, epoch loss=5.662471 Batch=59, step=2460, lr=0.189750, batch loss=0.230393, epoch loss=0.230393 Batch=119, step=2520, lr=0.189500, batch loss=0.195906, epoch loss=0.426299 Batch=179, step=2580, lr=0.189250, batch loss=0.221159, epoch loss=0.647458 Batch=239, step=2640, lr=0.189000, batch loss=0.328100, epoch loss=0.975558 Batch=299, step=2700, lr=0.188750, batch loss=0.202934, epoch loss=1.178492 Batch=359, step=2760, lr=0.188500, batch loss=0.288268, epoch loss=1.466760 Batch=419, step=2820, lr=0.188500, batch loss=0.280173, epoch loss=1.746932 Batch=479, step=2880, lr=0.188000, batch loss=0.251477, epoch loss=1.998409 Batch=539, step=2940, lr=0.187750, batch loss=0.191279, epoch loss=2.189689 Batch=599, step=3000, lr=0.187500, batch loss=0.224960, epoch loss=2.414649 Batch=659, step=3060, lr=0.187250, batch loss=0.335776, epoch loss=2.750424 Batch=719, step=3120, lr=0.187000, batch loss=0.331419, epoch loss=3.081843 Batch=779, step=3180, lr=0.187000, batch loss=0.357933, epoch loss=3.439776 Batch=839, step=3240, lr=0.186500, batch loss=0.325644, epoch loss=3.765419 Batch=899, step=3300, lr=0.186250, batch loss=0.292312, epoch loss=4.057732 Batch=959, step=3360, lr=0.186000, batch loss=0.244925, epoch loss=4.302656 Batch=1019, step=3420, lr=0.185750, batch loss=0.361687, epoch loss=4.664343 Batch=1079, step=3480, lr=0.185500, batch loss=0.221918, epoch loss=4.886262 Batch=1139, step=3540, lr=0.185250, batch loss=0.263934, epoch loss=5.150196 Batch=1199, step=3600, lr=0.185250, batch loss=0.199942, epoch loss=5.350138 Epoch=2, step=3600, lr=0.185250, epoch loss=5.350138 Batch=59, step=3660, lr=0.184750, batch loss=0.221226, epoch loss=0.221226 Batch=119, step=3720, lr=0.184750, batch loss=0.184950, epoch loss=0.406175 Batch=179, step=3780, lr=0.184250, batch loss=0.211390, epoch loss=0.617565 Batch=239, step=3840, lr=0.184000, batch loss=0.317001, epoch loss=0.934566 Batch=299, step=3900, lr=0.184000, batch loss=0.209161, epoch loss=1.143727 Batch=359, step=3960, lr=0.183500, batch loss=0.285710, epoch loss=1.429438 Batch=419, step=4020, lr=0.183500, batch loss=0.278151, epoch loss=1.707589 Batch=479, step=4080, lr=0.183250, batch loss=0.255081, epoch loss=1.962670 Batch=539, step=4140, lr=0.183000, batch loss=0.199807, epoch loss=2.162478 Batch=599, step=4200, lr=0.182750, batch loss=0.242799, epoch loss=2.405277 Batch=659, step=4260, lr=0.182500, batch loss=0.327693, epoch loss=2.732969 Batch=719, step=4320, lr=0.182250, batch loss=0.330986, epoch loss=3.063955 Batch=779, step=4380, lr=0.182000, batch loss=0.349663, epoch loss=3.413619 Batch=839, step=4440, lr=0.181750, batch loss=0.317960, epoch loss=3.731579 Batch=899, step=4500, lr=0.181500, batch loss=0.285655, epoch loss=4.017234 Batch=959, step=4560, lr=0.181250, batch loss=0.239917, epoch loss=4.257151 Batch=1019, step=4620, lr=0.181000, batch loss=0.332760, epoch loss=4.589911 Batch=1079, step=4680, lr=0.180750, batch loss=0.195964, epoch loss=4.785875 Batch=1139, step=4740, lr=0.180500, batch loss=0.228998, epoch loss=5.014873 Batch=1199, step=4800, lr=0.180250, batch loss=0.193268, epoch loss=5.208141 Epoch=3, step=4800, lr=0.180250, epoch loss=5.208141 Batch=59, step=4860, lr=0.180000, batch loss=0.234213, epoch loss=0.234213 Batch=119, step=4920, lr=0.179750, batch loss=0.192017, epoch loss=0.426230 Batch=179, step=4980, lr=0.179250, batch loss=0.205748, epoch loss=0.631978 Batch=239, step=5040, lr=0.179250, batch loss=0.307576, epoch loss=0.939553 Batch=299, step=5100, lr=0.179000, batch loss=0.205940, epoch loss=1.145494 Batch=359, step=5160, lr=0.178750, batch loss=0.270847, epoch loss=1.416341 Batch=419, step=5220, lr=0.178500, batch loss=0.265457, epoch loss=1.681797 Batch=479, step=5280, lr=0.178250, batch loss=0.242005, epoch loss=1.923802 Batch=539, step=5340, lr=0.178000, batch loss=0.195102, epoch loss=2.118905 Batch=599, step=5400, lr=0.177750, batch loss=0.243572, epoch loss=2.362476 Batch=659, step=5460, lr=0.177500, batch loss=0.317055, epoch loss=2.679532 Batch=719, step=5520, lr=0.177000, batch loss=0.317858, epoch loss=2.997389 Batch=779, step=5580, lr=0.177000, batch loss=0.343075, epoch loss=3.340464 Batch=839, step=5640, lr=0.176750, batch loss=0.308296, epoch loss=3.648760 Batch=899, step=5700, lr=0.176500, batch loss=0.273780, epoch loss=3.922541 Batch=959, step=5760, lr=0.176250, batch loss=0.210895, epoch loss=4.133435 Batch=1019, step=5820, lr=0.176000, batch loss=0.338590, epoch loss=4.472025 Batch=1079, step=5880, lr=0.175750, batch loss=0.201448, epoch loss=4.673474 Batch=1139, step=5940, lr=0.175250, batch loss=0.242937, epoch loss=4.916411 Batch=1199, step=6000, lr=0.175250, batch loss=0.185384, epoch loss=5.101795 Epoch=4, step=6000, lr=0.175250, epoch loss=5.101795 Batch=59, step=6060, lr=0.175000, batch loss=0.226384, epoch loss=0.226384 Batch=119, step=6120, lr=0.174500, batch loss=0.184067, epoch loss=0.410452 Batch=179, step=6180, lr=0.174500, batch loss=0.201419, epoch loss=0.611871 Batch=239, step=6240, lr=0.174250, batch loss=0.301233, epoch loss=0.913104 Batch=299, step=6300, lr=0.174000, batch loss=0.205854, epoch loss=1.118958 Batch=359, step=6360, lr=0.173750, batch loss=0.264843, epoch loss=1.383801 Batch=419, step=6420, lr=0.173500, batch loss=0.261375, epoch loss=1.645176 Batch=479, step=6480, lr=0.173250, batch loss=0.241105, epoch loss=1.886281 Batch=539, step=6540, lr=0.173000, batch loss=0.196389, epoch loss=2.082671 Batch=599, step=6600, lr=0.172750, batch loss=0.232272, epoch loss=2.314943 Batch=659, step=6660, lr=0.172500, batch loss=0.313635, epoch loss=2.628578 Batch=719, step=6720, lr=0.172250, batch loss=0.316645, epoch loss=2.945223 Batch=779, step=6780, lr=0.171750, batch loss=0.330985, epoch loss=3.276209 Batch=839, step=6840, lr=0.171750, batch loss=0.306191, epoch loss=3.582400 Batch=899, step=6900, lr=0.171500, batch loss=0.263879, epoch loss=3.846279 Batch=959, step=6960, lr=0.171250, batch loss=0.214559, epoch loss=4.060838 Batch=1019, step=7020, lr=0.171000, batch loss=0.321653, epoch loss=4.382491 Batch=1079, step=7080, lr=0.170750, batch loss=0.180103, epoch loss=4.562593 Batch=1139, step=7140, lr=0.170500, batch loss=0.217851, epoch loss=4.780444 Batch=1199, step=7200, lr=0.170250, batch loss=0.182263, epoch loss=4.962707 Epoch=5, step=7200, lr=0.170250, epoch loss=4.962707 Batch=59, step=7260, lr=0.170000, batch loss=0.241630, epoch loss=0.241630 Batch=119, step=7320, lr=0.169750, batch loss=0.180956, epoch loss=0.422585 Batch=179, step=7380, lr=0.169500, batch loss=0.195232, epoch loss=0.617817 Batch=239, step=7440, lr=0.169250, batch loss=0.290913, epoch loss=0.908729 Batch=299, step=7500, lr=0.169000, batch loss=0.208404, epoch loss=1.117133 Batch=359, step=7560, lr=0.168750, batch loss=0.260984, epoch loss=1.378117 Batch=419, step=7620, lr=0.168500, batch loss=0.255059, epoch loss=1.633176 Batch=479, step=7680, lr=0.168250, batch loss=0.238440, epoch loss=1.871617 Batch=539, step=7740, lr=0.168000, batch loss=0.186019, epoch loss=2.057635 Batch=599, step=7800, lr=0.167750, batch loss=0.227460, epoch loss=2.285096 Batch=659, step=7860, lr=0.167500, batch loss=0.304202, epoch loss=2.589298 Batch=719, step=7920, lr=0.167250, batch loss=0.308416, epoch loss=2.897714 Batch=779, step=7980, lr=0.167000, batch loss=0.324741, epoch loss=3.222455 Batch=839, step=8040, lr=0.166750, batch loss=0.292721, epoch loss=3.515176 Batch=899, step=8100, lr=0.166500, batch loss=0.261126, epoch loss=3.776302 Batch=959, step=8160, lr=0.166250, batch loss=0.203184, epoch loss=3.979485 Batch=1019, step=8220, lr=0.166000, batch loss=0.321375, epoch loss=4.300861 Batch=1079, step=8280, lr=0.165750, batch loss=0.199501, epoch loss=4.500361 Batch=1139, step=8340, lr=0.165500, batch loss=0.219894, epoch loss=4.720255 Batch=1199, step=8400, lr=0.165250, batch loss=0.174105, epoch loss=4.894360 Epoch=6, step=8400, lr=0.165250, epoch loss=4.894360 Batch=59, step=8460, lr=0.165000, batch loss=0.209302, epoch loss=0.209302 Batch=119, step=8520, lr=0.164750, batch loss=0.173466, epoch loss=0.382768 Batch=179, step=8580, lr=0.164500, batch loss=0.187619, epoch loss=0.570387 Batch=239, step=8640, lr=0.164250, batch loss=0.277371, epoch loss=0.847758 Batch=299, step=8700, lr=0.164000, batch loss=0.192321, epoch loss=1.040079 Batch=359, step=8760, lr=0.163750, batch loss=0.249727, epoch loss=1.289806 Batch=419, step=8820, lr=0.163500, batch loss=0.243547, epoch loss=1.533354 Batch=479, step=8880, lr=0.163250, batch loss=0.229931, epoch loss=1.763285 Batch=539, step=8940, lr=0.163000, batch loss=0.176401, epoch loss=1.939686 Batch=599, step=9000, lr=0.162750, batch loss=0.219604, epoch loss=2.159290 Batch=659, step=9060, lr=0.162500, batch loss=0.292899, epoch loss=2.452189 Batch=719, step=9120, lr=0.162250, batch loss=0.297338, epoch loss=2.749527 Batch=779, step=9180, lr=0.162000, batch loss=0.313872, epoch loss=3.063399 Batch=839, step=9240, lr=0.161750, batch loss=0.281856, epoch loss=3.345254 Batch=899, step=9300, lr=0.161500, batch loss=0.252496, epoch loss=3.597750 Batch=959, step=9360, lr=0.161250, batch loss=0.187851, epoch loss=3.785601 Batch=1019, step=9420, lr=0.161000, batch loss=0.316916, epoch loss=4.102516 Batch=1079, step=9480, lr=0.160750, batch loss=0.185443, epoch loss=4.287959 Batch=1139, step=9540, lr=0.160500, batch loss=0.212472, epoch loss=4.500430 Batch=1199, step=9600, lr=0.160250, batch loss=0.167301, epoch loss=4.667732 Epoch=7, step=9600, lr=0.160250, epoch loss=4.667732 Batch=59, step=9660, lr=0.160000, batch loss=0.201424, epoch loss=0.201424 Batch=119, step=9720, lr=0.159750, batch loss=0.163795, epoch loss=0.365219 Batch=179, step=9780, lr=0.159500, batch loss=0.178637, epoch loss=0.543856 Batch=239, step=9840, lr=0.159250, batch loss=0.262823, epoch loss=0.806679 Batch=299, step=9900, lr=0.159000, batch loss=0.181869, epoch loss=0.988547 Batch=359, step=9960, lr=0.158750, batch loss=0.242789, epoch loss=1.231337 Batch=419, step=10020, lr=0.158250, batch loss=0.238010, epoch loss=1.469347 Batch=479, step=10080, lr=0.158250, batch loss=0.226765, epoch loss=1.696112 Batch=539, step=10140, lr=0.158000, batch loss=0.171151, epoch loss=1.867263 Batch=599, step=10200, lr=0.157750, batch loss=0.202538, epoch loss=2.069800 Batch=659, step=10260, lr=0.157500, batch loss=0.281297, epoch loss=2.351098 Batch=719, step=10320, lr=0.157250, batch loss=0.285837, epoch loss=2.636934 Batch=779, step=10380, lr=0.157000, batch loss=0.294570, epoch loss=2.931505 Batch=839, step=10440, lr=0.156750, batch loss=0.273759, epoch loss=3.205263 Batch=899, step=10500, lr=0.156500, batch loss=0.236347, epoch loss=3.441610 Batch=959, step=10560, lr=0.156250, batch loss=0.180960, epoch loss=3.622570 Batch=1019, step=10620, lr=0.156000, batch loss=0.295354, epoch loss=3.917924 Batch=1079, step=10680, lr=0.155750, batch loss=0.178583, epoch loss=4.096508 Batch=1139, step=10740, lr=0.155500, batch loss=0.198317, epoch loss=4.294825 Batch=1199, step=10800, lr=0.155250, batch loss=0.155447, epoch loss=4.450272 Epoch=8, step=10800, lr=0.155250, epoch loss=4.450272 Batch=59, step=10860, lr=0.155000, batch loss=0.179216, epoch loss=0.179216 Batch=119, step=10920, lr=0.154750, batch loss=0.151774, epoch loss=0.330990 Batch=179, step=10980, lr=0.154500, batch loss=0.166411, epoch loss=0.497401 Batch=239, step=11040, lr=0.154250, batch loss=0.247192, epoch loss=0.744593 Batch=299, step=11100, lr=0.154000, batch loss=0.168552, epoch loss=0.913145 Batch=359, step=11160, lr=0.153750, batch loss=0.220159, epoch loss=1.133304 Batch=419, step=11220, lr=0.153500, batch loss=0.216703, epoch loss=1.350007 Batch=479, step=11280, lr=0.153250, batch loss=0.212101, epoch loss=1.562108 Batch=539, step=11340, lr=0.153000, batch loss=0.163564, epoch loss=1.725671 Batch=599, step=11400, lr=0.152750, batch loss=0.178415, epoch loss=1.904086 Batch=659, step=11460, lr=0.152500, batch loss=0.267170, epoch loss=2.171256 Batch=719, step=11520, lr=0.152250, batch loss=0.259518, epoch loss=2.430775 Batch=779, step=11580, lr=0.152000, batch loss=0.271833, epoch loss=2.702608 Batch=839, step=11640, lr=0.151750, batch loss=0.257557, epoch loss=2.960165 Batch=899, step=11700, lr=0.151500, batch loss=0.212752, epoch loss=3.172918 Batch=959, step=11760, lr=0.151250, batch loss=0.166225, epoch loss=3.339143 Batch=1019, step=11820, lr=0.151000, batch loss=0.267278, epoch loss=3.606421 Batch=1079, step=11880, lr=0.150750, batch loss=0.146337, epoch loss=3.752758 Batch=1139, step=11940, lr=0.150500, batch loss=0.186337, epoch loss=3.939095 Batch=1199, step=12000, lr=0.150250, batch loss=0.139237, epoch loss=4.078332 Epoch=9, step=12000, lr=0.150250, epoch loss=4.078332 Batch=59, step=12060, lr=0.150000, batch loss=0.163476, epoch loss=0.163476 Batch=119, step=12120, lr=0.149750, batch loss=0.135056, epoch loss=0.298532 Batch=179, step=12180, lr=0.149500, batch loss=0.151626, epoch loss=0.450158 Batch=239, step=12240, lr=0.149250, batch loss=0.219967, epoch loss=0.670125 Batch=299, step=12300, lr=0.149000, batch loss=0.142373, epoch loss=0.812498 Batch=359, step=12360, lr=0.148750, batch loss=0.196418, epoch loss=1.008916 Batch=419, step=12420, lr=0.148500, batch loss=0.207193, epoch loss=1.216109 Batch=479, step=12480, lr=0.148250, batch loss=0.178019, epoch loss=1.394128 Batch=539, step=12540, lr=0.148000, batch loss=0.142912, epoch loss=1.537040 Batch=599, step=12600, lr=0.147750, batch loss=0.149260, epoch loss=1.686301 Batch=659, step=12660, lr=0.147500, batch loss=0.221646, epoch loss=1.907947 Batch=719, step=12720, lr=0.147250, batch loss=0.230988, epoch loss=2.138935 Batch=779, step=12780, lr=0.147000, batch loss=0.243428, epoch loss=2.382363 Batch=839, step=12840, lr=0.146750, batch loss=0.231209, epoch loss=2.613572 Batch=899, step=12900, lr=0.146500, batch loss=0.193817, epoch loss=2.807388 Batch=959, step=12960, lr=0.146250, batch loss=0.162614, epoch loss=2.970003 Batch=1019, step=13020, lr=0.146000, batch loss=0.259713, epoch loss=3.229716 Batch=1079, step=13080, lr=0.145750, batch loss=0.116743, epoch loss=3.346459 Batch=1139, step=13140, lr=0.145500, batch loss=0.153960, epoch loss=3.500419 Batch=1199, step=13200, lr=0.145250, batch loss=0.117777, epoch loss=3.618197 Epoch=10, step=13200, lr=0.145250, epoch loss=3.618197 Batch=59, step=13260, lr=0.145000, batch loss=0.138412, epoch loss=0.138412 Batch=119, step=13320, lr=0.144750, batch loss=0.117108, epoch loss=0.255520 Batch=179, step=13380, lr=0.144500, batch loss=0.127513, epoch loss=0.383033 Batch=239, step=13440, lr=0.144250, batch loss=0.184628, epoch loss=0.567661 Batch=299, step=13500, lr=0.144000, batch loss=0.117793, epoch loss=0.685453 Batch=359, step=13560, lr=0.143750, batch loss=0.162851, epoch loss=0.848304 Batch=419, step=13620, lr=0.143500, batch loss=0.161099, epoch loss=1.009404 Batch=479, step=13680, lr=0.143250, batch loss=0.147749, epoch loss=1.157153 Batch=539, step=13740, lr=0.143000, batch loss=0.117121, epoch loss=1.274274 Batch=599, step=13800, lr=0.142750, batch loss=0.120800, epoch loss=1.395074 Batch=659, step=13860, lr=0.142500, batch loss=0.177409, epoch loss=1.572483 Batch=719, step=13920, lr=0.142250, batch loss=0.172883, epoch loss=1.745366 Batch=779, step=13980, lr=0.142000, batch loss=0.180093, epoch loss=1.925458 Batch=839, step=14040, lr=0.141750, batch loss=0.187140, epoch loss=2.112598 Batch=899, step=14100, lr=0.141500, batch loss=0.157602, epoch loss=2.270200 Batch=959, step=14160, lr=0.141250, batch loss=0.143253, epoch loss=2.413452 Batch=1019, step=14220, lr=0.141000, batch loss=0.320498, epoch loss=2.733950 Batch=1079, step=14280, lr=0.140750, batch loss=0.077446, epoch loss=2.811397 Batch=1139, step=14340, lr=0.140500, batch loss=0.123601, epoch loss=2.934998 Batch=1199, step=14400, lr=0.140250, batch loss=0.091620, epoch loss=3.026618 Epoch=11, step=14400, lr=0.140250, epoch loss=3.026618 Batch=59, step=14460, lr=0.140000, batch loss=0.110084, epoch loss=0.110084 Batch=119, step=14520, lr=0.139750, batch loss=0.103140, epoch loss=0.213224 Batch=179, step=14580, lr=0.139500, batch loss=0.105626, epoch loss=0.318850 Batch=239, step=14640, lr=0.139250, batch loss=0.141099, epoch loss=0.459949 Batch=299, step=14700, lr=0.139000, batch loss=0.080637, epoch loss=0.540585 Batch=359, step=14760, lr=0.138750, batch loss=0.118074, epoch loss=0.658659 Batch=419, step=14820, lr=0.138500, batch loss=0.127180, epoch loss=0.785839 Batch=479, step=14880, lr=0.138250, batch loss=0.100978, epoch loss=0.886817 Batch=539, step=14940, lr=0.138000, batch loss=0.090674, epoch loss=0.977492 Batch=599, step=15000, lr=0.137750, batch loss=0.084576, epoch loss=1.062068 Batch=659, step=15060, lr=0.137500, batch loss=0.128936, epoch loss=1.191004 Batch=719, step=15120, lr=0.137250, batch loss=0.133957, epoch loss=1.324961 Batch=779, step=15180, lr=0.137000, batch loss=0.179214, epoch loss=1.504175 Batch=839, step=15240, lr=0.136750, batch loss=0.162318, epoch loss=1.666493 Batch=899, step=15300, lr=0.136500, batch loss=0.288059, epoch loss=1.954552 Batch=959, step=15360, lr=0.136250, batch loss=0.054404, epoch loss=2.008956 Batch=1019, step=15420, lr=0.136000, batch loss=0.115691, epoch loss=2.124647 Batch=1079, step=15480, lr=0.135750, batch loss=0.066626, epoch loss=2.191273 Batch=1139, step=15540, lr=0.135500, batch loss=0.124237, epoch loss=2.315509 Batch=1199, step=15600, lr=0.135250, batch loss=0.067954, epoch loss=2.383464 Epoch=12, step=15600, lr=0.135250, epoch loss=2.383464 Batch=59, step=15660, lr=0.135000, batch loss=0.076032, epoch loss=0.076032 Batch=119, step=15720, lr=0.134750, batch loss=0.085894, epoch loss=0.161926 Batch=179, step=15780, lr=0.134500, batch loss=0.083209, epoch loss=0.245136 Batch=239, step=15840, lr=0.134250, batch loss=0.093141, epoch loss=0.338276 Batch=299, step=15900, lr=0.134000, batch loss=0.036873, epoch loss=0.375150 Batch=359, step=15960, lr=0.133750, batch loss=0.077785, epoch loss=0.452934 Batch=419, step=16020, lr=0.133500, batch loss=0.090700, epoch loss=0.543634 Batch=479, step=16080, lr=0.133250, batch loss=0.053459, epoch loss=0.597093 Batch=539, step=16140, lr=0.133000, batch loss=0.065198, epoch loss=0.662291 Batch=599, step=16200, lr=0.132750, batch loss=0.053403, epoch loss=0.715695 Batch=659, step=16260, lr=0.132500, batch loss=0.082670, epoch loss=0.798365 Batch=719, step=16320, lr=0.132250, batch loss=0.097624, epoch loss=0.895989 Batch=779, step=16380, lr=0.132000, batch loss=0.090240, epoch loss=0.986229 Batch=839, step=16440, lr=0.131750, batch loss=0.119821, epoch loss=1.106050 Batch=899, step=16500, lr=0.131500, batch loss=0.080261, epoch loss=1.186311 Batch=959, step=16560, lr=0.131250, batch loss=0.039568, epoch loss=1.225880 Batch=1019, step=16620, lr=0.131000, batch loss=0.047728, epoch loss=1.273608 Batch=1079, step=16680, lr=0.130750, batch loss=0.050511, epoch loss=1.324119 Batch=1139, step=16740, lr=0.130500, batch loss=0.095786, epoch loss=1.419904 Batch=1199, step=16800, lr=0.130250, batch loss=0.034561, epoch loss=1.454465 Epoch=13, step=16800, lr=0.130250, epoch loss=1.454465 Batch=59, step=16860, lr=0.130000, batch loss=0.035050, epoch loss=0.035050 Batch=119, step=16920, lr=0.129750, batch loss=0.034492, epoch loss=0.069543 Batch=179, step=16980, lr=0.129500, batch loss=0.042496, epoch loss=0.112038 Batch=239, step=17040, lr=0.129250, batch loss=0.057102, epoch loss=0.169140 Batch=299, step=17100, lr=0.129000, batch loss=0.020761, epoch loss=0.189901 Batch=359, step=17160, lr=0.128750, batch loss=0.040525, epoch loss=0.230426 Batch=419, step=17220, lr=0.128500, batch loss=0.043207, epoch loss=0.273633 Batch=479, step=17280, lr=0.128250, batch loss=0.022470, epoch loss=0.296103 Batch=539, step=17340, lr=0.128000, batch loss=0.027741, epoch loss=0.323844 Batch=599, step=17400, lr=0.127750, batch loss=0.039107, epoch loss=0.362951 Batch=659, step=17460, lr=0.127500, batch loss=0.054058, epoch loss=0.417009 Batch=719, step=17520, lr=0.127250, batch loss=0.069740, epoch loss=0.486748 Batch=779, step=17580, lr=0.126750, batch loss=0.070138, epoch loss=0.556886 Batch=839, step=17640, lr=0.126500, batch loss=0.112328, epoch loss=0.669214 Batch=899, step=17700, lr=0.126500, batch loss=0.048865, epoch loss=0.718079 Batch=959, step=17760, lr=0.126250, batch loss=0.021269, epoch loss=0.739348 Batch=1019, step=17820, lr=0.126000, batch loss=0.049359, epoch loss=0.788707 Batch=1079, step=17880, lr=0.125750, batch loss=0.044868, epoch loss=0.833575 Batch=1139, step=17940, lr=0.125500, batch loss=0.073052, epoch loss=0.906627 Batch=1199, step=18000, lr=0.125000, batch loss=0.022348, epoch loss=0.928976 Epoch=14, step=18000, lr=0.125000, epoch loss=0.928976 Batch=59, step=18060, lr=0.125000, batch loss=0.017344, epoch loss=0.017344 Batch=119, step=18120, lr=0.124750, batch loss=0.019115, epoch loss=0.036459 Batch=179, step=18180, lr=0.124250, batch loss=0.029311, epoch loss=0.065770 Batch=239, step=18240, lr=0.124250, batch loss=0.040931, epoch loss=0.106701 Batch=299, step=18300, lr=0.124000, batch loss=0.019208, epoch loss=0.125909 Batch=359, step=18360, lr=0.123750, batch loss=0.024085, epoch loss=0.149994 Batch=419, step=18420, lr=0.123500, batch loss=0.031515, epoch loss=0.181509 Batch=479, step=18480, lr=0.123250, batch loss=0.032373, epoch loss=0.213883 Batch=539, step=18540, lr=0.123000, batch loss=0.054800, epoch loss=0.268683 Batch=599, step=18600, lr=0.122750, batch loss=0.027201, epoch loss=0.295884 Batch=659, step=18660, lr=0.122500, batch loss=0.031767, epoch loss=0.327651 Batch=719, step=18720, lr=0.122250, batch loss=0.050840, epoch loss=0.378490 Batch=779, step=18780, lr=0.122000, batch loss=0.110981, epoch loss=0.489471 Batch=839, step=18840, lr=0.121750, batch loss=0.053122, epoch loss=0.542593 Batch=899, step=18900, lr=0.121500, batch loss=0.049046, epoch loss=0.591640 Batch=959, step=18960, lr=0.121250, batch loss=0.013440, epoch loss=0.605079 Batch=1019, step=19020, lr=0.121000, batch loss=0.023738, epoch loss=0.628818 Batch=1079, step=19080, lr=0.120500, batch loss=0.009005, epoch loss=0.637823 Batch=1139, step=19140, lr=0.120500, batch loss=0.023485, epoch loss=0.661308 Batch=1199, step=19200, lr=0.120250, batch loss=0.009264, epoch loss=0.670572 Epoch=15, step=19200, lr=0.120250, epoch loss=0.670572 Batch=59, step=19260, lr=0.120000, batch loss=0.004632, epoch loss=0.004632 Batch=119, step=19320, lr=0.119750, batch loss=0.021561, epoch loss=0.026193 Batch=179, step=19380, lr=0.119500, batch loss=0.066670, epoch loss=0.092862 Batch=239, step=19440, lr=0.119000, batch loss=0.026565, epoch loss=0.119427 Batch=299, step=19500, lr=0.119000, batch loss=0.012673, epoch loss=0.132100 Batch=359, step=19560, lr=0.118750, batch loss=0.023689, epoch loss=0.155788 Batch=419, step=19620, lr=0.118250, batch loss=0.020398, epoch loss=0.176186 Batch=479, step=19680, lr=0.118000, batch loss=0.008526, epoch loss=0.184712 Batch=539, step=19740, lr=0.117750, batch loss=0.017057, epoch loss=0.201769 Batch=599, step=19800, lr=0.117750, batch loss=0.021382, epoch loss=0.223152 Batch=659, step=19860, lr=0.117250, batch loss=0.019578, epoch loss=0.242730 Batch=719, step=19920, lr=0.117000, batch loss=0.045707, epoch loss=0.288436 Batch=779, step=19980, lr=0.117000, batch loss=0.078574, epoch loss=0.367011 Batch=839, step=20040, lr=0.116750, batch loss=0.031712, epoch loss=0.398723 Batch=899, step=20100, lr=0.116500, batch loss=0.029049, epoch loss=0.427772 Batch=959, step=20160, lr=0.116250, batch loss=0.019694, epoch loss=0.447466 Batch=1019, step=20220, lr=0.116000, batch loss=0.023146, epoch loss=0.470611 Batch=1079, step=20280, lr=0.115500, batch loss=0.002684, epoch loss=0.473296 Batch=1139, step=20340, lr=0.115500, batch loss=0.014724, epoch loss=0.488020 Batch=1199, step=20400, lr=0.115250, batch loss=0.005782, epoch loss=0.493802 Epoch=16, step=20400, lr=0.115250, epoch loss=0.493802 Batch=59, step=20460, lr=0.114750, batch loss=0.002656, epoch loss=0.002656 Batch=119, step=20520, lr=0.114750, batch loss=0.011419, epoch loss=0.014074 Batch=179, step=20580, lr=0.114500, batch loss=0.024915, epoch loss=0.038990 Batch=239, step=20640, lr=0.114250, batch loss=0.014172, epoch loss=0.053161 Batch=299, step=20700, lr=0.114000, batch loss=0.004570, epoch loss=0.057731 Batch=359, step=20760, lr=0.113500, batch loss=0.015453, epoch loss=0.073184 Batch=419, step=20820, lr=0.113500, batch loss=0.015060, epoch loss=0.088244 Batch=479, step=20880, lr=0.113250, batch loss=0.005114, epoch loss=0.093358 Batch=539, step=20940, lr=0.113000, batch loss=0.015916, epoch loss=0.109274 Batch=599, step=21000, lr=0.112750, batch loss=0.018950, epoch loss=0.128224 Batch=659, step=21060, lr=0.112500, batch loss=0.015124, epoch loss=0.143348 Batch=719, step=21120, lr=0.112250, batch loss=0.040936, epoch loss=0.184285 Batch=779, step=21180, lr=0.112000, batch loss=0.075863, epoch loss=0.260147 Batch=839, step=21240, lr=0.111750, batch loss=0.026623, epoch loss=0.286771 Batch=899, step=21300, lr=0.111500, batch loss=0.034485, epoch loss=0.321256 Batch=959, step=21360, lr=0.111250, batch loss=0.010232, epoch loss=0.331487 Batch=1019, step=21420, lr=0.111000, batch loss=0.011447, epoch loss=0.342935 Batch=1079, step=21480, lr=0.110750, batch loss=0.000644, epoch loss=0.343579 Batch=1139, step=21540, lr=0.110500, batch loss=0.013166, epoch loss=0.356744 Batch=1199, step=21600, lr=0.110250, batch loss=0.005332, epoch loss=0.362077 Epoch=17, step=21600, lr=0.110250, epoch loss=0.362077 Batch=59, step=21660, lr=0.110000, batch loss=0.002289, epoch loss=0.002289 Batch=119, step=21720, lr=0.109750, batch loss=0.006370, epoch loss=0.008659 Batch=179, step=21780, lr=0.109500, batch loss=0.012835, epoch loss=0.021494 Batch=239, step=21840, lr=0.109250, batch loss=0.009922, epoch loss=0.031416 Batch=299, step=21900, lr=0.109000, batch loss=0.014903, epoch loss=0.046319 Batch=359, step=21960, lr=0.108750, batch loss=0.015575, epoch loss=0.061894 Batch=419, step=22020, lr=0.108500, batch loss=0.013632, epoch loss=0.075526 Batch=479, step=22080, lr=0.108250, batch loss=0.003569, epoch loss=0.079095 Batch=539, step=22140, lr=0.108000, batch loss=0.015700, epoch loss=0.094795 Batch=599, step=22200, lr=0.107750, batch loss=0.015585, epoch loss=0.110381 Batch=659, step=22260, lr=0.107500, batch loss=0.013668, epoch loss=0.124049 Batch=719, step=22320, lr=0.107250, batch loss=0.021484, epoch loss=0.145533 Batch=779, step=22380, lr=0.107000, batch loss=0.032087, epoch loss=0.177620 Batch=839, step=22440, lr=0.106750, batch loss=0.027335, epoch loss=0.204955 Batch=899, step=22500, lr=0.106500, batch loss=0.025092, epoch loss=0.230047 Batch=959, step=22560, lr=0.106250, batch loss=0.008702, epoch loss=0.238749 Batch=1019, step=22620, lr=0.106000, batch loss=0.008891, epoch loss=0.247640 Batch=1079, step=22680, lr=0.105750, batch loss=0.000610, epoch loss=0.248249 Batch=1139, step=22740, lr=0.105500, batch loss=0.010698, epoch loss=0.258947 Batch=1199, step=22800, lr=0.105250, batch loss=0.004789, epoch loss=0.263736 Epoch=18, step=22800, lr=0.105250, epoch loss=0.263736 Batch=59, step=22860, lr=0.105000, batch loss=0.001808, epoch loss=0.001808 Batch=119, step=22920, lr=0.104750, batch loss=0.005447, epoch loss=0.007256 Batch=179, step=22980, lr=0.104500, batch loss=0.011112, epoch loss=0.018368 Batch=239, step=23040, lr=0.104250, batch loss=0.009222, epoch loss=0.027590 Batch=299, step=23100, lr=0.104000, batch loss=0.009644, epoch loss=0.037233 Batch=359, step=23160, lr=0.103750, batch loss=0.012497, epoch loss=0.049730 Batch=419, step=23220, lr=0.103250, batch loss=0.010703, epoch loss=0.060433 Batch=479, step=23280, lr=0.103250, batch loss=0.002684, epoch loss=0.063117 Batch=539, step=23340, lr=0.103000, batch loss=0.016807, epoch loss=0.079924 Batch=599, step=23400, lr=0.102750, batch loss=0.013733, epoch loss=0.093656 Batch=659, step=23460, lr=0.102500, batch loss=0.010518, epoch loss=0.104175 Batch=719, step=23520, lr=0.102250, batch loss=0.015708, epoch loss=0.119883 Batch=779, step=23580, lr=0.102000, batch loss=0.022193, epoch loss=0.142076 Batch=839, step=23640, lr=0.101750, batch loss=0.028100, epoch loss=0.170176 Batch=899, step=23700, lr=0.101500, batch loss=0.021523, epoch loss=0.191699 Batch=959, step=23760, lr=0.101000, batch loss=0.010344, epoch loss=0.202043 Batch=1019, step=23820, lr=0.101000, batch loss=0.007312, epoch loss=0.209355 Batch=1079, step=23880, lr=0.100750, batch loss=0.001000, epoch loss=0.210355 Batch=1139, step=23940, lr=0.100500, batch loss=0.008707, epoch loss=0.219062 Batch=1199, step=24000, lr=0.100250, batch loss=0.004662, epoch loss=0.223724 Epoch=19, step=24000, lr=0.100250, epoch loss=0.223724 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########******************************************│ │***********###########*************************############***************************************..│ │********######*####*********************************###*###*#***********************************....│ │*******######**##*************...******************#*######*#********************************.......│ │*******##*##**##**********...........***************########*##***************************..........│ │*****#######************.......%...%%...***************#########*************************.........%.│ │******######***********.........%........***************##*#####************************......%.%.%.│ │***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│ │****#######***********.........%%%%.........************#########********************........%%.%%.%│ │**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########*****************..........%%%%%%.│ │*#######************...........%%%%%%%..........************#######**************............%%%%%%.│ │*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.│ │*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..│ │#####*#***********..............%%%%%%%...............*******#######*********..............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######**********................%%%%%%%%...............*****###*###******................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######****.................%%%%%%.....│ │#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................*******..................%.%%.%%......│ │**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%................***...............%%%%%%%.%.%.......│ │*************...........................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │************.............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-03-20 22:17.53 ---> saved as "3c9ae9cb4cec8efc328d3db0d53dd068c65369dc83574e6d9af29b8f344af6d1" Job succeeded 2025-03-20 22:17.54: Job succeeded