2025-03-21 14:48.49: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (8b6a6facd7a53c86ebd5bf1ba838d58fbe9e8480) (linux-x86_64:(lower-bound)) Base: ocaml/opam:debian-12-ocaml-5.2@sha256:89ec63b50b2a5b473e2ee6ac01bf5fc8513ec4fc85f485bbee7ea4ce1c250e71 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 8b6a6fac cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:debian-12-ocaml-5.2@sha256:89ec63b50b2a5b473e2ee6ac01bf5fc8513ec4fc85f485bbee7ea4ce1c250e71 # debian-12-5.2_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.15.0 backoff.0.1.0 base.v0.17.0 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.5.0 conf-libffi.2.0.0 conf-pkg-config.1.0 cppo.1.6.7 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.14.0 dune-configurator.2.7.0 fieldslib.v0.17.0 integers.0.6.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.0.0 multicore-magic.2.3.0 num.1.0 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.0 ocamlbuild.0.14.3 ocamlfind.1.9.6 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.5.2 ppx_enumerate.v0.17.0 ppx_expect.v0.17.0 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.32.1 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.0.8.6 re.1.9.0 result.1.5 saturn_lockfree.0.5.0 seq.0.3 sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.1.0 time_now.v0.17.0 topkg.1.0.6 tyxml.4.4.0 uucp.13.0.0 uutf.1.0.3 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-03-21 14:48.49: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.2@sha256:89ec63b50b2a5b473e2ee6ac01bf5fc8513ec4fc85f485bbee7ea4ce1c250e71-debian-12-5.2_opam-2.3-b52341ba23877e94ae09b5dde4872bfa" 2025-03-21 14:48.49: Using OBuilder spec: ((from ocaml/opam:debian-12-ocaml-5.2@sha256:89ec63b50b2a5b473e2ee6ac01bf5fc8513ec4fc85f485bbee7ea4ce1c250e71) (comment debian-12-5.2_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.15.0 backoff.0.1.0 base.v0.17.0 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.5.0 conf-libffi.2.0.0 conf-pkg-config.1.0 cppo.1.6.7 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.14.0 dune-configurator.2.7.0 fieldslib.v0.17.0 integers.0.6.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.0.0 multicore-magic.2.3.0 num.1.0 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.0 ocamlbuild.0.14.3 ocamlfind.1.9.6 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.5.2 ppx_enumerate.v0.17.0 ppx_expect.v0.17.0 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.32.1 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.0.8.6 re.1.9.0 result.1.5 saturn_lockfree.0.5.0 seq.0.3 sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.1.0 time_now.v0.17.0 topkg.1.0.6 tyxml.4.4.0 uucp.13.0.0 uutf.1.0.3 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-03-21 14:48.49: Waiting for resource in pool OCluster 2025-03-21 14:48.50: Waiting for worker… 2025-03-21 14:49.15: Got resource from pool OCluster Building on toxis.caelum.ci.dev All commits already cached HEAD is now at 8b6a6fac Fix bug in grad formula for recip, update tests (from ocaml/opam:debian-12-ocaml-5.2@sha256:89ec63b50b2a5b473e2ee6ac01bf5fc8513ec4fc85f485bbee7ea4ce1c250e71) 2025-03-21 14:49.35 ---> saved as "38896d45c132e80618e0ea619c3f1eaa7c74f33d36affcfd5cb62a4306714da8" /: (comment debian-12-5.2_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-03-21 14:49.35 ---> saved as "3d0e7e63412167c499341f1181b172ea6ec9ca5bf81b7c396cb3e336abf0340f" /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. Continue? [y/n] y This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [ERROR] Could not update repository "opam-repository-archive": "/usr/bin/git fetch -q" exited with code 128 "fatal: unable to access 'https://github.com/ocaml/opam-repository-archive/': Could not resolve host: github.com" [default] synchronised from file:///home/opam/opam-repository 2025-03-21 14:50.56 ---> saved as "cc601c78ac214415c83b6ec0bc7840029b6ce55de18a34aea04b80f75512f2a5" /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-134-generic The OCaml toplevel, version 5.2.1 2.3.0 2025-03-21 14:50.57 ---> saved as "18f62663459dbff01fb684078274d620563485d9637901154dd6fddd17ab5ffc" /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-03-21 14:50.57 ---> saved as "1f4800f05b74f7563c3437c421c17e1a0e4a5d2253bb31d8c70d9fec1010be7b" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 || git fetch origin master) && git reset -q --hard 4e25d0cf5f38cf58d1904bbb48f62ccd9c48f785 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD a8ed476033..acfb0e6e94 master -> origin/master 4e25d0cf5f Merge pull request #27651 from lukstafi/opam-publish-ppx_minidebug.2.1.0 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [opam-repository-archive] synchronised from git+https://github.com/ocaml/opam-repository-archive [default] synchronised from file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-03-21 14:52.09 ---> saved as "fb0ede5a6901175d2597f7c70aa2212a1aad82bfce33c2dec7de7c3ba52d706b" /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-03-21 14:52.09 ---> saved as "f2f5ebcb90fe4d7778d0156232e743d8dbe87e1e3ca2a12d09b4c5932b870185" /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-03-21 14:52.15 ---> saved as "1536b2bf4ebd8d93704b8bcc35a2edf06e6e8f3e640f27ad399435c604c46fa9" /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-03-21 14:52.15 ---> saved as "c5aa8a4dae4992cad2d16d181872cb61df57e38fdd7ac420b38540c51ab6f201" /src: (env DEPS "angstrom.0.15.0 backoff.0.1.0 base.v0.17.0 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.5.0 conf-libffi.2.0.0 conf-pkg-config.1.0 cppo.1.6.7 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.14.0 dune-configurator.2.7.0 fieldslib.v0.17.0 integers.0.6.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 mtime.2.0.0 multicore-magic.2.3.0 num.1.0 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml_intrinsics_kernel.v0.17.0 ocamlbuild.0.14.3 ocamlfind.1.9.6 parsexp.v0.17.0 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.5.2 ppx_enumerate.v0.17.0 ppx_expect.v0.17.0 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.1.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.32.1 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.0.8.6 re.1.9.0 result.1.5 saturn_lockfree.0.5.0 seq.0.3 sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.1.0 time_now.v0.17.0 topkg.1.0.6 tyxml.4.4.0 uucp.13.0.0 uutf.1.0.3 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "apt-get" "update" - Hit:1 http://deb.debian.org/debian bookworm InRelease - Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB] - Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB] - Get:4 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [249 kB] - Fetched 353 kB in 0s (829 kB/s) - Reading package lists... - <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.1). [NOTE] Package ocaml is already installed (current version is 5.2.1). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-dev pkg-config <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config" - debconf: delaying package configuration, since apt-utils is not installed - Selecting previously unselected package libffi-dev:amd64. - (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 18776 files and directories currently installed.) - Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ... - Unpacking libffi-dev:amd64 (3.4.4-1) ... - Selecting previously unselected package libpkgconf3:amd64. - Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ... - Unpacking libpkgconf3:amd64 (1.8.1-1) ... - Selecting previously unselected package pkgconf-bin. - Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ... - Unpacking pkgconf-bin (1.8.1-1) ... - Selecting previously unselected package pkgconf:amd64. - Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ... - Unpacking pkgconf:amd64 (1.8.1-1) ... - Selecting previously unselected package pkg-config:amd64. - Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ... - Unpacking pkg-config:amd64 (1.8.1-1) ... - Setting up libffi-dev:amd64 (3.4.4-1) ... - Setting up libpkgconf3:amd64 (1.8.1-1) ... - Setting up pkgconf-bin (1.8.1-1) ... - Setting up pkgconf:amd64 (1.8.1-1) ... - Setting up pkg-config:amd64 (1.8.1-1) ... - Processing triggers for libc-bin (2.36-9+deb12u10) ... 2025-03-21 14:52.39 ---> saved as "5413d90b88bfad68372a87ba6af93a5c804fd152971fa170c50329eac12afcf9" /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.1). [NOTE] Package ocaml is already installed (current version is 5.2.1). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 65 packages - install angstrom 0.15.0 - install backoff 0.1.0 - install base v0.17.0 - install bigarray-compat 1.1.0 - install bigstringaf 0.5.0 - install conf-libffi 2.0.0 - install conf-pkg-config 1.0 - install cppo 1.6.7 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.14.0 - install dune-configurator 2.7.0 - install fieldslib v0.17.0 - install integers 0.6.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install mtime 2.0.0 - install multicore-magic 2.3.0 - install num 1.0 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml_intrinsics_kernel v0.17.0 - install ocamlbuild 0.14.3 - install ocamlfind 1.9.6 - install parsexp v0.17.0 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 5.2 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.0 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.1.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.32.1 - install ppxlib_jane v0.17.0 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 0.8.6 - install re 1.9.0 - install result 1.5 - install saturn_lockfree 0.5.0 - install seq 0.3 - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.1.0 - install time_now v0.17.0 - install topkg 1.0.6 - install tyxml 4.4.0 - install uucp 13.0.0 - install uutf 1.0.3 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.0 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved angstrom.0.15.0 (cached) -> retrieved base.v0.17.0 (cached) -> retrieved bigstringaf.0.5.0 (cached) -> retrieved cppo.1.6.7 (cached) -> installed conf-pkg-config.1.0 -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> retrieved dune-configurator.2.7.0 (cached) -> retrieved dune.3.14.0 (cached) -> installed conf-libffi.2.0.0 -> retrieved fieldslib.v0.17.0 (cached) -> retrieved integers.0.6.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved mtime.2.0.0 (cached) -> retrieved multicore-magic.2.3.0 (cached) -> retrieved num.1.0 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.0 (cached) -> retrieved ocamlbuild.0.14.3 (cached) -> retrieved ocamlfind.1.9.6 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_deriving.5.2 (cached) -> retrieved ppx_expect.v0.17.0 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_minidebug.2.1.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib_jane.v0.17.0 (cached) -> retrieved ptime.0.8.6 (cached) -> retrieved re.1.9.0 (cached) -> retrieved result.1.5 (cached) -> retrieved ppxlib.0.32.1 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved seq.0.3 (cached) -> retrieved sexplib.v0.17.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.1.0 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved tyxml.4.4.0 (cached) -> retrieved topkg.1.0.6 (cached) -> retrieved uutf.1.0.3 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved uucp.13.0.0 (cached) -> installed ocamlfind.1.9.6 -> installed num.1.0 -> installed ocamlbuild.0.14.3 -> installed topkg.1.0.6 -> installed uutf.1.0.3 -> installed mtime.2.0.0 -> installed ptime.0.8.6 -> installed dune.3.14.0 -> installed backoff.0.1.0 -> installed bigarray-compat.1.1.0 -> installed bigstringaf.0.5.0 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed result.1.5 -> installed printbox.0.12 -> installed seq.0.3 -> installed stdlib-shims.0.1.0 -> installed multicore-magic.2.3.0 -> installed ocaml_intrinsics_kernel.v0.17.0 -> installed sexplib0.v0.17.0 -> installed cppo.1.6.7 -> installed integers.0.6.0 -> installed ocaml-syntax-shims.1.0.0 -> installed dune-configurator.2.7.0 -> installed ocaml-compiler-libs.v0.17.0 -> installed re.1.9.0 -> installed saturn_lockfree.0.5.0 -> installed angstrom.0.15.0 -> installed parsexp.v0.17.0 -> installed sexplib.v0.17.0 -> installed tyxml.4.4.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed fieldslib.v0.17.0 -> installed variantslib.v0.17.0 -> installed stdio.v0.17.0 -> installed uucp.13.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.32.1 -> installed ppxlib_jane.v0.17.0 -> installed ppx_optcomp.v0.17.0 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_compare.v0.17.0 -> installed ppx_deriving.5.2 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.1.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.0 Done. # To update the current shell environment, run: eval $(opam env) 2025-03-21 14:54.45 ---> saved as "a706c3e8811936c940cce32ac493171bae38f86f52811ed8e079c228989c062b" /src: (copy (src .) (dst /src)) 2025-03-21 14:54.46 ---> saved as "b26fad00f5223891cce285af985873939119b00509f82dbdf9e69fefab14d091" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/80e197330f0a1ec97342338d65bfb875/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test && .tutorials.inline-tests/inline_test_runner_tutorials.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/937f14374927cbf1712cb8b9a6e6355c/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found cc, in the config file Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199500, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199250, batch loss=2.621123, epoch loss=34.770210 Batch=239, step=240, lr=0.199250, batch loss=0.846671, epoch loss=35.616880 Batch=299, step=300, lr=0.199000, batch loss=1.456496, epoch loss=37.073377 Batch=359, step=360, lr=0.198750, batch loss=1.327391, epoch loss=38.400768 Batch=419, step=420, lr=0.198500, batch loss=0.621544, epoch loss=39.022312 Batch=479, step=480, lr=0.198250, batch loss=0.817677, epoch loss=39.839989 Batch=539, step=540, lr=0.198000, batch loss=0.698250, epoch loss=40.538239 Batch=599, step=600, lr=0.197750, batch loss=1.072971, epoch loss=41.611210 Batch=659, step=660, lr=0.197500, batch loss=0.482882, epoch loss=42.094092 Batch=719, step=720, lr=0.197250, batch loss=0.411533, epoch loss=42.505625 Batch=779, step=780, lr=0.197000, batch loss=0.469816, epoch loss=42.975441 Batch=839, step=840, lr=0.196750, batch loss=0.444454, epoch loss=43.419895 Batch=899, step=900, lr=0.196500, batch loss=0.383228, epoch loss=43.803122 Batch=959, step=960, lr=0.196250, batch loss=0.241112, epoch loss=44.044235 Batch=1019, step=1020, lr=0.196000, batch loss=0.437622, epoch loss=44.481857 Batch=1079, step=1080, lr=0.195750, batch loss=0.227437, epoch loss=44.709293 Batch=1139, step=1140, lr=0.195250, batch loss=0.313630, epoch loss=45.022923 Batch=1199, step=1200, lr=0.195000, batch loss=0.262745, epoch loss=45.285668 Epoch=0, step=1200, lr=0.195000, epoch loss=45.285668 Batch=59, step=1260, lr=0.194750, batch loss=0.262468, epoch loss=0.262468 Batch=119, step=1320, lr=0.194500, batch loss=0.210044, epoch loss=0.472511 Batch=179, step=1380, lr=0.194250, batch loss=0.244490, epoch loss=0.717001 Batch=239, step=1440, lr=0.194250, batch loss=0.351676, epoch loss=1.068678 Batch=299, step=1500, lr=0.194000, batch loss=0.238587, epoch loss=1.307265 Batch=359, step=1560, lr=0.193750, batch loss=0.311803, epoch loss=1.619068 Batch=419, step=1620, lr=0.193500, batch loss=0.308030, epoch loss=1.927098 Batch=479, step=1680, lr=0.193000, batch loss=0.275416, epoch loss=2.202513 Batch=539, step=1740, lr=0.192750, batch loss=0.210289, epoch loss=2.412802 Batch=599, step=1800, lr=0.192750, batch loss=0.255038, epoch loss=2.667839 Batch=659, step=1860, lr=0.192250, batch loss=0.371409, epoch loss=3.039248 Batch=719, step=1920, lr=0.192250, batch loss=0.361133, epoch loss=3.400382 Batch=779, step=1980, lr=0.192000, batch loss=0.382905, epoch loss=3.783287 Batch=839, step=2040, lr=0.191750, batch loss=0.350146, epoch loss=4.133433 Batch=899, step=2100, lr=0.191500, batch loss=0.321958, epoch loss=4.455392 Batch=959, step=2160, lr=0.191250, batch loss=0.258995, epoch loss=4.714387 Batch=1019, step=2220, lr=0.191000, batch loss=0.388793, epoch loss=5.103180 Batch=1079, step=2280, lr=0.190500, batch loss=0.225327, epoch loss=5.328507 Batch=1139, step=2340, lr=0.190250, batch loss=0.269567, epoch loss=5.598074 Batch=1199, step=2400, lr=0.190250, batch loss=0.214531, epoch loss=5.812605 Epoch=1, step=2400, lr=0.190250, epoch loss=5.812605 Batch=59, step=2460, lr=0.189750, batch loss=0.228678, epoch loss=0.228678 Batch=119, step=2520, lr=0.189500, batch loss=0.186770, epoch loss=0.415448 Batch=179, step=2580, lr=0.189250, batch loss=0.220898, epoch loss=0.636346 Batch=239, step=2640, lr=0.189000, batch loss=0.339855, epoch loss=0.976201 Batch=299, step=2700, lr=0.188750, batch loss=0.217035, epoch loss=1.193237 Batch=359, step=2760, lr=0.188500, batch loss=0.297696, epoch loss=1.490933 Batch=419, step=2820, lr=0.188250, batch loss=0.287193, epoch loss=1.778126 Batch=479, step=2880, lr=0.188250, batch loss=0.260689, epoch loss=2.038815 Batch=539, step=2940, lr=0.187750, batch loss=0.195980, epoch loss=2.234795 Batch=599, step=3000, lr=0.187750, batch loss=0.234326, epoch loss=2.469121 Batch=659, step=3060, lr=0.187500, batch loss=0.339226, epoch loss=2.808347 Batch=719, step=3120, lr=0.187000, batch loss=0.336251, epoch loss=3.144598 Batch=779, step=3180, lr=0.186750, batch loss=0.357376, epoch loss=3.501974 Batch=839, step=3240, lr=0.186750, batch loss=0.330103, epoch loss=3.832078 Batch=899, step=3300, lr=0.186500, batch loss=0.300421, epoch loss=4.132499 Batch=959, step=3360, lr=0.186000, batch loss=0.233441, epoch loss=4.365939 Batch=1019, step=3420, lr=0.185750, batch loss=0.341547, epoch loss=4.707486 Batch=1079, step=3480, lr=0.185750, batch loss=0.189294, epoch loss=4.896781 Batch=1139, step=3540, lr=0.185500, batch loss=0.230660, epoch loss=5.127441 Batch=1199, step=3600, lr=0.185250, batch loss=0.200578, epoch loss=5.328019 Epoch=2, step=3600, lr=0.185250, epoch loss=5.328019 Batch=59, step=3660, lr=0.184750, batch loss=0.235964, epoch loss=0.235964 Batch=119, step=3720, lr=0.184500, batch loss=0.196062, epoch loss=0.432026 Batch=179, step=3780, lr=0.184250, batch loss=0.212394, epoch loss=0.644421 Batch=239, step=3840, lr=0.184000, batch loss=0.317725, epoch loss=0.962146 Batch=299, step=3900, lr=0.184000, batch loss=0.210050, epoch loss=1.172195 Batch=359, step=3960, lr=0.183500, batch loss=0.286499, epoch loss=1.458695 Batch=419, step=4020, lr=0.183250, batch loss=0.279238, epoch loss=1.737932 Batch=479, step=4080, lr=0.183000, batch loss=0.254909, epoch loss=1.992841 Batch=539, step=4140, lr=0.182750, batch loss=0.201595, epoch loss=2.194437 Batch=599, step=4200, lr=0.182500, batch loss=0.244921, epoch loss=2.439358 Batch=659, step=4260, lr=0.182250, batch loss=0.327467, epoch loss=2.766825 Batch=719, step=4320, lr=0.182000, batch loss=0.325390, epoch loss=3.092215 Batch=779, step=4380, lr=0.181750, batch loss=0.346678, epoch loss=3.438893 Batch=839, step=4440, lr=0.181500, batch loss=0.318746, epoch loss=3.757639 Batch=899, step=4500, lr=0.181250, batch loss=0.293147, epoch loss=4.050785 Batch=959, step=4560, lr=0.181250, batch loss=0.242819, epoch loss=4.293605 Batch=1019, step=4620, lr=0.180750, batch loss=0.339933, epoch loss=4.633537 Batch=1079, step=4680, lr=0.180750, batch loss=0.199506, epoch loss=4.833043 Batch=1139, step=4740, lr=0.180250, batch loss=0.231763, epoch loss=5.064806 Batch=1199, step=4800, lr=0.180000, batch loss=0.191070, epoch loss=5.255876 Epoch=3, step=4800, lr=0.180000, epoch loss=5.255876 Batch=59, step=4860, lr=0.179750, batch loss=0.223980, epoch loss=0.223980 Batch=119, step=4920, lr=0.179500, batch loss=0.187315, epoch loss=0.411295 Batch=179, step=4980, lr=0.179500, batch loss=0.205423, epoch loss=0.616718 Batch=239, step=5040, lr=0.179000, batch loss=0.308397, epoch loss=0.925115 Batch=299, step=5100, lr=0.178750, batch loss=0.206693, epoch loss=1.131808 Batch=359, step=5160, lr=0.178500, batch loss=0.270458, epoch loss=1.402266 Batch=419, step=5220, lr=0.178250, batch loss=0.264910, epoch loss=1.667177 Batch=479, step=5280, lr=0.178250, batch loss=0.243174, epoch loss=1.910350 Batch=539, step=5340, lr=0.177750, batch loss=0.192235, epoch loss=2.102585 Batch=599, step=5400, lr=0.177500, batch loss=0.227641, epoch loss=2.330227 Batch=659, step=5460, lr=0.177250, batch loss=0.324175, epoch loss=2.654402 Batch=719, step=5520, lr=0.177000, batch loss=0.332987, epoch loss=2.987389 Batch=779, step=5580, lr=0.176750, batch loss=0.342995, epoch loss=3.330384 Batch=839, step=5640, lr=0.176750, batch loss=0.309447, epoch loss=3.639831 Batch=899, step=5700, lr=0.176250, batch loss=0.271525, epoch loss=3.911357 Batch=959, step=5760, lr=0.176000, batch loss=0.218161, epoch loss=4.129518 Batch=1019, step=5820, lr=0.175750, batch loss=0.335871, epoch loss=4.465389 Batch=1079, step=5880, lr=0.175750, batch loss=0.193997, epoch loss=4.659386 Batch=1139, step=5940, lr=0.175500, batch loss=0.223659, epoch loss=4.883045 Batch=1199, step=6000, lr=0.175000, batch loss=0.188540, epoch loss=5.071585 Epoch=4, step=6000, lr=0.175000, epoch loss=5.071585 Batch=59, step=6060, lr=0.174750, batch loss=0.236530, epoch loss=0.236530 Batch=119, step=6120, lr=0.174500, batch loss=0.189096, epoch loss=0.425626 Batch=179, step=6180, lr=0.174250, batch loss=0.200941, epoch loss=0.626567 Batch=239, step=6240, lr=0.174000, batch loss=0.299858, epoch loss=0.926426 Batch=299, step=6300, lr=0.174000, batch loss=0.207228, epoch loss=1.133654 Batch=359, step=6360, lr=0.173500, batch loss=0.267120, epoch loss=1.400773 Batch=419, step=6420, lr=0.173250, batch loss=0.266752, epoch loss=1.667525 Batch=479, step=6480, lr=0.173000, batch loss=0.242105, epoch loss=1.909630 Batch=539, step=6540, lr=0.173000, batch loss=0.195067, epoch loss=2.104697 Batch=599, step=6600, lr=0.172500, batch loss=0.233500, epoch loss=2.338197 Batch=659, step=6660, lr=0.172250, batch loss=0.313232, epoch loss=2.651429 Batch=719, step=6720, lr=0.172000, batch loss=0.318771, epoch loss=2.970200 Batch=779, step=6780, lr=0.171750, batch loss=0.335123, epoch loss=3.305323 Batch=839, step=6840, lr=0.171500, batch loss=0.303606, epoch loss=3.608929 Batch=899, step=6900, lr=0.171500, batch loss=0.268206, epoch loss=3.877135 Batch=959, step=6960, lr=0.171000, batch loss=0.210873, epoch loss=4.088008 Batch=1019, step=7020, lr=0.170750, batch loss=0.327734, epoch loss=4.415742 Batch=1079, step=7080, lr=0.170500, batch loss=0.181695, epoch loss=4.597437 Batch=1139, step=7140, lr=0.170250, batch loss=0.216123, epoch loss=4.813560 Batch=1199, step=7200, lr=0.170000, batch loss=0.182240, epoch loss=4.995800 Epoch=5, step=7200, lr=0.170000, epoch loss=4.995800 Batch=59, step=7260, lr=0.169750, batch loss=0.233884, epoch loss=0.233884 Batch=119, step=7320, lr=0.169500, batch loss=0.182933, epoch loss=0.416817 Batch=179, step=7380, lr=0.169250, batch loss=0.195862, epoch loss=0.612679 Batch=239, step=7440, lr=0.169000, batch loss=0.291629, epoch loss=0.904308 Batch=299, step=7500, lr=0.168750, batch loss=0.199714, epoch loss=1.104022 Batch=359, step=7560, lr=0.168500, batch loss=0.260260, epoch loss=1.364282 Batch=419, step=7620, lr=0.168250, batch loss=0.256353, epoch loss=1.620635 Batch=479, step=7680, lr=0.168000, batch loss=0.238322, epoch loss=1.858957 Batch=539, step=7740, lr=0.167750, batch loss=0.187159, epoch loss=2.046117 Batch=599, step=7800, lr=0.167500, batch loss=0.224207, epoch loss=2.270323 Batch=659, step=7860, lr=0.167500, batch loss=0.306305, epoch loss=2.576628 Batch=719, step=7920, lr=0.167000, batch loss=0.308847, epoch loss=2.885475 Batch=779, step=7980, lr=0.166750, batch loss=0.321607, epoch loss=3.207082 Batch=839, step=8040, lr=0.166750, batch loss=0.297873, epoch loss=3.504955 Batch=899, step=8100, lr=0.166250, batch loss=0.259846, epoch loss=3.764801 Batch=959, step=8160, lr=0.166000, batch loss=0.207940, epoch loss=3.972741 Batch=1019, step=8220, lr=0.165750, batch loss=0.315268, epoch loss=4.288009 Batch=1079, step=8280, lr=0.165500, batch loss=0.170715, epoch loss=4.458724 Batch=1139, step=8340, lr=0.165250, batch loss=0.207501, epoch loss=4.666225 Batch=1199, step=8400, lr=0.165000, batch loss=0.179408, epoch loss=4.845633 Epoch=6, step=8400, lr=0.165000, epoch loss=4.845633 Batch=59, step=8460, lr=0.165000, batch loss=0.229280, epoch loss=0.229280 Batch=119, step=8520, lr=0.164500, batch loss=0.175235, epoch loss=0.404516 Batch=179, step=8580, lr=0.164250, batch loss=0.188860, epoch loss=0.593376 Batch=239, step=8640, lr=0.164000, batch loss=0.279597, epoch loss=0.872973 Batch=299, step=8700, lr=0.163750, batch loss=0.190142, epoch loss=1.063115 Batch=359, step=8760, lr=0.163750, batch loss=0.245250, epoch loss=1.308364 Batch=419, step=8820, lr=0.163500, batch loss=0.240181, epoch loss=1.548545 Batch=479, step=8880, lr=0.163250, batch loss=0.215178, epoch loss=1.763722 Batch=539, step=8940, lr=0.162750, batch loss=0.178824, epoch loss=1.942547 Batch=599, step=9000, lr=0.162500, batch loss=0.220014, epoch loss=2.162560 Batch=659, step=9060, lr=0.162250, batch loss=0.294158, epoch loss=2.456719 Batch=719, step=9120, lr=0.162000, batch loss=0.295533, epoch loss=2.752252 Batch=779, step=9180, lr=0.161750, batch loss=0.315538, epoch loss=3.067790 Batch=839, step=9240, lr=0.161500, batch loss=0.287761, epoch loss=3.355552 Batch=899, step=9300, lr=0.161250, batch loss=0.251729, epoch loss=3.607281 Batch=959, step=9360, lr=0.161000, batch loss=0.189697, epoch loss=3.796978 Batch=1019, step=9420, lr=0.160750, batch loss=0.316864, epoch loss=4.113842 Batch=1079, step=9480, lr=0.160750, batch loss=0.202092, epoch loss=4.315934 Batch=1139, step=9540, lr=0.160250, batch loss=0.212090, epoch loss=4.528023 Batch=1199, step=9600, lr=0.160000, batch loss=0.168885, epoch loss=4.696908 Epoch=7, step=9600, lr=0.160000, epoch loss=4.696908 Batch=59, step=9660, lr=0.159750, batch loss=0.194133, epoch loss=0.194133 Batch=119, step=9720, lr=0.159500, batch loss=0.163482, epoch loss=0.357615 Batch=179, step=9780, lr=0.159250, batch loss=0.178850, epoch loss=0.536465 Batch=239, step=9840, lr=0.159000, batch loss=0.263664, epoch loss=0.800128 Batch=299, step=9900, lr=0.158750, batch loss=0.182203, epoch loss=0.982331 Batch=359, step=9960, lr=0.158500, batch loss=0.235058, epoch loss=1.217389 Batch=419, step=10020, lr=0.158250, batch loss=0.232682, epoch loss=1.450071 Batch=479, step=10080, lr=0.158000, batch loss=0.214510, epoch loss=1.664581 Batch=539, step=10140, lr=0.157750, batch loss=0.172208, epoch loss=1.836788 Batch=599, step=10200, lr=0.157500, batch loss=0.202671, epoch loss=2.039459 Batch=659, step=10260, lr=0.157250, batch loss=0.282383, epoch loss=2.321842 Batch=719, step=10320, lr=0.157000, batch loss=0.278905, epoch loss=2.600747 Batch=779, step=10380, lr=0.156750, batch loss=0.300522, epoch loss=2.901269 Batch=839, step=10440, lr=0.156500, batch loss=0.272175, epoch loss=3.173443 Batch=899, step=10500, lr=0.156250, batch loss=0.240708, epoch loss=3.414152 Batch=959, step=10560, lr=0.156000, batch loss=0.198536, epoch loss=3.612688 Batch=1019, step=10620, lr=0.155750, batch loss=0.283231, epoch loss=3.895919 Batch=1079, step=10680, lr=0.155500, batch loss=0.183609, epoch loss=4.079529 Batch=1139, step=10740, lr=0.155250, batch loss=0.198123, epoch loss=4.277652 Batch=1199, step=10800, lr=0.155000, batch loss=0.157063, epoch loss=4.434715 Epoch=8, step=10800, lr=0.155000, epoch loss=4.434715 Batch=59, step=10860, lr=0.154750, batch loss=0.186775, epoch loss=0.186775 Batch=119, step=10920, lr=0.154500, batch loss=0.153678, epoch loss=0.340454 Batch=179, step=10980, lr=0.154500, batch loss=0.167507, epoch loss=0.507960 Batch=239, step=11040, lr=0.154250, batch loss=0.244555, epoch loss=0.752515 Batch=299, step=11100, lr=0.153750, batch loss=0.170120, epoch loss=0.922635 Batch=359, step=11160, lr=0.153500, batch loss=0.222842, epoch loss=1.145477 Batch=419, step=11220, lr=0.153250, batch loss=0.218442, epoch loss=1.363920 Batch=479, step=11280, lr=0.153000, batch loss=0.212944, epoch loss=1.576863 Batch=539, step=11340, lr=0.152750, batch loss=0.170860, epoch loss=1.747723 Batch=599, step=11400, lr=0.152500, batch loss=0.176247, epoch loss=1.923970 Batch=659, step=11460, lr=0.152250, batch loss=0.263241, epoch loss=2.187210 Batch=719, step=11520, lr=0.152000, batch loss=0.266851, epoch loss=2.454061 Batch=779, step=11580, lr=0.151750, batch loss=0.273609, epoch loss=2.727670 Batch=839, step=11640, lr=0.151500, batch loss=0.253375, epoch loss=2.981045 Batch=899, step=11700, lr=0.151250, batch loss=0.220777, epoch loss=3.201821 Batch=959, step=11760, lr=0.151000, batch loss=0.185921, epoch loss=3.387743 Batch=1019, step=11820, lr=0.150750, batch loss=0.277284, epoch loss=3.665026 Batch=1079, step=11880, lr=0.150500, batch loss=0.150606, epoch loss=3.815632 Batch=1139, step=11940, lr=0.150250, batch loss=0.180693, epoch loss=3.996325 Batch=1199, step=12000, lr=0.150000, batch loss=0.142314, epoch loss=4.138639 Epoch=9, step=12000, lr=0.150000, epoch loss=4.138639 Batch=59, step=12060, lr=0.149750, batch loss=0.164234, epoch loss=0.164234 Batch=119, step=12120, lr=0.149500, batch loss=0.137563, epoch loss=0.301796 Batch=179, step=12180, lr=0.149250, batch loss=0.152931, epoch loss=0.454727 Batch=239, step=12240, lr=0.149250, batch loss=0.222375, epoch loss=0.677102 Batch=299, step=12300, lr=0.149000, batch loss=0.145245, epoch loss=0.822347 Batch=359, step=12360, lr=0.148500, batch loss=0.199530, epoch loss=1.021878 Batch=419, step=12420, lr=0.148250, batch loss=0.208368, epoch loss=1.230246 Batch=479, step=12480, lr=0.148000, batch loss=0.182142, epoch loss=1.412388 Batch=539, step=12540, lr=0.147750, batch loss=0.144448, epoch loss=1.556836 Batch=599, step=12600, lr=0.147500, batch loss=0.152216, epoch loss=1.709051 Batch=659, step=12660, lr=0.147250, batch loss=0.229232, epoch loss=1.938283 Batch=719, step=12720, lr=0.147000, batch loss=0.242386, epoch loss=2.180670 Batch=779, step=12780, lr=0.147000, batch loss=0.265151, epoch loss=2.445821 Batch=839, step=12840, lr=0.146500, batch loss=0.238969, epoch loss=2.684790 Batch=899, step=12900, lr=0.146250, batch loss=0.216101, epoch loss=2.900890 Batch=959, step=12960, lr=0.146000, batch loss=0.152885, epoch loss=3.053775 Batch=1019, step=13020, lr=0.146000, batch loss=0.269327, epoch loss=3.323102 Batch=1079, step=13080, lr=0.145500, batch loss=0.111985, epoch loss=3.435088 Batch=1139, step=13140, lr=0.145250, batch loss=0.153253, epoch loss=3.588341 Batch=1199, step=13200, lr=0.145000, batch loss=0.120892, epoch loss=3.709233 Epoch=10, step=13200, lr=0.145000, epoch loss=3.709233 Batch=59, step=13260, lr=0.144750, batch loss=0.142018, epoch loss=0.142018 Batch=119, step=13320, lr=0.144500, batch loss=0.118825, epoch loss=0.260843 Batch=179, step=13380, lr=0.144250, batch loss=0.131243, epoch loss=0.392087 Batch=239, step=13440, lr=0.144000, batch loss=0.188956, epoch loss=0.581042 Batch=299, step=13500, lr=0.143750, batch loss=0.123778, epoch loss=0.704821 Batch=359, step=13560, lr=0.143500, batch loss=0.165761, epoch loss=0.870582 Batch=419, step=13620, lr=0.143250, batch loss=0.164685, epoch loss=1.035267 Batch=479, step=13680, lr=0.143000, batch loss=0.150197, epoch loss=1.185463 Batch=539, step=13740, lr=0.142750, batch loss=0.122302, epoch loss=1.307765 Batch=599, step=13800, lr=0.142750, batch loss=0.124427, epoch loss=1.432191 Batch=659, step=13860, lr=0.142500, batch loss=0.181932, epoch loss=1.614124 Batch=719, step=13920, lr=0.142250, batch loss=0.183522, epoch loss=1.797645 Batch=779, step=13980, lr=0.141750, batch loss=0.202303, epoch loss=1.999948 Batch=839, step=14040, lr=0.141500, batch loss=0.189492, epoch loss=2.189440 Batch=899, step=14100, lr=0.141500, batch loss=0.159331, epoch loss=2.348771 Batch=959, step=14160, lr=0.141000, batch loss=0.144201, epoch loss=2.492972 Batch=1019, step=14220, lr=0.140750, batch loss=0.301477, epoch loss=2.794449 Batch=1079, step=14280, lr=0.140500, batch loss=0.085744, epoch loss=2.880194 Batch=1139, step=14340, lr=0.140250, batch loss=0.122491, epoch loss=3.002685 Batch=1199, step=14400, lr=0.140000, batch loss=0.093353, epoch loss=3.096038 Epoch=11, step=14400, lr=0.140000, epoch loss=3.096038 Batch=59, step=14460, lr=0.139750, batch loss=0.109722, epoch loss=0.109722 Batch=119, step=14520, lr=0.139500, batch loss=0.101393, epoch loss=0.211115 Batch=179, step=14580, lr=0.139250, batch loss=0.105947, epoch loss=0.317062 Batch=239, step=14640, lr=0.139250, batch loss=0.148386, epoch loss=0.465448 Batch=299, step=14700, lr=0.138750, batch loss=0.083826, epoch loss=0.549274 Batch=359, step=14760, lr=0.138500, batch loss=0.123954, epoch loss=0.673228 Batch=419, step=14820, lr=0.138250, batch loss=0.127463, epoch loss=0.800691 Batch=479, step=14880, lr=0.138000, batch loss=0.113164, epoch loss=0.913855 Batch=539, step=14940, lr=0.137750, batch loss=0.107246, epoch loss=1.021101 Batch=599, step=15000, lr=0.137750, batch loss=0.089435, epoch loss=1.110536 Batch=659, step=15060, lr=0.137500, batch loss=0.132703, epoch loss=1.243240 Batch=719, step=15120, lr=0.137000, batch loss=0.135205, epoch loss=1.378444 Batch=779, step=15180, lr=0.136750, batch loss=0.149511, epoch loss=1.527956 Batch=839, step=15240, lr=0.136500, batch loss=0.162995, epoch loss=1.690951 Batch=899, step=15300, lr=0.136250, batch loss=0.243076, epoch loss=1.934026 Batch=959, step=15360, lr=0.136000, batch loss=0.063948, epoch loss=1.997975 Batch=1019, step=15420, lr=0.135750, batch loss=0.165823, epoch loss=2.163798 Batch=1079, step=15480, lr=0.135750, batch loss=0.051547, epoch loss=2.215344 Batch=1139, step=15540, lr=0.135250, batch loss=0.112053, epoch loss=2.327398 Batch=1199, step=15600, lr=0.135250, batch loss=0.063395, epoch loss=2.390793 Epoch=12, step=15600, lr=0.135250, epoch loss=2.390793 Batch=59, step=15660, lr=0.134750, batch loss=0.089594, epoch loss=0.089594 Batch=119, step=15720, lr=0.134500, batch loss=0.144308, epoch loss=0.233902 Batch=179, step=15780, lr=0.134250, batch loss=0.104054, epoch loss=0.337957 Batch=239, step=15840, lr=0.134000, batch loss=0.105169, epoch loss=0.443126 Batch=299, step=15900, lr=0.133750, batch loss=0.046274, epoch loss=0.489401 Batch=359, step=15960, lr=0.133500, batch loss=0.083343, epoch loss=0.572743 Batch=419, step=16020, lr=0.133250, batch loss=0.085637, epoch loss=0.658380 Batch=479, step=16080, lr=0.133250, batch loss=0.065771, epoch loss=0.724151 Batch=539, step=16140, lr=0.132750, batch loss=0.062460, epoch loss=0.786611 Batch=599, step=16200, lr=0.132500, batch loss=0.134273, epoch loss=0.920884 Batch=659, step=16260, lr=0.132250, batch loss=0.087885, epoch loss=1.008769 Batch=719, step=16320, lr=0.132000, batch loss=0.103755, epoch loss=1.112523 Batch=779, step=16380, lr=0.131750, batch loss=0.185083, epoch loss=1.297606 Batch=839, step=16440, lr=0.131500, batch loss=0.098131, epoch loss=1.395737 Batch=899, step=16500, lr=0.131500, batch loss=0.100996, epoch loss=1.496733 Batch=959, step=16560, lr=0.131250, batch loss=0.057568, epoch loss=1.554301 Batch=1019, step=16620, lr=0.131000, batch loss=0.146722, epoch loss=1.701022 Batch=1079, step=16680, lr=0.130750, batch loss=0.023113, epoch loss=1.724136 Batch=1139, step=16740, lr=0.130500, batch loss=0.050941, epoch loss=1.775077 Batch=1199, step=16800, lr=0.130250, batch loss=0.029536, epoch loss=1.804613 Epoch=13, step=16800, lr=0.130250, epoch loss=1.804613 Batch=59, step=16860, lr=0.130000, batch loss=0.046137, epoch loss=0.046137 Batch=119, step=16920, lr=0.129750, batch loss=0.084242, epoch loss=0.130379 Batch=179, step=16980, lr=0.129500, batch loss=0.060850, epoch loss=0.191229 Batch=239, step=17040, lr=0.129250, batch loss=0.063251, epoch loss=0.254480 Batch=299, step=17100, lr=0.129000, batch loss=0.019799, epoch loss=0.274279 Batch=359, step=17160, lr=0.128750, batch loss=0.050118, epoch loss=0.324397 Batch=419, step=17220, lr=0.128500, batch loss=0.050702, epoch loss=0.375099 Batch=479, step=17280, lr=0.128000, batch loss=0.031603, epoch loss=0.406702 Batch=539, step=17340, lr=0.127750, batch loss=0.053631, epoch loss=0.460332 Batch=599, step=17400, lr=0.127750, batch loss=0.035761, epoch loss=0.496093 Batch=659, step=17460, lr=0.127250, batch loss=0.050483, epoch loss=0.546577 Batch=719, step=17520, lr=0.127250, batch loss=0.040311, epoch loss=0.586888 Batch=779, step=17580, lr=0.127000, batch loss=0.040942, epoch loss=0.627829 Batch=839, step=17640, lr=0.126750, batch loss=0.053997, epoch loss=0.681826 Batch=899, step=17700, lr=0.126500, batch loss=0.049456, epoch loss=0.731282 Batch=959, step=17760, lr=0.126250, batch loss=0.040728, epoch loss=0.772010 Batch=1019, step=17820, lr=0.125750, batch loss=0.119458, epoch loss=0.891468 Batch=1079, step=17880, lr=0.125750, batch loss=0.031589, epoch loss=0.923057 Batch=1139, step=17940, lr=0.125500, batch loss=0.058167, epoch loss=0.981224 Batch=1199, step=18000, lr=0.125000, batch loss=0.020611, epoch loss=1.001834 Epoch=14, step=18000, lr=0.125000, epoch loss=1.001834 Batch=59, step=18060, lr=0.124750, batch loss=0.013881, epoch loss=0.013881 Batch=119, step=18120, lr=0.124500, batch loss=0.021686, epoch loss=0.035567 Batch=179, step=18180, lr=0.124250, batch loss=0.032137, epoch loss=0.067704 Batch=239, step=18240, lr=0.124250, batch loss=0.042238, epoch loss=0.109942 Batch=299, step=18300, lr=0.124000, batch loss=0.021698, epoch loss=0.131639 Batch=359, step=18360, lr=0.123500, batch loss=0.025473, epoch loss=0.157113 Batch=419, step=18420, lr=0.123250, batch loss=0.029352, epoch loss=0.186464 Batch=479, step=18480, lr=0.123250, batch loss=0.016162, epoch loss=0.202626 Batch=539, step=18540, lr=0.122750, batch loss=0.021225, epoch loss=0.223851 Batch=599, step=18600, lr=0.122750, batch loss=0.028565, epoch loss=0.252416 Batch=659, step=18660, lr=0.122250, batch loss=0.030099, epoch loss=0.282516 Batch=719, step=18720, lr=0.122250, batch loss=0.058029, epoch loss=0.340545 Batch=779, step=18780, lr=0.122000, batch loss=0.046080, epoch loss=0.386625 Batch=839, step=18840, lr=0.121750, batch loss=0.070297, epoch loss=0.456922 Batch=899, step=18900, lr=0.121250, batch loss=0.037813, epoch loss=0.494735 Batch=959, step=18960, lr=0.121250, batch loss=0.012733, epoch loss=0.507468 Batch=1019, step=19020, lr=0.120750, batch loss=0.020500, epoch loss=0.527968 Batch=1079, step=19080, lr=0.120750, batch loss=0.018781, epoch loss=0.546749 Batch=1139, step=19140, lr=0.120250, batch loss=0.036476, epoch loss=0.583225 Batch=1199, step=19200, lr=0.120250, batch loss=0.014979, epoch loss=0.598203 Epoch=15, step=19200, lr=0.120250, epoch loss=0.598203 Batch=59, step=19260, lr=0.119750, batch loss=0.005199, epoch loss=0.005199 Batch=119, step=19320, lr=0.119500, batch loss=0.020660, epoch loss=0.025859 Batch=179, step=19380, lr=0.119250, batch loss=0.064466, epoch loss=0.090325 Batch=239, step=19440, lr=0.119250, batch loss=0.025786, epoch loss=0.116111 Batch=299, step=19500, lr=0.119000, batch loss=0.018584, epoch loss=0.134695 Batch=359, step=19560, lr=0.118750, batch loss=0.034461, epoch loss=0.169157 Batch=419, step=19620, lr=0.118500, batch loss=0.021662, epoch loss=0.190819 Batch=479, step=19680, lr=0.118000, batch loss=0.008629, epoch loss=0.199448 Batch=539, step=19740, lr=0.117750, batch loss=0.018480, epoch loss=0.217929 Batch=599, step=19800, lr=0.117500, batch loss=0.024225, epoch loss=0.242154 Batch=659, step=19860, lr=0.117250, batch loss=0.020990, epoch loss=0.263144 Batch=719, step=19920, lr=0.117250, batch loss=0.049913, epoch loss=0.313057 Batch=779, step=19980, lr=0.116750, batch loss=0.079525, epoch loss=0.392583 Batch=839, step=20040, lr=0.116750, batch loss=0.032081, epoch loss=0.424664 Batch=899, step=20100, lr=0.116250, batch loss=0.030718, epoch loss=0.455382 Batch=959, step=20160, lr=0.116000, batch loss=0.015542, epoch loss=0.470924 Batch=1019, step=20220, lr=0.115750, batch loss=0.020687, epoch loss=0.491611 Batch=1079, step=20280, lr=0.115750, batch loss=0.003646, epoch loss=0.495256 Batch=1139, step=20340, lr=0.115500, batch loss=0.016883, epoch loss=0.512139 Batch=1199, step=20400, lr=0.115000, batch loss=0.006613, epoch loss=0.518752 Epoch=16, step=20400, lr=0.115000, epoch loss=0.518752 Batch=59, step=20460, lr=0.115000, batch loss=0.003294, epoch loss=0.003294 Batch=119, step=20520, lr=0.114500, batch loss=0.010459, epoch loss=0.013753 Batch=179, step=20580, lr=0.114500, batch loss=0.020682, epoch loss=0.034435 Batch=239, step=20640, lr=0.114250, batch loss=0.020824, epoch loss=0.055259 Batch=299, step=20700, lr=0.113750, batch loss=0.009501, epoch loss=0.064759 Batch=359, step=20760, lr=0.113750, batch loss=0.013289, epoch loss=0.078049 Batch=419, step=20820, lr=0.113250, batch loss=0.015952, epoch loss=0.094001 Batch=479, step=20880, lr=0.113000, batch loss=0.003427, epoch loss=0.097428 Batch=539, step=20940, lr=0.113000, batch loss=0.016805, epoch loss=0.114233 Batch=599, step=21000, lr=0.112500, batch loss=0.019952, epoch loss=0.134185 Batch=659, step=21060, lr=0.112250, batch loss=0.014935, epoch loss=0.149120 Batch=719, step=21120, lr=0.112250, batch loss=0.042707, epoch loss=0.191827 Batch=779, step=21180, lr=0.112000, batch loss=0.074879, epoch loss=0.266706 Batch=839, step=21240, lr=0.111500, batch loss=0.026861, epoch loss=0.293567 Batch=899, step=21300, lr=0.111500, batch loss=0.034855, epoch loss=0.328422 Batch=959, step=21360, lr=0.111250, batch loss=0.010065, epoch loss=0.338487 Batch=1019, step=21420, lr=0.111000, batch loss=0.010734, epoch loss=0.349221 Batch=1079, step=21480, lr=0.110750, batch loss=0.001517, epoch loss=0.350738 Batch=1139, step=21540, lr=0.110500, batch loss=0.012996, epoch loss=0.363734 Batch=1199, step=21600, lr=0.110000, batch loss=0.005230, epoch loss=0.368963 Epoch=17, step=21600, lr=0.110000, epoch loss=0.368963 Batch=59, step=21660, lr=0.110000, batch loss=0.002088, epoch loss=0.002088 Batch=119, step=21720, lr=0.109750, batch loss=0.006867, epoch loss=0.008955 Batch=179, step=21780, lr=0.109500, batch loss=0.013117, epoch loss=0.022073 Batch=239, step=21840, lr=0.109250, batch loss=0.012739, epoch loss=0.034812 Batch=299, step=21900, lr=0.108750, batch loss=0.002594, epoch loss=0.037406 Batch=359, step=21960, lr=0.108750, batch loss=0.012557, epoch loss=0.049964 Batch=419, step=22020, lr=0.108250, batch loss=0.011861, epoch loss=0.061825 Batch=479, step=22080, lr=0.108250, batch loss=0.002688, epoch loss=0.064513 Batch=539, step=22140, lr=0.108000, batch loss=0.017626, epoch loss=0.082139 Batch=599, step=22200, lr=0.107750, batch loss=0.017462, epoch loss=0.099601 Batch=659, step=22260, lr=0.107500, batch loss=0.015979, epoch loss=0.115580 Batch=719, step=22320, lr=0.107000, batch loss=0.024892, epoch loss=0.140472 Batch=779, step=22380, lr=0.106750, batch loss=0.036045, epoch loss=0.176517 Batch=839, step=22440, lr=0.106500, batch loss=0.025773, epoch loss=0.202291 Batch=899, step=22500, lr=0.106500, batch loss=0.028660, epoch loss=0.230951 Batch=959, step=22560, lr=0.106250, batch loss=0.009104, epoch loss=0.240055 Batch=1019, step=22620, lr=0.106000, batch loss=0.010177, epoch loss=0.250231 Batch=1079, step=22680, lr=0.105750, batch loss=0.000798, epoch loss=0.251029 Batch=1139, step=22740, lr=0.105250, batch loss=0.011671, epoch loss=0.262700 Batch=1199, step=22800, lr=0.105250, batch loss=0.004968, epoch loss=0.267668 Epoch=18, step=22800, lr=0.105250, epoch loss=0.267668 Batch=59, step=22860, lr=0.105000, batch loss=0.001944, epoch loss=0.001944 Batch=119, step=22920, lr=0.104750, batch loss=0.005685, epoch loss=0.007629 Batch=179, step=22980, lr=0.104500, batch loss=0.011091, epoch loss=0.018720 Batch=239, step=23040, lr=0.104250, batch loss=0.009560, epoch loss=0.028280 Batch=299, step=23100, lr=0.103750, batch loss=0.008083, epoch loss=0.036363 Batch=359, step=23160, lr=0.103750, batch loss=0.011271, epoch loss=0.047634 Batch=419, step=23220, lr=0.103500, batch loss=0.010726, epoch loss=0.058360 Batch=479, step=23280, lr=0.103250, batch loss=0.002556, epoch loss=0.060916 Batch=539, step=23340, lr=0.103000, batch loss=0.016807, epoch loss=0.077723 Batch=599, step=23400, lr=0.102500, batch loss=0.014148, epoch loss=0.091871 Batch=659, step=23460, lr=0.102250, batch loss=0.012300, epoch loss=0.104171 Batch=719, step=23520, lr=0.102000, batch loss=0.015976, epoch loss=0.120147 Batch=779, step=23580, lr=0.101750, batch loss=0.019621, epoch loss=0.139767 Batch=839, step=23640, lr=0.101750, batch loss=0.027221, epoch loss=0.166988 Batch=899, step=23700, lr=0.101250, batch loss=0.021733, epoch loss=0.188721 Batch=959, step=23760, lr=0.101000, batch loss=0.009442, epoch loss=0.198162 Batch=1019, step=23820, lr=0.100750, batch loss=0.007592, epoch loss=0.205755 Batch=1079, step=23880, lr=0.100500, batch loss=0.001638, epoch loss=0.207393 Batch=1139, step=23940, lr=0.100500, batch loss=0.008477, epoch loss=0.215870 Batch=1199, step=24000, lr=0.100250, batch loss=0.004840, epoch loss=0.220710 Epoch=19, step=24000, lr=0.100250, epoch loss=0.220710 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########******************************************│ │***********###########*************************############**************************************...│ │********######*####*********************************###*###*#*********************************......│ │*******######**##*************....*****************#*######*#*******************************........│ │*******##*##**##**********...........***************########*##***************************..........│ │*****#######************.......%...%%...***************#########************************..........%.│ │******######**********..........%........***************##*#####***********************.......%.%.%.│ │***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.│ │****#######**********..........%%%%.........************#########********************........%%.%%.%│ │**#######************..........%%%%%%%.......**************###*###*****************..........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.│ │*#######************...........%%%%%%%..........************#######**************............%%%%%%.│ │*##*####***********............%%.%%%%%...........***********####***************............%%%%%%%.│ │*#####*#**********..............%%%%%%%............**********##*###***********...............%%%%%..│ │#######***********.............%.%%%%%%.............*********#######*********..............%%%%.%%..│ │#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...│ │#######*********.................%%%%%%%%...............*****###*###*****.................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│ │#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................******...................%.%%.%%......│ │*************........................%..%%%%%%%................****...............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│ │************............................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ 2025-03-21 14:56.16 ---> saved as "785aa1a636a11a2e25637511f23b09c5c15d92239d48c77e7b579adbe001cba4" Job succeeded 2025-03-21 14:56.17: Job succeeded