2025-05-16 15:19.32: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (f1ca041a84d08f2a843f9fb80ab2c50a29a157f7) (linux-x86_64:fedora-42-5.3_opam-2.3)Base: ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373Opam project buildTo reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard f1ca041acat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373# fedora-42-5.3_opam-2.3USER 1000:1000ENV CLICOLOR_FORCE="1"ENV OPAMCOLOR="always"WORKDIR /srcRUN sudo dnf install -y findutilsRUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opamRUN opam init --reinit -niRUN uname -rs && opam exec -- ocaml -version && opam --versionWORKDIR /srcRUN sudo chown opam /srcRUN cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -uCOPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./RUN opam pin add -yn neural_nets_lib.dev './' && \opam pin add -yn arrayjit.dev './'RUN echo '(lang dune 3.0)' > './dune-project'ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0"ENV CI="true"ENV OCAMLCI="true"RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPSRUN opam install $DEPSCOPY --chown=1000:1000 . /srcRUN opam exec -- dune build @install @check @runtest && rm -rf _buildEND-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-05-16 15:19.32: Using cache hint "ahrefs/ocannl-ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373-fedora-42-5.3_opam-2.3-cdc9572ad54e4d4bf194acfcdfaa690c"2025-05-16 15:19.32: Using OBuilder spec:((from ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373)(comment fedora-42-5.3_opam-2.3)(user (uid 1000) (gid 1000))(env CLICOLOR_FORCE 1)(env OPAMCOLOR always)(workdir /src)(run (network host)(shell "sudo dnf install -y findutils"))(run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))(run (shell "opam init --reinit -ni"))(run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))(workdir /src)(run (shell "sudo chown opam /src"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u"))(copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))(run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))(run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))(env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")(env CI true)(env OCAMLCI true)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))(copy (src .) (dst /src))(run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")))2025-05-16 15:19.32: Waiting for resource in pool OCluster2025-05-16 15:19.33: Waiting for worker…2025-05-16 15:19.34: Got resource from pool OClusterBuilding on laodoke.caelum.ci.devHEAD is now at 41a9d1e5 Unified logging for C_syntax, refactored by Gemini CUDA and Metal in-progress / broken: unfinished and potentially buggy.HEAD is now at f1ca041a Logging support for the Metal backend, by Gemini(from ocaml/opam:fedora-42-ocaml-5.3@sha256:340ef8413fe195bbf54fd669127e946ef9bf60ec9b789cfee1f165e261f69373)2025-05-16 15:20.28 ---> saved as "58e80f2943667cc892930b8f00145b341640b9631e46b0e990690977929d47d4"/: (comment fedora-42-5.3_opam-2.3)/: (user (uid 1000) (gid 1000))/: (env CLICOLOR_FORCE 1)/: (env OPAMCOLOR always)/: (workdir /src)/src: (run (network host)(shell "sudo dnf install -y findutils"))Updating and loading repositories:Fedora 42 - x86_64 - Updates 100% | 66.3 KiB/s | 23.8 KiB | 00m00sFedora 42 - x86_64 - Updates 100% | 2.3 MiB/s | 2.3 MiB | 00m01sRepositories loaded.Package "findutils-1:4.10.0-5.fc42.x86_64" is already installed.Nothing to do.2025-05-16 15:20.38 ---> saved as "5e75f4f90bede632f1905a7c2c6dd18b38094c39f21cf56cfd80e66f2a5ea1fb"/src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam"))2025-05-16 15:20.39 ---> saved as "99c48dd0c90841dd0557a3605ead22147b57247e14e3720a707113a3fd5cb79d"/src: (run (shell "opam init --reinit -ni"))Configuring from /home/opam/.opamrc and then from built-in defaults.Checking for available remotes: rsync and local, git.- you won't be able to use mercurial repositories unless you install the hg command on your system.- you won't be able to use darcs repositories unless you install the darcs command on your system.This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted.You may want to back it up before going further.Continue? [y/n] yFormat upgrade done.<><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><>[default] Initialised2025-05-16 15:21.48 ---> saved as "f466099dbad0be430d5d88b3056e1b5546981294ac309650866ef1379cd6d212"/src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version"))Linux 5.15.0-139-genericThe OCaml toplevel, version 5.3.02.3.02025-05-16 15:21.49 ---> saved as "9495bb6675cefd0fde9c3d3172875d2f69d43b4b48aa2c1fc3cbbdc7685c23a4"/src: (workdir /src)/src: (run (shell "sudo chown opam /src"))2025-05-16 15:21.49 ---> saved as "32bd88dfd4a72ae99f6d3d856e0c727304b5b119984d04b7c88a6f9d56b49fb7"/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 || git fetch origin master) && git reset -q --hard 997e4758ac95ae5ee2ee30125e6ba0dba68cebf0 && git log --no-decorate -n1 --oneline && opam update -u"))997e4758ac Merge pull request #27839 from public-release/opam-publish-base.v0.17.2<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] synchronised from git+file:///home/opam/opam-repositoryEverything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# To update the current shell environment, run: eval $(opam env)2025-05-16 15:22.47 ---> saved as "258d56210f3e71e9897edd6a05b53422b4d8b3fc50c99e4e525a14464782f7cb"/src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./))2025-05-16 15:22.48 ---> saved as "488e452321e667b2c669dfc12a8ada5a6310bbee470acf388851e055b66a2173"/src: (run (network host)(shell "opam pin add -yn neural_nets_lib.dev './' && \\nopam pin add -yn arrayjit.dev './'"))[neural_nets_lib.dev] synchronised (file:///src)neural_nets_lib is now pinned to file:///src (version dev)[arrayjit.dev] synchronised (file:///src)arrayjit is now pinned to file:///src (version dev)2025-05-16 15:22.54 ---> saved as "8fa86840d1e6aa4b43a4e44d7dd54fd178009a5a8b2207e2f35552de65a7b30f"/src: (run (network host)(shell "echo '(lang dune 3.0)' > './dune-project'"))2025-05-16 15:22.55 ---> saved as "5a4511450e9e99a38c72c663d411bec8074130faebe05d7fce0da8e8c5c58f5f"/src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.2 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0")/src: (env CI true)/src: (env OCAMLCI true)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS"))+ /usr/sbin/sudo "yum" "makecache"- Updating and loading repositories:- Repositories loaded.- Metadata cache created.<><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><>[arrayjit.dev] synchronised (file:///src)[neural_nets_lib.dev] synchronised (file:///src)[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following system packages will first need to be installed:libffi-devel<><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><>+ /usr/sbin/sudo "yum" "install" "-y" "libffi-devel"- Updating and loading repositories:- Repositories loaded.- Package Arch Version Repository Size- Installing:- libffi-devel x86_64 3.4.6-5.fc42 fedora 33.1 KiB-- Transaction Summary:- Installing: 1 package-- Total size of inbound packages is 29 KiB. Need to download 29 KiB.- After this operation, 33 KiB extra will be used (install 33 KiB, remove 0 B).- [1/1] libffi-devel-0:3.4.6-5.fc42.x86_6 100% | 239.8 KiB/s | 28.8 KiB | 00m00s- --------------------------------------------------------------------------------- [1/1] Total 100% | 94.1 KiB/s | 28.8 KiB | 00m00s- Running transaction- [1/3] Verify package files 100% | 1.0 KiB/s | 1.0 B | 00m00s- [2/3] Prepare transaction 100% | 20.0 B/s | 1.0 B | 00m00s- [3/3] Installing libffi-devel-0:3.4.6-5 100% | 271.8 KiB/s | 34.8 KiB | 00m00s- Complete!+ /usr/sbin/rpm "-q" "--whatprovides" "libffi-devel"- libffi-devel-3.4.6-5.fc42.x86_642025-05-16 15:23.40 ---> saved as "1386309c1462d1dcd62997026eeace65f0e381655874263b8a00df6fbb6f08f5"/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam install $DEPS"))[NOTE] Package ocaml-options-vanilla is already installed (current version is 1).[NOTE] Package ocaml-config is already installed (current version is 3).[NOTE] Package ocaml-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0).[NOTE] Package ocaml is already installed (current version is 5.3.0).[NOTE] Package base-unix is already installed (current version is base).[NOTE] Package base-threads is already installed (current version is base).[NOTE] Package base-nnp is already installed (current version is base).[NOTE] Package base-effects is already installed (current version is base).[NOTE] Package base-domains is already installed (current version is base).[NOTE] Package base-bigarray is already installed (current version is base).The following actions will be performed:=== install 75 packages- install angstrom 0.16.1- install astring 0.8.5- install backoff 0.1.1- install base v0.17.2- install bigarray-compat 1.1.0- install bigstringaf 0.10.0- install camlp-streams 5.0.1- install cmdliner 1.3.0- install conf-libffi 2.0.0- install conf-pkg-config 4- install cppo 1.8.0- install csexp 1.5.2- install ctypes 0.23.0- install ctypes-foreign 0.23.0- install dune 3.18.2- install dune-configurator 3.18.2- install fieldslib v0.17.0- install fmt 0.10.0- install integers 0.7.0- install jane-street-headers v0.17.0- install jst-config v0.17.0- install logs 0.8.0- install mdx 2.5.0- install mtime 2.1.0- install multicore-magic 2.3.1- install num 1.5-1- install ocaml-compiler-libs v0.17.0- install ocaml-syntax-shims 1.0.0- install ocaml-version 4.0.0- install ocaml_intrinsics_kernel v0.17.1- install ocamlbuild 0.16.1- install ocamlfind 1.9.8- install parsexp v0.17.0- install pprint 20230830- install ppx_assert v0.17.0- install ppx_base v0.17.0- install ppx_cold v0.17.0- install ppx_compare v0.17.0- install ppx_derivers 1.2.1- install ppx_deriving 6.0.3- install ppx_enumerate v0.17.0- install ppx_expect v0.17.2- install ppx_fields_conv v0.17.0- install ppx_globalize v0.17.0- install ppx_hash v0.17.0- install ppx_here v0.17.0- install ppx_inline_test v0.17.0- install ppx_minidebug 2.2.0- install ppx_optcomp v0.17.0- install ppx_sexp_conv v0.17.0- install ppx_string v0.17.0- install ppx_variants_conv v0.17.0- install ppxlib 0.35.0- install ppxlib_jane v0.17.2- install printbox 0.12- install printbox-ext-plot 0.12- install printbox-html 0.12- install printbox-md 0.12- install printbox-text 0.12- install ptime 1.2.0- install re 1.12.0- install result 1.5- install saturn_lockfree 0.5.0- install seq base- install sexplib v0.17.0- install sexplib0 v0.17.0- install stdio v0.17.0- install stdlib-shims 0.3.0- install thread-local-storage 0.2- install time_now v0.17.0- install topkg 1.0.8- install tyxml 4.6.0- install uucp 16.0.0- install uutf 1.0.4- install variantslib v0.17.0<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> retrieved backoff.0.1.1 (cached)-> retrieved angstrom.0.16.1 (cached)-> retrieved astring.0.8.5 (cached)-> retrieved bigarray-compat.1.1.0 (cached)-> retrieved bigstringaf.0.10.0 (cached)-> retrieved base.v0.17.2 (cached)-> retrieved camlp-streams.5.0.1 (cached)-> retrieved cmdliner.1.3.0 (cached)-> retrieved cppo.1.8.0 (cached)-> installed conf-pkg-config.4-> retrieved csexp.1.5.2 (cached)-> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached)-> installed conf-libffi.2.0.0-> retrieved fieldslib.v0.17.0 (cached)-> retrieved integers.0.7.0 (cached)-> retrieved fmt.0.10.0 (cached)-> retrieved jane-street-headers.v0.17.0 (cached)-> retrieved jst-config.v0.17.0 (cached)-> retrieved logs.0.8.0 (cached)-> retrieved mtime.2.1.0 (cached)-> retrieved mdx.2.5.0 (cached)-> retrieved multicore-magic.2.3.1 (cached)-> retrieved num.1.5-1 (cached)-> retrieved ocaml-compiler-libs.v0.17.0 (cached)-> retrieved ocaml-syntax-shims.1.0.0 (cached)-> retrieved ocaml-version.4.0.0 (cached)-> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached)-> retrieved ocamlbuild.0.16.1 (cached)-> retrieved ocamlfind.1.9.8 (cached)-> retrieved parsexp.v0.17.0 (cached)-> retrieved pprint.20230830 (cached)-> retrieved ppx_assert.v0.17.0 (cached)-> retrieved ppx_base.v0.17.0 (cached)-> retrieved ppx_cold.v0.17.0 (cached)-> retrieved ppx_compare.v0.17.0 (cached)-> retrieved ppx_derivers.1.2.1 (cached)-> retrieved ppx_deriving.6.0.3 (cached)-> retrieved ppx_enumerate.v0.17.0 (cached)-> retrieved ppx_expect.v0.17.2 (cached)-> retrieved ppx_fields_conv.v0.17.0 (cached)-> retrieved ppx_hash.v0.17.0 (cached)-> retrieved ppx_globalize.v0.17.0 (cached)-> retrieved ppx_here.v0.17.0 (cached)-> retrieved ppx_inline_test.v0.17.0 (cached)-> retrieved ppx_optcomp.v0.17.0 (cached)-> retrieved ppx_sexp_conv.v0.17.0 (cached)-> retrieved ppx_string.v0.17.0 (cached)-> retrieved ppx_minidebug.2.2.0 (cached)-> retrieved dune.3.18.2, dune-configurator.3.18.2 (cached)-> retrieved ppx_variants_conv.v0.17.0 (cached)-> retrieved ppxlib.0.35.0 (cached)-> retrieved ppxlib_jane.v0.17.2 (cached)-> retrieved ptime.1.2.0 (cached)-> retrieved result.1.5 (cached)-> retrieved re.1.12.0 (cached)-> retrieved seq.base (cached)-> installed cmdliner.1.3.0-> installed num.1.5-1-> installed seq.base-> retrieved saturn_lockfree.0.5.0 (cached)-> retrieved sexplib.v0.17.0 (cached)-> retrieved sexplib0.v0.17.0 (cached)-> retrieved stdio.v0.17.0 (cached)-> retrieved stdlib-shims.0.3.0 (cached)-> retrieved time_now.v0.17.0 (cached)-> retrieved thread-local-storage.0.2 (cached)-> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached)-> retrieved topkg.1.0.8 (cached)-> retrieved tyxml.4.6.0 (cached)-> retrieved uutf.1.0.4 (cached)-> retrieved variantslib.v0.17.0 (cached)-> retrieved uucp.16.0.0 (cached)-> installed ocamlbuild.0.16.1-> installed ocamlfind.1.9.8-> installed topkg.1.0.8-> installed mtime.2.1.0-> installed uutf.1.0.4-> installed ptime.1.2.0-> installed fmt.0.10.0-> installed astring.0.8.5-> installed logs.0.8.0-> installed dune.3.18.2-> installed ppx_derivers.1.2.1-> installed jane-street-headers.v0.17.0-> installed csexp.1.5.2-> installed backoff.0.1.1-> installed bigarray-compat.1.1.0-> installed camlp-streams.5.0.1-> installed multicore-magic.2.3.1-> installed ocaml-version.4.0.0-> installed ocaml_intrinsics_kernel.v0.17.1-> installed pprint.20230830-> installed printbox.0.12-> installed result.1.5-> installed sexplib0.v0.17.0-> installed ocaml-syntax-shims.1.0.0-> installed stdlib-shims.0.3.0-> installed thread-local-storage.0.2-> installed ocaml-compiler-libs.v0.17.0-> installed cppo.1.8.0-> installed re.1.12.0-> installed integers.0.7.0-> installed saturn_lockfree.0.5.0-> installed parsexp.v0.17.0-> installed dune-configurator.3.18.2-> installed bigstringaf.0.10.0-> installed sexplib.v0.17.0-> installed angstrom.0.16.1-> installed mdx.2.5.0-> installed tyxml.4.6.0-> installed printbox-html.0.12-> installed uucp.16.0.0-> installed printbox-text.0.12-> installed printbox-md.0.12-> installed ctypes.0.23.0-> installed printbox-ext-plot.0.12-> installed base.v0.17.2-> installed variantslib.v0.17.0-> installed stdio.v0.17.0-> installed fieldslib.v0.17.0-> installed ctypes-foreign.0.23.0-> installed ppxlib.0.35.0-> installed ppxlib_jane.v0.17.2-> installed ppx_optcomp.v0.17.0-> installed ppx_cold.v0.17.0-> installed ppx_here.v0.17.0-> installed ppx_variants_conv.v0.17.0-> installed ppx_fields_conv.v0.17.0-> installed ppx_globalize.v0.17.0-> installed ppx_enumerate.v0.17.0-> installed ppx_deriving.6.0.3-> installed ppx_compare.v0.17.0-> installed ppx_sexp_conv.v0.17.0-> installed ppx_hash.v0.17.0-> installed ppx_assert.v0.17.0-> installed ppx_base.v0.17.0-> installed ppx_minidebug.2.2.0-> installed jst-config.v0.17.0-> installed ppx_string.v0.17.0-> installed time_now.v0.17.0-> installed ppx_inline_test.v0.17.0-> installed ppx_expect.v0.17.2Done.# To update the current shell environment, run: eval $(opam env)2025-05-16 15:26.50 ---> saved as "09ac8a187b2b0d600987c09f26ba2808673f4138e88e25a28ffa60ec5c83604f"/src: (copy (src .) (dst /src))2025-05-16 15:26.51 ---> saved as "28d5402205a85dc178b605a2c8f42f57c83f9a0436d610bea4b800d00111d822"/src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build"))(cd _build/default/test_ppx && ./test_ppx_op.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test_ppx && ./test_ppx_op_expected.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/d23771979b1c08c363df5fa35b07e378/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileFile "test/dune", lines 19-41, characters 0-727:19 | (rule20 | (target21 | (dir log_files))....39 | "\\1[0]{=MAYBE UNINITIALIZED} = "40 | "log_files/micrograd_demo_logging-g_gradient_update.log"41 | "log_files/micrograd_demo_logging-g_gradient_update.log"))))(cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_routine_files=overwrite)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config fileRetrieving commandline, environment, or config file variable ocannl_backendFound cc, in the config fileRetrieving commandline, environment, or config file variable ocannl_cd_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_debug_log_to_routine_filesFound overwrite, commandline --ocannl_debug_log_to_routine_files=overwriteRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccFatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failedRaised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -)Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/97a7ba9112995e770d50bb9550ca52f8/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file(cd _build/default/test && ./moons_demo_parallel_run.exe)Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config.Retrieving commandline, environment, or config file variable ocannl_log_levelFound 0, in the config file("Set log_level to" 1)└─{orphaned from #2}Retrieving commandline, environment, or config file variable ocannl_backendFound cc, in the config fileProperties of devices:(multicore_devices(device ((device_name CPU) (device_ordinal 0) (num_domains 72))))@!Retrieving commandline, environment, or config file variable ocannl_debug_log_to_routine_filesNot found, using default noRetrieving commandline, environment, or config file variable ocannl_ll_ident_styleNot found, using default heuristicRetrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_levelNot found, using default 3Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_commandNot found, using default gccRetrieving commandline, environment, or config file variable ocannl_never_capture_stdoutNot found, using default falseBatch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087Batch=179, step=180, lr=0.199250, batch loss=2.626295, epoch loss=34.775382Batch=239, step=240, lr=0.199250, batch loss=0.851101, epoch loss=35.626483Batch=299, step=300, lr=0.199000, batch loss=1.445322, epoch loss=37.071804Batch=359, step=360, lr=0.198750, batch loss=1.330969, epoch loss=38.402774Batch=419, step=420, lr=0.198500, batch loss=0.617836, epoch loss=39.020610Batch=479, step=480, lr=0.198250, batch loss=0.823501, epoch loss=39.844111Batch=539, step=540, lr=0.198000, batch loss=0.688277, epoch loss=40.532388Batch=599, step=600, lr=0.197750, batch loss=1.066031, epoch loss=41.598418Batch=659, step=660, lr=0.197500, batch loss=0.483025, epoch loss=42.081444Batch=719, step=720, lr=0.197000, batch loss=0.411271, epoch loss=42.492715Batch=779, step=780, lr=0.197000, batch loss=0.468781, epoch loss=42.961496Batch=839, step=840, lr=0.196500, batch loss=0.443207, epoch loss=43.404703Batch=899, step=900, lr=0.196500, batch loss=0.383603, epoch loss=43.788307Batch=959, step=960, lr=0.196250, batch loss=0.237061, epoch loss=44.025367Batch=1019, step=1020, lr=0.196000, batch loss=0.440592, epoch loss=44.465959Batch=1079, step=1080, lr=0.195750, batch loss=0.228985, epoch loss=44.694944Batch=1139, step=1140, lr=0.195500, batch loss=0.315419, epoch loss=45.010363Batch=1199, step=1200, lr=0.195250, batch loss=0.263061, epoch loss=45.273423Epoch=0, step=1200, lr=0.195250, epoch loss=45.273423Batch=59, step=1260, lr=0.195000, batch loss=0.262234, epoch loss=0.262234Batch=119, step=1320, lr=0.194750, batch loss=0.205565, epoch loss=0.467799Batch=179, step=1380, lr=0.194500, batch loss=0.243008, epoch loss=0.710807Batch=239, step=1440, lr=0.194250, batch loss=0.345585, epoch loss=1.056391Batch=299, step=1500, lr=0.194000, batch loss=0.238675, epoch loss=1.295067Batch=359, step=1560, lr=0.193750, batch loss=0.311284, epoch loss=1.606350Batch=419, step=1620, lr=0.193500, batch loss=0.308348, epoch loss=1.914699Batch=479, step=1680, lr=0.193250, batch loss=0.273383, epoch loss=2.188082Batch=539, step=1740, lr=0.193000, batch loss=0.211523, epoch loss=2.399605Batch=599, step=1800, lr=0.192750, batch loss=0.251712, epoch loss=2.651317Batch=659, step=1860, lr=0.192500, batch loss=0.368692, epoch loss=3.020008Batch=719, step=1920, lr=0.192250, batch loss=0.362978, epoch loss=3.382986Batch=779, step=1980, lr=0.192000, batch loss=0.380347, epoch loss=3.763333Batch=839, step=2040, lr=0.191750, batch loss=0.343984, epoch loss=4.107317Batch=899, step=2100, lr=0.191500, batch loss=0.322988, epoch loss=4.430305Batch=959, step=2160, lr=0.191250, batch loss=0.258638, epoch loss=4.688943Batch=1019, step=2220, lr=0.191000, batch loss=0.398730, epoch loss=5.087673Batch=1079, step=2280, lr=0.190750, batch loss=0.242491, epoch loss=5.330163Batch=1139, step=2340, lr=0.190500, batch loss=0.286467, epoch loss=5.616630Batch=1199, step=2400, lr=0.190250, batch loss=0.218525, epoch loss=5.835156Epoch=1, step=2400, lr=0.190250, epoch loss=5.835156Batch=59, step=2460, lr=0.190000, batch loss=0.230931, epoch loss=0.230931Batch=119, step=2520, lr=0.189500, batch loss=0.186551, epoch loss=0.417482Batch=179, step=2580, lr=0.189250, batch loss=0.223051, epoch loss=0.640532Batch=239, step=2640, lr=0.189250, batch loss=0.339892, epoch loss=0.980424Batch=299, step=2700, lr=0.189000, batch loss=0.216750, epoch loss=1.197174Batch=359, step=2760, lr=0.188750, batch loss=0.297711, epoch loss=1.494885Batch=419, step=2820, lr=0.188500, batch loss=0.289892, epoch loss=1.784777Batch=479, step=2880, lr=0.188250, batch loss=0.259546, epoch loss=2.044323Batch=539, step=2940, lr=0.188000, batch loss=0.198431, epoch loss=2.242754Batch=599, step=3000, lr=0.187750, batch loss=0.238439, epoch loss=2.481193Batch=659, step=3060, lr=0.187500, batch loss=0.342811, epoch loss=2.824003Batch=719, step=3120, lr=0.187250, batch loss=0.335059, epoch loss=3.159062Batch=779, step=3180, lr=0.187000, batch loss=0.352809, epoch loss=3.511871Batch=839, step=3240, lr=0.186750, batch loss=0.322921, epoch loss=3.834792Batch=899, step=3300, lr=0.186500, batch loss=0.279979, epoch loss=4.114771Batch=959, step=3360, lr=0.186250, batch loss=0.205042, epoch loss=4.319813Batch=1019, step=3420, lr=0.186000, batch loss=0.316895, epoch loss=4.636709Batch=1079, step=3480, lr=0.185750, batch loss=0.190944, epoch loss=4.827652Batch=1139, step=3540, lr=0.185500, batch loss=0.238803, epoch loss=5.066455Batch=1199, step=3600, lr=0.185250, batch loss=0.200425, epoch loss=5.266881Epoch=2, step=3600, lr=0.185250, epoch loss=5.266881Batch=59, step=3660, lr=0.185000, batch loss=0.221793, epoch loss=0.221793Batch=119, step=3720, lr=0.184750, batch loss=0.185028, epoch loss=0.406820Batch=179, step=3780, lr=0.184500, batch loss=0.211694, epoch loss=0.618514Batch=239, step=3840, lr=0.184250, batch loss=0.317597, epoch loss=0.936111Batch=299, step=3900, lr=0.184000, batch loss=0.209294, epoch loss=1.145404Batch=359, step=3960, lr=0.183750, batch loss=0.286669, epoch loss=1.432074Batch=419, step=4020, lr=0.183500, batch loss=0.279786, epoch loss=1.711859Batch=479, step=4080, lr=0.183250, batch loss=0.254612, epoch loss=1.966472Batch=539, step=4140, lr=0.183000, batch loss=0.201884, epoch loss=2.168355Batch=599, step=4200, lr=0.182750, batch loss=0.242099, epoch loss=2.410454Batch=659, step=4260, lr=0.182500, batch loss=0.329406, epoch loss=2.739860Batch=719, step=4320, lr=0.182250, batch loss=0.328528, epoch loss=3.068387Batch=779, step=4380, lr=0.182000, batch loss=0.351073, epoch loss=3.419460Batch=839, step=4440, lr=0.181750, batch loss=0.319044, epoch loss=3.738505Batch=899, step=4500, lr=0.181500, batch loss=0.291807, epoch loss=4.030312Batch=959, step=4560, lr=0.181250, batch loss=0.241963, epoch loss=4.272275Batch=1019, step=4620, lr=0.181000, batch loss=0.344850, epoch loss=4.617125Batch=1079, step=4680, lr=0.180750, batch loss=0.214059, epoch loss=4.831183Batch=1139, step=4740, lr=0.180500, batch loss=0.249822, epoch loss=5.081005Batch=1199, step=4800, lr=0.180000, batch loss=0.191567, epoch loss=5.272573Epoch=3, step=4800, lr=0.180000, epoch loss=5.272573Batch=59, step=4860, lr=0.179750, batch loss=0.226587, epoch loss=0.226587Batch=119, step=4920, lr=0.179500, batch loss=0.190371, epoch loss=0.416959Batch=179, step=4980, lr=0.179250, batch loss=0.207378, epoch loss=0.624336Batch=239, step=5040, lr=0.179000, batch loss=0.309229, epoch loss=0.933565Batch=299, step=5100, lr=0.178750, batch loss=0.205750, epoch loss=1.139315Batch=359, step=5160, lr=0.178750, batch loss=0.275328, epoch loss=1.414643Batch=419, step=5220, lr=0.178250, batch loss=0.265285, epoch loss=1.679928Batch=479, step=5280, lr=0.178000, batch loss=0.238811, epoch loss=1.918739Batch=539, step=5340, lr=0.178000, batch loss=0.191143, epoch loss=2.109883Batch=599, step=5400, lr=0.177750, batch loss=0.227797, epoch loss=2.337679Batch=659, step=5460, lr=0.177250, batch loss=0.324802, epoch loss=2.662481Batch=719, step=5520, lr=0.177250, batch loss=0.322068, epoch loss=2.984549Batch=779, step=5580, lr=0.176750, batch loss=0.342880, epoch loss=3.327429Batch=839, step=5640, lr=0.176500, batch loss=0.311223, epoch loss=3.638652Batch=899, step=5700, lr=0.176500, batch loss=0.272925, epoch loss=3.911577Batch=959, step=5760, lr=0.176000, batch loss=0.217097, epoch loss=4.128674Batch=1019, step=5820, lr=0.175750, batch loss=0.332412, epoch loss=4.461086Batch=1079, step=5880, lr=0.175500, batch loss=0.181017, epoch loss=4.642103Batch=1139, step=5940, lr=0.175500, batch loss=0.217957, epoch loss=4.860060Batch=1199, step=6000, lr=0.175250, batch loss=0.201955, epoch loss=5.062015Epoch=4, step=6000, lr=0.175250, epoch loss=5.062015Batch=59, step=6060, lr=0.175000, batch loss=0.241017, epoch loss=0.241017Batch=119, step=6120, lr=0.174750, batch loss=0.186828, epoch loss=0.427845Batch=179, step=6180, lr=0.174250, batch loss=0.200968, epoch loss=0.628814Batch=239, step=6240, lr=0.174000, batch loss=0.301196, epoch loss=0.930010Batch=299, step=6300, lr=0.173750, batch loss=0.212526, epoch loss=1.142535Batch=359, step=6360, lr=0.173750, batch loss=0.270327, epoch loss=1.412862Batch=419, step=6420, lr=0.173500, batch loss=0.264602, epoch loss=1.677464Batch=479, step=6480, lr=0.173250, batch loss=0.244674, epoch loss=1.922139Batch=539, step=6540, lr=0.173000, batch loss=0.192050, epoch loss=2.114189Batch=599, step=6600, lr=0.172750, batch loss=0.229992, epoch loss=2.344180Batch=659, step=6660, lr=0.172500, batch loss=0.315708, epoch loss=2.659888Batch=719, step=6720, lr=0.172250, batch loss=0.315491, epoch loss=2.975379Batch=779, step=6780, lr=0.172000, batch loss=0.332990, epoch loss=3.308369Batch=839, step=6840, lr=0.171750, batch loss=0.305331, epoch loss=3.613700Batch=899, step=6900, lr=0.171500, batch loss=0.265919, epoch loss=3.879619Batch=959, step=6960, lr=0.171250, batch loss=0.211031, epoch loss=4.090650Batch=1019, step=7020, lr=0.171000, batch loss=0.323953, epoch loss=4.414603Batch=1079, step=7080, lr=0.170750, batch loss=0.176394, epoch loss=4.590997Batch=1139, step=7140, lr=0.170500, batch loss=0.214906, epoch loss=4.805903Batch=1199, step=7200, lr=0.170250, batch loss=0.185314, epoch loss=4.991218Epoch=5, step=7200, lr=0.170250, epoch loss=4.991218Batch=59, step=7260, lr=0.170000, batch loss=0.236259, epoch loss=0.236259Batch=119, step=7320, lr=0.169750, batch loss=0.181510, epoch loss=0.417769Batch=179, step=7380, lr=0.169500, batch loss=0.195598, epoch loss=0.613367Batch=239, step=7440, lr=0.169250, batch loss=0.290682, epoch loss=0.904049Batch=299, step=7500, lr=0.169000, batch loss=0.197849, epoch loss=1.101898Batch=359, step=7560, lr=0.168750, batch loss=0.255410, epoch loss=1.357307Batch=419, step=7620, lr=0.168500, batch loss=0.250241, epoch loss=1.607548Batch=479, step=7680, lr=0.168250, batch loss=0.227505, epoch loss=1.835053Batch=539, step=7740, lr=0.168000, batch loss=0.187155, epoch loss=2.022208Batch=599, step=7800, lr=0.167750, batch loss=0.225836, epoch loss=2.248044Batch=659, step=7860, lr=0.167500, batch loss=0.305879, epoch loss=2.553923Batch=719, step=7920, lr=0.167250, batch loss=0.305375, epoch loss=2.859298Batch=779, step=7980, lr=0.167000, batch loss=0.322101, epoch loss=3.181399Batch=839, step=8040, lr=0.166750, batch loss=0.295281, epoch loss=3.476680Batch=899, step=8100, lr=0.166500, batch loss=0.260478, epoch loss=3.737157Batch=959, step=8160, lr=0.166250, batch loss=0.204925, epoch loss=3.942082Batch=1019, step=8220, lr=0.166000, batch loss=0.319454, epoch loss=4.261536Batch=1079, step=8280, lr=0.165750, batch loss=0.172717, epoch loss=4.434253Batch=1139, step=8340, lr=0.165500, batch loss=0.211010, epoch loss=4.645263Batch=1199, step=8400, lr=0.165250, batch loss=0.179991, epoch loss=4.825255Epoch=6, step=8400, lr=0.165250, epoch loss=4.825255Batch=59, step=8460, lr=0.165000, batch loss=0.226747, epoch loss=0.226747Batch=119, step=8520, lr=0.164750, batch loss=0.176115, epoch loss=0.402862Batch=179, step=8580, lr=0.164500, batch loss=0.188220, epoch loss=0.591083Batch=239, step=8640, lr=0.164250, batch loss=0.277602, epoch loss=0.868685Batch=299, step=8700, lr=0.164000, batch loss=0.193640, epoch loss=1.062324Batch=359, step=8760, lr=0.163750, batch loss=0.248001, epoch loss=1.310326Batch=419, step=8820, lr=0.163500, batch loss=0.246770, epoch loss=1.557096Batch=479, step=8880, lr=0.163250, batch loss=0.227665, epoch loss=1.784761Batch=539, step=8940, lr=0.163000, batch loss=0.180333, epoch loss=1.965094Batch=599, step=9000, lr=0.162750, batch loss=0.216601, epoch loss=2.181695Batch=659, step=9060, lr=0.162500, batch loss=0.295403, epoch loss=2.477098Batch=719, step=9120, lr=0.162250, batch loss=0.294479, epoch loss=2.771578Batch=779, step=9180, lr=0.162000, batch loss=0.317584, epoch loss=3.089162Batch=839, step=9240, lr=0.161750, batch loss=0.286339, epoch loss=3.375501Batch=899, step=9300, lr=0.161500, batch loss=0.252702, epoch loss=3.628203Batch=959, step=9360, lr=0.161250, batch loss=0.187806, epoch loss=3.816009Batch=1019, step=9420, lr=0.161000, batch loss=0.321649, epoch loss=4.137659Batch=1079, step=9480, lr=0.160750, batch loss=0.197372, epoch loss=4.335031Batch=1139, step=9540, lr=0.160500, batch loss=0.213277, epoch loss=4.548308Batch=1199, step=9600, lr=0.160250, batch loss=0.168202, epoch loss=4.716511Epoch=7, step=9600, lr=0.160250, epoch loss=4.716511Batch=59, step=9660, lr=0.160000, batch loss=0.198842, epoch loss=0.198842Batch=119, step=9720, lr=0.159750, batch loss=0.164821, epoch loss=0.363663Batch=179, step=9780, lr=0.159250, batch loss=0.178767, epoch loss=0.542430Batch=239, step=9840, lr=0.159250, batch loss=0.262341, epoch loss=0.804772Batch=299, step=9900, lr=0.159000, batch loss=0.183853, epoch loss=0.988624Batch=359, step=9960, lr=0.158750, batch loss=0.243286, epoch loss=1.231910Batch=419, step=10020, lr=0.158500, batch loss=0.233162, epoch loss=1.465072Batch=479, step=10080, lr=0.158250, batch loss=0.213881, epoch loss=1.678953Batch=539, step=10140, lr=0.158000, batch loss=0.171442, epoch loss=1.850395Batch=599, step=10200, lr=0.157750, batch loss=0.202057, epoch loss=2.052452Batch=659, step=10260, lr=0.157500, batch loss=0.282076, epoch loss=2.334529Batch=719, step=10320, lr=0.157250, batch loss=0.287874, epoch loss=2.622403Batch=779, step=10380, lr=0.157000, batch loss=0.296135, epoch loss=2.918538Batch=839, step=10440, lr=0.156750, batch loss=0.267732, epoch loss=3.186270Batch=899, step=10500, lr=0.156250, batch loss=0.245194, epoch loss=3.431464Batch=959, step=10560, lr=0.156250, batch loss=0.195699, epoch loss=3.627163Batch=1019, step=10620, lr=0.156000, batch loss=0.282813, epoch loss=3.909977Batch=1079, step=10680, lr=0.155750, batch loss=0.166515, epoch loss=4.076491Batch=1139, step=10740, lr=0.155500, batch loss=0.194834, epoch loss=4.271325Batch=1199, step=10800, lr=0.155250, batch loss=0.154854, epoch loss=4.426179Epoch=8, step=10800, lr=0.155250, epoch loss=4.426179Batch=59, step=10860, lr=0.155000, batch loss=0.181453, epoch loss=0.181453Batch=119, step=10920, lr=0.154750, batch loss=0.157520, epoch loss=0.338973Batch=179, step=10980, lr=0.154500, batch loss=0.167212, epoch loss=0.506184Batch=239, step=11040, lr=0.154250, batch loss=0.245739, epoch loss=0.751923Batch=299, step=11100, lr=0.154000, batch loss=0.166387, epoch loss=0.918310Batch=359, step=11160, lr=0.153750, batch loss=0.227743, epoch loss=1.146053Batch=419, step=11220, lr=0.153500, batch loss=0.234316, epoch loss=1.380368Batch=479, step=11280, lr=0.153000, batch loss=0.199518, epoch loss=1.579886Batch=539, step=11340, lr=0.153000, batch loss=0.157754, epoch loss=1.737641Batch=599, step=11400, lr=0.152750, batch loss=0.181406, epoch loss=1.919046Batch=659, step=11460, lr=0.152500, batch loss=0.263888, epoch loss=2.182934Batch=719, step=11520, lr=0.152250, batch loss=0.261992, epoch loss=2.444925Batch=779, step=11580, lr=0.152000, batch loss=0.270519, epoch loss=2.715444Batch=839, step=11640, lr=0.151750, batch loss=0.251632, epoch loss=2.967076Batch=899, step=11700, lr=0.151500, batch loss=0.217712, epoch loss=3.184788Batch=959, step=11760, lr=0.151250, batch loss=0.175188, epoch loss=3.359976Batch=1019, step=11820, lr=0.151000, batch loss=0.260865, epoch loss=3.620841Batch=1079, step=11880, lr=0.150750, batch loss=0.145006, epoch loss=3.765847Batch=1139, step=11940, lr=0.150500, batch loss=0.180615, epoch loss=3.946462Batch=1199, step=12000, lr=0.150250, batch loss=0.140390, epoch loss=4.086852Epoch=9, step=12000, lr=0.150250, epoch loss=4.086852Batch=59, step=12060, lr=0.150000, batch loss=0.162659, epoch loss=0.162659Batch=119, step=12120, lr=0.149750, batch loss=0.137047, epoch loss=0.299706Batch=179, step=12180, lr=0.149500, batch loss=0.152038, epoch loss=0.451743Batch=239, step=12240, lr=0.149250, batch loss=0.220914, epoch loss=0.672657Batch=299, step=12300, lr=0.149000, batch loss=0.143773, epoch loss=0.816430Batch=359, step=12360, lr=0.148750, batch loss=0.198654, epoch loss=1.015083Batch=419, step=12420, lr=0.148500, batch loss=0.207364, epoch loss=1.222447Batch=479, step=12480, lr=0.148250, batch loss=0.180099, epoch loss=1.402546Batch=539, step=12540, lr=0.148000, batch loss=0.143424, epoch loss=1.545970Batch=599, step=12600, lr=0.147750, batch loss=0.150268, epoch loss=1.696239Batch=659, step=12660, lr=0.147500, batch loss=0.228022, epoch loss=1.924261Batch=719, step=12720, lr=0.147250, batch loss=0.238189, epoch loss=2.162450Batch=779, step=12780, lr=0.147000, batch loss=0.265394, epoch loss=2.427843Batch=839, step=12840, lr=0.146500, batch loss=0.235770, epoch loss=2.663614Batch=899, step=12900, lr=0.146500, batch loss=0.213525, epoch loss=2.877139Batch=959, step=12960, lr=0.146250, batch loss=0.153124, epoch loss=3.030263Batch=1019, step=13020, lr=0.146000, batch loss=0.267551, epoch loss=3.297814Batch=1079, step=13080, lr=0.145750, batch loss=0.113473, epoch loss=3.411287Batch=1139, step=13140, lr=0.145500, batch loss=0.154293, epoch loss=3.565580Batch=1199, step=13200, lr=0.145000, batch loss=0.118809, epoch loss=3.684389Epoch=10, step=13200, lr=0.145000, epoch loss=3.684389Batch=59, step=13260, lr=0.145000, batch loss=0.137312, epoch loss=0.137312Batch=119, step=13320, lr=0.144750, batch loss=0.120855, epoch loss=0.258166Batch=179, step=13380, lr=0.144500, batch loss=0.128389, epoch loss=0.386555Batch=239, step=13440, lr=0.144250, batch loss=0.184808, epoch loss=0.571363Batch=299, step=13500, lr=0.144000, batch loss=0.117279, epoch loss=0.688641Batch=359, step=13560, lr=0.143750, batch loss=0.167326, epoch loss=0.855968Batch=419, step=13620, lr=0.143500, batch loss=0.183464, epoch loss=1.039432Batch=479, step=13680, lr=0.143250, batch loss=0.138467, epoch loss=1.177899Batch=539, step=13740, lr=0.143000, batch loss=0.120954, epoch loss=1.298853Batch=599, step=13800, lr=0.142750, batch loss=0.121428, epoch loss=1.420281Batch=659, step=13860, lr=0.142500, batch loss=0.179236, epoch loss=1.599517Batch=719, step=13920, lr=0.142250, batch loss=0.175487, epoch loss=1.775005Batch=779, step=13980, lr=0.142000, batch loss=0.190725, epoch loss=1.965730Batch=839, step=14040, lr=0.141750, batch loss=0.192749, epoch loss=2.158479Batch=899, step=14100, lr=0.141500, batch loss=0.186988, epoch loss=2.345468Batch=959, step=14160, lr=0.141250, batch loss=0.130780, epoch loss=2.476248Batch=1019, step=14220, lr=0.141000, batch loss=0.237888, epoch loss=2.714135Batch=1079, step=14280, lr=0.140750, batch loss=0.080621, epoch loss=2.794756Batch=1139, step=14340, lr=0.140500, batch loss=0.127480, epoch loss=2.922236Batch=1199, step=14400, lr=0.140250, batch loss=0.091374, epoch loss=3.013610Epoch=11, step=14400, lr=0.140250, epoch loss=3.013610Batch=59, step=14460, lr=0.140000, batch loss=0.113334, epoch loss=0.113334Batch=119, step=14520, lr=0.139750, batch loss=0.110039, epoch loss=0.223373Batch=179, step=14580, lr=0.139500, batch loss=0.105545, epoch loss=0.328917Batch=239, step=14640, lr=0.139250, batch loss=0.141441, epoch loss=0.470358Batch=299, step=14700, lr=0.139000, batch loss=0.082539, epoch loss=0.552897Batch=359, step=14760, lr=0.138750, batch loss=0.123299, epoch loss=0.676196Batch=419, step=14820, lr=0.138500, batch loss=0.133412, epoch loss=0.809608Batch=479, step=14880, lr=0.138250, batch loss=0.099428, epoch loss=0.909037Batch=539, step=14940, lr=0.138000, batch loss=0.090118, epoch loss=0.999154Batch=599, step=15000, lr=0.137750, batch loss=0.086482, epoch loss=1.085637Batch=659, step=15060, lr=0.137500, batch loss=0.139110, epoch loss=1.224746Batch=719, step=15120, lr=0.137250, batch loss=0.176443, epoch loss=1.401189Batch=779, step=15180, lr=0.137000, batch loss=0.332944, epoch loss=1.734133Batch=839, step=15240, lr=0.136750, batch loss=0.136746, epoch loss=1.870879Batch=899, step=15300, lr=0.136500, batch loss=0.112257, epoch loss=1.983136Batch=959, step=15360, lr=0.136250, batch loss=0.102311, epoch loss=2.085448Batch=1019, step=15420, lr=0.136000, batch loss=0.165474, epoch loss=2.250922Batch=1079, step=15480, lr=0.135750, batch loss=0.045850, epoch loss=2.296772Batch=1139, step=15540, lr=0.135500, batch loss=0.100060, epoch loss=2.396831Batch=1199, step=15600, lr=0.135250, batch loss=0.063180, epoch loss=2.460012Epoch=12, step=15600, lr=0.135250, epoch loss=2.460012Batch=59, step=15660, lr=0.135000, batch loss=0.082016, epoch loss=0.082016Batch=119, step=15720, lr=0.134750, batch loss=0.119778, epoch loss=0.201795Batch=179, step=15780, lr=0.134500, batch loss=0.092562, epoch loss=0.294357Batch=239, step=15840, lr=0.134250, batch loss=0.097553, epoch loss=0.391910Batch=299, step=15900, lr=0.134000, batch loss=0.041472, epoch loss=0.433382Batch=359, step=15960, lr=0.133750, batch loss=0.084970, epoch loss=0.518352Batch=419, step=16020, lr=0.133500, batch loss=0.083985, epoch loss=0.602337Batch=479, step=16080, lr=0.133250, batch loss=0.069944, epoch loss=0.672281Batch=539, step=16140, lr=0.133000, batch loss=0.055227, epoch loss=0.727508Batch=599, step=16200, lr=0.132750, batch loss=0.100388, epoch loss=0.827896Batch=659, step=16260, lr=0.132500, batch loss=0.080120, epoch loss=0.908016Batch=719, step=16320, lr=0.132250, batch loss=0.087808, epoch loss=0.995824Batch=779, step=16380, lr=0.132000, batch loss=0.117613, epoch loss=1.113437Batch=839, step=16440, lr=0.131750, batch loss=0.121613, epoch loss=1.235050Batch=899, step=16500, lr=0.131500, batch loss=0.137988, epoch loss=1.373038Batch=959, step=16560, lr=0.131250, batch loss=0.045157, epoch loss=1.418195Batch=1019, step=16620, lr=0.131000, batch loss=0.118874, epoch loss=1.537070Batch=1079, step=16680, lr=0.130750, batch loss=0.021967, epoch loss=1.559037Batch=1139, step=16740, lr=0.130500, batch loss=0.051023, epoch loss=1.610060Batch=1199, step=16800, lr=0.130250, batch loss=0.029044, epoch loss=1.639104Epoch=13, step=16800, lr=0.130250, epoch loss=1.639104Batch=59, step=16860, lr=0.130000, batch loss=0.043995, epoch loss=0.043995Batch=119, step=16920, lr=0.129750, batch loss=0.079345, epoch loss=0.123340Batch=179, step=16980, lr=0.129500, batch loss=0.056479, epoch loss=0.179819Batch=239, step=17040, lr=0.129000, batch loss=0.063792, epoch loss=0.243611Batch=299, step=17100, lr=0.129000, batch loss=0.027079, epoch loss=0.270690Batch=359, step=17160, lr=0.128750, batch loss=0.049710, epoch loss=0.320400Batch=419, step=17220, lr=0.128500, batch loss=0.067956, epoch loss=0.388356Batch=479, step=17280, lr=0.128250, batch loss=0.023689, epoch loss=0.412045Batch=539, step=17340, lr=0.128000, batch loss=0.030024, epoch loss=0.442070Batch=599, step=17400, lr=0.127750, batch loss=0.034590, epoch loss=0.476660Batch=659, step=17460, lr=0.127500, batch loss=0.047339, epoch loss=0.523999Batch=719, step=17520, lr=0.127250, batch loss=0.046804, epoch loss=0.570802Batch=779, step=17580, lr=0.127000, batch loss=0.078861, epoch loss=0.649663Batch=839, step=17640, lr=0.126750, batch loss=0.171462, epoch loss=0.821125Batch=899, step=17700, lr=0.126500, batch loss=0.064913, epoch loss=0.886038Batch=959, step=17760, lr=0.126250, batch loss=0.023999, epoch loss=0.910037Batch=1019, step=17820, lr=0.126000, batch loss=0.037523, epoch loss=0.947560Batch=1079, step=17880, lr=0.125750, batch loss=0.021393, epoch loss=0.968952Batch=1139, step=17940, lr=0.125500, batch loss=0.048179, epoch loss=1.017131Batch=1199, step=18000, lr=0.125250, batch loss=0.019939, epoch loss=1.037070Epoch=14, step=18000, lr=0.125250, epoch loss=1.037070Batch=59, step=18060, lr=0.124750, batch loss=0.012530, epoch loss=0.012530Batch=119, step=18120, lr=0.124750, batch loss=0.019021, epoch loss=0.031551Batch=179, step=18180, lr=0.124500, batch loss=0.028712, epoch loss=0.060263Batch=239, step=18240, lr=0.124250, batch loss=0.034184, epoch loss=0.094448Batch=299, step=18300, lr=0.124000, batch loss=0.010004, epoch loss=0.104451Batch=359, step=18360, lr=0.123750, batch loss=0.023100, epoch loss=0.127551Batch=419, step=18420, lr=0.123500, batch loss=0.029167, epoch loss=0.156718Batch=479, step=18480, lr=0.123250, batch loss=0.022961, epoch loss=0.179679Batch=539, step=18540, lr=0.123000, batch loss=0.041019, epoch loss=0.220698Batch=599, step=18600, lr=0.122750, batch loss=0.026023, epoch loss=0.246721Batch=659, step=18660, lr=0.122500, batch loss=0.034320, epoch loss=0.281041Batch=719, step=18720, lr=0.122250, batch loss=0.034294, epoch loss=0.315335Batch=779, step=18780, lr=0.122000, batch loss=0.079969, epoch loss=0.395304Batch=839, step=18840, lr=0.121750, batch loss=0.055901, epoch loss=0.451205Batch=899, step=18900, lr=0.121500, batch loss=0.054137, epoch loss=0.505342Batch=959, step=18960, lr=0.121250, batch loss=0.015554, epoch loss=0.520896Batch=1019, step=19020, lr=0.121000, batch loss=0.028930, epoch loss=0.549826Batch=1079, step=19080, lr=0.120500, batch loss=0.011832, epoch loss=0.561658Batch=1139, step=19140, lr=0.120250, batch loss=0.023714, epoch loss=0.585372Batch=1199, step=19200, lr=0.120250, batch loss=0.009776, epoch loss=0.595148Epoch=15, step=19200, lr=0.120250, epoch loss=0.595148Batch=59, step=19260, lr=0.119750, batch loss=0.005250, epoch loss=0.005250Batch=119, step=19320, lr=0.119750, batch loss=0.021733, epoch loss=0.026983Batch=179, step=19380, lr=0.119250, batch loss=0.050993, epoch loss=0.077976Batch=239, step=19440, lr=0.119000, batch loss=0.022850, epoch loss=0.100826Batch=299, step=19500, lr=0.119000, batch loss=0.018263, epoch loss=0.119089Batch=359, step=19560, lr=0.118500, batch loss=0.032891, epoch loss=0.151980Batch=419, step=19620, lr=0.118500, batch loss=0.020663, epoch loss=0.172643Batch=479, step=19680, lr=0.118000, batch loss=0.007445, epoch loss=0.180087Batch=539, step=19740, lr=0.118000, batch loss=0.024665, epoch loss=0.204752Batch=599, step=19800, lr=0.117500, batch loss=0.025898, epoch loss=0.230650Batch=659, step=19860, lr=0.117250, batch loss=0.016647, epoch loss=0.247298Batch=719, step=19920, lr=0.117000, batch loss=0.051700, epoch loss=0.298997Batch=779, step=19980, lr=0.116750, batch loss=0.071247, epoch loss=0.370244Batch=839, step=20040, lr=0.116500, batch loss=0.031232, epoch loss=0.401476Batch=899, step=20100, lr=0.116500, batch loss=0.033904, epoch loss=0.435380Batch=959, step=20160, lr=0.116000, batch loss=0.011818, epoch loss=0.447197Batch=1019, step=20220, lr=0.115750, batch loss=0.014964, epoch loss=0.462162Batch=1079, step=20280, lr=0.115750, batch loss=0.001826, epoch loss=0.463988Batch=1139, step=20340, lr=0.115250, batch loss=0.015194, epoch loss=0.479183Batch=1199, step=20400, lr=0.115250, batch loss=0.008486, epoch loss=0.487669Epoch=16, step=20400, lr=0.115250, epoch loss=0.487669Batch=59, step=20460, lr=0.115000, batch loss=0.004707, epoch loss=0.004707Batch=119, step=20520, lr=0.114750, batch loss=0.011782, epoch loss=0.016489Batch=179, step=20580, lr=0.114500, batch loss=0.024971, epoch loss=0.041460Batch=239, step=20640, lr=0.114250, batch loss=0.015022, epoch loss=0.056482Batch=299, step=20700, lr=0.114000, batch loss=0.002818, epoch loss=0.059300Batch=359, step=20760, lr=0.113750, batch loss=0.013067, epoch loss=0.072367Batch=419, step=20820, lr=0.113500, batch loss=0.015494, epoch loss=0.087861Batch=479, step=20880, lr=0.113250, batch loss=0.003409, epoch loss=0.091270Batch=539, step=20940, lr=0.113000, batch loss=0.018733, epoch loss=0.110002Batch=599, step=21000, lr=0.112750, batch loss=0.021071, epoch loss=0.131074Batch=659, step=21060, lr=0.112500, batch loss=0.017844, epoch loss=0.148918Batch=719, step=21120, lr=0.112000, batch loss=0.053149, epoch loss=0.202067Batch=779, step=21180, lr=0.111750, batch loss=0.063475, epoch loss=0.265543Batch=839, step=21240, lr=0.111750, batch loss=0.024615, epoch loss=0.290158Batch=899, step=21300, lr=0.111500, batch loss=0.031375, epoch loss=0.321532Batch=959, step=21360, lr=0.111000, batch loss=0.010682, epoch loss=0.332214Batch=1019, step=21420, lr=0.111000, batch loss=0.011919, epoch loss=0.344134Batch=1079, step=21480, lr=0.110750, batch loss=0.002075, epoch loss=0.346209Batch=1139, step=21540, lr=0.110500, batch loss=0.011911, epoch loss=0.358119Batch=1199, step=21600, lr=0.110250, batch loss=0.004570, epoch loss=0.362689Epoch=17, step=21600, lr=0.110250, epoch loss=0.362689Batch=59, step=21660, lr=0.110000, batch loss=0.001638, epoch loss=0.001638Batch=119, step=21720, lr=0.109750, batch loss=0.007632, epoch loss=0.009269Batch=179, step=21780, lr=0.109500, batch loss=0.015622, epoch loss=0.024891Batch=239, step=21840, lr=0.109250, batch loss=0.013809, epoch loss=0.038700Batch=299, step=21900, lr=0.109000, batch loss=0.002406, epoch loss=0.041106Batch=359, step=21960, lr=0.108750, batch loss=0.011578, epoch loss=0.052684Batch=419, step=22020, lr=0.108500, batch loss=0.012029, epoch loss=0.064713Batch=479, step=22080, lr=0.108250, batch loss=0.003140, epoch loss=0.067852Batch=539, step=22140, lr=0.108000, batch loss=0.017697, epoch loss=0.085549Batch=599, step=22200, lr=0.107750, batch loss=0.016871, epoch loss=0.102420Batch=659, step=22260, lr=0.107500, batch loss=0.015311, epoch loss=0.117730Batch=719, step=22320, lr=0.107250, batch loss=0.025340, epoch loss=0.143070Batch=779, step=22380, lr=0.107000, batch loss=0.037247, epoch loss=0.180317Batch=839, step=22440, lr=0.106750, batch loss=0.022810, epoch loss=0.203127Batch=899, step=22500, lr=0.106500, batch loss=0.031011, epoch loss=0.234139Batch=959, step=22560, lr=0.106250, batch loss=0.009155, epoch loss=0.243294Batch=1019, step=22620, lr=0.106000, batch loss=0.009991, epoch loss=0.253285Batch=1079, step=22680, lr=0.105750, batch loss=0.000290, epoch loss=0.253575Batch=1139, step=22740, lr=0.105500, batch loss=0.011516, epoch loss=0.265092Batch=1199, step=22800, lr=0.105250, batch loss=0.004529, epoch loss=0.269621Epoch=18, step=22800, lr=0.105250, epoch loss=0.269621Batch=59, step=22860, lr=0.105000, batch loss=0.001404, epoch loss=0.001404Batch=119, step=22920, lr=0.104750, batch loss=0.005288, epoch loss=0.006692Batch=179, step=22980, lr=0.104500, batch loss=0.010044, epoch loss=0.016736Batch=239, step=23040, lr=0.104250, batch loss=0.010043, epoch loss=0.026779Batch=299, step=23100, lr=0.104000, batch loss=0.010210, epoch loss=0.036990Batch=359, step=23160, lr=0.103750, batch loss=0.011833, epoch loss=0.048823Batch=419, step=23220, lr=0.103500, batch loss=0.010831, epoch loss=0.059654Batch=479, step=23280, lr=0.103000, batch loss=0.002353, epoch loss=0.062007Batch=539, step=23340, lr=0.103000, batch loss=0.018291, epoch loss=0.080299Batch=599, step=23400, lr=0.102750, batch loss=0.015115, epoch loss=0.095414Batch=659, step=23460, lr=0.102500, batch loss=0.015060, epoch loss=0.110473Batch=719, step=23520, lr=0.102250, batch loss=0.014836, epoch loss=0.125310Batch=779, step=23580, lr=0.102000, batch loss=0.020916, epoch loss=0.146225Batch=839, step=23640, lr=0.101750, batch loss=0.027552, epoch loss=0.173777Batch=899, step=23700, lr=0.101500, batch loss=0.022173, epoch loss=0.195949Batch=959, step=23760, lr=0.101250, batch loss=0.009998, epoch loss=0.205948Batch=1019, step=23820, lr=0.101000, batch loss=0.007109, epoch loss=0.213056Batch=1079, step=23880, lr=0.100750, batch loss=0.001234, epoch loss=0.214290Batch=1139, step=23940, lr=0.100500, batch loss=0.008641, epoch loss=0.222931Batch=1199, step=24000, lr=0.100250, batch loss=0.004693, epoch loss=0.227625Epoch=19, step=24000, lr=0.100250, epoch loss=0.227625Half-moons scatterplot and decision boundary:┌────────────────────────────────────────────────────────────────────────────────────────────────────┐│********************************#*******************************************************************││**********************#*#*#######*###*#####*********************************************************││**********************#########################*****************************************************││*****************#**########*######*###########*###*************************************************││***************#################*###################************************************************││************######*#################*#################**********************************************││**********#*#####*########*#**************##*#########*#********************************************││***********########*##*#******************#*****##########******************************************││***********###########*************************############***************************************..││********######*####*********************************###*###*#**********************************.....││*******######**##*************...******************#*######*#*******************************........││*******##*##**##**********...........***************########*##***************************..........││*****#######************.......%...%%...***************#########*************************.........%.││******######***********.........%........***************##*#####***********************.......%.%.%.││***#########**********.........%%%.%%......*************#*#######*********************.......%.%%%%.││****#######**********..........%%%%.........************#########********************........%%.%%.%││**#######************..........%%%%%%%.......**************###*###******************.........%%%%%%.││*##*####************...........%%%%%%%.........***********########****************...........%%%%%%.││*#######************...........%%%%%%%..........************#######**************............%%%%%%.││*##*####***********............%%.%%%%%..........************####***************............%%%%%%%.││*#####*#***********.............%%%%%%%............**********##*###************..............%%%%%..││#######***********.............%.%%%%%%.............*********#######**********.............%%%%.%%..││#####*#**********...............%%%%%%%...............*******#######********...............%%%%%%%%.││###*#*#**********...............%%%%%%%%%..............*******######*******................%%%%%%...││#######*********.................%%%%%%%%...............*****###*###******................%%%%%%....││######**********.................%%%%%%%%%................***#*###*******...............%%%%%%%%%...││*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....││#****##********....................%%%%%%%%%................***###*#**................%.%%%%%%%.....││**************.....................%.%%%%%%...................*******..................%.%%.%%......││**************.......................%..%%%%%%%................*****..............%.%%%%%%%%%.......││*************.........................%.%%%.%%%%................***...............%%%%%%%.%.%.......││*************...........................%..%%%%..%................................%%%%%%%%..........││************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........││***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........││***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............││**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............││**********....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................││*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................││*********............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................││********................................................%...%%%%.%%.%%%%..%.........................│└────────────────────────────────────────────────────────────────────────────────────────────────────┘"/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 12025-05-16 15:28.39: Job failed: Failed: Build failed