2025-05-22 12:20.03: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (9afb61d245b2724d2132450805c8b080ac7e0c9a) (linux-x86_64:debian-12-5.2_opam-2.3) Base: ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 9afb61d2 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357 # debian-12-5.2_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-05-22 12:20.03: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357-debian-12-5.2_opam-2.3-2f9e463b934c3596793f881f51cf211b" 2025-05-22 12:20.03: Using OBuilder spec: ((from ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357) (comment debian-12-5.2_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-05-22 12:20.03: Waiting for resource in pool OCluster 2025-05-22 12:20.03: Waiting for worker… 2025-05-22 12:20.04: Got resource from pool OCluster Building on laodoke.caelum.ci.dev All commits already cached HEAD is now at 9afb61d2 In progress / broken: Format -> PPrint migration first pass by Claude (from ocaml/opam:debian-12-ocaml-5.2@sha256:a17317e9abe385dc16b4390c64a374046d6dd562e80aea838d91c6c1335da357) 2025-05-22 12:20.05 ---> using "54ec013a6f149facc0d7142b09647559c040b9be9f10f3b28aedc92eb0aa3eda" from cache /: (comment debian-12-5.2_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-05-22 12:20.05 ---> using "c2927949a690e79730c631d7a8829146d4d2b9a42c6543f46b7f5eb020a6256d" from cache /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. Continue? [y/n] y This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. [NOTE] The 'jobs' option was reset, its value was 39 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=39 --global Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [default] Initialised 2025-05-22 12:20.05 ---> using "a7fd5cacf3665fbb975eef60f5b8ef985d00ea4b339eac782682eead64b75f01" from cache /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 5.15.0-139-generic The OCaml toplevel, version 5.2.1 2.3.0 2025-05-22 12:20.05 ---> using "7adba0ccf89541c88b795935f2449fd37f815bccab093d57590be9ba50021241" from cache /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-05-22 12:20.05 ---> using "b491f791e6cf8593356bc2c9bd1da33ed7889986fb1cee97a62247171886e89a" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 || git fetch origin master) && git reset -q --hard c7d6d1d2aa1bf00c8e6ec9dff2030cd39d493e47 && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD 0d013e603b..27f5ac67c2 master -> origin/master c7d6d1d2aa Merge pull request #27880 from MisterDA/os-family-fedora <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [default] synchronised from git+file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-05-22 12:20.05 ---> using "f24dd5436b12d3c594ccbdb79e37874843eaefce9734f868872db865957b7d07" from cache /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-05-22 12:20.05 ---> using "96c276ff845fad8279397cc29989664029c340409f58c8927b2b710da51559fa" from cache /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-05-22 12:20.05 ---> using "f212c0d5e0cd6c9b988fa677d98ad3015733f20ba427c037942180d1d026d4ec" from cache /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-05-22 12:20.05 ---> using "4d8a2e50e4e2558b3a40096f9cf12c7f5fcf968e8efe19a2c8ff4a9e4d5668d8" from cache /src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.2 base-bigarray.base base-domains.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 cmdliner.1.3.0 conf-libffi.2.0.0 conf-pkg-config.4 cppo.1.8.0 csexp.1.5.2 ctypes.0.23.0 ctypes-foreign.0.23.0 dune.3.18.2 dune-configurator.3.18.2 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.5-1 ocaml.5.2.1 ocaml-base-compiler.5.2.1 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.0 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.0.3 ppx_enumerate.v0.17.0 ppx_expect.v0.17.2 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.0 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.0 ppx_minidebug.2.2.0 ppx_optcomp.v0.17.0 ppx_sexp_conv.v0.17.0 ppx_string.v0.17.0 ppx_variants_conv.v0.17.0 ppxlib.0.35.0 ppxlib_jane.v0.17.0 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.12.0 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "apt-get" "update" - Get:1 http://deb.debian.org/debian bookworm InRelease [151 kB] - Get:2 http://deb.debian.org/debian bookworm-updates InRelease [55.4 kB] - Get:3 http://deb.debian.org/debian-security bookworm-security InRelease [48.0 kB] - Get:4 http://deb.debian.org/debian bookworm/main amd64 Packages [8793 kB] - Get:5 http://deb.debian.org/debian-security bookworm-security/main amd64 Packages [259 kB] - Fetched 9306 kB in 2s (5818 kB/s) - Reading package lists... - <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.1). [NOTE] Package ocaml is already installed (current version is 5.2.1). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: libffi-dev pkg-config <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "apt-get" "install" "-qq" "-yy" "libffi-dev" "pkg-config" - debconf: delaying package configuration, since apt-utils is not installed - Selecting previously unselected package libffi-dev:amd64. - (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 18778 files and directories currently installed.) - Preparing to unpack .../libffi-dev_3.4.4-1_amd64.deb ... - Unpacking libffi-dev:amd64 (3.4.4-1) ... - Selecting previously unselected package libpkgconf3:amd64. - Preparing to unpack .../libpkgconf3_1.8.1-1_amd64.deb ... - Unpacking libpkgconf3:amd64 (1.8.1-1) ... - Selecting previously unselected package pkgconf-bin. - Preparing to unpack .../pkgconf-bin_1.8.1-1_amd64.deb ... - Unpacking pkgconf-bin (1.8.1-1) ... - Selecting previously unselected package pkgconf:amd64. - Preparing to unpack .../pkgconf_1.8.1-1_amd64.deb ... - Unpacking pkgconf:amd64 (1.8.1-1) ... - Selecting previously unselected package pkg-config:amd64. - Preparing to unpack .../pkg-config_1.8.1-1_amd64.deb ... - Unpacking pkg-config:amd64 (1.8.1-1) ... - Setting up libffi-dev:amd64 (3.4.4-1) ... - Setting up libpkgconf3:amd64 (1.8.1-1) ... - Setting up pkgconf-bin (1.8.1-1) ... - Setting up pkgconf:amd64 (1.8.1-1) ... - Setting up pkg-config:amd64 (1.8.1-1) ... - Processing triggers for libc-bin (2.36-9+deb12u10) ... 2025-05-22 12:20.05 ---> using "f5b7876c5fb0e07ef35935f2ca2c8d8c322bb9e9a6838fd200622292fabf821a" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.2.1). [NOTE] Package ocaml is already installed (current version is 5.2.1). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 75 packages - install angstrom 0.16.1 - install astring 0.8.5 - install backoff 0.1.1 - install base v0.17.2 - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install camlp-streams 5.0.1 - install cmdliner 1.3.0 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install cppo 1.8.0 - install csexp 1.5.2 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install dune 3.18.2 - install dune-configurator 3.18.2 - install fieldslib v0.17.0 - install fmt 0.10.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install logs 0.8.0 - install mdx 2.5.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.5-1 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml-version 4.0.0 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install pprint 20230830 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.0.3 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.2 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.0 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.0 - install ppx_minidebug 2.2.0 - install ppx_optcomp v0.17.0 - install ppx_sexp_conv v0.17.0 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.0 - install ppxlib 0.35.0 - install ppxlib_jane v0.17.0 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.12.0 - install result 1.5 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install thread-local-storage 0.2 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved astring.0.8.5 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.2 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved camlp-streams.5.0.1 (cached) -> installed conf-pkg-config.4 -> retrieved cppo.1.8.0 (cached) -> retrieved cmdliner.1.3.0 (cached) -> retrieved csexp.1.5.2 (cached) -> installed conf-libffi.2.0.0 -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> retrieved fieldslib.v0.17.0 (cached) -> retrieved fmt.0.10.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved logs.0.8.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved mdx.2.5.0 (cached) -> retrieved num.1.5-1 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml-version.4.0.0 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved dune.3.18.2, dune-configurator.3.18.2 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved pprint.20230830 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> installed cmdliner.1.3.0 -> installed num.1.5-1 -> retrieved ppx_deriving.6.0.3 (cached) -> retrieved ppx_expect.v0.17.2 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.0 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.0 (cached) -> retrieved ppx_optcomp.v0.17.0 (cached) -> retrieved ppx_sexp_conv.v0.17.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.0 (cached) -> retrieved ppxlib_jane.v0.17.0 (cached) -> retrieved ppx_minidebug.2.2.0 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved re.1.12.0 (cached) -> retrieved result.1.5 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved sexplib.v0.17.0 (cached) -> retrieved ppxlib.0.35.0 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved thread-local-storage.0.2 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved uucp.16.0.0 (cached) -> installed ocamlfind.1.9.8 -> installed ocamlbuild.0.16.1 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed fmt.0.10.0 -> installed ptime.1.2.0 -> installed astring.0.8.5 -> installed logs.0.8.0 -> installed dune.3.18.2 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed backoff.0.1.1 -> installed csexp.1.5.2 -> installed bigarray-compat.1.1.0 -> installed camlp-streams.5.0.1 -> installed multicore-magic.2.3.1 -> installed cppo.1.8.0 -> installed ocaml-compiler-libs.v0.17.0 -> installed ocaml-syntax-shims.1.0.0 -> installed ocaml-version.4.0.0 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed pprint.20230830 -> installed printbox.0.12 -> installed re.1.12.0 -> installed result.1.5 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed thread-local-storage.0.2 -> installed saturn_lockfree.0.5.0 -> installed integers.0.7.0 -> installed parsexp.v0.17.0 -> installed dune-configurator.3.18.2 -> installed bigstringaf.0.10.0 -> installed mdx.2.5.0 -> installed sexplib.v0.17.0 -> installed angstrom.0.16.1 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed ctypes.0.23.0 -> installed base.v0.17.2 -> installed variantslib.v0.17.0 -> installed fieldslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed ppxlib.0.35.0 -> installed ppxlib_jane.v0.17.0 -> installed ppx_optcomp.v0.17.0 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.0 -> installed uucp.16.0.0 -> installed ppx_fields_conv.v0.17.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.0 -> installed ppx_deriving.6.0.3 -> installed printbox-ext-plot.0.12 -> installed ppx_compare.v0.17.0 -> installed ppx_sexp_conv.v0.17.0 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_base.v0.17.0 -> installed ppx_minidebug.2.2.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.0 -> installed ppx_expect.v0.17.2 Done. # To update the current shell environment, run: eval $(opam env) 2025-05-22 12:20.05 ---> using "12051074805c5c484f3eeafdbf39e2350d43789d9bf9c014781b8f9473c04977" from cache /src: (copy (src .) (dst /src)) 2025-05-22 12:20.06 ---> saved as "9b8e7bffcad9651cff16cb7fb20d774f5501f8eb23d82a8baf1553caeebb199b" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) (cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Wrote value of 'backend' to ocannl_backend.txt (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/6581522e58f4a81d82648c256bc7fed8/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file File "test/dune", lines 30-40, characters 0-281: 30 | (rule 31 | (alias runtest) 32 | (target 33 | (dir log_files)) 34 | (action 35 | (run 36 | %{dep:micrograd_demo_logging.exe} 37 | "--ocannl_debug_backend=text" 38 | "--ocannl_log_file_stem=micrograd_demo_logging" 39 | "--ocannl_log_main_domain_to_stdout=false" 40 | "--ocannl_debug_log_to_stream_files=true"))) (cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Retrieving commandline, environment, or config file variable ocannl_backend Found multicore_cc, in the config file Retrieving commandline, environment, or config file variable ocannl_cd_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity Found true, in the config file Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files Found true, commandline --ocannl_debug_log_to_stream_files=true Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41 Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64 Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31 Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59 Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38 Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 144-172, characters 31-82 Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 454-455, characters 4-92 Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 441-455, characters 23-92 Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 34, characters 13-77 (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file File "test/micrograd_demo.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/micrograd_demo.ml.corrected diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/micrograd_demo.ml.corrected index 77e46c6..3cb470c 100644 --- a/_build/default/test/micrograd_demo.ml +++ b/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/micrograd_demo.ml.corrected @@ -52,15 +52,14 @@ let%expect_test "Micrograd README basic example" = │├┼───────┤ │ │││ -4.00 │ │ │└┴───────┘ │ - └─────────────────┘ - ┌────────────────────────┐ - │[0]: a shape 0:1 grad_a│ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 1.38e+2 │ │ - │└┴─────────┘ │ - └────────────────────────┘ + └─────────────────┘┌────────────────────────┐ + │[0]: a shape 0:1 grad_a│ + │┌┬─────────┐ │ + │││axis 0 │ │ + │├┼─────────┤ │ + │││ 1.38e+2 │ │ + │└┴─────────┘ │ + └────────────────────────┘ |}]; Tensor.print ~with_code:false ~with_grad:true `Default b; [%expect @@ -72,15 +71,14 @@ let%expect_test "Micrograd README basic example" = │├┼──────┤ │ │││ 2.00 │ │ │└┴──────┘ │ - └─────────────────┘ - ┌────────────────────────┐ - │[2]: b shape 0:1 grad_b│ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 6.45e+2 │ │ - │└┴─────────┘ │ - └────────────────────────┘ + └─────────────────┘┌────────────────────────┐ + │[2]: b shape 0:1 grad_b│ + │┌┬─────────┐ │ + │││axis 0 │ │ + │├┼─────────┤ │ + │││ 6.45e+2 │ │ + │└┴─────────┘ │ + └────────────────────────┘ |}] let%expect_test "Micrograd half-moons example" = File "test/hello_world_op.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/hello_world_op.ml.corrected diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/hello_world_op.ml.corrected index ba9d7ef..6b90c44 100644 --- a/_build/default/test/hello_world_op.ml +++ b/_build/.sandbox/45bd23cf4d65ee7463b943b460e50f97/default/test/hello_world_op.ml.corrected @@ -102,36 +102,39 @@ let%expect_test "Print constant tensor" = let%op hey = [ (1, 2, 3); (4, 5, 6) ] in Train.forward_and_forget backend ctx hey; Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey; - [%expect {| [1.00, 2.00, 3.00; 4.00, 5.00, 6.00] |}]; + [%expect {| [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ][0]: c2x3_hey shape 1:3->0:2 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect {| - ┌─────────────────────────────────────────────────────────────┐ - │[0]: [1.00, 2.00, 3.00; 4.00, 5.00, 6.00]_hey shape 1:3->0:2 │ - │┌──────┬──────────────────┐ │ - ││ │axis 1 │ │ - │├──────┼──────────────────┤ │ - ││axis 0│ 1.00 2.00 3.00 │ │ - ││ │ 4.00 5.00 6.00 │ │ - │└──────┴──────────────────┘ │ - └─────────────────────────────────────────────────────────────┘ + ┌─────────────────────────────┐ + │[0]: c2x3_hey shape 1:3->0:2 │ + │┌──────┬──────────────────┐ │ + ││ │axis 1 │ │ + │├──────┼──────────────────┤ │ + ││axis 0│ 1.00 2.00 3.00 │ │ + ││ │ 4.00 5.00 6.00 │ │ + │└──────┴──────────────────┘ │ + └─────────────────────────────┘ |}]; let%op hoo = [| [ 1; 2; 3 ]; [ 4; 5; 6 ] |] in Train.forward_and_forget backend ctx hoo; Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo; - [%expect {| [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|] |}]; + [%expect {| + [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |][1]: c2x3_hoo shape + 0:2|1:3 + |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo; [%expect {| - ┌──────────────────────────────────────────────────────────────────┐ - │[1]: [|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]_hoo shape 0:2|1:3 │ - │┌──────┬──────────────────┐ │ - ││ │axis 1 │ │ - │├──────┼──────────────────┤ │ - ││axis 0│ 1.00 2.00 3.00 │ │ - ││ │ 4.00 5.00 6.00 │ │ - │└──────┴──────────────────┘ │ - └──────────────────────────────────────────────────────────────────┘ + ┌────────────────────────────┐ + │[1]: c2x3_hoo shape 0:2|1:3 │ + │┌──────┬──────────────────┐ │ + ││ │axis 1 │ │ + │├──────┼──────────────────┤ │ + ││axis 0│ 1.00 2.00 3.00 │ │ + ││ │ 4.00 5.00 6.00 │ │ + │└──────┴──────────────────┘ │ + └────────────────────────────┘ |}]; let%op hey2 = [ @@ -145,10 +148,12 @@ let%expect_test "Print constant tensor" = Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey2; [%expect {| - [(1.00, 2.00, 3.00), (4.00, 5.00, 6.00); - (7.00, 8.00, 9.00), (10.00, 11.00, 12.00); - (13.00, 14.00, 15.00), (16.00, 17.00, 18.00); - (19.00, 20.00, 21.00), (22.00, 23.00, 24.00)] + [ + ( 1.00 , 2.00 , 3.00 ) , ( 4.00 , 5.00 , 6.00 ) + ; ( 7.00 , 8.00 , 9.00 ) , ( 10.00 , 11.00 , 12.00 ) + ; ( 13.00 , 14.00 , 15.00 ) , ( 16.00 , 17.00 , 18.00 ) + ; ( 19.00 , 20.00 , 21.00 ) , ( 22.00 , 23.00 , 24.00 ) + ][2]: c4x2x3_hey2 shape 1:2,2:3->0:4 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2; [%expect @@ -178,10 +183,12 @@ let%expect_test "Print constant tensor" = Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo2; [%expect {| - [|[[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]]; - [[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]]; - [[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]]; - [[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]]|] + [| + [ [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] ] + ; [ [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] ] + ; [ [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] ] + ; [ [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] ] + |][3]: c4x2x3_hoo2 shape 0:4|1:2,2:3 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo2; [%expect @@ -209,10 +216,12 @@ let%expect_test "Print constant tensor" = Tensor.print ~with_code:false ~with_grad:false `Inline @@ heyhoo; [%expect {| - [|[|[1.00; 2.00; 3.00]; [4.00; 5.00; 6.00]|]; - [|[7.00; 8.00; 9.00]; [10.00; 11.00; 12.00]|]; - [|[13.00; 14.00; 15.00]; [16.00; 17.00; 18.00]|]; - [|[19.00; 20.00; 21.00]; [22.00; 23.00; 24.00]|]|] + [| + [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |] + ; [| [ 7.00 ; 8.00 ; 9.00 ] ; [ 10.00 ; 11.00 ; 12.00 ] |] + ; [| [ 13.00 ; 14.00 ; 15.00 ] ; [ 16.00 ; 17.00 ; 18.00 ] |] + ; [| [ 19.00 ; 20.00 ; 21.00 ] ; [ 22.00 ; 23.00 ; 24.00 ] |] + |][4]: c4x2x3_heyhoo shape 0:4,1:2|2:3 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo; [%expect @@ -241,14 +250,23 @@ let%expect_test "Print constant tensor" = [%expect {| [| - [|[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]]; - [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]|]; - [|[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]]; - [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]|]; - [|[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]]; - [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]|]; - [|[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]]; - [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]|]|] + [| + [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ] + ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ] + |] + ; [| + [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ] + ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ] + |] + ; [| + [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ] + ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ] + |] + ; [| + [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ] + ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ] + |] + |][5]: c4x2x3x2_heyhoo2 shape 0:4,1:2|2:3,3:2 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo2; [%expect @@ -295,15 +313,26 @@ let%expect_test "Print constant tensor" = {| [| [| - [[[1.00; 31.00]; [2.00; 32.00]; [3.00; 33.00]]; - [[4.00; 34.00]; [5.00; 35.00]; [6.00; 36.00]]]; - [[[7.00; 37.00]; [8.00; 38.00]; [9.00; 39.00]]; - [[10.00; 40.00]; [11.00; 41.00]; [12.00; 42.00]]]|]; - [| - [[[13.00; 43.00]; [14.00; 44.00]; [15.00; 45.00]]; - [[16.00; 46.00]; [17.00; 47.00]; [18.00; 48.00]]]; - [[[19.00; 49.00]; [20.00; 50.00]; [21.00; 51.00]]; - [[22.00; 52.00]; [23.00; 53.00]; [24.00; 54.00]]]|]|] + [ + [ [ 1.00 ; 31.00 ] ; [ 2.00 ; 32.00 ] ; [ 3.00 ; 33.00 ] ] + ; [ [ 4.00 ; 34.00 ] ; [ 5.00 ; 35.00 ] ; [ 6.00 ; 36.00 ] ] + ] + ; [ + [ [ 7.00 ; 37.00 ] ; [ 8.00 ; 38.00 ] ; [ 9.00 ; 39.00 ] ] + ; [ [ 10.00 ; 40.00 ] ; [ 11.00 ; 41.00 ] ; [ 12.00 ; 42.00 ] ] + ] + |] + ; [| + [ + [ [ 13.00 ; 43.00 ] ; [ 14.00 ; 44.00 ] ; [ 15.00 ; 45.00 ] ] + ; [ [ 16.00 ; 46.00 ] ; [ 17.00 ; 47.00 ] ; [ 18.00 ; 48.00 ] ] + ] + ; [ + [ [ 19.00 ; 49.00 ] ; [ 20.00 ; 50.00 ] ; [ 21.00 ; 51.00 ] ] + ; [ [ 22.00 ; 52.00 ] ; [ 23.00 ; 53.00 ] ; [ 24.00 ; 54.00 ] ] + ] + |] + |][6]: c2x2x2x3x2_heyhoo3 shape 0:2,1:2|2:2,3:3,4:2 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo3; [%expect @@ -355,15 +384,26 @@ let%expect_test "Print constant tensor" = {| [| [ - [[1.00, 31.00; 2.00, 32.00; 3.00, 33.00]; - [4.00, 34.00; 5.00, 35.00; 6.00, 36.00]]; - [[7.00, 37.00; 8.00, 38.00; 9.00, 39.00]; - [10.00, 40.00; 11.00, 41.00; 12.00, 42.00]]]; - [ - [[13.00, 43.00; 14.00, 44.00; 15.00, 45.00]; - [16.00, 46.00; 17.00, 47.00; 18.00, 48.00]]; - [[19.00, 49.00; 20.00, 50.00; 21.00, 51.00]; - [22.00, 52.00; 23.00, 53.00; 24.00, 54.00]]]|] + [ + [ 1.00 , 31.00 ; 2.00 , 32.00 ; 3.00 , 33.00 ] + ; [ 4.00 , 34.00 ; 5.00 , 35.00 ; 6.00 , 36.00 ] + ] + ; [ + [ 7.00 , 37.00 ; 8.00 , 38.00 ; 9.00 , 39.00 ] + ; [ 10.00 , 40.00 ; 11.00 , 41.00 ; 12.00 , 42.00 ] + ] + ] + ; [ + [ + [ 13.00 , 43.00 ; 14.00 , 44.00 ; 15.00 , 45.00 ] + ; [ 16.00 , 46.00 ; 17.00 , 47.00 ; 18.00 , 48.00 ] + ] + ; [ + [ 19.00 , 49.00 ; 20.00 , 50.00 ; 21.00 , 51.00 ] + ; [ 22.00 , 52.00 ; 23.00 , 53.00 ; 24.00 , 54.00 ] + ] + ] + |][7]: c2x2x2x3x2_heyhoo4 shape 0:2|4:2->1:2,2:2,3:3 |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ heyhoo4; [%expect @@ -462,8 +502,29 @@ let%expect_test "Big matrix" = Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty; [%expect {| - [0.00; 1.00; 2.00; 3.00; 4.00; 5.00; 6.00; 7.00; 8.00; 9.00; 10.00; 11.00; - 12.00; 13.00; 14.00; 15.00; 16.00; 17.00; 18.00; 19.00; 20.00] + [ + 0.00 + ; 1.00 + ; 2.00 + ; 3.00 + ; 4.00 + ; 5.00 + ; 6.00 + ; 7.00 + ; 8.00 + ; 9.00 + ; 10.00 + ; 11.00 + ; 12.00 + ; 13.00 + ; 14.00 + ; 15.00 + ; 16.00 + ; 17.00 + ; 18.00 + ; 19.00 + ; 20.00 + ][2]: 0...20 shape 0:21 |}]; Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty; [%expect (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #2} Retrieving commandline, environment, or config file variable ocannl_backend Found multicore_cc, in the config file Properties of devices: (multicore_devices (device ((device_name CPU) (device_ordinal 0) (num_domains 72)))) @!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity Found true, in the config file Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files Not found, using default false Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=23.609453, epoch loss=23.609453 Batch=119, step=120, lr=0.199750, batch loss=8.539634, epoch loss=32.149087 Batch=179, step=180, lr=0.199500, batch loss=2.626295, epoch loss=34.775382 Batch=239, step=240, lr=0.199250, batch loss=0.849657, epoch loss=35.625039 Batch=299, step=300, lr=0.199000, batch loss=1.447177, epoch loss=37.072216 Batch=359, step=360, lr=0.198750, batch loss=1.329296, epoch loss=38.401512 Batch=419, step=420, lr=0.198500, batch loss=0.618569, epoch loss=39.020081 Batch=479, step=480, lr=0.198250, batch loss=0.822060, epoch loss=39.842141 Batch=539, step=540, lr=0.198000, batch loss=0.690244, epoch loss=40.532385 Batch=599, step=600, lr=0.197750, batch loss=1.063878, epoch loss=41.596263 Batch=659, step=660, lr=0.197500, batch loss=0.483340, epoch loss=42.079603 Batch=719, step=720, lr=0.197250, batch loss=0.411299, epoch loss=42.490902 Batch=779, step=780, lr=0.197000, batch loss=0.470123, epoch loss=42.961024 Batch=839, step=840, lr=0.196750, batch loss=0.446661, epoch loss=43.407685 Batch=899, step=900, lr=0.196500, batch loss=0.382721, epoch loss=43.790407 Batch=959, step=960, lr=0.196250, batch loss=0.245136, epoch loss=44.035543 Batch=1019, step=1020, lr=0.196000, batch loss=0.466506, epoch loss=44.502049 Batch=1079, step=1080, lr=0.195750, batch loss=0.248781, epoch loss=44.750829 Batch=1139, step=1140, lr=0.195500, batch loss=0.317440, epoch loss=45.068269 Batch=1199, step=1200, lr=0.195250, batch loss=0.263683, epoch loss=45.331952 Epoch=0, step=1200, lr=0.195250, epoch loss=45.331952 Batch=59, step=1260, lr=0.195000, batch loss=0.262138, epoch loss=0.262138 Batch=119, step=1320, lr=0.194750, batch loss=0.205243, epoch loss=0.467381 Batch=179, step=1380, lr=0.194500, batch loss=0.243644, epoch loss=0.711025 Batch=239, step=1440, lr=0.194250, batch loss=0.347897, epoch loss=1.058921 Batch=299, step=1500, lr=0.194000, batch loss=0.247348, epoch loss=1.306269 Batch=359, step=1560, lr=0.193750, batch loss=0.316559, epoch loss=1.622828 Batch=419, step=1620, lr=0.193500, batch loss=0.312735, epoch loss=1.935563 Batch=479, step=1680, lr=0.193250, batch loss=0.276268, epoch loss=2.211831 Batch=539, step=1740, lr=0.193000, batch loss=0.209826, epoch loss=2.421657 Batch=599, step=1800, lr=0.192750, batch loss=0.250384, epoch loss=2.672042 Batch=659, step=1860, lr=0.192500, batch loss=0.367201, epoch loss=3.039243 Batch=719, step=1920, lr=0.192250, batch loss=0.354917, epoch loss=3.394160 Batch=779, step=1980, lr=0.192000, batch loss=0.381382, epoch loss=3.775542 Batch=839, step=2040, lr=0.191750, batch loss=0.339637, epoch loss=4.115179 Batch=899, step=2100, lr=0.191500, batch loss=0.295234, epoch loss=4.410413 Batch=959, step=2160, lr=0.191250, batch loss=0.214033, epoch loss=4.624446 Batch=1019, step=2220, lr=0.191000, batch loss=0.330972, epoch loss=4.955419 Batch=1079, step=2280, lr=0.190750, batch loss=0.208236, epoch loss=5.163654 Batch=1139, step=2340, lr=0.190500, batch loss=0.278374, epoch loss=5.442028 Batch=1199, step=2400, lr=0.190250, batch loss=0.220793, epoch loss=5.662821 Epoch=1, step=2400, lr=0.190250, epoch loss=5.662821 Batch=59, step=2460, lr=0.190000, batch loss=0.230363, epoch loss=0.230363 Batch=119, step=2520, lr=0.189750, batch loss=0.195962, epoch loss=0.426325 Batch=179, step=2580, lr=0.189500, batch loss=0.221156, epoch loss=0.647481 Batch=239, step=2640, lr=0.189250, batch loss=0.328098, epoch loss=0.975578 Batch=299, step=2700, lr=0.189000, batch loss=0.202947, epoch loss=1.178525 Batch=359, step=2760, lr=0.188750, batch loss=0.289890, epoch loss=1.468415 Batch=419, step=2820, lr=0.188500, batch loss=0.281744, epoch loss=1.750160 Batch=479, step=2880, lr=0.188250, batch loss=0.264844, epoch loss=2.015004 Batch=539, step=2940, lr=0.188000, batch loss=0.203798, epoch loss=2.218802 Batch=599, step=3000, lr=0.187750, batch loss=0.248079, epoch loss=2.466881 Batch=659, step=3060, lr=0.187500, batch loss=0.345056, epoch loss=2.811937 Batch=719, step=3120, lr=0.187250, batch loss=0.343542, epoch loss=3.155479 Batch=779, step=3180, lr=0.187000, batch loss=0.366989, epoch loss=3.522468 Batch=839, step=3240, lr=0.186750, batch loss=0.321779, epoch loss=3.844248 Batch=899, step=3300, lr=0.186500, batch loss=0.283865, epoch loss=4.128112 Batch=959, step=3360, lr=0.186250, batch loss=0.214411, epoch loss=4.342523 Batch=1019, step=3420, lr=0.186000, batch loss=0.306400, epoch loss=4.648923 Batch=1079, step=3480, lr=0.185750, batch loss=0.177313, epoch loss=4.826236 Batch=1139, step=3540, lr=0.185500, batch loss=0.235576, epoch loss=5.061812 Batch=1199, step=3600, lr=0.185250, batch loss=0.197911, epoch loss=5.259723 Epoch=2, step=3600, lr=0.185250, epoch loss=5.259723 Batch=59, step=3660, lr=0.185000, batch loss=0.226539, epoch loss=0.226539 Batch=119, step=3720, lr=0.184750, batch loss=0.191802, epoch loss=0.418341 Batch=179, step=3780, lr=0.184500, batch loss=0.210712, epoch loss=0.629053 Batch=239, step=3840, lr=0.184250, batch loss=0.314104, epoch loss=0.943157 Batch=299, step=3900, lr=0.184000, batch loss=0.206011, epoch loss=1.149168 Batch=359, step=3960, lr=0.183750, batch loss=0.291253, epoch loss=1.440420 Batch=419, step=4020, lr=0.183500, batch loss=0.297434, epoch loss=1.737854 Batch=479, step=4080, lr=0.183250, batch loss=0.260511, epoch loss=1.998365 Batch=539, step=4140, lr=0.183000, batch loss=0.195229, epoch loss=2.193593 Batch=599, step=4200, lr=0.182750, batch loss=0.230395, epoch loss=2.423989 Batch=659, step=4260, lr=0.182500, batch loss=0.337132, epoch loss=2.761121 Batch=719, step=4320, lr=0.182250, batch loss=0.347122, epoch loss=3.108243 Batch=779, step=4380, lr=0.182000, batch loss=0.346320, epoch loss=3.454563 Batch=839, step=4440, lr=0.181750, batch loss=0.317770, epoch loss=3.772333 Batch=899, step=4500, lr=0.181500, batch loss=0.283906, epoch loss=4.056239 Batch=959, step=4560, lr=0.181250, batch loss=0.238371, epoch loss=4.294610 Batch=1019, step=4620, lr=0.181000, batch loss=0.336891, epoch loss=4.631501 Batch=1079, step=4680, lr=0.180750, batch loss=0.208592, epoch loss=4.840094 Batch=1139, step=4740, lr=0.180500, batch loss=0.249212, epoch loss=5.089306 Batch=1199, step=4800, lr=0.180250, batch loss=0.191768, epoch loss=5.281074 Epoch=3, step=4800, lr=0.180250, epoch loss=5.281074 Batch=59, step=4860, lr=0.180000, batch loss=0.228079, epoch loss=0.228079 Batch=119, step=4920, lr=0.179750, batch loss=0.190219, epoch loss=0.418298 Batch=179, step=4980, lr=0.179500, batch loss=0.205905, epoch loss=0.624203 Batch=239, step=5040, lr=0.179250, batch loss=0.309067, epoch loss=0.933269 Batch=299, step=5100, lr=0.179000, batch loss=0.204593, epoch loss=1.137862 Batch=359, step=5160, lr=0.178750, batch loss=0.271139, epoch loss=1.409001 Batch=419, step=5220, lr=0.178500, batch loss=0.264055, epoch loss=1.673056 Batch=479, step=5280, lr=0.178250, batch loss=0.239692, epoch loss=1.912748 Batch=539, step=5340, lr=0.178000, batch loss=0.189445, epoch loss=2.102194 Batch=599, step=5400, lr=0.177750, batch loss=0.231019, epoch loss=2.333213 Batch=659, step=5460, lr=0.177500, batch loss=0.323592, epoch loss=2.656805 Batch=719, step=5520, lr=0.177250, batch loss=0.325867, epoch loss=2.982672 Batch=779, step=5580, lr=0.177000, batch loss=0.343179, epoch loss=3.325851 Batch=839, step=5640, lr=0.176750, batch loss=0.309345, epoch loss=3.635195 Batch=899, step=5700, lr=0.176500, batch loss=0.273171, epoch loss=3.908366 Batch=959, step=5760, lr=0.176250, batch loss=0.214921, epoch loss=4.123287 Batch=1019, step=5820, lr=0.176000, batch loss=0.339150, epoch loss=4.462436 Batch=1079, step=5880, lr=0.175750, batch loss=0.207828, epoch loss=4.670264 Batch=1139, step=5940, lr=0.175500, batch loss=0.240055, epoch loss=4.910319 Batch=1199, step=6000, lr=0.175250, batch loss=0.186862, epoch loss=5.097180 Epoch=4, step=6000, lr=0.175250, epoch loss=5.097180 Batch=59, step=6060, lr=0.175000, batch loss=0.230529, epoch loss=0.230529 Batch=119, step=6120, lr=0.174750, batch loss=0.194291, epoch loss=0.424820 Batch=179, step=6180, lr=0.174500, batch loss=0.201541, epoch loss=0.626361 Batch=239, step=6240, lr=0.174250, batch loss=0.302380, epoch loss=0.928741 Batch=299, step=6300, lr=0.174000, batch loss=0.203930, epoch loss=1.132671 Batch=359, step=6360, lr=0.173750, batch loss=0.266115, epoch loss=1.398786 Batch=419, step=6420, lr=0.173500, batch loss=0.265282, epoch loss=1.664067 Batch=479, step=6480, lr=0.173250, batch loss=0.243319, epoch loss=1.907387 Batch=539, step=6540, lr=0.173000, batch loss=0.192969, epoch loss=2.100355 Batch=599, step=6600, lr=0.172750, batch loss=0.234500, epoch loss=2.334855 Batch=659, step=6660, lr=0.172500, batch loss=0.312107, epoch loss=2.646963 Batch=719, step=6720, lr=0.172250, batch loss=0.314221, epoch loss=2.961184 Batch=779, step=6780, lr=0.172000, batch loss=0.333223, epoch loss=3.294407 Batch=839, step=6840, lr=0.171500, batch loss=0.303757, epoch loss=3.598164 Batch=899, step=6900, lr=0.171500, batch loss=0.268431, epoch loss=3.866595 Batch=959, step=6960, lr=0.171250, batch loss=0.211110, epoch loss=4.077705 Batch=1019, step=7020, lr=0.171000, batch loss=0.330372, epoch loss=4.408078 Batch=1079, step=7080, lr=0.170750, batch loss=0.180928, epoch loss=4.589006 Batch=1139, step=7140, lr=0.170500, batch loss=0.216288, epoch loss=4.805294 Batch=1199, step=7200, lr=0.170250, batch loss=0.181887, epoch loss=4.987181 Epoch=5, step=7200, lr=0.170250, epoch loss=4.987181 Batch=59, step=7260, lr=0.170000, batch loss=0.232941, epoch loss=0.232941 Batch=119, step=7320, lr=0.169750, batch loss=0.184385, epoch loss=0.417327 Batch=179, step=7380, lr=0.169500, batch loss=0.196288, epoch loss=0.613614 Batch=239, step=7440, lr=0.169250, batch loss=0.290853, epoch loss=0.904467 Batch=299, step=7500, lr=0.169000, batch loss=0.200779, epoch loss=1.105246 Batch=359, step=7560, lr=0.168750, batch loss=0.258485, epoch loss=1.363731 Batch=419, step=7620, lr=0.168500, batch loss=0.256773, epoch loss=1.620504 Batch=479, step=7680, lr=0.168250, batch loss=0.236037, epoch loss=1.856541 Batch=539, step=7740, lr=0.168000, batch loss=0.187845, epoch loss=2.044386 Batch=599, step=7800, lr=0.167750, batch loss=0.223972, epoch loss=2.268358 Batch=659, step=7860, lr=0.167500, batch loss=0.305891, epoch loss=2.574249 Batch=719, step=7920, lr=0.167250, batch loss=0.309341, epoch loss=2.883590 Batch=779, step=7980, lr=0.167000, batch loss=0.329903, epoch loss=3.213493 Batch=839, step=8040, lr=0.166750, batch loss=0.292457, epoch loss=3.505950 Batch=899, step=8100, lr=0.166500, batch loss=0.261744, epoch loss=3.767693 Batch=959, step=8160, lr=0.166250, batch loss=0.193358, epoch loss=3.961051 Batch=1019, step=8220, lr=0.166000, batch loss=0.313948, epoch loss=4.274999 Batch=1079, step=8280, lr=0.165750, batch loss=0.172165, epoch loss=4.447163 Batch=1139, step=8340, lr=0.165500, batch loss=0.209717, epoch loss=4.656881 Batch=1199, step=8400, lr=0.165250, batch loss=0.178243, epoch loss=4.835124 Epoch=6, step=8400, lr=0.165250, epoch loss=4.835124 Batch=59, step=8460, lr=0.165000, batch loss=0.229793, epoch loss=0.229793 Batch=119, step=8520, lr=0.164750, batch loss=0.174981, epoch loss=0.404774 Batch=179, step=8580, lr=0.164500, batch loss=0.187812, epoch loss=0.592586 Batch=239, step=8640, lr=0.164250, batch loss=0.278233, epoch loss=0.870819 Batch=299, step=8700, lr=0.164000, batch loss=0.191934, epoch loss=1.062753 Batch=359, step=8760, lr=0.163750, batch loss=0.248685, epoch loss=1.311438 Batch=419, step=8820, lr=0.163500, batch loss=0.245561, epoch loss=1.556999 Batch=479, step=8880, lr=0.163250, batch loss=0.228635, epoch loss=1.785634 Batch=539, step=8940, lr=0.163000, batch loss=0.178079, epoch loss=1.963713 Batch=599, step=9000, lr=0.162750, batch loss=0.217444, epoch loss=2.181158 Batch=659, step=9060, lr=0.162500, batch loss=0.294787, epoch loss=2.475945 Batch=719, step=9120, lr=0.162250, batch loss=0.296490, epoch loss=2.772435 Batch=779, step=9180, lr=0.162000, batch loss=0.316675, epoch loss=3.089110 Batch=839, step=9240, lr=0.161750, batch loss=0.287287, epoch loss=3.376397 Batch=899, step=9300, lr=0.161500, batch loss=0.251023, epoch loss=3.627420 Batch=959, step=9360, lr=0.161250, batch loss=0.190615, epoch loss=3.818035 Batch=1019, step=9420, lr=0.161000, batch loss=0.311613, epoch loss=4.129648 Batch=1079, step=9480, lr=0.160750, batch loss=0.191706, epoch loss=4.321354 Batch=1139, step=9540, lr=0.160500, batch loss=0.215711, epoch loss=4.537065 Batch=1199, step=9600, lr=0.160250, batch loss=0.165616, epoch loss=4.702681 Epoch=7, step=9600, lr=0.160250, epoch loss=4.702681 Batch=59, step=9660, lr=0.160000, batch loss=0.197278, epoch loss=0.197278 Batch=119, step=9720, lr=0.159750, batch loss=0.165427, epoch loss=0.362705 Batch=179, step=9780, lr=0.159500, batch loss=0.179285, epoch loss=0.541989 Batch=239, step=9840, lr=0.159250, batch loss=0.263150, epoch loss=0.805139 Batch=299, step=9900, lr=0.159000, batch loss=0.183125, epoch loss=0.988264 Batch=359, step=9960, lr=0.158750, batch loss=0.239948, epoch loss=1.228211 Batch=419, step=10020, lr=0.158500, batch loss=0.232869, epoch loss=1.461081 Batch=479, step=10080, lr=0.158250, batch loss=0.213260, epoch loss=1.674341 Batch=539, step=10140, lr=0.158000, batch loss=0.170419, epoch loss=1.844759 Batch=599, step=10200, lr=0.157750, batch loss=0.200325, epoch loss=2.045084 Batch=659, step=10260, lr=0.157500, batch loss=0.283012, epoch loss=2.328096 Batch=719, step=10320, lr=0.157250, batch loss=0.279879, epoch loss=2.607975 Batch=779, step=10380, lr=0.157000, batch loss=0.300655, epoch loss=2.908630 Batch=839, step=10440, lr=0.156750, batch loss=0.271122, epoch loss=3.179752 Batch=899, step=10500, lr=0.156500, batch loss=0.239657, epoch loss=3.419409 Batch=959, step=10560, lr=0.156250, batch loss=0.198336, epoch loss=3.617745 Batch=1019, step=10620, lr=0.156000, batch loss=0.293454, epoch loss=3.911199 Batch=1079, step=10680, lr=0.155750, batch loss=0.180077, epoch loss=4.091276 Batch=1139, step=10740, lr=0.155500, batch loss=0.203591, epoch loss=4.294867 Batch=1199, step=10800, lr=0.155250, batch loss=0.154575, epoch loss=4.449442 Epoch=8, step=10800, lr=0.155250, epoch loss=4.449442 Batch=59, step=10860, lr=0.155000, batch loss=0.176634, epoch loss=0.176634 Batch=119, step=10920, lr=0.154750, batch loss=0.146993, epoch loss=0.323627 Batch=179, step=10980, lr=0.154500, batch loss=0.166377, epoch loss=0.490004 Batch=239, step=11040, lr=0.154250, batch loss=0.242150, epoch loss=0.732153 Batch=299, step=11100, lr=0.154000, batch loss=0.168808, epoch loss=0.900961 Batch=359, step=11160, lr=0.153750, batch loss=0.223415, epoch loss=1.124376 Batch=419, step=11220, lr=0.153500, batch loss=0.230530, epoch loss=1.354906 Batch=479, step=11280, lr=0.153250, batch loss=0.201426, epoch loss=1.556332 Batch=539, step=11340, lr=0.153000, batch loss=0.161164, epoch loss=1.717496 Batch=599, step=11400, lr=0.152750, batch loss=0.182333, epoch loss=1.899829 Batch=659, step=11460, lr=0.152500, batch loss=0.262990, epoch loss=2.162819 Batch=719, step=11520, lr=0.152250, batch loss=0.257052, epoch loss=2.419871 Batch=779, step=11580, lr=0.152000, batch loss=0.271766, epoch loss=2.691638 Batch=839, step=11640, lr=0.151750, batch loss=0.256005, epoch loss=2.947643 Batch=899, step=11700, lr=0.151500, batch loss=0.212099, epoch loss=3.159742 Batch=959, step=11760, lr=0.151250, batch loss=0.170691, epoch loss=3.330433 Batch=1019, step=11820, lr=0.151000, batch loss=0.263481, epoch loss=3.593914 Batch=1079, step=11880, lr=0.150750, batch loss=0.153161, epoch loss=3.747075 Batch=1139, step=11940, lr=0.150500, batch loss=0.183687, epoch loss=3.930762 Batch=1199, step=12000, lr=0.150250, batch loss=0.139680, epoch loss=4.070442 Epoch=9, step=12000, lr=0.150250, epoch loss=4.070442 Batch=59, step=12060, lr=0.150000, batch loss=0.158856, epoch loss=0.158856 Batch=119, step=12120, lr=0.149750, batch loss=0.128075, epoch loss=0.286930 Batch=179, step=12180, lr=0.149500, batch loss=0.152083, epoch loss=0.439014 Batch=239, step=12240, lr=0.149250, batch loss=0.221623, epoch loss=0.660636 Batch=299, step=12300, lr=0.149000, batch loss=0.143878, epoch loss=0.804515 Batch=359, step=12360, lr=0.148750, batch loss=0.196264, epoch loss=1.000779 Batch=419, step=12420, lr=0.148500, batch loss=0.205563, epoch loss=1.206342 Batch=479, step=12480, lr=0.148250, batch loss=0.179358, epoch loss=1.385700 Batch=539, step=12540, lr=0.147750, batch loss=0.142001, epoch loss=1.527701 Batch=599, step=12600, lr=0.147750, batch loss=0.150739, epoch loss=1.678440 Batch=659, step=12660, lr=0.147500, batch loss=0.223708, epoch loss=1.902148 Batch=719, step=12720, lr=0.147250, batch loss=0.235127, epoch loss=2.137275 Batch=779, step=12780, lr=0.147000, batch loss=0.251565, epoch loss=2.388840 Batch=839, step=12840, lr=0.146750, batch loss=0.224358, epoch loss=2.613199 Batch=899, step=12900, lr=0.146500, batch loss=0.193474, epoch loss=2.806673 Batch=959, step=12960, lr=0.146250, batch loss=0.159952, epoch loss=2.966625 Batch=1019, step=13020, lr=0.146000, batch loss=0.260500, epoch loss=3.227125 Batch=1079, step=13080, lr=0.145750, batch loss=0.117187, epoch loss=3.344312 Batch=1139, step=13140, lr=0.145500, batch loss=0.160385, epoch loss=3.504697 Batch=1199, step=13200, lr=0.145250, batch loss=0.119238, epoch loss=3.623935 Epoch=10, step=13200, lr=0.145250, epoch loss=3.623935 Batch=59, step=13260, lr=0.145000, batch loss=0.140068, epoch loss=0.140068 Batch=119, step=13320, lr=0.144750, batch loss=0.122142, epoch loss=0.262210 Batch=179, step=13380, lr=0.144500, batch loss=0.128088, epoch loss=0.390298 Batch=239, step=13440, lr=0.144250, batch loss=0.191929, epoch loss=0.582227 Batch=299, step=13500, lr=0.144000, batch loss=0.116682, epoch loss=0.698910 Batch=359, step=13560, lr=0.143750, batch loss=0.163493, epoch loss=0.862402 Batch=419, step=13620, lr=0.143500, batch loss=0.166364, epoch loss=1.028766 Batch=479, step=13680, lr=0.143250, batch loss=0.149393, epoch loss=1.178159 Batch=539, step=13740, lr=0.143000, batch loss=0.116097, epoch loss=1.294256 Batch=599, step=13800, lr=0.142750, batch loss=0.119991, epoch loss=1.414247 Batch=659, step=13860, lr=0.142500, batch loss=0.177697, epoch loss=1.591944 Batch=719, step=13920, lr=0.142250, batch loss=0.172740, epoch loss=1.764684 Batch=779, step=13980, lr=0.142000, batch loss=0.179594, epoch loss=1.944278 Batch=839, step=14040, lr=0.141750, batch loss=0.198560, epoch loss=2.142838 Batch=899, step=14100, lr=0.141500, batch loss=0.216549, epoch loss=2.359387 Batch=959, step=14160, lr=0.141250, batch loss=0.105132, epoch loss=2.464519 Batch=1019, step=14220, lr=0.141000, batch loss=0.193031, epoch loss=2.657550 Batch=1079, step=14280, lr=0.140750, batch loss=0.083980, epoch loss=2.741530 Batch=1139, step=14340, lr=0.140500, batch loss=0.138069, epoch loss=2.879598 Batch=1199, step=14400, lr=0.140250, batch loss=0.087928, epoch loss=2.967526 Epoch=11, step=14400, lr=0.140250, epoch loss=2.967526 Batch=59, step=14460, lr=0.140000, batch loss=0.102506, epoch loss=0.102506 Batch=119, step=14520, lr=0.139750, batch loss=0.099055, epoch loss=0.201561 Batch=179, step=14580, lr=0.139500, batch loss=0.099726, epoch loss=0.301287 Batch=239, step=14640, lr=0.139250, batch loss=0.142618, epoch loss=0.443904 Batch=299, step=14700, lr=0.139000, batch loss=0.073521, epoch loss=0.517425 Batch=359, step=14760, lr=0.138750, batch loss=0.122870, epoch loss=0.640295 Batch=419, step=14820, lr=0.138500, batch loss=0.131926, epoch loss=0.772221 Batch=479, step=14880, lr=0.138250, batch loss=0.098309, epoch loss=0.870530 Batch=539, step=14940, lr=0.138000, batch loss=0.101288, epoch loss=0.971818 Batch=599, step=15000, lr=0.137750, batch loss=0.083356, epoch loss=1.055174 Batch=659, step=15060, lr=0.137500, batch loss=0.127509, epoch loss=1.182683 Batch=719, step=15120, lr=0.137250, batch loss=0.132599, epoch loss=1.315283 Batch=779, step=15180, lr=0.137000, batch loss=0.180407, epoch loss=1.495690 Batch=839, step=15240, lr=0.136750, batch loss=0.186252, epoch loss=1.681942 Batch=899, step=15300, lr=0.136500, batch loss=0.303889, epoch loss=1.985831 Batch=959, step=15360, lr=0.136250, batch loss=0.055069, epoch loss=2.040900 Batch=1019, step=15420, lr=0.136000, batch loss=0.120267, epoch loss=2.161167 Batch=1079, step=15480, lr=0.135750, batch loss=0.059309, epoch loss=2.220476 Batch=1139, step=15540, lr=0.135500, batch loss=0.100413, epoch loss=2.320889 Batch=1199, step=15600, lr=0.135250, batch loss=0.063931, epoch loss=2.384820 Epoch=12, step=15600, lr=0.135250, epoch loss=2.384820 Batch=59, step=15660, lr=0.135000, batch loss=0.078106, epoch loss=0.078106 Batch=119, step=15720, lr=0.134750, batch loss=0.099923, epoch loss=0.178028 Batch=179, step=15780, lr=0.134500, batch loss=0.088998, epoch loss=0.267026 Batch=239, step=15840, lr=0.134250, batch loss=0.093290, epoch loss=0.360317 Batch=299, step=15900, lr=0.134000, batch loss=0.040004, epoch loss=0.400321 Batch=359, step=15960, lr=0.133750, batch loss=0.078207, epoch loss=0.478528 Batch=419, step=16020, lr=0.133500, batch loss=0.079171, epoch loss=0.557699 Batch=479, step=16080, lr=0.133250, batch loss=0.068926, epoch loss=0.626625 Batch=539, step=16140, lr=0.133000, batch loss=0.052026, epoch loss=0.678652 Batch=599, step=16200, lr=0.132750, batch loss=0.103638, epoch loss=0.782290 Batch=659, step=16260, lr=0.132500, batch loss=0.078231, epoch loss=0.860521 Batch=719, step=16320, lr=0.132250, batch loss=0.087875, epoch loss=0.948396 Batch=779, step=16380, lr=0.132000, batch loss=0.112043, epoch loss=1.060439 Batch=839, step=16440, lr=0.131750, batch loss=0.112653, epoch loss=1.173092 Batch=899, step=16500, lr=0.131500, batch loss=0.134471, epoch loss=1.307563 Batch=959, step=16560, lr=0.131250, batch loss=0.051532, epoch loss=1.359094 Batch=1019, step=16620, lr=0.131000, batch loss=0.132230, epoch loss=1.491324 Batch=1079, step=16680, lr=0.130750, batch loss=0.020538, epoch loss=1.511862 Batch=1139, step=16740, lr=0.130500, batch loss=0.045230, epoch loss=1.557092 Batch=1199, step=16800, lr=0.130250, batch loss=0.028037, epoch loss=1.585129 Epoch=13, step=16800, lr=0.130250, epoch loss=1.585129 Batch=59, step=16860, lr=0.130000, batch loss=0.043817, epoch loss=0.043817 Batch=119, step=16920, lr=0.129750, batch loss=0.082815, epoch loss=0.126632 Batch=179, step=16980, lr=0.129500, batch loss=0.059548, epoch loss=0.186180 Batch=239, step=17040, lr=0.129250, batch loss=0.060734, epoch loss=0.246914 Batch=299, step=17100, lr=0.129000, batch loss=0.018213, epoch loss=0.265127 Batch=359, step=17160, lr=0.128750, batch loss=0.045828, epoch loss=0.310955 Batch=419, step=17220, lr=0.128500, batch loss=0.046463, epoch loss=0.357418 Batch=479, step=17280, lr=0.128250, batch loss=0.029384, epoch loss=0.386802 Batch=539, step=17340, lr=0.128000, batch loss=0.056317, epoch loss=0.443119 Batch=599, step=17400, lr=0.127750, batch loss=0.033770, epoch loss=0.476888 Batch=659, step=17460, lr=0.127500, batch loss=0.046328, epoch loss=0.523217 Batch=719, step=17520, lr=0.127250, batch loss=0.049151, epoch loss=0.572367 Batch=779, step=17580, lr=0.127000, batch loss=0.108220, epoch loss=0.680588 Batch=839, step=17640, lr=0.126750, batch loss=0.104717, epoch loss=0.785305 Batch=899, step=17700, lr=0.126500, batch loss=0.147171, epoch loss=0.932476 Batch=959, step=17760, lr=0.126250, batch loss=0.019439, epoch loss=0.951915 Batch=1019, step=17820, lr=0.126000, batch loss=0.035627, epoch loss=0.987542 Batch=1079, step=17880, lr=0.125750, batch loss=0.011075, epoch loss=0.998617 Batch=1139, step=17940, lr=0.125500, batch loss=0.028871, epoch loss=1.027488 Batch=1199, step=18000, lr=0.125250, batch loss=0.013809, epoch loss=1.041297 Epoch=14, step=18000, lr=0.125250, epoch loss=1.041297 Batch=59, step=18060, lr=0.125000, batch loss=0.011248, epoch loss=0.011248 Batch=119, step=18120, lr=0.124750, batch loss=0.017901, epoch loss=0.029149 Batch=179, step=18180, lr=0.124500, batch loss=0.029309, epoch loss=0.058458 Batch=239, step=18240, lr=0.124250, batch loss=0.036316, epoch loss=0.094774 Batch=299, step=18300, lr=0.124000, batch loss=0.019049, epoch loss=0.113823 Batch=359, step=18360, lr=0.123750, batch loss=0.022951, epoch loss=0.136774 Batch=419, step=18420, lr=0.123500, batch loss=0.030611, epoch loss=0.167385 Batch=479, step=18480, lr=0.123250, batch loss=0.023515, epoch loss=0.190900 Batch=539, step=18540, lr=0.123000, batch loss=0.045874, epoch loss=0.236774 Batch=599, step=18600, lr=0.122750, batch loss=0.025500, epoch loss=0.262274 Batch=659, step=18660, lr=0.122500, batch loss=0.034415, epoch loss=0.296689 Batch=719, step=18720, lr=0.122250, batch loss=0.034067, epoch loss=0.330756 Batch=779, step=18780, lr=0.122000, batch loss=0.081176, epoch loss=0.411932 Batch=839, step=18840, lr=0.121750, batch loss=0.052601, epoch loss=0.464533 Batch=899, step=18900, lr=0.121500, batch loss=0.049320, epoch loss=0.513853 Batch=959, step=18960, lr=0.121250, batch loss=0.013589, epoch loss=0.527441 Batch=1019, step=19020, lr=0.121000, batch loss=0.023920, epoch loss=0.551361 Batch=1079, step=19080, lr=0.120750, batch loss=0.012678, epoch loss=0.564039 Batch=1139, step=19140, lr=0.120500, batch loss=0.022619, epoch loss=0.586658 Batch=1199, step=19200, lr=0.120250, batch loss=0.008866, epoch loss=0.595524 Epoch=15, step=19200, lr=0.120250, epoch loss=0.595524 Batch=59, step=19260, lr=0.120000, batch loss=0.004616, epoch loss=0.004616 Batch=119, step=19320, lr=0.119750, batch loss=0.023211, epoch loss=0.027827 Batch=179, step=19380, lr=0.119500, batch loss=0.070159, epoch loss=0.097985 Batch=239, step=19440, lr=0.119250, batch loss=0.027583, epoch loss=0.125568 Batch=299, step=19500, lr=0.119000, batch loss=0.011972, epoch loss=0.137540 Batch=359, step=19560, lr=0.118750, batch loss=0.022547, epoch loss=0.160087 Batch=419, step=19620, lr=0.118500, batch loss=0.019780, epoch loss=0.179868 Batch=479, step=19680, lr=0.118250, batch loss=0.007234, epoch loss=0.187102 Batch=539, step=19740, lr=0.118000, batch loss=0.018141, epoch loss=0.205243 Batch=599, step=19800, lr=0.117750, batch loss=0.021899, epoch loss=0.227142 Batch=659, step=19860, lr=0.117500, batch loss=0.016839, epoch loss=0.243981 Batch=719, step=19920, lr=0.117250, batch loss=0.031820, epoch loss=0.275801 Batch=779, step=19980, lr=0.117000, batch loss=0.064754, epoch loss=0.340554 Batch=839, step=20040, lr=0.116750, batch loss=0.031541, epoch loss=0.372095 Batch=899, step=20100, lr=0.116500, batch loss=0.030381, epoch loss=0.402476 Batch=959, step=20160, lr=0.116250, batch loss=0.016787, epoch loss=0.419263 Batch=1019, step=20220, lr=0.116000, batch loss=0.024871, epoch loss=0.444134 Batch=1079, step=20280, lr=0.115750, batch loss=0.003299, epoch loss=0.447433 Batch=1139, step=20340, lr=0.115500, batch loss=0.014918, epoch loss=0.462351 Batch=1199, step=20400, lr=0.115250, batch loss=0.005866, epoch loss=0.468217 Epoch=16, step=20400, lr=0.115250, epoch loss=0.468217 Batch=59, step=20460, lr=0.115000, batch loss=0.002674, epoch loss=0.002674 Batch=119, step=20520, lr=0.114750, batch loss=0.011751, epoch loss=0.014424 Batch=179, step=20580, lr=0.114500, batch loss=0.024141, epoch loss=0.038565 Batch=239, step=20640, lr=0.114250, batch loss=0.016670, epoch loss=0.055235 Batch=299, step=20700, lr=0.114000, batch loss=0.005073, epoch loss=0.060309 Batch=359, step=20760, lr=0.113750, batch loss=0.013627, epoch loss=0.073936 Batch=419, step=20820, lr=0.113500, batch loss=0.014768, epoch loss=0.088704 Batch=479, step=20880, lr=0.113250, batch loss=0.006165, epoch loss=0.094869 Batch=539, step=20940, lr=0.113000, batch loss=0.016455, epoch loss=0.111324 Batch=599, step=21000, lr=0.112750, batch loss=0.018296, epoch loss=0.129620 Batch=659, step=21060, lr=0.112500, batch loss=0.014467, epoch loss=0.144087 Batch=719, step=21120, lr=0.112250, batch loss=0.042439, epoch loss=0.186526 Batch=779, step=21180, lr=0.112000, batch loss=0.067903, epoch loss=0.254429 Batch=839, step=21240, lr=0.111750, batch loss=0.024778, epoch loss=0.279207 Batch=899, step=21300, lr=0.111500, batch loss=0.031317, epoch loss=0.310524 Batch=959, step=21360, lr=0.111250, batch loss=0.011189, epoch loss=0.321713 Batch=1019, step=21420, lr=0.111000, batch loss=0.013359, epoch loss=0.335072 Batch=1079, step=21480, lr=0.110750, batch loss=0.001399, epoch loss=0.336471 Batch=1139, step=21540, lr=0.110500, batch loss=0.012590, epoch loss=0.349061 Batch=1199, step=21600, lr=0.110250, batch loss=0.004799, epoch loss=0.353860 Epoch=17, step=21600, lr=0.110250, epoch loss=0.353860 Batch=59, step=21660, lr=0.110000, batch loss=0.001601, epoch loss=0.001601 Batch=119, step=21720, lr=0.109750, batch loss=0.007591, epoch loss=0.009192 Batch=179, step=21780, lr=0.109500, batch loss=0.015581, epoch loss=0.024773 Batch=239, step=21840, lr=0.109250, batch loss=0.014073, epoch loss=0.038846 Batch=299, step=21900, lr=0.109000, batch loss=0.004147, epoch loss=0.042992 Batch=359, step=21960, lr=0.108750, batch loss=0.011051, epoch loss=0.054043 Batch=419, step=22020, lr=0.108500, batch loss=0.012477, epoch loss=0.066520 Batch=479, step=22080, lr=0.108250, batch loss=0.003205, epoch loss=0.069725 Batch=539, step=22140, lr=0.108000, batch loss=0.017013, epoch loss=0.086738 Batch=599, step=22200, lr=0.107750, batch loss=0.016629, epoch loss=0.103367 Batch=659, step=22260, lr=0.107500, batch loss=0.014938, epoch loss=0.118305 Batch=719, step=22320, lr=0.107250, batch loss=0.024065, epoch loss=0.142370 Batch=779, step=22380, lr=0.107000, batch loss=0.042348, epoch loss=0.184718 Batch=839, step=22440, lr=0.106750, batch loss=0.020700, epoch loss=0.205418 Batch=899, step=22500, lr=0.106500, batch loss=0.022566, epoch loss=0.227984 Batch=959, step=22560, lr=0.106250, batch loss=0.010668, epoch loss=0.238652 Batch=1019, step=22620, lr=0.106000, batch loss=0.009354, epoch loss=0.248006 Batch=1079, step=22680, lr=0.105750, batch loss=0.000000, epoch loss=0.248006 Batch=1139, step=22740, lr=0.105500, batch loss=0.009796, epoch loss=0.257802 Batch=1199, step=22800, lr=0.105250, batch loss=0.004733, epoch loss=0.262536 Epoch=18, step=22800, lr=0.105250, epoch loss=0.262536 Batch=59, step=22860, lr=0.105000, batch loss=0.002015, epoch loss=0.002015 Batch=119, step=22920, lr=0.104750, batch loss=0.005770, epoch loss=0.007785 Batch=179, step=22980, lr=0.104500, batch loss=0.010335, epoch loss=0.018120 Batch=239, step=23040, lr=0.104250, batch loss=0.008669, epoch loss=0.026789 Batch=299, step=23100, lr=0.104000, batch loss=0.006901, epoch loss=0.033690 Batch=359, step=23160, lr=0.103750, batch loss=0.011537, epoch loss=0.045227 Batch=419, step=23220, lr=0.103500, batch loss=0.010662, epoch loss=0.055889 Batch=479, step=23280, lr=0.103250, batch loss=0.002485, epoch loss=0.058374 Batch=539, step=23340, lr=0.103000, batch loss=0.017502, epoch loss=0.075876 Batch=599, step=23400, lr=0.102750, batch loss=0.013762, epoch loss=0.089638 Batch=659, step=23460, lr=0.102500, batch loss=0.010341, epoch loss=0.099979 Batch=719, step=23520, lr=0.102250, batch loss=0.013785, epoch loss=0.113765 Batch=779, step=23580, lr=0.102000, batch loss=0.021279, epoch loss=0.135044 Batch=839, step=23640, lr=0.101750, batch loss=0.024569, epoch loss=0.159613 Batch=899, step=23700, lr=0.101500, batch loss=0.023467, epoch loss=0.183080 Batch=959, step=23760, lr=0.101250, batch loss=0.008478, epoch loss=0.191558 Batch=1019, step=23820, lr=0.101000, batch loss=0.007784, epoch loss=0.199342 Batch=1079, step=23880, lr=0.100750, batch loss=0.000887, epoch loss=0.200229 Batch=1139, step=23940, lr=0.100500, batch loss=0.009416, epoch loss=0.209645 Batch=1199, step=24000, lr=0.100250, batch loss=0.004988, epoch loss=0.214634 Epoch=19, step=24000, lr=0.100250, epoch loss=0.214634 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │********************************#*******************************************************************│ │**********************#*#*#######*###*#####*********************************************************│ │**********************#########################*****************************************************│ │*****************#**########*######*###########*###*************************************************│ │***************#################*###################************************************************│ │************######*#################*#################**********************************************│ │**********#*#####*########*#**************##*#########*#********************************************│ │***********########*##*#******************#*****##########****************************************..│ │***********###########*************************############************************************.....│ │********######*####*********************************###*###*#********************************.......│ │*******######**##*************....*****************#*######*#*****************************..........│ │*******##*##**##**********...........***************########*##*************************............│ │*****#######************.......%...%%...***************#########***********************...........%.│ │******######**********..........%.........**************##*#####**********************........%.%.%.│ │***#########**********.........%%%.%%......*************#*#######********************........%.%%%%.│ │****#######**********..........%%%%.........************#########******************..........%%.%%.%│ │**#######************..........%%%%%%%........*************###*###****************...........%%%%%%.│ │*##*####************...........%%%%%%%.........***********########***************............%%%%%%.│ │*#######************...........%%%%%%%..........************#######*************.............%%%%%%.│ │*##*####***********............%%.%%%%%...........***********####**************.............%%%%%%%.│ │*#####*#**********..............%%%%%%%............**********##*###***********...............%%%%%..│ │#######***********.............%.%%%%%%..............********#######********...............%%%%.%%..│ │#####*#**********...............%%%%%%%...............*******#######*******................%%%%%%%%.│ │###*#*#**********...............%%%%%%%%%..............*******######******.................%%%%%%...│ │#######*********.................%%%%%%%%................****###*###*****.................%%%%%%....│ │######**********.................%%%%%%%%%................***#*###******................%%%%%%%%%...│ │*#*##*#********...................%%%%%%%%%%...............***######***..................%%%%%%.....│ │#****##********....................%%%%%%%%%.................**###*#*.................%.%%%%%%%.....│ │**************.....................%.%%%%%%...................******...................%.%%.%%......│ │*************........................%..%%%%%%%.................***...............%.%%%%%%%%%.......│ │*************.........................%.%%%.%%%%.................*................%%%%%%%.%.%.......│ │************............................%..%%%%..%................................%%%%%%%%..........│ │************.............................%%%%%%%%%%%........................%%..%%%%%%%%.%..........│ │***********..............................%%.%%%%%%%%..%....................%..%%%.%%%%%%%...........│ │***********.................................%%%%.%%%%%%%%...............%.%%%%%%%%%%%%.%............│ │**********...................................%%%%%%%%%%%%%%%%%%%%%%.%%%%.%%%%%%%%%%%%%..............│ │*********.....................................%%.%%%%%%%%%%%%%%%%%%%%%%.%%%%%%%%%%%.................│ │*********.........................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...................│ │********.............................................%%%.%%%%%%%%%%%%%%%%%%%%%......................│ │********................................................%...%%%%.%%.%%%%..%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ "/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1 2025-05-22 12:20.33: Job failed: Failed: Build failed