2025-07-05 17:55.53: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (04aebe83f689f400053156a388e1fd5985b16d96) (linux-x86_64:alpine-3.21-5.3_opam-2.3) Base: ocaml/opam:alpine-3.21-ocaml-5.3@sha256:3343dcc8eef8cc135210194424c75ebe26927358b1a48d0c3c0860cfe43df613 Opam project build To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 04aebe83 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:alpine-3.21-ocaml-5.3@sha256:3343dcc8eef8cc135210194424c75ebe26927358b1a48d0c3c0860cfe43df613 # alpine-3.21-5.3_opam-2.3 USER 1000:1000 ENV CLICOLOR_FORCE="1" ENV OPAMCOLOR="always" WORKDIR /src RUN sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam RUN opam init --reinit -ni RUN uname -rs && opam exec -- ocaml -version && opam --version WORKDIR /src RUN sudo chown opam /src RUN cd ~/opam-repository && (git cat-file -e 382e02b4c17b5b59d503254784ac3a5d78916d5a || git fetch origin master) && git reset -q --hard 382e02b4c17b5b59d503254784ac3a5d78916d5a && git log --no-decorate -n1 --oneline && opam update -u COPY --chown=1000:1000 neural_nets_lib.opam arrayjit.opam ./ RUN opam pin add -yn neural_nets_lib.dev './' && \ opam pin add -yn arrayjit.dev './' RUN echo '(lang dune 3.0)' > './dune-project' ENV DEPS="angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.3 base-bigarray.base base-bytes.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 camlzip.1.13 cmdliner.1.3.0 conf-libcurl.2 conf-libffi.2.0.0 conf-pkg-config.4 conf-zlib.1 cppo.1.8.0 csexp.1.5.2 csv.2.4 ctypes.0.23.0 ctypes-foreign.0.23.0 curl.0.10.0 dune.3.19.1 dune-configurator.3.19.1 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.6 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.1 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.1.0 ppx_enumerate.v0.17.0 ppx_expect.v0.17.3 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.2 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.1 ppx_minidebug.2.3.0 ppx_optcomp.v0.17.1 ppx_sexp_conv.v0.17.1 ppx_string.v0.17.0 ppx_variants_conv.v0.17.1 ppxlib.0.36.0 ppxlib_jane.v0.17.4 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.13.2 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0" ENV CI="true" ENV OCAMLCI="true" RUN opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS RUN opam install $DEPS COPY --chown=1000:1000 . /src RUN opam exec -- dune build @install @check @runtest && rm -rf _build END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-07-05 17:55.53: Using cache hint "ahrefs/ocannl-ocaml/opam:alpine-3.21-ocaml-5.3@sha256:3343dcc8eef8cc135210194424c75ebe26927358b1a48d0c3c0860cfe43df613-alpine-3.21-5.3_opam-2.3-e7b66700cdf32b3cc2b84269e6d26c23" 2025-07-05 17:55.53: Using OBuilder spec: ((from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:3343dcc8eef8cc135210194424c75ebe26927358b1a48d0c3c0860cfe43df613) (comment alpine-3.21-5.3_opam-2.3) (user (uid 1000) (gid 1000)) (env CLICOLOR_FORCE 1) (env OPAMCOLOR always) (workdir /src) (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) (run (shell "opam init --reinit -ni")) (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) (workdir /src) (run (shell "sudo chown opam /src")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 382e02b4c17b5b59d503254784ac3a5d78916d5a || git fetch origin master) && git reset -q --hard 382e02b4c17b5b59d503254784ac3a5d78916d5a && git log --no-decorate -n1 --oneline && opam update -u")) (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.3 base-bigarray.base base-bytes.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 camlzip.1.13 cmdliner.1.3.0 conf-libcurl.2 conf-libffi.2.0.0 conf-pkg-config.4 conf-zlib.1 cppo.1.8.0 csexp.1.5.2 csv.2.4 ctypes.0.23.0 ctypes-foreign.0.23.0 curl.0.10.0 dune.3.19.1 dune-configurator.3.19.1 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.6 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.1 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.1.0 ppx_enumerate.v0.17.0 ppx_expect.v0.17.3 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.2 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.1 ppx_minidebug.2.3.0 ppx_optcomp.v0.17.1 ppx_sexp_conv.v0.17.1 ppx_string.v0.17.0 ppx_variants_conv.v0.17.1 ppxlib.0.36.0 ppxlib_jane.v0.17.4 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.13.2 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") (env CI true) (env OCAMLCI true) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) (copy (src .) (dst /src)) (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) ) 2025-07-05 17:55.53: Waiting for resource in pool OCluster 2025-07-05 17:55.53: Waiting for worker… 2025-07-05 17:55.54: Got resource from pool OCluster Building on phoebe All commits already cached HEAD is now at 04aebe83 In progress: preparations for threefry, get rid of File_mapped (from ocaml/opam:alpine-3.21-ocaml-5.3@sha256:3343dcc8eef8cc135210194424c75ebe26927358b1a48d0c3c0860cfe43df613) 2025-07-05 17:55.55 ---> using "3141b9cf5ba038771635d2742247938bc41e269100016f85aeafaf7cfcddc947" from cache /: (comment alpine-3.21-5.3_opam-2.3) /: (user (uid 1000) (gid 1000)) /: (env CLICOLOR_FORCE 1) /: (env OPAMCOLOR always) /: (workdir /src) /src: (run (shell "sudo ln -f /usr/bin/opam-2.3 /usr/bin/opam")) 2025-07-05 17:55.55 ---> using "80b751ed47c43451e88cd0293ed9a635ccea08011eeda4114e7ec036c06c48a3" from cache /src: (run (shell "opam init --reinit -ni")) Configuring from /home/opam/.opamrc and then from built-in defaults. Checking for available remotes: rsync and local, git. - you won't be able to use mercurial repositories unless you install the hg command on your system. - you won't be able to use darcs repositories unless you install the darcs command on your system. This development version of opam requires an update to the layout of /home/opam/.opam from version 2.0 to version 2.2, which can't be reverted. You may want to back it up before going further. Continue? [y/n] y [NOTE] The 'jobs' option was reset, its value was 255 and its new value will vary according to the current number of cores on your machine. You can restore the fixed value using: opam option jobs=255 --global Format upgrade done. <><> Updating repositories ><><><><><><><><><><><><><><><><><><><><><><><><><><> [default] Initialised 2025-07-05 17:55.55 ---> using "6c30ca4e379a499434293ac7ae4ccb6757139c93d8bc5916b7557572123e5ba0" from cache /src: (run (shell "uname -rs && opam exec -- ocaml -version && opam --version")) Linux 6.8.0-63-generic The OCaml toplevel, version 5.3.0 2.3.0 2025-07-05 17:55.55 ---> using "a3df42546ff5706a2753d5e221aadf5e7ea87ec715342ceb488042e5402600c3" from cache /src: (workdir /src) /src: (run (shell "sudo chown opam /src")) 2025-07-05 17:55.55 ---> using "17ab95ba4820eaae0552c498f14e3328c5320ed844b460f0519c8783adab646a" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 382e02b4c17b5b59d503254784ac3a5d78916d5a || git fetch origin master) && git reset -q --hard 382e02b4c17b5b59d503254784ac3a5d78916d5a && git log --no-decorate -n1 --oneline && opam update -u")) From https://github.com/ocaml/opam-repository * branch master -> FETCH_HEAD bc726805e3..6dea2f9390 master -> origin/master 382e02b4c1 Merge pull request #28124 from kit-ty-kate/opam-publish-re.1.13.2 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [default] synchronised from git+file:///home/opam/opam-repository Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # To update the current shell environment, run: eval $(opam env) 2025-07-05 17:55.55 ---> using "7d1392ff9d45ee1bdc610d742587b6fcda45831941ca51685b96330d07eb5be0" from cache /src: (copy (src neural_nets_lib.opam arrayjit.opam) (dst ./)) 2025-07-05 17:55.55 ---> using "d77d67c5a318db8fff466e4217b2512e8e6255fa4225cc65a6bc0ec02fc14709" from cache /src: (run (network host) (shell "opam pin add -yn neural_nets_lib.dev './' && \ \nopam pin add -yn arrayjit.dev './'")) [neural_nets_lib.dev] synchronised (file:///src) neural_nets_lib is now pinned to file:///src (version dev) [arrayjit.dev] synchronised (file:///src) arrayjit is now pinned to file:///src (version dev) 2025-07-05 17:55.55 ---> using "67d0c6168053c979981b141e0bc0b7f0cff21d5e59b0cc7a3c0deb0326c4e9dc" from cache /src: (run (network host) (shell "echo '(lang dune 3.0)' > './dune-project'")) 2025-07-05 17:55.55 ---> using "6c053ff75909946e6ba6e2422e409a6119a5c59e7731beda470f2b8b807cfcbe" from cache /src: (env DEPS "angstrom.0.16.1 astring.0.8.5 backoff.0.1.1 base.v0.17.3 base-bigarray.base base-bytes.base base-domains.base base-effects.base base-nnp.base base-threads.base base-unix.base bigarray-compat.1.1.0 bigstringaf.0.10.0 camlp-streams.5.0.1 camlzip.1.13 cmdliner.1.3.0 conf-libcurl.2 conf-libffi.2.0.0 conf-pkg-config.4 conf-zlib.1 cppo.1.8.0 csexp.1.5.2 csv.2.4 ctypes.0.23.0 ctypes-foreign.0.23.0 curl.0.10.0 dune.3.19.1 dune-configurator.3.19.1 fieldslib.v0.17.0 fmt.0.10.0 integers.0.7.0 jane-street-headers.v0.17.0 jst-config.v0.17.0 logs.0.8.0 mdx.2.5.0 mtime.2.1.0 multicore-magic.2.3.1 num.1.6 ocaml.5.3.0 ocaml-base-compiler.5.3.0 ocaml-compiler.5.3.0 ocaml-compiler-libs.v0.17.0 ocaml-config.3 ocaml-options-vanilla.1 ocaml-syntax-shims.1.0.0 ocaml-version.4.0.1 ocaml_intrinsics_kernel.v0.17.1 ocamlbuild.0.16.1 ocamlfind.1.9.8 parsexp.v0.17.0 pprint.20230830 ppx_assert.v0.17.0 ppx_base.v0.17.0 ppx_cold.v0.17.0 ppx_compare.v0.17.0 ppx_derivers.1.2.1 ppx_deriving.6.1.0 ppx_enumerate.v0.17.0 ppx_expect.v0.17.3 ppx_fields_conv.v0.17.0 ppx_globalize.v0.17.2 ppx_hash.v0.17.0 ppx_here.v0.17.0 ppx_inline_test.v0.17.1 ppx_minidebug.2.3.0 ppx_optcomp.v0.17.1 ppx_sexp_conv.v0.17.1 ppx_string.v0.17.0 ppx_variants_conv.v0.17.1 ppxlib.0.36.0 ppxlib_jane.v0.17.4 printbox.0.12 printbox-ext-plot.0.12 printbox-html.0.12 printbox-md.0.12 printbox-text.0.12 ptime.1.2.0 re.1.13.2 result.1.5 saturn_lockfree.0.5.0 seq.base sexplib.v0.17.0 sexplib0.v0.17.0 stdio.v0.17.0 stdlib-shims.0.3.0 thread-local-storage.0.2 time_now.v0.17.0 topkg.1.0.8 tyxml.4.6.0 uucp.16.0.0 uutf.1.0.4 variantslib.v0.17.0") /src: (env CI true) /src: (env OCAMLCI true) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam update --depexts && opam install --cli=2.3 --depext-only -y neural_nets_lib.dev arrayjit.dev $DEPS")) + /usr/bin/sudo "apk" "update" - fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/main/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/v3.21/community/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/edge/main/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/edge/community/x86_64/APKINDEX.tar.gz - fetch https://dl-cdn.alpinelinux.org/alpine/edge/testing/x86_64/APKINDEX.tar.gz - v3.21.3-626-g91d0df3d1c9 [https://dl-cdn.alpinelinux.org/alpine/v3.21/main] - v3.21.3-626-g91d0df3d1c9 [https://dl-cdn.alpinelinux.org/alpine/v3.21/community] - v3.23.0_alpha20250612-1570-g52a657b5ed6 [https://dl-cdn.alpinelinux.org/alpine/edge/main] - v3.23.0_alpha20250612-1579-ged4ae29d6ae [https://dl-cdn.alpinelinux.org/alpine/edge/community] - v3.23.0_alpha20250612-1557-g033f88509d9 [https://dl-cdn.alpinelinux.org/alpine/edge/testing] - OK: 58982 distinct packages available <><> Synchronising pinned packages ><><><><><><><><><><><><><><><><><><><><><><> [arrayjit.dev] synchronised (file:///src) [neural_nets_lib.dev] synchronised (file:///src) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following system packages will first need to be installed: curl-dev libffi-dev zlib-dev <><> Handling external dependencies <><><><><><><><><><><><><><><><><><><><><><> + /usr/bin/sudo "apk" "add" "curl-dev" "libffi-dev" "zlib-dev" - (1/13) Installing brotli (1.1.0-r2) - (2/13) Installing brotli-dev (1.1.0-r2) - (3/13) Installing c-ares-dev (1.34.5-r0) - (4/13) Installing libidn2-dev (2.3.7-r0) - (5/13) Installing libpsl-utils (0.21.5-r3) - (6/13) Installing libpsl-dev (0.21.5-r3) - (7/13) Installing nghttp2-dev (1.64.0-r0) - (8/13) Installing openssl-dev (3.3.3-r0) - (9/13) Installing zlib-dev (1.3.1-r2) - (10/13) Installing zstd-dev (1.5.6-r2) - (11/13) Installing curl-dev (8.12.1-r1) - (12/13) Installing linux-headers (6.6-r1) - (13/13) Installing libffi-dev (3.4.7-r0) - Executing busybox-1.37.0-r12.trigger - OK: 317 MiB in 115 packages 2025-07-05 17:55.55 ---> using "311f9410713c14be261f928b14f3d5c264ab1acb129f7490fd6209bbadd7e31c" from cache /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam install $DEPS")) [NOTE] Package ocaml-options-vanilla is already installed (current version is 1). [NOTE] Package ocaml-config is already installed (current version is 3). [NOTE] Package ocaml-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml-base-compiler is already installed (current version is 5.3.0). [NOTE] Package ocaml is already installed (current version is 5.3.0). [NOTE] Package base-unix is already installed (current version is base). [NOTE] Package base-threads is already installed (current version is base). [NOTE] Package base-nnp is already installed (current version is base). [NOTE] Package base-effects is already installed (current version is base). [NOTE] Package base-domains is already installed (current version is base). [NOTE] Package base-bigarray is already installed (current version is base). The following actions will be performed: === install 81 packages - install angstrom 0.16.1 - install astring 0.8.5 - install backoff 0.1.1 - install base v0.17.3 - install base-bytes base - install bigarray-compat 1.1.0 - install bigstringaf 0.10.0 - install camlp-streams 5.0.1 - install camlzip 1.13 - install cmdliner 1.3.0 - install conf-libcurl 2 - install conf-libffi 2.0.0 - install conf-pkg-config 4 - install conf-zlib 1 - install cppo 1.8.0 - install csexp 1.5.2 - install csv 2.4 - install ctypes 0.23.0 - install ctypes-foreign 0.23.0 - install curl 0.10.0 - install dune 3.19.1 - install dune-configurator 3.19.1 - install fieldslib v0.17.0 - install fmt 0.10.0 - install integers 0.7.0 - install jane-street-headers v0.17.0 - install jst-config v0.17.0 - install logs 0.8.0 - install mdx 2.5.0 - install mtime 2.1.0 - install multicore-magic 2.3.1 - install num 1.6 - install ocaml-compiler-libs v0.17.0 - install ocaml-syntax-shims 1.0.0 - install ocaml-version 4.0.1 - install ocaml_intrinsics_kernel v0.17.1 - install ocamlbuild 0.16.1 - install ocamlfind 1.9.8 - install parsexp v0.17.0 - install pprint 20230830 - install ppx_assert v0.17.0 - install ppx_base v0.17.0 - install ppx_cold v0.17.0 - install ppx_compare v0.17.0 - install ppx_derivers 1.2.1 - install ppx_deriving 6.1.0 - install ppx_enumerate v0.17.0 - install ppx_expect v0.17.3 - install ppx_fields_conv v0.17.0 - install ppx_globalize v0.17.2 - install ppx_hash v0.17.0 - install ppx_here v0.17.0 - install ppx_inline_test v0.17.1 - install ppx_minidebug 2.3.0 - install ppx_optcomp v0.17.1 - install ppx_sexp_conv v0.17.1 - install ppx_string v0.17.0 - install ppx_variants_conv v0.17.1 - install ppxlib 0.36.0 - install ppxlib_jane v0.17.4 - install printbox 0.12 - install printbox-ext-plot 0.12 - install printbox-html 0.12 - install printbox-md 0.12 - install printbox-text 0.12 - install ptime 1.2.0 - install re 1.13.2 - install result 1.5 - install saturn_lockfree 0.5.0 - install seq base - install sexplib v0.17.0 - install sexplib0 v0.17.0 - install stdio v0.17.0 - install stdlib-shims 0.3.0 - install thread-local-storage 0.2 - install time_now v0.17.0 - install topkg 1.0.8 - install tyxml 4.6.0 - install uucp 16.0.0 - install uutf 1.0.4 - install variantslib v0.17.0 <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> retrieved backoff.0.1.1 (cached) -> retrieved astring.0.8.5 (cached) -> retrieved angstrom.0.16.1 (cached) -> retrieved base.v0.17.3 (cached) -> retrieved bigarray-compat.1.1.0 (cached) -> retrieved bigstringaf.0.10.0 (cached) -> retrieved camlp-streams.5.0.1 (cached) -> retrieved camlzip.1.13 (cached) -> installed conf-libcurl.2 -> retrieved cmdliner.1.3.0 (cached) -> retrieved cppo.1.8.0 (cached) -> installed conf-pkg-config.4 -> retrieved csexp.1.5.2 (cached) -> retrieved csv.2.4 (cached) -> retrieved ctypes.0.23.0, ctypes-foreign.0.23.0 (cached) -> installed conf-libffi.2.0.0 -> installed conf-zlib.1 -> retrieved curl.0.10.0 (cached) -> retrieved fieldslib.v0.17.0 (cached) -> retrieved integers.0.7.0 (cached) -> retrieved fmt.0.10.0 (cached) -> retrieved jane-street-headers.v0.17.0 (cached) -> retrieved jst-config.v0.17.0 (cached) -> retrieved logs.0.8.0 (cached) -> retrieved mtime.2.1.0 (cached) -> retrieved multicore-magic.2.3.1 (cached) -> retrieved mdx.2.5.0 (cached) -> retrieved num.1.6 (cached) -> retrieved ocaml-compiler-libs.v0.17.0 (cached) -> retrieved ocaml-syntax-shims.1.0.0 (cached) -> retrieved ocaml-version.4.0.1 (cached) -> retrieved ocaml_intrinsics_kernel.v0.17.1 (cached) -> retrieved ocamlfind.1.9.8 (cached) -> retrieved ocamlbuild.0.16.1 (cached) -> retrieved parsexp.v0.17.0 (cached) -> retrieved pprint.20230830 (cached) -> retrieved ppx_assert.v0.17.0 (cached) -> retrieved ppx_base.v0.17.0 (cached) -> retrieved ppx_cold.v0.17.0 (cached) -> retrieved ppx_compare.v0.17.0 (cached) -> retrieved ppx_derivers.1.2.1 (cached) -> retrieved ppx_enumerate.v0.17.0 (cached) -> retrieved ppx_deriving.6.1.0 (cached) -> retrieved ppx_expect.v0.17.3 (cached) -> retrieved dune.3.19.1, dune-configurator.3.19.1 (cached) -> retrieved ppx_fields_conv.v0.17.0 (cached) -> retrieved ppx_globalize.v0.17.2 (cached) -> retrieved ppx_hash.v0.17.0 (cached) -> retrieved ppx_here.v0.17.0 (cached) -> retrieved ppx_inline_test.v0.17.1 (cached) -> retrieved ppx_optcomp.v0.17.1 (cached) -> retrieved ppx_sexp_conv.v0.17.1 (cached) -> installed cmdliner.1.3.0 -> installed num.1.6 -> retrieved ppx_minidebug.2.3.0 (cached) -> retrieved ppx_string.v0.17.0 (cached) -> retrieved ppx_variants_conv.v0.17.1 (cached) -> retrieved ppxlib_jane.v0.17.4 (cached) -> retrieved ptime.1.2.0 (cached) -> retrieved re.1.13.2 (cached) -> retrieved result.1.5 (cached) -> retrieved saturn_lockfree.0.5.0 (cached) -> retrieved seq.base (cached) -> installed seq.base -> retrieved sexplib.v0.17.0 (cached) -> retrieved ppxlib.0.36.0 (cached) -> retrieved sexplib0.v0.17.0 (cached) -> retrieved stdio.v0.17.0 (cached) -> retrieved stdlib-shims.0.3.0 (cached) -> retrieved thread-local-storage.0.2 (cached) -> retrieved time_now.v0.17.0 (cached) -> retrieved topkg.1.0.8 (cached) -> retrieved printbox.0.12, printbox-ext-plot.0.12, printbox-html.0.12, printbox-md.0.12, printbox-text.0.12 (cached) -> retrieved tyxml.4.6.0 (cached) -> retrieved uutf.1.0.4 (cached) -> retrieved variantslib.v0.17.0 (cached) -> retrieved uucp.16.0.0 (cached) -> installed ocamlfind.1.9.8 -> installed base-bytes.base -> installed camlzip.1.13 -> installed ocamlbuild.0.16.1 -> installed topkg.1.0.8 -> installed uutf.1.0.4 -> installed mtime.2.1.0 -> installed fmt.0.10.0 -> installed ptime.1.2.0 -> installed astring.0.8.5 -> installed logs.0.8.0 -> installed dune.3.19.1 -> installed jane-street-headers.v0.17.0 -> installed ppx_derivers.1.2.1 -> installed csexp.1.5.2 -> installed backoff.0.1.1 -> installed bigarray-compat.1.1.0 -> installed camlp-streams.5.0.1 -> installed csv.2.4 -> installed cppo.1.8.0 -> installed multicore-magic.2.3.1 -> installed ocaml-syntax-shims.1.0.0 -> installed ocaml-version.4.0.1 -> installed ocaml_intrinsics_kernel.v0.17.1 -> installed pprint.20230830 -> installed printbox.0.12 -> installed re.1.13.2 -> installed ocaml-compiler-libs.v0.17.0 -> installed result.1.5 -> installed sexplib0.v0.17.0 -> installed stdlib-shims.0.3.0 -> installed thread-local-storage.0.2 -> installed saturn_lockfree.0.5.0 -> installed integers.0.7.0 -> installed parsexp.v0.17.0 -> installed dune-configurator.3.19.1 -> installed bigstringaf.0.10.0 -> installed mdx.2.5.0 -> installed sexplib.v0.17.0 -> installed angstrom.0.16.1 -> installed tyxml.4.6.0 -> installed printbox-html.0.12 -> installed curl.0.10.0 -> installed ctypes.0.23.0 -> installed base.v0.17.3 -> installed fieldslib.v0.17.0 -> installed variantslib.v0.17.0 -> installed stdio.v0.17.0 -> installed ctypes-foreign.0.23.0 -> installed uucp.16.0.0 -> installed printbox-text.0.12 -> installed printbox-md.0.12 -> installed printbox-ext-plot.0.12 -> installed ppxlib.0.36.0 -> installed ppx_optcomp.v0.17.1 -> installed ppxlib_jane.v0.17.4 -> installed ppx_cold.v0.17.0 -> installed ppx_here.v0.17.0 -> installed ppx_variants_conv.v0.17.1 -> installed ppx_fields_conv.v0.17.0 -> installed ppx_enumerate.v0.17.0 -> installed ppx_globalize.v0.17.2 -> installed ppx_compare.v0.17.0 -> installed ppx_deriving.6.1.0 -> installed ppx_sexp_conv.v0.17.1 -> installed ppx_hash.v0.17.0 -> installed ppx_assert.v0.17.0 -> installed ppx_minidebug.2.3.0 -> installed ppx_base.v0.17.0 -> installed jst-config.v0.17.0 -> installed ppx_string.v0.17.0 -> installed time_now.v0.17.0 -> installed ppx_inline_test.v0.17.1 -> installed ppx_expect.v0.17.3 Done. # To update the current shell environment, run: eval $(opam env) 2025-07-05 17:55.55 ---> using "116a15f6f013e0d10b3c7559df8b885734a0da484b0d73b49fb45e75d52736fd" from cache /src: (copy (src .) (dst /src)) 2025-07-05 17:55.55 ---> saved as "660f44d7dcfc167261c6a511a36daea17c7653add6bae37c6e5c296f2e82903d" /src: (run (shell "opam exec -- dune build @install @check @runtest && rm -rf _build")) File "arrayjit/test/dune", line 6, characters 7-15: 6 | (pps ppx_jane))) ^^^^^^^^ Error: Library "ppx_jane" not found. -> required by _build/default/arrayjit/test/.merlin-conf/exe-test_numerical_types -> required by alias arrayjit/test/check (cd _build/default/test/config && ../../arrayjit/bin/read_config.exe --read=backend) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/config/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Wrote value of 'backend' to ocannl_backend.txt File "test_ppx/test_ppx_op_expected.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test_ppx/test_ppx_op_expected.ml _build/default/test_ppx/test_ppx_op_actual.ml diff --git a/_build/default/test_ppx/test_ppx_op_expected.ml b/_build/default/test_ppx/test_ppx_op_actual.ml index a6aa4ae..c971ef0 100644 --- a/_build/default/test_ppx/test_ppx_op_expected.ml +++ b/_build/default/test_ppx/test_ppx_op_actual.ml @@ -2,20 +2,20 @@ open Base open Ocannl module TDSL = Operation.TDSL let y0 = - let hey1 = TDSL.param ?values:None "hey1" in + let hey1 = TDSL.param ?value:None "hey1" in let open! TDSL.O in ((+) ?label:(Some ["y0"])) ((( *. ) ?label:None) (TDSL.number (Float.of_int 2)) hey1) (TDSL.number (Float.of_int 3)) let y1 = - let hey2 = TDSL.param ?values:None "hey2" in + let hey2 = TDSL.param ?value:None "hey2" in let open! TDSL.O in fun x -> ((+) ?label:(Some (List.concat [["y1"]; (x.Tensor.value).Ir.Tnode.label]))) ((( * ) ?label:None) hey2 (TDSL.number (Float.of_int 2))) x let y2 = - let hey3 = TDSL.param ?values:None "hey3" in + let hey3 = TDSL.param ?value:None "hey3" in let open! TDSL.O in fun x1 x2 -> ((+) ?label:(Some @@ -36,14 +36,14 @@ let b = ~output_dims:[2] [|(Float.of_int 7);(Float.of_int 8);(Float.of_int 9);(Float.of_int 10)|] let y = - let hey4 = TDSL.param ?values:None "hey4" in + let hey4 = TDSL.param ?value:None "hey4" in let open! TDSL.O in ((+) ?label:(Some ["y"])) ((( * ) ?label:None) hey4 (TDSL.number ?label:None ~axis_label:"q" 2.0)) (TDSL.number ?label:None ~axis_label:"p" 1.0) let z = - let hey5 = TDSL.param ?values:None "hey5" - and hey6 = TDSL.param ?values:None "hey6" in + let hey5 = TDSL.param ?value:None "hey5" + and hey6 = TDSL.param ?value:None "hey6" in let open! TDSL.O in ((+) ?label:(Some ["z"])) ((( * ) ?label:None) (TDSL.number ?label:None ~axis_label:"q" 2.0) hey5) @@ -51,8 +51,8 @@ let z = let stride = 2 and dilation = 3 let z2 = - let hey7 = TDSL.param ?values:None "hey7" - and hey8 = TDSL.param ?values:None "hey8" in + let hey7 = TDSL.param ?value:None "hey7" + and hey8 = TDSL.param ?value:None "hey8" in let open! TDSL.O in TDSL.einsum ?label:(Some ["z2"]) (String.concat ~sep:"" @@ -61,8 +61,8 @@ let z2 = let z3 = let s = 2 and d = 3 in - let hey10 = TDSL.param ?values:None "hey10" - and hey9 = TDSL.param ?values:None "hey9" in + let hey10 = TDSL.param ?value:None "hey10" + and hey9 = TDSL.param ?value:None "hey9" in let open! TDSL.O in TDSL.einsum ?label:(Some []) (String.concat ~sep:"" @@ -78,7 +78,7 @@ let mlp_layer = let b = (TDSL.param ~more_label:(config.label)) ~output_dims:[config.hid_dim] "b" - and w = (TDSL.param ~more_label:(config.label)) ?values:None "w" in + and w = (TDSL.param ~more_label:(config.label)) ?value:None "w" in fun x -> (relu ?label:(Some File "test/test_print_style.expected", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/test_print_style.expected _build/default/test/test_print_style.exe.output diff --git a/_build/default/test/test_print_style.expected b/_build/default/test/test_print_style.exe.output index e51ab37..c55c8ad 100644 --- a/_build/default/test/test_print_style.expected +++ b/_build/default/test/test_print_style.exe.output @@ -1,5 +1,5 @@ -Welcome to OCANNL! Reading configuration defaults from /Users/lukstafi/ocannl/_build/default/test/ocannl_config. +Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Testing print_style functionality: File "test/einsum/test_conv_syntax.expected", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/einsum/test_conv_syntax.expected _build/default/test/einsum/test_conv_syntax.exe.output diff --git a/_build/default/test/einsum/test_conv_syntax.expected b/_build/default/test/einsum/test_conv_syntax.exe.output index 4bd5a1c..0580e1e 100644 --- a/_build/default/test/einsum/test_conv_syntax.expected +++ b/_build/default/test/einsum/test_conv_syntax.exe.output @@ -1,5 +1,5 @@ -Welcome to OCANNL! Reading configuration defaults from /Users/lukstafi/ocannl/_build/default/test/einsum/ocannl_config. +Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/einsum/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Testing conv syntax parsing... (cd _build/default/test_ppx && ./test_ppx_op.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/default/test_ppx && ./test_ppx_op_expected.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test_ppx/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Found 0, in the config file' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Retrieving commandline, environment, or config file variable ocannl_log_level' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition '' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition 'Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/712404dd180161c5b0700d34eb50b2fb/default/test/ocannl_config.' -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file File "test/dune", lines 30-40, characters 0-281: 30 | (rule 31 | (alias runtest) 32 | (target 33 | (dir log_files)) 34 | (action 35 | (run 36 | %{dep:micrograd_demo_logging.exe} 37 | "--ocannl_debug_backend=text" 38 | "--ocannl_log_file_stem=micrograd_demo_logging" 39 | "--ocannl_log_main_domain_to_stdout=false" 40 | "--ocannl_debug_log_to_stream_files=true"))) (cd _build/default/test && ./micrograd_demo_logging.exe --ocannl_debug_backend=text --ocannl_log_file_stem=micrograd_demo_logging --ocannl_log_main_domain_to_stdout=false --ocannl_debug_log_to_stream_files=true) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Retrieving commandline, environment, or config file variable ocannl_backend Found multicore_cc, in the config file Retrieving commandline, environment, or config file variable ocannl_limit_constant_fill_size Not found, using default 256 Retrieving commandline, environment, or config file variable ocannl_cd_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity Found true, in the config file Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files Found true, commandline --ocannl_debug_log_to_stream_files=true Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_cc_backend_verify_codesign Not found, using default false Fatal error: exception File "src/printbox-text/PrintBox_text.ml", line 212, characters 6-12: Assertion failed Raised at PrintBox_text.Output.Make_out.to_buf_aux_ in file "src/printbox-text/PrintBox_text.ml", line 212, characters 6-50 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 19-42 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41 Called from Stdlib__Map.Make.fold in file "map.ml", line 329, characters 26-41 Called from PrintBox_text.Output.Make_out.render in file "src/printbox-text/PrintBox_text.ml", line 242, characters 14-64 Called from PrintBox_text.output in file "src/printbox-text/PrintBox_text.ml", line 851, characters 2-31 Called from Minidebug_runtime.PrintBox.output_box in file "minidebug_runtime.ml", line 1527, characters 19-59 Called from Minidebug_runtime.PrintBox.close_log_impl.close_tree in file "minidebug_runtime.ml", line 1572, characters 6-38 Called from Backends.Add_buffer_retrieval_and_syncing.sync_routine in file "arrayjit/lib/backends.ml", lines 207-235, characters 31-82 Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 539-540, characters 4-92 Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 Called from Dune__exe__Micrograd_demo_logging in file "test/micrograd_demo_logging.ml", line 36, characters 13-76 (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition zero2hero_1of7.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition primitive_ops.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition hello_world_op.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition micrograd_demo.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file File "test/einsum/einsum_trivia_exec.expected", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/einsum/einsum_trivia_exec.expected _build/default/test/einsum/einsum_trivia_exec.exe.output diff --git a/_build/default/test/einsum/einsum_trivia_exec.expected b/_build/default/test/einsum/einsum_trivia_exec.exe.output index ab456b5..74ceb2e 100644 --- a/_build/default/test/einsum/einsum_trivia_exec.expected +++ b/_build/default/test/einsum/einsum_trivia_exec.exe.output @@ -1,5 +1,5 @@ -Welcome to OCANNL! Reading configuration defaults from /Users/lukstafi/ocannl/_build/default/test/einsum/ocannl_config. +Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/einsum/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file HERE: test/einsum/einsum_trivia_exec.ml:31:21 (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition einsum_trivia.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file File "test/einsum/moons_demo_variant.expected", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/einsum/moons_demo_variant.expected _build/default/test/einsum/moons_demo_variant.exe.output diff --git a/_build/default/test/einsum/moons_demo_variant.expected b/_build/default/test/einsum/moons_demo_variant.exe.output index 68171df..9bf36d8 100644 --- a/_build/default/test/einsum/moons_demo_variant.expected +++ b/_build/default/test/einsum/moons_demo_variant.exe.output @@ -1,121 +1,121 @@ -Welcome to OCANNL! Reading configuration defaults from /Users/lukstafi/ocannl/_build/default/test/einsum/ocannl_config. +Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/einsum/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ -│****************************#***********************************************************************│ -│****************************###***###***#***********************************************************│ -│***************************#********#***#**********************************************************.│ -│********************#*********************##*#*#************************************************....│ -│******************#***#*****##*#*#*#****#*#*#*##**********************************************......│ -│**************#*#**###**#*##**##********#**#****###**#*************************************.........│ -│***********#***##***#**##*****************#*#*##*###*************************************...........│ -│***************#**###******************************##*#********************************.............│ -│*************#*****#******************************#*#***#*#*************************................│ -│*********#***#************************************##*###**#***********************..................│ -│******#***#*#*#******************************************#*#*******************.....................│ -│*****###****#*******************************************#********************.......................│ -│*******#*###***********************************************#*#************..........................│ -│**********##**********************%%*********************#**##**********............................│ -│*****#***#**********************%**%%********************************.............................%.│ -│**####*************************%**%**************************##*#**............................%...%│ -│****#**************************%%**%*************************#*#*..............................%....│ -│*#****##************************%*...%*********************##*..#...........................%...%%..│ -│****#*#***********************.%..%.%........................#..#.............................%.%...│ -│##*##*********************.......%.............................#.##.........................%.%.%%%.│ -│**#####*****************.......%...%%.......................................................%..%.%..│ -│**********************...........%.%.%%.....................#.#.#.#.............................%%..│ -│***##****************............%..%..%........................#.#.........................%...%...│ -│##**#**************................%...%.....................#..###......................%.%%.......│ -│###***************...............%.%.%...........................##............................%....│ -│**##************.................................................#......................%%.%..%.....│ -│**#************....................%%...........................##......................%%.%........│ -│**************........................%......................#..#...........................%.......│ -│************........................%%.%.......................##.....................%..%..........│ -│***********............................%%..............................................%%.%%........│ -│**********.............................%...%............................................%%%.........│ -│********...................................%..%....................................%.....%..........│ -│*******.................................%%..%.%................................%.%.%.%%.............│ -│******.....................................%%.%...%%.%.......................%%%....%.%.%...........│ -│*****....................................%.....%.%..%....................%.....%%.%.%...............│ -│***.........................................%%...%.%%%%..%.......%.........%%.%%....................│ -│**................................................%..%.......%..%%%%...%.%..%.%.....................│ -│*..................................................%.%.%............%..%...%........................│ -│..................................................%....%.%......%%...%.%............................│ -│..........................................................%..%......%%%.............................│ +│...................................#................................................................│ +│..........................##..#..###....##..........................................................│ +│.........................#....#.....#...............................................................│ +│.......................#.##.#.......#####...#.#.....................................................│ +│..................#.#.#...#..#.##....#.#.##..#....#.................................................│ +│.............#.#......##.#..#..........###..#....#..................................................│ +│.............##..##.....#....................#.##.###...#...........................................│ +│.............#.##.#.#.#.......................#...##..#.............................................│ +│.........#.#....##..................................##..#...........................................│ +│...........#.#....................................#..##...#.#.......................................│ +│..........##..........................................#...#.#.......................................│ +│.......#...###............................................#.........................................│ +│....#....##..........................%..................#....##.....................................│ +│.........##................................................#..#..............................%......│ +│.....#..#........................%..%......................##.#...................................%.│ +│...##.#..................................................#....................................%...%.│ +│..###...........................%..%%......................###................................%.%...│ +│..#...#.#.......................%%.%%.......................#.##.#............................%.....│ +│..#...#.............................%.........................##..............................%...%.│ +│..##..#............................%%.........................#..##.........................%%..%.%%│ +│#...#..#.........................%.%%%.......................##...#............................%.%..│ +│##...#.............................%%.........................#.#..#................................│ +│...................................%%%%.......................##.#.#........................%%..%.%.│ +│###................................%%..............................#.......................%....%...│ +│..##................................%%%.%....................................................%%..%..│ +│.#.....................................%.......................#.###......................%.%%.%....│ +│.....#................................%...%................................................%...%....│ +│...................................%.....%%......................#..................................│ +│.....................................%%%..%...........................................%..%%..%......│ +│.......................................%%.....%......................................%..%.%.%.......│ +│.........................................%%............................................%..%%.%......│ +│..........................................%.%.....%...............................%.%.%%..%.........│ +│.........................................%%%..%%......................................%.............│ +│...........................................%.%..%..............................%%.%.%.%%.%..........│ +│..............................................%%%%..%.%.......................%%..%.%.%.............│ +│..............................................%..%%%.%%...................%..%%...%.................│ +│...............................................%...%.%.%..%...%.%%..%%.%%...%.......................│ +│......................................................%%.%%%..%.%..%%.%.%.%%........................│ +│.....................................................%...........%%%%......%%.......................│ +│.......................................................%.%%%%.%.......%...%.........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘Loss: -┌────────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ -│ 3.79e+1│- │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│l │ │ -│o │ │ -│s │ │ -│s │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ - │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ │ -│ │ - - - - │ -│ 0.00 │ - ---- --- --- ---- ---- ---- ---- -- - ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- │ -├────────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ -│ │0.00 7.90e+1│ -│ │ step │ -└────────┴────────────────────────────────────────────────────────────────────────────────────────────────────┘Log-loss, for better visibility: +┌─────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ +│ 2.18│ - │ +│ │ │ +│ │ │ +│ │ │ +│ │ - │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│ │ │ +│l │ │ +│o │ │ +│s │ │ +│s │ │ +│ │- │ +│ │ │ +│ │ │ +│ │ - │ +│ │ │ +│ │ │ +│ │ │ +│ │ - │ +│ │ │ +│ │ │ +│ │ - │ +│ │ │ +│ │ │ +│ │ │ +│ │ - │ +│ │ │ +│ │ │ +│ 0.00│ --- ---- ---- ---- -- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- │ +├─────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ +│ │0.00 7.90e+1│ +│ │ step │ +└─────┴────────────────────────────────────────────────────────────────────────────────────────────────────┘Log-loss, for better visibility: ┌─────────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ -│ 3.63 │- │ +│ 7.79e-1 │ - │ +│ │ - │ │ │ │ +│ │- │ +│ │ - │ +│ │ - │ │ │ │ +│ │ - │ │ │ │ -│ │ - │ +│ │ - │ │ │ │ │ │ │ │ │ │ │ │ │ -│ │ - │ -│ │ - │ -│ │ - - - │ -│ │ - - - - │ -│ │ -- - - - - - - - -- - - - - - -- │ -│ │ - - - - - - - -- - - -- - - - - │ -│ │ - - - │ -│l │ - - - -- - │ -│o │ - - │ -│g │ - - - - │ -│ │ - - - │ -│l │ - -- │ -│o │ - - - │ -│s │ - │ -│s │ - │ -│ │ - │ +│ │ │ +│ │ │ +│l │ │ +│o │ │ +│g │ │ +│ │ - │ +│l │ │ +│o │ │ +│s │ │ +│s │ │ +│ │ │ │ │ │ │ │ │ │ │ │ @@ -130,7 +130,7 @@ Half-moons scatterplot and decision boundary: │ │ │ │ │ │ │ │ │ -│ -1.00e+1│ - - - - - - - │ +│ -1.00e+1│ -- ---- ---- ---- -- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- │ ├─────────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ │ │0.00 7.90e+1│ │ │ step │ (cd _build/default/test && ./moons_demo_parallel_run.exe) Welcome to OCANNL! Reading configuration defaults from /src/_build/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file ("Set log_level to" 1) └─{orphaned from #27} Retrieving commandline, environment, or config file variable ocannl_backend Found multicore_cc, in the config file Properties of devices: (multicore_devices (device ((device_name CPU) (device_ordinal 0) (num_domains 72)))) @!Retrieving commandline, environment, or config file variable ocannl_prefer_backend_uniformity Found true, in the config file Retrieving commandline, environment, or config file variable ocannl_debug_log_to_stream_files Not found, using default false Retrieving commandline, environment, or config file variable ocannl_ll_ident_style Not found, using default heuristic Retrieving commandline, environment, or config file variable ocannl_cc_backend_optimization_level Not found, using default 3 Retrieving commandline, environment, or config file variable ocannl_cc_backend_compiler_command Not found, using default gcc Retrieving commandline, environment, or config file variable ocannl_cc_backend_verify_codesign Not found, using default false Retrieving commandline, environment, or config file variable ocannl_never_capture_stdout Not found, using default false Batch=59, step=60, lr=0.200000, batch loss=1.966667, epoch loss=1.966667 Batch=119, step=120, lr=0.199750, batch loss=1.200000, epoch loss=3.166667 Batch=179, step=180, lr=0.199500, batch loss=0.401032, epoch loss=3.567699 Batch=239, step=240, lr=0.199250, batch loss=0.000000, epoch loss=3.567699 Batch=299, step=300, lr=0.199000, batch loss=0.000000, epoch loss=3.567699 Batch=359, step=360, lr=0.198750, batch loss=0.000000, epoch loss=3.567699 Batch=419, step=420, lr=0.198500, batch loss=0.000000, epoch loss=3.567699 Batch=479, step=480, lr=0.198250, batch loss=0.000000, epoch loss=3.567699 Batch=539, step=540, lr=0.198000, batch loss=0.000000, epoch loss=3.567699 Batch=599, step=600, lr=0.197750, batch loss=0.000000, epoch loss=3.567699 Batch=659, step=660, lr=0.197500, batch loss=4.396238, epoch loss=7.963936 Batch=719, step=720, lr=0.197250, batch loss=3.606143, epoch loss=11.570080 Batch=779, step=780, lr=0.197000, batch loss=2.817080, epoch loss=14.387159 Batch=839, step=840, lr=0.196750, batch loss=2.029048, epoch loss=16.416207 Batch=899, step=900, lr=0.196500, batch loss=1.242046, epoch loss=17.658253 Batch=959, step=960, lr=0.196250, batch loss=0.456076, epoch loss=18.114330 Batch=1019, step=1020, lr=0.196000, batch loss=0.000000, epoch loss=18.114330 Batch=1079, step=1080, lr=0.195750, batch loss=0.000000, epoch loss=18.114330 Batch=1139, step=1140, lr=0.195500, batch loss=0.000000, epoch loss=18.114330 Batch=1199, step=1200, lr=0.195250, batch loss=0.000000, epoch loss=18.114330 Epoch=0, step=1200, lr=0.195250, epoch loss=18.114330 Batch=59, step=1260, lr=0.195000, batch loss=4.328499, epoch loss=4.328499 Batch=119, step=1320, lr=0.194750, batch loss=3.548408, epoch loss=7.876907 Batch=179, step=1380, lr=0.194500, batch loss=2.769348, epoch loss=10.646255 Batch=239, step=1440, lr=0.194250, batch loss=1.991318, epoch loss=12.637572 Batch=299, step=1500, lr=0.194000, batch loss=1.214318, epoch loss=13.851891 Batch=359, step=1560, lr=0.193750, batch loss=0.438349, epoch loss=14.290239 Batch=419, step=1620, lr=0.193500, batch loss=0.000000, epoch loss=14.290239 Batch=479, step=1680, lr=0.193250, batch loss=0.000000, epoch loss=14.290239 Batch=539, step=1740, lr=0.193000, batch loss=0.000000, epoch loss=14.290239 Batch=599, step=1800, lr=0.192750, batch loss=0.000000, epoch loss=14.290239 Batch=659, step=1860, lr=0.192500, batch loss=4.336230, epoch loss=18.626469 Batch=719, step=1920, lr=0.192250, batch loss=3.566140, epoch loss=22.192609 Batch=779, step=1980, lr=0.192000, batch loss=2.797080, epoch loss=24.989689 Batch=839, step=2040, lr=0.191750, batch loss=2.029049, epoch loss=27.018739 Batch=899, step=2100, lr=0.191500, batch loss=1.262048, epoch loss=28.280787 Batch=959, step=2160, lr=0.191250, batch loss=0.496077, epoch loss=28.776864 Batch=1019, step=2220, lr=0.191000, batch loss=0.000000, epoch loss=28.776864 Batch=1079, step=2280, lr=0.190750, batch loss=0.000000, epoch loss=28.776864 Batch=1139, step=2340, lr=0.190500, batch loss=0.000000, epoch loss=28.776864 Batch=1199, step=2400, lr=0.190250, batch loss=0.000000, epoch loss=28.776864 Epoch=1, step=2400, lr=0.190250, epoch loss=28.776864 Batch=59, step=2460, lr=0.190000, batch loss=4.268520, epoch loss=4.268520 Batch=119, step=2520, lr=0.189750, batch loss=3.508433, epoch loss=7.776953 Batch=179, step=2580, lr=0.189500, batch loss=2.749376, epoch loss=10.526329 Batch=239, step=2640, lr=0.189250, batch loss=1.991348, epoch loss=12.517677 Batch=299, step=2700, lr=0.189000, batch loss=1.234348, epoch loss=13.752025 Batch=359, step=2760, lr=0.188750, batch loss=0.478377, epoch loss=14.230402 Batch=419, step=2820, lr=0.188500, batch loss=0.000000, epoch loss=14.230402 Batch=479, step=2880, lr=0.188250, batch loss=0.000000, epoch loss=14.230402 Batch=539, step=2940, lr=0.188000, batch loss=0.000000, epoch loss=14.230402 Batch=599, step=3000, lr=0.187750, batch loss=0.000000, epoch loss=14.230402 Batch=659, step=3060, lr=0.187500, batch loss=4.276223, epoch loss=18.506625 Batch=719, step=3120, lr=0.187250, batch loss=3.526138, epoch loss=22.032763 Batch=779, step=3180, lr=0.187000, batch loss=2.777080, epoch loss=24.809843 Batch=839, step=3240, lr=0.186750, batch loss=2.029051, epoch loss=26.838894 Batch=899, step=3300, lr=0.186500, batch loss=1.282050, epoch loss=28.120945 Batch=959, step=3360, lr=0.186250, batch loss=0.536077, epoch loss=28.657022 Batch=1019, step=3420, lr=0.186000, batch loss=0.000000, epoch loss=28.657022 Batch=1079, step=3480, lr=0.185750, batch loss=0.000000, epoch loss=28.657022 Batch=1139, step=3540, lr=0.185500, batch loss=0.000000, epoch loss=28.657022 Batch=1199, step=3600, lr=0.185250, batch loss=0.000000, epoch loss=28.657022 Epoch=2, step=3600, lr=0.185250, epoch loss=28.657022 Batch=59, step=3660, lr=0.185000, batch loss=4.208540, epoch loss=4.208540 Batch=119, step=3720, lr=0.184750, batch loss=3.468459, epoch loss=7.676999 Batch=179, step=3780, lr=0.184500, batch loss=2.729405, epoch loss=10.406404 Batch=239, step=3840, lr=0.184250, batch loss=1.991378, epoch loss=12.397781 Batch=299, step=3900, lr=0.184000, batch loss=1.254378, epoch loss=13.652159 Batch=359, step=3960, lr=0.183750, batch loss=0.518405, epoch loss=14.170564 Batch=419, step=4020, lr=0.183500, batch loss=0.000000, epoch loss=14.170564 Batch=479, step=4080, lr=0.183250, batch loss=0.000000, epoch loss=14.170564 Batch=539, step=4140, lr=0.183000, batch loss=0.000000, epoch loss=14.170564 Batch=599, step=4200, lr=0.182750, batch loss=0.000000, epoch loss=14.170564 Batch=659, step=4260, lr=0.182500, batch loss=4.216216, epoch loss=18.386780 Batch=719, step=4320, lr=0.182250, batch loss=3.486135, epoch loss=21.872915 Batch=779, step=4380, lr=0.182000, batch loss=2.757080, epoch loss=24.629995 Batch=839, step=4440, lr=0.181750, batch loss=2.029053, epoch loss=26.659048 Batch=899, step=4500, lr=0.181500, batch loss=1.302052, epoch loss=27.961100 Batch=959, step=4560, lr=0.181250, batch loss=0.576077, epoch loss=28.537177 Batch=1019, step=4620, lr=0.181000, batch loss=0.000000, epoch loss=28.537177 Batch=1079, step=4680, lr=0.180750, batch loss=0.000000, epoch loss=28.537177 Batch=1139, step=4740, lr=0.180500, batch loss=0.000000, epoch loss=28.537177 Batch=1199, step=4800, lr=0.180250, batch loss=0.000000, epoch loss=28.537177 Epoch=3, step=4800, lr=0.180250, epoch loss=28.537177 Batch=59, step=4860, lr=0.180000, batch loss=4.148561, epoch loss=4.148561 Batch=119, step=4920, lr=0.179750, batch loss=3.428483, epoch loss=7.577044 Batch=179, step=4980, lr=0.179500, batch loss=2.709432, epoch loss=10.286476 Batch=239, step=5040, lr=0.179250, batch loss=1.991407, epoch loss=12.277883 Batch=299, step=5100, lr=0.179000, batch loss=1.274407, epoch loss=13.552290 Batch=359, step=5160, lr=0.178750, batch loss=0.558433, epoch loss=14.110723 Batch=419, step=5220, lr=0.178500, batch loss=0.000000, epoch loss=14.110723 Batch=479, step=5280, lr=0.178250, batch loss=0.000000, epoch loss=14.110723 Batch=539, step=5340, lr=0.178000, batch loss=0.000000, epoch loss=14.110723 Batch=599, step=5400, lr=0.177750, batch loss=0.000000, epoch loss=14.110723 Batch=659, step=5460, lr=0.177500, batch loss=4.156208, epoch loss=18.266931 Batch=719, step=5520, lr=0.177250, batch loss=3.446132, epoch loss=21.713063 Batch=779, step=5580, lr=0.177000, batch loss=2.737080, epoch loss=24.450143 Batch=839, step=5640, lr=0.176750, batch loss=2.029054, epoch loss=26.479198 Batch=899, step=5700, lr=0.176500, batch loss=1.322053, epoch loss=27.801251 Batch=959, step=5760, lr=0.176250, batch loss=0.616077, epoch loss=28.417328 Batch=1019, step=5820, lr=0.176000, batch loss=0.000000, epoch loss=28.417328 Batch=1079, step=5880, lr=0.175750, batch loss=0.000000, epoch loss=28.417328 Batch=1139, step=5940, lr=0.175500, batch loss=0.000000, epoch loss=28.417328 Batch=1199, step=6000, lr=0.175250, batch loss=0.000000, epoch loss=28.417328 Epoch=4, step=6000, lr=0.175250, epoch loss=28.417328 Batch=59, step=6060, lr=0.175000, batch loss=4.088580, epoch loss=4.088580 Batch=119, step=6120, lr=0.174750, batch loss=3.388507, epoch loss=7.477088 Batch=179, step=6180, lr=0.174500, batch loss=2.689459, epoch loss=10.166547 Batch=239, step=6240, lr=0.174250, batch loss=1.991435, epoch loss=12.157981 Batch=299, step=6300, lr=0.174000, batch loss=1.294435, epoch loss=13.452416 Batch=359, step=6360, lr=0.173750, batch loss=0.598460, epoch loss=14.050876 Batch=419, step=6420, lr=0.173500, batch loss=0.000000, epoch loss=14.050876 Batch=479, step=6480, lr=0.173250, batch loss=0.000000, epoch loss=14.050876 Batch=539, step=6540, lr=0.173000, batch loss=0.000000, epoch loss=14.050876 Batch=599, step=6600, lr=0.172750, batch loss=0.000000, epoch loss=14.050876 Batch=659, step=6660, lr=0.172500, batch loss=4.096201, epoch loss=18.147077 Batch=719, step=6720, lr=0.172250, batch loss=3.406129, epoch loss=21.553206 Batch=779, step=6780, lr=0.172000, batch loss=2.717080, epoch loss=24.270287 Batch=839, step=6840, lr=0.171750, batch loss=2.029056, epoch loss=26.299342 Batch=899, step=6900, lr=0.171500, batch loss=1.342055, epoch loss=27.641397 Batch=959, step=6960, lr=0.171250, batch loss=0.656077, epoch loss=28.297475 Batch=1019, step=7020, lr=0.171000, batch loss=0.000000, epoch loss=28.297475 Batch=1079, step=7080, lr=0.170750, batch loss=0.000000, epoch loss=28.297475 Batch=1139, step=7140, lr=0.170500, batch loss=0.000000, epoch loss=28.297475 Batch=1199, step=7200, lr=0.170250, batch loss=0.000000, epoch loss=28.297475 Epoch=5, step=7200, lr=0.170250, epoch loss=28.297475 Batch=59, step=7260, lr=0.170000, batch loss=4.028600, epoch loss=4.028600 Batch=119, step=7320, lr=0.169750, batch loss=3.348531, epoch loss=7.377130 Batch=179, step=7380, lr=0.169500, batch loss=2.669485, epoch loss=10.046615 Batch=239, step=7440, lr=0.169250, batch loss=1.991462, epoch loss=12.038077 Batch=299, step=7500, lr=0.169000, batch loss=1.314462, epoch loss=13.352539 Batch=359, step=7560, lr=0.168750, batch loss=0.638485, epoch loss=13.991024 Batch=419, step=7620, lr=0.168500, batch loss=0.000000, epoch loss=13.991024 Batch=479, step=7680, lr=0.168250, batch loss=0.000000, epoch loss=13.991024 Batch=539, step=7740, lr=0.168000, batch loss=0.000000, epoch loss=13.991024 Batch=599, step=7800, lr=0.167750, batch loss=0.000000, epoch loss=13.991024 Batch=659, step=7860, lr=0.167500, batch loss=4.036195, epoch loss=18.027219 Batch=719, step=7920, lr=0.167250, batch loss=3.366126, epoch loss=21.393345 Batch=779, step=7980, lr=0.167000, batch loss=2.697081, epoch loss=24.090426 Batch=839, step=8040, lr=0.166750, batch loss=2.029057, epoch loss=26.119483 Batch=899, step=8100, lr=0.166500, batch loss=1.362056, epoch loss=27.481539 Batch=959, step=8160, lr=0.166250, batch loss=0.696078, epoch loss=28.177617 Batch=1019, step=8220, lr=0.166000, batch loss=0.031121, epoch loss=28.208738 Batch=1079, step=8280, lr=0.165750, batch loss=0.000000, epoch loss=28.208738 Batch=1139, step=8340, lr=0.165500, batch loss=0.000000, epoch loss=28.208738 Batch=1199, step=8400, lr=0.165250, batch loss=0.000000, epoch loss=28.208738 Epoch=6, step=8400, lr=0.165250, epoch loss=28.208738 Batch=59, step=8460, lr=0.165000, batch loss=4.632552, epoch loss=4.632552 Batch=119, step=8520, lr=0.164750, batch loss=3.972465, epoch loss=8.605017 Batch=179, step=8580, lr=0.164500, batch loss=3.313400, epoch loss=11.918418 Batch=239, step=8640, lr=0.164250, batch loss=2.655357, epoch loss=14.573775 Batch=299, step=8700, lr=0.164000, batch loss=1.998336, epoch loss=16.572110 Batch=359, step=8760, lr=0.163750, batch loss=1.342336, epoch loss=17.914446 Batch=419, step=8820, lr=0.163500, batch loss=0.687357, epoch loss=18.601803 Batch=479, step=8880, lr=0.163250, batch loss=0.033400, epoch loss=18.635203 Batch=539, step=8940, lr=0.163000, batch loss=0.000000, epoch loss=18.635203 Batch=599, step=9000, lr=0.162750, batch loss=0.000000, epoch loss=18.635203 Batch=659, step=9060, lr=0.162500, batch loss=4.619365, epoch loss=23.254568 Batch=719, step=9120, lr=0.162250, batch loss=3.969280, epoch loss=27.223848 Batch=779, step=9180, lr=0.162000, batch loss=3.320216, epoch loss=30.544064 Batch=839, step=9240, lr=0.161750, batch loss=2.672173, epoch loss=33.216237 Batch=899, step=9300, lr=0.161500, batch loss=2.025151, epoch loss=35.241388 Batch=959, step=9360, lr=0.161250, batch loss=1.379151, epoch loss=36.620539 Batch=1019, step=9420, lr=0.161000, batch loss=0.734171, epoch loss=37.354709 Batch=1079, step=9480, lr=0.160750, batch loss=0.090211, epoch loss=37.444921 Batch=1139, step=9540, lr=0.160500, batch loss=0.000000, epoch loss=37.444921 Batch=1199, step=9600, lr=0.160250, batch loss=0.000000, epoch loss=37.444921 Epoch=7, step=9600, lr=0.160250, epoch loss=37.444921 Batch=59, step=9660, lr=0.160000, batch loss=4.552563, epoch loss=4.552563 Batch=119, step=9720, lr=0.159750, batch loss=3.912482, epoch loss=8.465045 Batch=179, step=9780, lr=0.159500, batch loss=3.273421, epoch loss=11.738466 Batch=239, step=9840, lr=0.159250, batch loss=2.635380, epoch loss=14.373846 Batch=299, step=9900, lr=0.159000, batch loss=1.998360, epoch loss=16.372206 Batch=359, step=9960, lr=0.158750, batch loss=1.362360, epoch loss=17.734566 Batch=419, step=10020, lr=0.158500, batch loss=0.727380, epoch loss=18.461946 Batch=479, step=10080, lr=0.158250, batch loss=0.093421, epoch loss=18.555367 Batch=539, step=10140, lr=0.158000, batch loss=0.000000, epoch loss=18.555367 Batch=599, step=10200, lr=0.157750, batch loss=0.000000, epoch loss=18.555367 Batch=659, step=10260, lr=0.157500, batch loss=4.539359, epoch loss=23.094725 Batch=719, step=10320, lr=0.157250, batch loss=3.909279, epoch loss=27.004004 Batch=779, step=10380, lr=0.157000, batch loss=3.280219, epoch loss=30.284223 Batch=839, step=10440, lr=0.156750, batch loss=2.652178, epoch loss=32.936401 Batch=899, step=10500, lr=0.156500, batch loss=2.025158, epoch loss=34.961559 Batch=959, step=10560, lr=0.156250, batch loss=1.399157, epoch loss=36.360716 Batch=1019, step=10620, lr=0.156000, batch loss=0.774176, epoch loss=37.134892 Batch=1079, step=10680, lr=0.155750, batch loss=0.150214, epoch loss=37.285106 Batch=1139, step=10740, lr=0.155500, batch loss=0.000000, epoch loss=37.285106 Batch=1199, step=10800, lr=0.155250, batch loss=0.000000, epoch loss=37.285106 Epoch=8, step=10800, lr=0.155250, epoch loss=37.285106 Batch=59, step=10860, lr=0.155000, batch loss=4.472574, epoch loss=4.472574 Batch=119, step=10920, lr=0.154750, batch loss=3.852498, epoch loss=8.325072 Batch=179, step=10980, lr=0.154500, batch loss=3.233440, epoch loss=11.558513 Batch=239, step=11040, lr=0.154250, batch loss=2.615402, epoch loss=14.173915 Batch=299, step=11100, lr=0.154000, batch loss=1.998383, epoch loss=16.172298 Batch=359, step=11160, lr=0.153750, batch loss=1.382383, epoch loss=17.554682 Batch=419, step=11220, lr=0.153500, batch loss=0.767402, epoch loss=18.322084 Batch=479, step=11280, lr=0.153250, batch loss=0.153440, epoch loss=18.475524 Batch=539, step=11340, lr=0.153000, batch loss=0.000000, epoch loss=18.475524 Batch=599, step=11400, lr=0.152750, batch loss=0.000000, epoch loss=18.475524 Batch=659, step=11460, lr=0.152500, batch loss=4.459353, epoch loss=22.934877 Batch=719, step=11520, lr=0.152250, batch loss=3.849278, epoch loss=26.784155 Batch=779, step=11580, lr=0.152000, batch loss=3.240221, epoch loss=30.024376 Batch=839, step=11640, lr=0.151750, batch loss=2.632184, epoch loss=32.656560 Batch=899, step=11700, lr=0.151500, batch loss=2.025165, epoch loss=34.681724 Batch=959, step=11760, lr=0.151250, batch loss=1.419164, epoch loss=36.100888 Batch=1019, step=11820, lr=0.151000, batch loss=0.814181, epoch loss=36.915070 Batch=1079, step=11880, lr=0.150750, batch loss=0.210217, epoch loss=37.125287 Batch=1139, step=11940, lr=0.150500, batch loss=0.000000, epoch loss=37.125287 Batch=1199, step=12000, lr=0.150250, batch loss=0.000000, epoch loss=37.125287 Epoch=9, step=12000, lr=0.150250, epoch loss=37.125287 Batch=59, step=12060, lr=0.150000, batch loss=4.392585, epoch loss=4.392585 Batch=119, step=12120, lr=0.149750, batch loss=3.792513, epoch loss=8.185098 Batch=179, step=12180, lr=0.149500, batch loss=3.193459, epoch loss=11.378557 Batch=239, step=12240, lr=0.149250, batch loss=2.595424, epoch loss=13.973981 Batch=299, step=12300, lr=0.149000, batch loss=1.998406, epoch loss=15.972386 Batch=359, step=12360, lr=0.148750, batch loss=1.402406, epoch loss=17.374792 Batch=419, step=12420, lr=0.148500, batch loss=0.807424, epoch loss=18.182216 Batch=479, step=12480, lr=0.148250, batch loss=0.213459, epoch loss=18.395675 Batch=539, step=12540, lr=0.148000, batch loss=0.000000, epoch loss=18.395675 Batch=599, step=12600, lr=0.147750, batch loss=0.000000, epoch loss=18.395675 Batch=659, step=12660, lr=0.147500, batch loss=4.379347, epoch loss=22.775022 Batch=719, step=12720, lr=0.147250, batch loss=3.789277, epoch loss=26.564299 Batch=779, step=12780, lr=0.147000, batch loss=3.200224, epoch loss=29.764524 Batch=839, step=12840, lr=0.146750, batch loss=2.612189, epoch loss=32.376713 Batch=899, step=12900, lr=0.146500, batch loss=2.025171, epoch loss=34.401884 Batch=959, step=12960, lr=0.146250, batch loss=1.439170, epoch loss=35.841054 Batch=1019, step=13020, lr=0.146000, batch loss=0.854187, epoch loss=36.695241 Batch=1079, step=13080, lr=0.145750, batch loss=0.270220, epoch loss=36.965461 Batch=1139, step=13140, lr=0.145500, batch loss=0.000000, epoch loss=36.965461 Batch=1199, step=13200, lr=0.145250, batch loss=0.000000, epoch loss=36.965461 Epoch=10, step=13200, lr=0.145250, epoch loss=36.965461 Batch=59, step=13260, lr=0.145000, batch loss=4.312595, epoch loss=4.312595 Batch=119, step=13320, lr=0.144750, batch loss=3.732528, epoch loss=8.045123 Batch=179, step=13380, lr=0.144500, batch loss=3.153478, epoch loss=11.198600 Batch=239, step=13440, lr=0.144250, batch loss=2.575444, epoch loss=13.774045 Batch=299, step=13500, lr=0.144000, batch loss=1.998428, epoch loss=15.772472 Batch=359, step=13560, lr=0.143750, batch loss=1.422428, epoch loss=17.194900 Batch=419, step=13620, lr=0.143500, batch loss=0.847444, epoch loss=18.042344 Batch=479, step=13680, lr=0.143250, batch loss=0.273477, epoch loss=18.315822 Batch=539, step=13740, lr=0.143000, batch loss=0.000000, epoch loss=18.315822 Batch=599, step=13800, lr=0.142750, batch loss=0.000000, epoch loss=18.315822 Batch=659, step=13860, lr=0.142500, batch loss=4.299342, epoch loss=22.615164 Batch=719, step=13920, lr=0.142250, batch loss=3.729276, epoch loss=26.344440 Batch=779, step=13980, lr=0.142000, batch loss=3.160227, epoch loss=29.504667 Batch=839, step=14040, lr=0.141750, batch loss=2.592194, epoch loss=32.096862 Batch=899, step=14100, lr=0.141500, batch loss=2.025178, epoch loss=34.122039 Batch=959, step=14160, lr=0.141250, batch loss=1.459177, epoch loss=35.581216 Batch=1019, step=14220, lr=0.141000, batch loss=0.894192, epoch loss=36.475408 Batch=1079, step=14280, lr=0.140750, batch loss=0.330223, epoch loss=36.805632 Batch=1139, step=14340, lr=0.140500, batch loss=0.000000, epoch loss=36.805632 Batch=1199, step=14400, lr=0.140250, batch loss=0.000000, epoch loss=36.805632 Epoch=11, step=14400, lr=0.140250, epoch loss=36.805632 Batch=59, step=14460, lr=0.140000, batch loss=4.232604, epoch loss=4.232604 Batch=119, step=14520, lr=0.139750, batch loss=3.672542, epoch loss=7.905146 Batch=179, step=14580, lr=0.139500, batch loss=3.113495, epoch loss=11.018641 Batch=239, step=14640, lr=0.139250, batch loss=2.555464, epoch loss=13.574105 Batch=299, step=14700, lr=0.139000, batch loss=1.998449, epoch loss=15.572554 Batch=359, step=14760, lr=0.138750, batch loss=1.442449, epoch loss=17.015002 Batch=419, step=14820, lr=0.138500, batch loss=0.887464, epoch loss=17.902466 Batch=479, step=14880, lr=0.138250, batch loss=0.333495, epoch loss=18.235961 Batch=539, step=14940, lr=0.138000, batch loss=0.000000, epoch loss=18.235961 Batch=599, step=15000, lr=0.137750, batch loss=0.000000, epoch loss=18.235961 Batch=659, step=15060, lr=0.137500, batch loss=4.219337, epoch loss=22.455298 Batch=719, step=15120, lr=0.137250, batch loss=3.669276, epoch loss=26.124574 Batch=779, step=15180, lr=0.137000, batch loss=3.120230, epoch loss=29.244804 Batch=839, step=15240, lr=0.136750, batch loss=2.572199, epoch loss=31.817003 Batch=899, step=15300, lr=0.136500, batch loss=2.025184, epoch loss=33.842187 Batch=959, step=15360, lr=0.136250, batch loss=1.479183, epoch loss=35.321370 Batch=1019, step=15420, lr=0.136000, batch loss=0.934197, epoch loss=36.255567 Batch=1079, step=15480, lr=0.135750, batch loss=0.390226, epoch loss=36.645793 Batch=1139, step=15540, lr=0.135500, batch loss=0.000000, epoch loss=36.645793 Batch=1199, step=15600, lr=0.135250, batch loss=0.000000, epoch loss=36.645793 Epoch=12, step=15600, lr=0.135250, epoch loss=36.645793 Batch=59, step=15660, lr=0.135000, batch loss=4.152614, epoch loss=4.152614 Batch=119, step=15720, lr=0.134750, batch loss=3.612556, epoch loss=7.765169 Batch=179, step=15780, lr=0.134500, batch loss=3.073512, epoch loss=10.838681 Batch=239, step=15840, lr=0.134250, batch loss=2.535483, epoch loss=13.374164 Batch=299, step=15900, lr=0.134000, batch loss=1.998469, epoch loss=15.372633 Batch=359, step=15960, lr=0.133750, batch loss=1.462469, epoch loss=16.835102 Batch=419, step=16020, lr=0.133500, batch loss=0.927483, epoch loss=17.762585 Batch=479, step=16080, lr=0.133250, batch loss=0.393512, epoch loss=18.156097 Batch=539, step=16140, lr=0.133000, batch loss=0.000000, epoch loss=18.156097 Batch=599, step=16200, lr=0.132750, batch loss=0.000000, epoch loss=18.156097 Batch=659, step=16260, lr=0.132500, batch loss=4.139332, epoch loss=22.295429 Batch=719, step=16320, lr=0.132250, batch loss=3.609275, epoch loss=25.904704 Batch=779, step=16380, lr=0.132000, batch loss=3.080232, epoch loss=28.984936 Batch=839, step=16440, lr=0.131750, batch loss=2.552204, epoch loss=31.537140 Batch=899, step=16500, lr=0.131500, batch loss=2.025189, epoch loss=33.562329 Batch=959, step=16560, lr=0.131250, batch loss=1.499189, epoch loss=35.061518 Batch=1019, step=16620, lr=0.131000, batch loss=0.974202, epoch loss=36.035719 Batch=1079, step=16680, lr=0.130750, batch loss=0.450229, epoch loss=36.485948 Batch=1139, step=16740, lr=0.130500, batch loss=0.000000, epoch loss=36.485948 Batch=1199, step=16800, lr=0.130250, batch loss=0.000000, epoch loss=36.485948 Epoch=13, step=16800, lr=0.130250, epoch loss=36.485948 Batch=59, step=16860, lr=0.130000, batch loss=4.072623, epoch loss=4.072623 Batch=119, step=16920, lr=0.129750, batch loss=3.552569, epoch loss=7.625192 Batch=179, step=16980, lr=0.129500, batch loss=3.033529, epoch loss=10.658721 Batch=239, step=17040, lr=0.129250, batch loss=2.515502, epoch loss=13.174223 Batch=299, step=17100, lr=0.129000, batch loss=1.998489, epoch loss=15.172712 Batch=359, step=17160, lr=0.128750, batch loss=1.482489, epoch loss=16.655200 Batch=419, step=17220, lr=0.128500, batch loss=0.967502, epoch loss=17.622703 Batch=479, step=17280, lr=0.128250, batch loss=0.453529, epoch loss=18.076231 Batch=539, step=17340, lr=0.128000, batch loss=0.000000, epoch loss=18.076231 Batch=599, step=17400, lr=0.127750, batch loss=0.000000, epoch loss=18.076231 Batch=659, step=17460, lr=0.127500, batch loss=4.059326, epoch loss=22.135558 Batch=719, step=17520, lr=0.127250, batch loss=3.549274, epoch loss=25.684832 Batch=779, step=17580, lr=0.127000, batch loss=3.040235, epoch loss=28.725066 Batch=839, step=17640, lr=0.126750, batch loss=2.532208, epoch loss=31.257274 Batch=899, step=17700, lr=0.126500, batch loss=2.025195, epoch loss=33.282469 Batch=959, step=17760, lr=0.126250, batch loss=1.519194, epoch loss=34.801663 Batch=1019, step=17820, lr=0.126000, batch loss=1.014206, epoch loss=35.815869 Batch=1079, step=17880, lr=0.125750, batch loss=0.510231, epoch loss=36.326100 Batch=1139, step=17940, lr=0.125500, batch loss=0.007268, epoch loss=36.333369 Batch=1199, step=18000, lr=0.125250, batch loss=0.000000, epoch loss=36.333369 Epoch=14, step=18000, lr=0.125250, epoch loss=36.333369 Batch=59, step=18060, lr=0.125000, batch loss=4.494619, epoch loss=4.494619 Batch=119, step=18120, lr=0.124750, batch loss=3.994557, epoch loss=8.489176 Batch=179, step=18180, lr=0.124500, batch loss=3.495507, epoch loss=11.984683 Batch=239, step=18240, lr=0.124250, batch loss=2.997470, epoch loss=14.982153 Batch=299, step=18300, lr=0.124000, batch loss=2.500445, epoch loss=17.482598 Batch=359, step=18360, lr=0.123750, batch loss=2.004433, epoch loss=19.487030 Batch=419, step=18420, lr=0.123500, batch loss=1.509432, epoch loss=20.996463 Batch=479, step=18480, lr=0.123250, batch loss=1.015445, epoch loss=22.011907 Batch=539, step=18540, lr=0.123000, batch loss=0.522469, epoch loss=22.534376 Batch=599, step=18600, lr=0.122750, batch loss=0.030505, epoch loss=22.564881 Batch=659, step=18660, lr=0.122500, batch loss=4.460446, epoch loss=27.025328 Batch=719, step=18720, lr=0.122250, batch loss=3.970386, epoch loss=30.995714 Batch=779, step=18780, lr=0.122000, batch loss=3.481338, epoch loss=34.477052 Batch=839, step=18840, lr=0.121750, batch loss=2.993302, epoch loss=37.470353 Batch=899, step=18900, lr=0.121500, batch loss=2.506278, epoch loss=39.976631 Batch=959, step=18960, lr=0.121250, batch loss=2.020265, epoch loss=41.996896 Batch=1019, step=19020, lr=0.121000, batch loss=1.535265, epoch loss=43.532161 Batch=1079, step=19080, lr=0.120750, batch loss=1.051276, epoch loss=44.583437 Batch=1139, step=19140, lr=0.120500, batch loss=0.568299, epoch loss=45.151736 Batch=1199, step=19200, lr=0.120250, batch loss=0.086334, epoch loss=45.238070 Epoch=15, step=19200, lr=0.120250, epoch loss=45.238070 Batch=59, step=19260, lr=0.120000, batch loss=4.394620, epoch loss=4.394620 Batch=119, step=19320, lr=0.119750, batch loss=3.914563, epoch loss=8.309183 Batch=179, step=19380, lr=0.119500, batch loss=3.435517, epoch loss=11.744700 Batch=239, step=19440, lr=0.119250, batch loss=2.957483, epoch loss=14.702183 Batch=299, step=19500, lr=0.119000, batch loss=2.480460, epoch loss=17.182643 Batch=359, step=19560, lr=0.118750, batch loss=2.004448, epoch loss=19.187091 Batch=419, step=19620, lr=0.118500, batch loss=1.529448, epoch loss=20.716540 Batch=479, step=19680, lr=0.118250, batch loss=1.055459, epoch loss=21.771999 Batch=539, step=19740, lr=0.118000, batch loss=0.582482, epoch loss=22.354481 Batch=599, step=19800, lr=0.117750, batch loss=0.110515, epoch loss=22.464996 Batch=659, step=19860, lr=0.117500, batch loss=4.360440, epoch loss=26.825437 Batch=719, step=19920, lr=0.117250, batch loss=3.890385, epoch loss=30.715822 Batch=779, step=19980, lr=0.117000, batch loss=3.421341, epoch loss=34.137162 Batch=839, step=20040, lr=0.116750, batch loss=2.953307, epoch loss=37.090470 Batch=899, step=20100, lr=0.116500, batch loss=2.486285, epoch loss=39.576755 Batch=959, step=20160, lr=0.116250, batch loss=2.020274, epoch loss=41.597029 Batch=1019, step=20220, lr=0.116000, batch loss=1.555273, epoch loss=43.152302 Batch=1079, step=20280, lr=0.115750, batch loss=1.091284, epoch loss=44.243586 Batch=1139, step=20340, lr=0.115500, batch loss=0.628305, epoch loss=44.871891 Batch=1199, step=20400, lr=0.115250, batch loss=0.166336, epoch loss=45.038227 Epoch=16, step=20400, lr=0.115250, epoch loss=45.038227 Batch=59, step=20460, lr=0.115000, batch loss=4.294621, epoch loss=4.294621 Batch=119, step=20520, lr=0.114750, batch loss=3.834569, epoch loss=8.129190 Batch=179, step=20580, lr=0.114500, batch loss=3.375526, epoch loss=11.504716 Batch=239, step=20640, lr=0.114250, batch loss=2.917495, epoch loss=14.422211 Batch=299, step=20700, lr=0.114000, batch loss=2.460474, epoch loss=16.882685 Batch=359, step=20760, lr=0.113750, batch loss=2.004463, epoch loss=18.887148 Batch=419, step=20820, lr=0.113500, batch loss=1.549463, epoch loss=20.436612 Batch=479, step=20880, lr=0.113250, batch loss=1.095474, epoch loss=21.532085 Batch=539, step=20940, lr=0.113000, batch loss=0.642494, epoch loss=22.174579 Batch=599, step=21000, lr=0.112750, batch loss=0.190525, epoch loss=22.365104 Batch=659, step=21060, lr=0.112500, batch loss=4.260435, epoch loss=26.625538 Batch=719, step=21120, lr=0.112250, batch loss=3.810384, epoch loss=30.435922 Batch=779, step=21180, lr=0.112000, batch loss=3.361343, epoch loss=33.797265 Batch=839, step=21240, lr=0.111750, batch loss=2.913313, epoch loss=36.710578 Batch=899, step=21300, lr=0.111500, batch loss=2.466292, epoch loss=39.176870 Batch=959, step=21360, lr=0.111250, batch loss=2.020282, epoch loss=41.197152 Batch=1019, step=21420, lr=0.111000, batch loss=1.575281, epoch loss=42.772433 Batch=1079, step=21480, lr=0.110750, batch loss=1.131291, epoch loss=43.903724 Batch=1139, step=21540, lr=0.110500, batch loss=0.688310, epoch loss=44.592034 Batch=1199, step=21600, lr=0.110250, batch loss=0.246339, epoch loss=44.838373 Epoch=17, step=21600, lr=0.110250, epoch loss=44.838373 Batch=59, step=21660, lr=0.110000, batch loss=4.194623, epoch loss=4.194623 Batch=119, step=21720, lr=0.109750, batch loss=3.754574, epoch loss=7.949197 Batch=179, step=21780, lr=0.109500, batch loss=3.315536, epoch loss=11.264733 Batch=239, step=21840, lr=0.109250, batch loss=2.877507, epoch loss=14.142240 Batch=299, step=21900, lr=0.109000, batch loss=2.440488, epoch loss=16.582727 Batch=359, step=21960, lr=0.108750, batch loss=2.004478, epoch loss=18.587206 Batch=419, step=22020, lr=0.108500, batch loss=1.569478, epoch loss=20.156684 Batch=479, step=22080, lr=0.108250, batch loss=1.135487, epoch loss=21.292171 Batch=539, step=22140, lr=0.108000, batch loss=0.702506, epoch loss=21.994677 Batch=599, step=22200, lr=0.107750, batch loss=0.270534, epoch loss=22.265211 Batch=659, step=22260, lr=0.107500, batch loss=4.160429, epoch loss=26.425640 Batch=719, step=22320, lr=0.107250, batch loss=3.730382, epoch loss=30.156022 Batch=779, step=22380, lr=0.107000, batch loss=3.301345, epoch loss=33.457367 Batch=839, step=22440, lr=0.106750, batch loss=2.873317, epoch loss=36.330685 Batch=899, step=22500, lr=0.106500, batch loss=2.446299, epoch loss=38.776983 Batch=959, step=22560, lr=0.106250, batch loss=2.020289, epoch loss=40.797273 Batch=1019, step=22620, lr=0.106000, batch loss=1.595289, epoch loss=42.392561 Batch=1079, step=22680, lr=0.105750, batch loss=1.171297, epoch loss=43.563859 Batch=1139, step=22740, lr=0.105500, batch loss=0.748315, epoch loss=44.312174 Batch=1199, step=22800, lr=0.105250, batch loss=0.326341, epoch loss=44.638515 Epoch=18, step=22800, lr=0.105250, epoch loss=44.638515 Batch=59, step=22860, lr=0.105000, batch loss=4.094624, epoch loss=4.094624 Batch=119, step=22920, lr=0.104750, batch loss=3.674580, epoch loss=7.769203 Batch=179, step=22980, lr=0.104500, batch loss=3.255544, epoch loss=11.024747 Batch=239, step=23040, lr=0.104250, batch loss=2.837518, epoch loss=13.862266 Batch=299, step=23100, lr=0.104000, batch loss=2.420501, epoch loss=16.282766 Batch=359, step=23160, lr=0.103750, batch loss=2.004492, epoch loss=18.287258 Batch=419, step=23220, lr=0.103500, batch loss=1.589492, epoch loss=19.876750 Batch=479, step=23280, lr=0.103250, batch loss=1.175500, epoch loss=21.052251 Batch=539, step=23340, lr=0.103000, batch loss=0.762517, epoch loss=21.814768 Batch=599, step=23400, lr=0.102750, batch loss=0.350543, epoch loss=22.165311 Batch=659, step=23460, lr=0.102500, batch loss=4.060423, epoch loss=26.225734 Batch=719, step=23520, lr=0.102250, batch loss=3.650381, epoch loss=29.876115 Batch=779, step=23580, lr=0.102000, batch loss=3.241347, epoch loss=33.117462 Batch=839, step=23640, lr=0.101750, batch loss=2.833322, epoch loss=35.950784 Batch=899, step=23700, lr=0.101500, batch loss=2.426305, epoch loss=38.377089 Batch=959, step=23760, lr=0.101250, batch loss=2.020296, epoch loss=40.397385 Batch=1019, step=23820, lr=0.101000, batch loss=1.615296, epoch loss=42.012680 Batch=1079, step=23880, lr=0.100750, batch loss=1.211303, epoch loss=43.223984 Batch=1139, step=23940, lr=0.100500, batch loss=0.808319, epoch loss=44.032303 Batch=1199, step=24000, lr=0.100250, batch loss=0.406343, epoch loss=44.438646 Epoch=19, step=24000, lr=0.100250, epoch loss=44.438646 Half-moons scatterplot and decision boundary: ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ │...................................#................................................................│ │.......................#.#################.#........................................................│ │......................##########################....................................................│ │.................#################################..................................................│ │..............######################################................................................│ │.............#########################################..............................................│ │...........################.............#################...........................................│ │..........##############...................###############..........................................│ │........##############..........................###########.........................................│ │.......#############............................##.##########.......................................│ │......###########..................................############.....................................│ │.....###########....................................###########.....................................│ │....##########.....................%.................###########....................................│ │...#########.#...................%%%%.................#########..............................%%%.%%.│ │...##########..................%%%.%%...................########.#...........................%%.%%%.│ │..#######......................%%%%%%%..................##########...........................%%%%%%%│ │.#########....................%%%%%%%%....................########...........................%%%%%%.│ │.########......................%%%%%%%.....................########..........................%%%%%%.│ │#########.....................%%.%%%%%......................########........................%%%%%%%.│ │.#######.......................%%%%%%%%....................#########........................%%%%%%%.│ │########.......................%%%%%%%%.....................########.......................%%%%%%%%.│ │#######.........................%%%%%%%......................#######.......................%%%%%%%%.│ │#######.........................%%%%%%%%......................#######.....................%%%%%%%%..│ │#######.........................%%%%%%%%.....................#######.....................%%%%%%%%%..│ │######............................%%%%%%%%...................#######....................%%%%%%%%%...│ │######...........................%%%%%%%%....................########..................%%%%%%%%%%...│ │######............................%%%%%%%%%..................#######..................%.%%%%%%%%....│ │###.#.............................%.%%%%%%%%%.................#.#.##..................%%.%%%%%%%....│ │....................................%%%%%%%%%.......................................%%%%%%%%%%%.....│ │......................................%%%%%%%.%.....................................%%%%%%%%%%%.....│ │.....................................%%%%%%%%%%%.................................%%%%%%%%%%%%.......│ │......................................%%.%%%%%%%%...............................%%%%%%%%%%%.........│ │........................................%%%%%%%%%%%%........................%..%%%%%%%%%%%%.........│ │........................................%%%%%%%%%%%%%%.%....................%%%%%%%%%%%%%...........│ │..........................................%%%%%%%%%%%%%%%%%..............%%%%%%%%%%%%%%%............│ │............................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%...............│ │................................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%.%..%..............│ │................................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%.%..................│ │..................................................%%%%%%%%%%%%%%%%%%%%%%%%%%%%.%....................│ │.....................................................%%.%..%%%%%%%%%%%%%%%..........................│ └────────────────────────────────────────────────────────────────────────────────────────────────────┘ (cd _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test && .tutorials.inline-tests/inline-test-runner.exe inline-test-runner tutorials -partition moons_demo_parallel.ml -source-tree-root .. -diff-cmd -) Welcome to OCANNL! Reading configuration defaults from /src/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/ocannl_config. Retrieving commandline, environment, or config file variable ocannl_log_level Found 0, in the config file File "test/zero2hero_1of7.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/zero2hero_1of7.ml _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/zero2hero_1of7.ml.corrected diff --git a/_build/default/test/zero2hero_1of7.ml b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/zero2hero_1of7.ml.corrected index 3b7cc00..394b619 100644 --- a/_build/default/test/zero2hero_1of7.ml +++ b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/zero2hero_1of7.ml.corrected @@ -30,24 +30,24 @@ let%expect_test "Graph drawing recompile" = Tensor.print_tree ~with_grad:true ~depth:9 f_nd; [%expect {| - #15 +_f_nd - 6.00e+1 - #16 grad_+_f_nd Virt/30 - <void> - #13 - Virt/152 │#2 5. Virt/40 - <void> │<void> - #14 grad_- Virt/30 │ - <void> │ - #11 *. Virt/152 │ #4 *. Virt/152 │ - <void> │ <void> │ - #12 grad_*. Virt/30 │ #5 grad_*. Virt/30 │ - <void> │ <void> │ - #10 3. Virt/40│#7 **. Virt/152 │#3 4. Virt/40│#0 x │ - <void> │<void> │<void> │ 5.00 │ - │#8 grad_**. Virt/30│ │#1 grad_x Local/30│ - │<void> │ │<void> │ - │[0]│ #6 2. Virt/40 │ │ │ - │ │ <void> │ │ │ + #15 +_f_nd + 5.00 + #16 grad_+_f_nd Virt/30 + <void> + #13 - Virt/152 │#2 5. Virt/40 + <void> │<void> + #14 grad_- Virt/30 │ + <void> │ + #11 *. Virt/152 │#4 *. Virt/152 │ + <void> │<void> │ + #12 grad_*. Virt/30 │#5 grad_*. Virt/30│ + <void> │<void> │ + #10 3. Virt/40│#7 **. Virt/152 │#3 4. Virt/40│[0] │ + <void> │<void> │<void> │ │ + │#8 grad_**. Virt/30│ │ │ + │<void> │ │ │ + │[0]│ #6 2. Virt/40 │ │ │ + │ │ <void> │ │ │ |}]; let%op f = (3 *. ("x" [ 5 ] **. 2)) - (4 *. x) + 5 in Train.every_non_literal_on_host f; @@ -55,27 +55,7 @@ let%expect_test "Graph drawing recompile" = let f_bprop = Train.to_routine (module Backend) ctx IDX.empty f_upd in Train.run f_bprop; Tensor.print_tree ~with_grad:true ~depth:9 f; - [%expect - {| - #32 +_f - 6.00e+1 - #33 grad_+_f - 1.00 - #30 - │#19 5. Virt/40 - 5.50e+1 │<void> - #31 grad_- │ - 1.00 │ - #28 *. │ #21 *. │ - 7.50e+1 │ 2.00e+1 │ - #29 grad_*. │ #22 grad_*. │ - 1.00 │ -1.00 │ - #27 3. Virt/40│ #24 **. │#20 4. Virt/40│#17 x │ - <void> │ 2.50e+1 │<void> │ 5.00 │ - │ #25 grad_**. │ │#18 grad_x│ - │ 3.00 │ │ 2.60e+1 │ - │[17]│#23 2. Virt/40│ │ │ - │ │<void> │ │ │ - |}]; + [%expect.unreachable]; let xs = Array.init 10 ~f:Float.(fun i -> of_int i - 5.) in let ys = Array.map xs ~f:(fun v -> @@ -94,54 +74,21 @@ let%expect_test "Graph drawing recompile" = [ Scatterplot { points = Array.zip_exn xs ys; content = PrintBox.line "#" } ] in PrintBox_text.output Stdio.stdout plot_box; - [%expect - {| - ┌────────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │ 1.00e+2│# │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ # │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │f │ │ - │( │ │ - │x │ │ - │) │ │ - │ │ │ - │ │ # │ - │ │ │ - │ │ │ - │ │ #│ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ # │ - │ │ │ - │ │ # │ - │ │ │ - │ │ │ - │ │ # │ - │ │ # │ - │ │ │ - │ 4.00 │ # # │ - ├────────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │ │-5.00 4.00│ - │ │ x │ - └────────┴────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node x\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Zero2hero_1of7.(fun) in file "test/zero2hero_1of7.ml", line 55, characters 16-69 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] let%expect_test "Graph drawing fetch" = Tensor.unsafe_reinitialize (); @@ -271,96 +218,39 @@ let%expect_test "Simple gradients hosted" = let sgd_routine = Train.to_routine (module Backend) grad_routine.context IDX.empty sgd in (* Check out the initial state without running a forward pass. *) Tensor.print_tree ~spy:true ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l Host&stream/41 - <not-in-yet> - #13 grad_*._l Host&stream/41 - <not-in-yet> - #8 +_d Host&stream/41 │#10 f Host&shared/39 - <not-in-yet> │<not-in-yet> - #9 grad_+_d Host&stream/41 │#11 grad_f Host&stream/41 - <not-in-yet> │<not-in-yet> - #4 *._e Host&stream/41 │#6 c Host&shared/39 │ - <not-in-yet> │<not-in-yet> │ - #5 grad_*._e Host&stream/41 │#7 grad_c Host&stream/41│ - <not-in-yet> │<not-in-yet> │ - #0 a Host&shared/39 │#2 b Host&shared/39 │ │ - <not-in-yet> │<not-in-yet> │ │ - #1 grad_a Host&stream/41│#3 grad_b Host&stream/41│ │ - <not-in-yet> │<not-in-yet> │ │ - |}]; + [%expect.unreachable]; (* Do not update the params: all values and gradients will be at initial points, which are specified in the tensor in the brackets. *) Train.run grad_routine; Tensor.print_tree ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l - -8.00 - #13 grad_*._l - 1.00 - #8 +_d │#10 f - 4.00 │ -2.00 - #9 grad_+_d │#11 grad_f - -2.00 │ 4.00 - #4 *._e │#6 c │ - -6.00 │ 1.00e+1 │ - #5 grad_*._e │#7 grad_c│ - -2.00 │ -2.00 │ - #0 a │#2 b │ │ - 2.00 │ -3.00 │ │ - #1 grad_a│#3 grad_b│ │ - 6.00 │ -4.00 │ │ - |}]; + [%expect.unreachable]; (* Now we update the params, but we are not doing the forward and backward passes: only params values will change, compared to the above. The update is in the opposite direction of the gradient. *) Train.run sgd_routine; Tensor.print_tree ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l - -8.00 - #13 grad_*._l - 1.00 - #8 +_d │#10 f - 4.00 │ -2.40 - #9 grad_+_d │#11 grad_f - -2.00 │ 4.00 - #4 *._e │#6 c │ - -6.00 │ 1.02e+1 │ - #5 grad_*._e │#7 grad_c│ - -2.00 │ -2.00 │ - #0 a │#2 b │ │ - 1.40 │ -2.60 │ │ - #1 grad_a│#3 grad_b│ │ - 6.00 │ -4.00 │ │ - |}]; + [%expect.unreachable]; (* Now the params will remain as above, but both param gradients and the values and gradients of other nodes will change thanks to the forward and backward passes. *) Train.run grad_routine; Tensor.print_tree ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l - -1.57e+1 - #13 grad_*._l - 1.00 - #8 +_d │#10 f - 6.56 │ -2.40 - #9 grad_+_d │#11 grad_f - -2.40 │ 6.56 - #4 *._e │#6 c │ - -3.64 │ 1.02e+1 │ - #5 grad_*._e │#7 grad_c│ - -2.40 │ -2.40 │ - #0 a │#2 b │ │ - 1.40 │ -2.60 │ │ - #1 grad_a│#3 grad_b│ │ - 6.24 │ -3.36 │ │ - |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node a\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 860, characters 8-14 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Zero2hero_1of7.(fun) in file "test/zero2hero_1of7.ml", line 270, characters 21-73 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] let%expect_test "Simple gradients virtual" = Tensor.unsafe_reinitialize (); @@ -381,68 +271,29 @@ let%expect_test "Simple gradients virtual" = Tensor.print_tree ~spy:true ~with_grad:true ~depth:9 l; [%expect {| - #12 *._l Host&dev/41 - <not-in-yet> - #13 grad_*._l unknown - <not-in-yet> - #8 +_d unknown │#10 f Host-non-const/24 - <not-in-yet> │<not-in-yet> - #9 grad_+_d unknown │#11 grad_f Material/28 - <not-in-yet> │<not-in-yet> - #4 *._e unknown │#6 c Host-non-const/24│ - <not-in-yet> │<not-in-yet> │ - #5 grad_*._e unknown │#7 grad_c Material/28 │ - <not-in-yet> │<not-in-yet> │ - #0 a Host-non-const/24│#2 b Host-non-const/24│ │ - <not-in-yet> │<not-in-yet> │ │ - #1 grad_a Material/28 │#3 grad_b Material/28 │ │ - <not-in-yet> │<not-in-yet> │ │ + #12 *._l Host&dev/412 + <not-in-yet> + #13 grad_*._l unknown + <not-in-yet> + #8 +_d unknown │[10] + <not-in-yet> │ + #9 grad_+_d unknown │ + <not-in-yet> │ + #4 *._e unknown │[6]│ + <not-in-yet> │ │ + #5 grad_*._e unknown│ │ + <not-in-yet> │ │ + [0]│[2] │ │ |}]; let grad_routine = Train.to_routine (module Backend) ctx IDX.empty grad in (* Check out the state without running a forward pass or compiling the SGD update. *) Tensor.print_tree ~spy:true ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l Host&stream/41 - <not-in-yet> - #13 grad_*._l Virt/40 - <not-in-yet> - #8 +_d Local/46 │#10 f Host&shared/39 - <not-in-yet> │<not-in-yet> - #9 grad_+_d Virt/40 │#11 grad_f Dev-stream/41 - <not-in-yet> │<not-in-yet> - #4 *._e Virt/152 │#6 c Host&shared/39 │ - <not-in-yet> │<not-in-yet> │ - #5 grad_*._e Virt/40 │#7 grad_c Dev-stream/41│ - <not-in-yet> │<not-in-yet> │ - #0 a Host&shared/39 │#2 b Host&shared/39 │ │ - <not-in-yet> │<not-in-yet> │ │ - #1 grad_a Dev-stream/41│#3 grad_b Dev-stream/41│ │ - <not-in-yet> │<not-in-yet> │ │ - |}]; + [%expect.unreachable]; (* Do not update the params: all values and gradients will be at initial points, which are specified in the tensor in the brackets. *) Train.run grad_routine; Tensor.print_tree ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l - -8.00 - #13 grad_*._l Virt/40 - <void> - #8 +_d Local/46 │#10 f - <void> │ -2.00 - #9 grad_+_d Virt/40 │#11 grad_f Dev-stream/41 - <void> │<void> - #4 *._e Virt/152 │#6 c │ - <void> │ 1.00e+1 │ - #5 grad_*._e Virt/40 │#7 grad_c Dev-stream/41│ - <void> │<void> │ - #0 a │#2 b │ │ - 2.00 │ -3.00 │ │ - #1 grad_a Dev-stream/41│#3 grad_b Dev-stream/41│ │ - <void> │<void> │ │ - |}]; + [%expect.unreachable]; (* Only now compile the SGD update. *) let sgd_routine = Train.to_routine (module Backend) grad_routine.context IDX.empty sgd in (* Now we update the params, but are not doing the forward and backward passes: only params values @@ -450,48 +301,28 @@ let%expect_test "Simple gradients virtual" = always be recomputed using the latest parameter state. *) Train.run sgd_routine; Tensor.print_tree ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l - -8.00 - #13 grad_*._l Virt/40 - <void> - #8 +_d Local/46 │#10 f - <void> │ -2.40 - #9 grad_+_d Virt/40 │#11 grad_f Dev-stream/41 - <void> │<void> - #4 *._e Virt/152 │#6 c │ - <void> │ 1.02e+1 │ - #5 grad_*._e Virt/40 │#7 grad_c Dev-stream/41│ - <void> │<void> │ - #0 a │#2 b │ │ - 1.40 │ -2.60 │ │ - #1 grad_a Dev-stream/41│#3 grad_b Dev-stream/41│ │ - <void> │<void> │ │ - |}]; + [%expect.unreachable]; (* Now the params will remain as above, but both param gradients and the values and gradients of other nodes will change thanks to the forward and backward passes. *) Train.run grad_routine; Tensor.print_tree ~with_grad:true ~depth:9 l; - [%expect - {| - #12 *._l - -1.57e+1 - #13 grad_*._l Virt/40 - <void> - #8 +_d Local/46 │#10 f - <void> │ -2.40 - #9 grad_+_d Virt/40 │#11 grad_f Dev-stream/41 - <void> │<void> - #4 *._e Virt/152 │#6 c │ - <void> │ 1.02e+1 │ - #5 grad_*._e Virt/40 │#7 grad_c Dev-stream/41│ - <void> │<void> │ - #0 a │#2 b │ │ - 1.40 │ -2.60 │ │ - #1 grad_a Dev-stream/41│#3 grad_b Dev-stream/41│ │ - <void> │<void> │ │ - |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node a\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 860, characters 8-14 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 860, characters 8-14 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Zero2hero_1of7.(fun) in file "test/zero2hero_1of7.ml", line 401, characters 21-73 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] let%expect_test "tanh plot" = Tensor.unsafe_reinitialize (); @@ -510,21 +341,22 @@ let%expect_test "2D neuron hosted" = let routine = Train.to_routine (module Backend) ctx IDX.empty update in Train.run routine; Tensor.print_tree ~with_grad:true ~depth:9 v; - [%expect - {| - #8 +_v - 7.00e-1 - #9 grad_+_v - 1.00 - #6 * │#0 b - -6.00 │ 6.70 - #7 grad_* │#1 grad_b - 1.00 │ 1.00 - #2 w │#4 x │ - -3.00 1.00 │ 2.00 0.00 │ - #3 grad_w │#5 grad_x │ - 2.00 0.00 │ -3.00 1.00 │ - |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node b\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 861, characters 8-11 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Zero2hero_1of7.(fun) in file "test/zero2hero_1of7.ml", line 510, characters 16-70 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] let%expect_test "2D neuron virtual" = Tensor.unsafe_reinitialize (); @@ -537,18 +369,19 @@ let%expect_test "2D neuron virtual" = let routine = Train.to_routine (module Backend) ctx IDX.empty update in Train.run routine; Tensor.print_tree ~with_grad:true ~depth:9 v; - [%expect - {| - #8 +_v - 7.00e-1 - #9 grad_+_v Virt/40 - <void> - #6 * Local/46 │#0 b - <void> │ 6.70 - #7 grad_* Virt/40 │#1 grad_b Local/46 - <void> │<void> - #2 w │#4 x │ - -3.00 1.00 │ 2.00 0.00 │ - #3 grad_w Local/46│#5 grad_x Local/46│ - <void> │<void> │ - |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node b\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 860, characters 8-14 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Zero2hero_1of7.(fun) in file "test/zero2hero_1of7.ml", line 537, characters 16-70 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] File "test/moons_demo_parallel.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/moons_demo_parallel.ml _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/moons_demo_parallel.ml.corrected diff --git a/_build/default/test/moons_demo_parallel.ml b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/moons_demo_parallel.ml.corrected index e0e4f52..7813ac8 100644 --- a/_build/default/test/moons_demo_parallel.ml +++ b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/moons_demo_parallel.ml.corrected @@ -97,4 +97,140 @@ let%expect_test "Half-moons data parallel" = ] in PrintBox_text.output Stdio.stdout plot_loss); - [%expect "Success"] + [%expect " + Half-moons scatterplot and decision boundary: + \226\148\140\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\144 + \226\148\130***********************************#****************************************************************\226\148\130 + \226\148\130***********************#*#################*#********************************************************\226\148\130 + \226\148\130**********************##########################****************************************************\226\148\130 + \226\148\130*****************#################################**************************************************\226\148\130 + \226\148\130**************######################################************************************************\226\148\130 + \226\148\130*************#########################################**********************************************\226\148\130 + \226\148\130***********################*************#################*******************************************\226\148\130 + \226\148\130**********##############*******************###############******************************************\226\148\130 + \226\148\130********##############**************************###########*****************************************\226\148\130 + \226\148\130*******#############****************************##*##########***************************************\226\148\130 + \226\148\130******###########**********************************############*************************************\226\148\130 + \226\148\130*****###########************************************###########*************************************\226\148\130 + \226\148\130****##########*********************%*****************###########************************************\226\148\130 + \226\148\130***#########*#*******************%%%%*****************#########******************************%%%*%%*\226\148\130 + \226\148\130***##########******************%%%*%%*******************########*#***************************%%*%%%*\226\148\130 + \226\148\130**#######**********************%%%%%%%******************##########***************************%%%%%%%\226\148\130 + \226\148\130*#########********************%%%%%%%%********************########***************************%%%%%%*\226\148\130 + \226\148\130*########**********************%%%%%%%*********************########**************************%%%%%%*\226\148\130 + \226\148\130#########*********************%%*%%%%%**********************########************************%%%%%%%*\226\148\130 + \226\148\130*#######***********************%%%%%%%%********************#########************************%%%%%%%*\226\148\130 + \226\148\130########***********************%%%%%%%%*********************########***********************%%%%%%%%*\226\148\130 + \226\148\130#######*************************%%%%%%%**********************#######***********************%%%%%%%%*\226\148\130 + \226\148\130#######*************************%%%%%%%%**********************#######*********************%%%%%%%%**\226\148\130 + \226\148\130#######*************************%%%%%%%%*********************#######*********************%%%%%%%%%**\226\148\130 + \226\148\130######****************************%%%%%%%%*******************#######********************%%%%%%%%%***\226\148\130 + \226\148\130######***************************%%%%%%%%********************########******************%%%%%%%%%%***\226\148\130 + \226\148\130######****************************%%%%%%%%%******************#######******************%*%%%%%%%%****\226\148\130 + \226\148\130###*#*****************************%*%%%%%%%%%*****************#*#*##******************%%*%%%%%%%****\226\148\130 + \226\148\130************************************%%%%%%%%%***************************************%%%%%%%%%%%*****\226\148\130 + \226\148\130**************************************%%%%%%%*%*************************************%%%%%%%%%%%*****\226\148\130 + \226\148\130*************************************%%%%%%%%%%%*********************************%%%%%%%%%%%%*******\226\148\130 + \226\148\130**************************************%%*%%%%%%%%*******************************%%%%%%%%%%%*********\226\148\130 + \226\148\130****************************************%%%%%%%%%%%%************************%**%%%%%%%%%%%%*********\226\148\130 + \226\148\130****************************************%%%%%%%%%%%%%%*%********************%%%%%%%%%%%%%***********\226\148\130 + \226\148\130******************************************%%%%%%%%%%%%%%%%%**************%%%%%%%%%%%%%%%************\226\148\130 + \226\148\130********************************************%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%***************\226\148\130 + \226\148\130************************************************%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*%**%**************\226\148\130 + \226\148\130************************************************%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*%******************\226\148\130 + \226\148\130**************************************************%%%%%%%%%%%%%%%%%%%%%%%%%%%%*%********************\226\148\130 + \226\148\130*****************************************************%%*%**%%%%%%%%%%%%%%%**************************\226\148\130 + \226\148\148\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\152 + Batch Log-loss: + \226\148\140\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\172\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\144 + \226\148\130 4.15e+2 \226\148\130- \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130b \226\148\130 \226\148\130 + \226\148\130a \226\148\130 \226\148\130 + \226\148\130t \226\148\130 \226\148\130 + \226\148\130c \226\148\130 \226\148\130 + \226\148\130h \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130l \226\148\130 \226\148\130 + \226\148\130o \226\148\130 \226\148\130 + \226\148\130g \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130l \226\148\130 \226\148\130 + \226\148\130o \226\148\130 \226\148\130 + \226\148\130s \226\148\130 - \226\148\130 + \226\148\130s \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 - \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 --- --- --- -- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---\226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 -1.04e+1\226\148\130--- --- --- -------- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- \226\148\130 + \226\148\156\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\188\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\164 + \226\148\130 \226\148\1300.00 3.99e+2\226\148\130 + \226\148\130 \226\148\130 step \226\148\130 + \226\148\148\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\180\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\152 + Epoch Log-loss: + \226\148\140\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\172\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\144 + \226\148\130 4.15e+2\226\148\130- \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130e \226\148\130 \226\148\130 + \226\148\130p \226\148\130 \226\148\130 + \226\148\130o \226\148\130 \226\148\130 + \226\148\130c \226\148\130 \226\148\130 + \226\148\130h \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130l \226\148\130 \226\148\130 + \226\148\130o \226\148\130 \226\148\130 + \226\148\130g \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130l \226\148\130 \226\148\130 + \226\148\130o \226\148\130 \226\148\130 + \226\148\130s \226\148\130 \226\148\130 + \226\148\130s \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 \226\148\130 - \226\148\130 + \226\148\130 \226\148\130 \226\148\130 + \226\148\130 4.60e+1\226\148\130 - - - - - - - - - - - - - - - - - - \226\148\130 + \226\148\156\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\188\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\164 + \226\148\130 \226\148\1300.00 1.90e+1\226\148\130 + \226\148\130 \226\148\130 step \226\148\130 + \226\148\148\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\180\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\128\226\148\152 + "] File "test/micrograd_demo.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/micrograd_demo.ml _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/micrograd_demo.ml.corrected diff --git a/_build/default/test/micrograd_demo.ml b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/micrograd_demo.ml.corrected index b394efe..4062941 100644 --- a/_build/default/test/micrograd_demo.ml +++ b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/micrograd_demo.ml.corrected @@ -31,57 +31,26 @@ let%expect_test "Micrograd README basic example" = let step = Train.to_routine (module Backend) ctx IDX.empty update in Train.run step; Tensor.print ~with_code:false ~with_grad:false `Default g; - [%expect - {| - ┌────────────────────┐ - │[75]: +_g shape 0:1 │ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 2.47e+1 │ │ - │└┴─────────┘ │ - └────────────────────┘ - |}]; + [%expect.unreachable]; Tensor.print ~with_code:false ~with_grad:true `Default a; - [%expect - {| - ┌─────────────────┐ - │[0]: a shape 0:1 │ - │┌┬───────┐ │ - │││axis 0 │ │ - │├┼───────┤ │ - │││ -4.00 │ │ - │└┴───────┘ │ - └─────────────────┘ - ┌────────────────────────┐ - │[0]: a shape 0:1 grad_a│ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 1.38e+2 │ │ - │└┴─────────┘ │ - └────────────────────────┘ - |}]; + [%expect.unreachable]; Tensor.print ~with_code:false ~with_grad:true `Default b; - [%expect - {| - ┌─────────────────┐ - │[2]: b shape 0:1 │ - │┌┬──────┐ │ - │││axis 0│ │ - │├┼──────┤ │ - │││ 2.00 │ │ - │└┴──────┘ │ - └─────────────────┘ - ┌────────────────────────┐ - │[2]: b shape 0:1 grad_b│ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 6.45e+2 │ │ - │└┴─────────┘ │ - └────────────────────────┘ - |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node a\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 860, characters 8-14 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Micrograd_demo.(fun) in file "test/micrograd_demo.ml", line 31, characters 13-67 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] let%expect_test "Micrograd half-moons example" = Tensor.unsafe_reinitialize (); @@ -168,223 +137,34 @@ let%expect_test "Micrograd half-moons example" = in Stdio.printf "Half-moons scatterplot and decision boundary:\n%!"; PrintBox_text.output Stdio.stdout plot_moons; - [%expect - {| - Half-moons scatterplot and decision boundary: - ┌────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │****************************#***********************************************************************│ - │****************************###***###***#***********************************************************│ - │***************************#********#***#***********************************************************│ - │********************#*********************##*#*#****************************************************│ - │******************#***#*****##*#*#*#****#*#*#*##****************************************************│ - │**************#*#**###**#*##**##********#**#****###**#**********************************************│ - │***********#***##***#**##*****************#*#*##*###**********************************************..│ - │***************#**###******************************##*#******************************************...│ - │*************#*****#******************************#*#***#*#*************************************....│ - │*********#***#******************......************##*###**#************************************.....│ - │******#***#*#*#****************........******************#*#*********************************.......│ - │*****###****#****************...........****************#***********************************........│ - │*******#*###****************.............******************#*#*****************************.........│ - │**********##***************.......%%......***************#**##****************************..........│ - │*****#***#****************......%..%%......*********************************************..........%.│ - │**####*******************......%..%..........****************##*#**********************........%...%│ - │****#*******************.......%%..%..........***************#*#**********************.........%....│ - │*#****##***************.........%....%.........************##***#********************.......%...%%..│ - │****#*#***************.........%..%.%...........*************#**#*******************..........%.%...│ - │##*##****************............%...............**************#*##****************.........%.%.%%%.│ - │**#####************............%...%%.............*******************************...........%..%.%..│ - │******************...............%.%.%%............*********#*#*#*#*************................%%..│ - │***##************................%..%..%............************#*#************.............%...%...│ - │##**#***********...................%...%.............********#**###***********...........%.%%.......│ - │###************..................%.%.%................***********##**********..................%....│ - │**##**********.........................................**********#*********.............%%.%..%.....│ - │**#**********......................%%...................********##********..............%%.%........│ - │************..........................%..................****#**#********...................%.......│ - │***********.........................%%.%..................*****##******...............%..%..........│ - │**********.............................%%..................***********.................%%.%%........│ - │*********..............................%...%................*******.....................%%%.........│ - │********...................................%..%....................................%.....%..........│ - │*******.................................%%..%.%................................%.%.%.%%.............│ - │******.....................................%%.%...%%.%.......................%%%....%.%.%...........│ - │*****....................................%.....%.%..%....................%.....%%.%.%...............│ - │****........................................%%...%.%%%%..%.......%.........%%.%%....................│ - │***...............................................%..%.......%..%%%%...%.%..%.%.....................│ - │**.................................................%.%.%............%..%...%........................│ - │*.................................................%....%.%......%%...%.%............................│ - │..........................................................%..%......%%%.............................│ - └────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + [%expect.unreachable]; Stdio.printf "Loss:\n%!"; let plot_loss = PrintBox_utils.plot ~x_label:"step" ~y_label:"loss" [ Line_plot { points = Array.of_list_rev !losses; content = PrintBox.line "-" } ] in PrintBox_text.output Stdio.stdout plot_loss; - [%expect - {| - Loss: - ┌────────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │ 3.79e+1│- │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │l │ │ - │o │ │ - │s │ │ - │s │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │- │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │- - - │ - │ 0.00 │----------------------------------------------------------------------------------------------------│ - ├────────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │ │0.00 3.99e+2│ - │ │ step │ - └────────┴────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + [%expect.unreachable]; Stdio.printf "Log-loss, for better visibility:\n%!"; let plot_loss = PrintBox_utils.plot ~x_label:"step" ~y_label:"log loss" [ Line_plot { points = Array.of_list_rev !log_losses; content = PrintBox.line "-" } ] in PrintBox_text.output Stdio.stdout plot_loss; - [%expect - {| - Log-loss, for better visibility: - ┌─────────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │ 3.63 │- │ - │ │ │ - │ │ │ - │ │ │ - │ │- │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │- │ - │ │ - - │ - │ │ - - - │ - │ │------ - │ - │ │ ---- - - --- - - - - - │ - │ │ -- - - - --- -- - -- - │ - │ │ - -- - - --- - - --- ----- - -- │ - │l │ -- - - - - --- - - - │ - │o │ - - - -- - -- - - - - - │ - │g │ - - - - -- - - - - - - - │ - │ │ - - - -- - - - -- - - - │ - │l │ - - -- -- - - - - -│ - │o │ - - - - - - - - - - - │ - │s │ - - - - -- - │ - │s │ - - │ - │ │ - - - - - - - - - │ - │ │ - - - - - │ - │ │ - - │ - │ │ - - - │ - │ │ - - │ - │ │ │ - │ │ - │ - │ │ - │ - │ │ │ - │ │ │ - │ │ │ - │ │ │ - │ │ - │ - │ │ │ - │ │ │ - │ -1.00e+1│ - - - - ----- -- --------------- ---------------------------------------------------------│ - ├─────────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │ │0.00 3.99e+2│ - │ │ step │ - └─────────┴────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + [%expect.unreachable]; Stdio.printf "\nLearning rate:\n%!"; let plot_lr = PrintBox_utils.plot ~x_label:"step" ~y_label:"learning rate" [ Line_plot { points = Array.of_list_rev !learning_rates; content = PrintBox.line "-" } ] in PrintBox_text.output Stdio.stdout plot_lr; - [%expect - {| - Learning rate: - ┌─────────┬────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │ -1.00e-1│ -│ - │ │ ---│ - │ │ ---- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │ │ --- │ - │ │ --- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │l │ ---- │ - │e │ --- │ - │a │ --- │ - │r │ --- │ - │n │ --- │ - │i │ ---- │ - │n │ --- │ - │g │ ---- │ - │ │ --- │ - │r │ ---- │ - │a │ --- │ - │t │ --- │ - │e │ --- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │ │ --- │ - │ │ --- │ - │ │ --- │ - │ │ ---- │ - │ │ --- │ - │ │ ---- │ - │ -2.00e-1│--- │ - ├─────────┼────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │ │0.00 3.99e+2│ - │ │ step │ - └─────────┴────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + [%expect.unreachable]; (* Testing how the syntax extension %op creates labels for the resulting tensors: *) Stdio.printf "mlp_result's name: %s\n%!" @@ Tensor.debug_name mlp_result; (* Note: mlp_result is not included in the resulting tensor's label, because the identifier label does not propagate across function calls. *) - [%expect {| mlp_result's name: mlp_point |}]; + [%expect.unreachable]; (Stdio.printf "(mlp moons_input) name: %s\n%!" @@ Tensor.debug_name @@ @@ -398,4 +178,20 @@ let%expect_test "Micrograd half-moons example" = ] -> subtensor | _ -> assert false); - [%expect {| (mlp moons_input) name: mlp_moons_input |}] + [%expect.unreachable] +[@@expect.uncaught_exn {| + (* CR expect_test_collector: This test expectation appears to contain a backtrace. + This is strongly discouraged as backtraces are fragile. + Please change this test to not include a backtrace. *) + ("Utils.User_error(\"The linked context lacks node b1\")") + Raised at Backends.verify_prior_context.(fun) in file "arrayjit/lib/backends.ml", line 285, characters 11-93 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 861, characters 8-11 + Called from Base__Set.Tree0.iter.iter in file "src/set.ml", line 860, characters 8-14 + Called from Base__Set.Accessors.iter in file "src/set.ml" (inlined), line 1193, characters 18-38 + Called from Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 281-285, characters 2-94 + Re-raised at Backends.verify_prior_context in file "arrayjit/lib/backends.ml", lines 280-285, characters 37-94 + Called from Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 527-528, characters 4-49 + Re-raised at Backends.Raise_backend.link in file "arrayjit/lib/backends.ml", lines 526-540, characters 23-92 + Called from Tutorials__Micrograd_demo.(fun) in file "test/micrograd_demo.ml", line 125, characters 4-83 + Called from Ppx_expect_runtime__Test_block.Configured.dump_backtrace in file "runtime/test_block.ml", line 142, characters 10-28 + |}] File "test/hello_world_op.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/hello_world_op.ml _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/hello_world_op.ml.corrected diff --git a/_build/default/test/hello_world_op.ml b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/hello_world_op.ml.corrected index 6407bb4..0bdacc9 100644 --- a/_build/default/test/hello_world_op.ml +++ b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/hello_world_op.ml.corrected @@ -36,11 +36,11 @@ let%expect_test "Pointwise multiplication dims 1" = {| ┌────────────────────┐ │[3]: *._y shape 0:1 │ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 1.40e+1 │ │ - │└┴─────────┘ │ + │┌┬──────┐ │ + │││axis 0│ │ + │├┼──────┤ │ + │││ 0.00 │ │ + │└┴──────┘ │ └────────────────────┘ |}] @@ -67,11 +67,11 @@ let%expect_test "Matrix multiplication dims 1x1" = {| ┌────────────────────────┐ │[0]: hey shape 1:1->0:1 │ - │┌──────┬──────┐ │ - ││ │axis 1│ │ - │├──────┼──────┤ │ - ││axis 0│ 7.00 │ │ - │└──────┴──────┘ │ + │┌──────┬──────────┐ │ + ││ │axis 1 │ │ + │├──────┼──────────┤ │ + ││axis 0│ 1.82e+28 │ │ + │└──────┴──────────┘ │ └────────────────────────┘ |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ y; @@ -79,11 +79,11 @@ let%expect_test "Matrix multiplication dims 1x1" = {| ┌───────────────────┐ │[6]: +_y shape 0:1 │ - │┌┬─────────┐ │ - │││axis 0 │ │ - │├┼─────────┤ │ - │││ 1.50e+1 │ │ - │└┴─────────┘ │ + │┌┬──────────┐ │ + │││axis 0 │ │ + │├┼──────────┤ │ + │││ 3.65e+28 │ │ + │└┴──────────┘ │ └───────────────────┘ |}] @@ -108,7 +108,7 @@ let%expect_test "Print constant tensor" = Tensor.print ~with_code:false ~with_grad:false `Inline @@ hey; [%expect {| - [0]: [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ]_hey shape 1:3->0:2 [ + [0]: [ 0.00 , 0.00 , 0.00 ; 0.00 , 0.00 , 0.00 ]_hey shape 1:3->0:2 [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ] @@ -117,7 +117,7 @@ let%expect_test "Print constant tensor" = [%expect {| ┌────────────────────────────────────────────────────────────────────────┐ - │[0]: [ 1.00 , 2.00 , 3.00 ; 4.00 , 5.00 , 6.00 ]_hey shape 1:3->0:2 │ + │[0]: [ 0.00 , 0.00 , 0.00 ; 0.00 , 0.00 , 0.00 ]_hey shape 1:3->0:2 │ │┌──────┬──────────────────┐ │ ││ │axis 1 │ │ │├──────┼──────────────────┤ │ @@ -131,7 +131,7 @@ let%expect_test "Print constant tensor" = Tensor.print ~with_code:false ~with_grad:false `Inline @@ hoo; [%expect {| - [1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 [| + [1]: c2x3_hoo shape 0:2|1:3 [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |] @@ -139,15 +139,15 @@ let%expect_test "Print constant tensor" = Tensor.print ~with_code:false ~with_grad:false `Default @@ hoo; [%expect {| - ┌─────────────────────────────────────────────────────────────────────────────┐ - │[1]: [| [ 1.00 ; 2.00 ; 3.00 ] ; [ 4.00 ; 5.00 ; 6.00 ] |]_hoo shape 0:2|1:3 │ - │┌──────┬──────────────────┐ │ - ││ │axis 1 │ │ - │├──────┼──────────────────┤ │ - ││axis 0│ 1.00 2.00 3.00 │ │ - ││ │ 4.00 5.00 6.00 │ │ - │└──────┴──────────────────┘ │ - └─────────────────────────────────────────────────────────────────────────────┘ + ┌────────────────────────────┐ + │[1]: c2x3_hoo shape 0:2|1:3 │ + │┌──────┬──────────────────┐ │ + ││ │axis 1 │ │ + │├──────┼──────────────────┤ │ + ││axis 0│ 1.00 2.00 3.00 │ │ + ││ │ 4.00 5.00 6.00 │ │ + │└──────┴──────────────────┘ │ + └────────────────────────────┘ |}]; let%op hey2 = [ @@ -471,16 +471,16 @@ let%expect_test "Matrix multiplication dims 2x3" = Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect {| - ┌────────────────────────┐ - │[0]: hey shape 1:2->0:3 │ - │┌──────┬────────────┐ │ - ││ │axis 1 │ │ - │├──────┼────────────┤ │ - ││axis 0│ 7.00 7.00 │ │ - ││ │ 7.00 7.00 │ │ - ││ │ 7.00 7.00 │ │ - │└──────┴────────────┘ │ - └────────────────────────┘ + ┌──────────────────────────────┐ + │[0]: hey shape 1:2->0:3 │ + │┌──────┬─────────────────────┐│ + ││ │axis 1 ││ + │├──────┼─────────────────────┤│ + ││axis 0│ -1.55e+18 4.07e-41 ││ + ││ │ 0.00 0.00 ││ + ││ │ -1.32e+8 4.07e-41 ││ + │└──────┴─────────────────────┘│ + └──────────────────────────────┘ |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ y; [%expect @@ -490,7 +490,7 @@ let%expect_test "Matrix multiplication dims 2x3" = │┌┬───────────────────────────┐│ │││axis 0 ││ │├┼───────────────────────────┤│ - │││ 3.90e+1 4.00e+1 4.10e+1 ││ + │││ -3.11e+18 5.00 -2.64e+8 ││ │└┴───────────────────────────┘│ └──────────────────────────────┘ |}] @@ -515,71 +515,38 @@ let%expect_test "Big matrix" = let y = TDSL.O.((hey * zero_to_twenty) + zero_to_twenty) in Train.forward_and_forget backend ctx y; Tensor.print ~with_code:false ~with_grad:false `Inline zero_to_twenty; + [%expect + {| [2]: 0...20 shape 0:21 <virtual> |}]; + Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty; + [%expect + {| [2]: 0...20 shape 0:21 <virtual> |}]; + Tensor.print ~with_code:false ~with_grad:false `Default hey; [%expect {| - [2]: 0...20 shape 0:21 [ - 0.00 - ; 1.00 - ; 2.00 - ; 3.00 - ; 4.00 - ; 5.00 - ; 6.00 - ; 7.00 - ; 8.00 - ; 9.00 - ; 10.00 - ; 11.00 - ; 12.00 - ; 13.00 - ; 14.00 - ; 15.00 - ; 16.00 - ; 17.00 - ; 18.00 - ; 19.00 - ; 20.00 - ] + ┌──────────────────────────────────────┐ + │[0]: hey shape 1:21->0:21 │ + │┌──────┬─────────────────────────────┐│ + ││ │axis 1 ││ + │├──────┼─────────────────────────────┤│ + ││axis 0│ 0.00 0.00 ... 0.00 0.00 ││ + ││ │ 0.00 0.00 ... 0.00 0.00 ││ + ││ │ ... ... ... ... ... ││ + ││ │ 0.00 0.00 ... 0.00 0.00 ││ + ││ │ 0.00 0.00 ... 0.00 0.00 ││ + │└──────┴─────────────────────────────┘│ + └──────────────────────────────────────┘ |}]; - Tensor.print ~with_code:false ~with_grad:false `Default zero_to_twenty; + Tensor.print ~with_code:false ~with_grad:false `Default y; [%expect {| ┌──────────────────────────────────────┐ - │[2]: 0...20 shape 0:21 │ + │[5]: + shape 0:21 │ │┌┬───────────────────────────────────┐│ │││axis 0 ││ │├┼───────────────────────────────────┤│ │││ 0.00 1.00 ... 1.90e+1 2.00e+1 ││ │└┴───────────────────────────────────┘│ └──────────────────────────────────────┘ - |}]; - Tensor.print ~with_code:false ~with_grad:false `Default hey; - [%expect - {| - ┌──────────────────────────────────────────────────┐ - │[0]: hey shape 1:21->0:21 │ - │┌──────┬─────────────────────────────────────────┐│ - ││ │axis 1 ││ - │├──────┼─────────────────────────────────────────┤│ - ││axis 0│ 5.00e-1 5.00e-1 ... 5.00e-1 5.00e-1 ││ - ││ │ 5.00e-1 5.00e-1 ... 5.00e-1 5.00e-1 ││ - ││ │ ... ... ... ... ... ││ - ││ │ 5.00e-1 5.00e-1 ... 5.00e-1 5.00e-1 ││ - ││ │ 5.00e-1 5.00e-1 ... 5.00e-1 5.00e-1 ││ - │└──────┴─────────────────────────────────────────┘│ - └──────────────────────────────────────────────────┘ - |}]; - Tensor.print ~with_code:false ~with_grad:false `Default y; - [%expect - {| - ┌────────────────────────────────────────────┐ - │[5]: + shape 0:21 │ - │┌┬─────────────────────────────────────────┐│ - │││axis 0 ││ - │├┼─────────────────────────────────────────┤│ - │││ 1.05e+2 1.06e+2 ... 1.24e+2 1.25e+2 ││ - │└┴─────────────────────────────────────────┘│ - └────────────────────────────────────────────┘ |}] let%expect_test "Very big tensor" = @@ -603,140 +570,7 @@ let%expect_test "Very big tensor" = Train.forward_and_forget backend ctx hoo; Tensor.print ~with_code:false ~with_grad:false `Default hey; [%expect - {| - ┌───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[0]: r6x10x11x7x8x9 shape 0:6|3:7,4:8,5:9->1:10,2:11 │ - │┌──────┬─────────────────────────────────────────┬─────────────────────────────────────────┬──────┬─────────────────────────────────────────┬─────────────────────────────────────────┐│ - ││0 @ 0 │0 @ 4 │1 @ 4 │~~~~~ │6 @ 4 │7 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5│axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││0 @ 1 │ 0.00 1.00 ... 7.00 8.00 │ 9.00 1.00e+1 ... 1.60e+1 1.70e+1 │ ... │ 5.40e+1 5.50e+1 ... 6.10e+1 6.20e+1 │ 6.30e+1 6.40e+1 ... 7.00e+1 7.10e+1 ││ - ││axis 2│ 5.04e+2 5.05e+2 ... 5.11e+2 5.12e+2 │ 5.13e+2 5.14e+2 ... 5.20e+2 5.21e+2 │ │ 5.58e+2 5.59e+2 ... 5.65e+2 5.66e+2 │ 5.67e+2 5.68e+2 ... 5.74e+2 5.75e+2 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 4.53e+3 4.53e+3 ... 4.54e+3 4.54e+3 │ 4.54e+3 4.54e+3 ... 4.55e+3 4.55e+3 │ │ 4.59e+3 4.59e+3 ... 4.59e+3 4.59e+3 │ 4.59e+3 4.60e+3 ... 4.60e+3 4.60e+3 ││ - ││ │ 5.04e+3 5.04e+3 ... 5.04e+3 5.04e+3 │ 5.04e+3 5.05e+3 ... 5.05e+3 5.05e+3 │ │ 5.09e+3 5.09e+3 ... 5.10e+3 5.10e+3 │ 5.10e+3 5.10e+3 ... 5.11e+3 5.11e+3 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││1 @ 1 │ 5.54e+3 5.54e+3 ... 5.55e+3 5.55e+3 │ 5.55e+3 5.55e+3 ... 5.56e+3 5.56e+3 │ ... │ 5.59e+3 5.59e+3 ... 5.60e+3 5.60e+3 │ 5.60e+3 5.60e+3 ... 5.61e+3 5.61e+3 ││ - ││axis 2│ 6.04e+3 6.04e+3 ... 6.05e+3 6.05e+3 │ 6.05e+3 6.05e+3 ... 6.06e+3 6.06e+3 │ │ 6.10e+3 6.10e+3 ... 6.10e+3 6.11e+3 │ 6.11e+3 6.11e+3 ... 6.11e+3 6.11e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.00e+4 1.00e+4 ... 1.00e+4 1.00e+4 │ 1.00e+4 1.00e+4 ... 1.00e+4 1.00e+4 │ │ 1.01e+4 1.01e+4 ... 1.01e+4 1.01e+4 │ 1.01e+4 1.01e+4 ... 1.01e+4 1.01e+4 ││ - ││ │ 1.05e+4 1.05e+4 ... 1.05e+4 1.05e+4 │ 1.05e+4 1.05e+4 ... 1.06e+4 1.06e+4 │ │ 1.06e+4 1.06e+4 ... 1.06e+4 1.06e+4 │ 1.06e+4 1.06e+4 ... 1.06e+4 1.06e+4 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... │ ... ││ - ││axis 2│ │ │ │ │ ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││8 @ 1 │ 4.43e+4 4.43e+4 ... 4.43e+4 4.43e+4 │ 4.43e+4 4.43e+4 ... 4.43e+4 4.43e+4 │ ... │ 4.44e+4 4.44e+4 ... 4.44e+4 4.44e+4 │ 4.44e+4 4.44e+4 ... 4.44e+4 4.44e+4 ││ - ││axis 2│ 4.48e+4 4.48e+4 ... 4.48e+4 4.48e+4 │ 4.48e+4 4.48e+4 ... 4.48e+4 4.48e+4 │ │ 4.49e+4 4.49e+4 ... 4.49e+4 4.49e+4 │ 4.49e+4 4.49e+4 ... 4.49e+4 4.49e+4 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 4.88e+4 4.88e+4 ... 4.88e+4 4.88e+4 │ 4.88e+4 4.88e+4 ... 4.89e+4 4.89e+4 │ │ 4.89e+4 4.89e+4 ... 4.89e+4 4.89e+4 │ 4.89e+4 4.89e+4 ... 4.89e+4 4.89e+4 ││ - ││ │ 4.93e+4 4.93e+4 ... 4.93e+4 4.94e+4 │ 4.94e+4 4.94e+4 ... 4.94e+4 4.94e+4 │ │ 4.94e+4 4.94e+4 ... 4.94e+4 4.94e+4 │ 4.94e+4 4.94e+4 ... 4.94e+4 4.94e+4 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││9 @ 1 │ 4.98e+4 4.98e+4 ... 4.99e+4 4.99e+4 │ 4.99e+4 4.99e+4 ... 4.99e+4 4.99e+4 │ ... │ 4.99e+4 4.99e+4 ... 4.99e+4 4.99e+4 │ 4.99e+4 4.99e+4 ... 4.99e+4 4.99e+4 ││ - ││axis 2│ 5.04e+4 5.04e+4 ... 5.04e+4 5.04e+4 │ 5.04e+4 5.04e+4 ... 5.04e+4 5.04e+4 │ │ 5.04e+4 5.04e+4 ... 5.04e+4 5.04e+4 │ 5.04e+4 5.04e+4 ... 5.04e+4 5.04e+4 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 5.44e+4 5.44e+4 ... 5.44e+4 5.44e+4 │ 5.44e+4 5.44e+4 ... 5.44e+4 5.44e+4 │ │ 5.44e+4 5.44e+4 ... 5.44e+4 5.44e+4 │ 5.44e+4 5.44e+4 ... 5.45e+4 5.45e+4 ││ - ││ │ 5.49e+4 5.49e+4 ... 5.49e+4 5.49e+4 │ 5.49e+4 5.49e+4 ... 5.49e+4 5.49e+4 │ │ 5.49e+4 5.49e+4 ... 5.49e+4 5.49e+4 │ 5.49e+4 5.50e+4 ... 5.50e+4 5.50e+4 ││ - │└──────┴─────────────────────────────────────────┴─────────────────────────────────────────┴──────┴─────────────────────────────────────────┴─────────────────────────────────────────┘│ - ├───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │┌──────┬─────────────────────────────────────────┬─────────────────────────────────────────┬──────┬─────────────────────────────────────────┬─────────────────────────────────────────┐│ - ││1 @ 0 │0 @ 4 │1 @ 4 │~~~~~ │6 @ 4 │7 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5│axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││0 @ 1 │ 5.54e+4 5.54e+4 ... 5.54e+4 5.54e+4 │ 5.54e+4 5.54e+4 ... 5.54e+4 5.54e+4 │ ... │ 5.54e+4 5.54e+4 ... 5.55e+4 5.55e+4 │ 5.55e+4 5.55e+4 ... 5.55e+4 5.55e+4 ││ - ││axis 2│ 5.59e+4 5.59e+4 ... 5.59e+4 5.59e+4 │ 5.59e+4 5.59e+4 ... 5.59e+4 5.59e+4 │ │ 5.59e+4 5.59e+4 ... 5.60e+4 5.60e+4 │ 5.60e+4 5.60e+4 ... 5.60e+4 5.60e+4 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 5.99e+4 5.99e+4 ... 5.99e+4 5.99e+4 │ 5.99e+4 5.99e+4 ... 5.99e+4 5.99e+4 │ │ 6.00e+4 6.00e+4 ... 6.00e+4 6.00e+4 │ 6.00e+4 6.00e+4 ... 6.00e+4 6.00e+4 ││ - ││ │ 6.04e+4 6.04e+4 ... 6.04e+4 6.04e+4 │ 6.04e+4 6.04e+4 ... 6.04e+4 6.04e+4 │ │ 6.05e+4 6.05e+4 ... 6.05e+4 6.05e+4 │ 6.05e+4 6.05e+4 ... 6.05e+4 6.05e+4 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││1 @ 1 │ 6.09e+4 6.09e+4 ... 6.09e+4 6.09e+4 │ 6.09e+4 6.09e+4 ... 6.10e+4 6.10e+4 │ ... │ 6.10e+4 6.10e+4 ... 6.10e+4 6.10e+4 │ 6.10e+4 6.10e+4 ... 6.10e+4 6.10e+4 ││ - ││axis 2│ 6.14e+4 6.14e+4 ... 6.14e+4 6.14e+4 │ 6.14e+4 6.14e+4 ... 6.15e+4 6.15e+4 │ │ 6.15e+4 6.15e+4 ... 6.15e+4 6.15e+4 │ 6.15e+4 6.15e+4 ... 6.15e+4 6.15e+4 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 6.55e+4 6.55e+4 ... 6.55e+4 6.55e+4 │ 6.55e+4 6.55e+4 ... 6.55e+4 6.55e+4 │ │ 6.55e+4 6.55e+4 ... 6.55e+4 6.55e+4 │ 6.55e+4 6.55e+4 ... 6.55e+4 6.55e+4 ││ - ││ │ 6.60e+4 6.60e+4 ... 6.60e+4 6.60e+4 │ 6.60e+4 6.60e+4 ... 6.60e+4 6.60e+4 │ │ 6.60e+4 6.60e+4 ... 6.60e+4 6.60e+4 │ 6.60e+4 6.60e+4 ... 6.60e+4 6.60e+4 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... │ ... ││ - ││axis 2│ │ │ │ │ ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││8 @ 1 │ 9.97e+4 9.97e+4 ... 9.97e+4 9.98e+4 │ 9.98e+4 9.98e+4 ... 9.98e+4 9.98e+4 │ ... │ 9.98e+4 9.98e+4 ... 9.98e+4 9.98e+4 │ 9.98e+4 9.98e+4 ... 9.98e+4 9.98e+4 ││ - ││axis 2│ 1.00e+5 1.00e+5 ... 1.00e+5 1.00e+5 │ 1.00e+5 1.00e+5 ... 1.00e+5 1.00e+5 │ │ 1.00e+5 1.00e+5 ... 1.00e+5 1.00e+5 │ 1.00e+5 1.00e+5 ... 1.00e+5 1.00e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 │ │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 ││ - ││ │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 │ │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 │ 1.04e+5 1.04e+5 ... 1.04e+5 1.04e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││9 @ 1 │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 │ ... │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 ││ - ││axis 2│ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 │ │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 │ 1.05e+5 1.05e+5 ... 1.05e+5 1.05e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.09e+5 1.09e+5 ... 1.09e+5 1.09e+5 │ 1.09e+5 1.09e+5 ... 1.09e+5 1.09e+5 │ │ 1.09e+5 1.09e+5 ... 1.09e+5 1.09e+5 │ 1.09e+5 1.09e+5 ... 1.09e+5 1.09e+5 ││ - ││ │ 1.10e+5 1.10e+5 ... 1.10e+5 1.10e+5 │ 1.10e+5 1.10e+5 ... 1.10e+5 1.10e+5 │ │ 1.10e+5 1.10e+5 ... 1.10e+5 1.10e+5 │ 1.10e+5 1.10e+5 ... 1.10e+5 1.10e+5 ││ - │└──────┴─────────────────────────────────────────┴─────────────────────────────────────────┴──────┴─────────────────────────────────────────┴─────────────────────────────────────────┘│ - ├───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │ ... │ - ├───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │┌──────┬─────────────────────────────────────────┬─────────────────────────────────────────┬──────┬─────────────────────────────────────────┬─────────────────────────────────────────┐│ - ││4 @ 0 │0 @ 4 │1 @ 4 │~~~~~ │6 @ 4 │7 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5│axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││0 @ 1 │ 2.21e+5 2.21e+5 ... 2.21e+5 2.21e+5 │ 2.21e+5 2.21e+5 ... 2.21e+5 2.21e+5 │ ... │ 2.21e+5 2.21e+5 ... 2.21e+5 2.21e+5 │ 2.21e+5 2.21e+5 ... 2.21e+5 2.21e+5 ││ - ││axis 2│ 2.22e+5 2.22e+5 ... 2.22e+5 2.22e+5 │ 2.22e+5 2.22e+5 ... 2.22e+5 2.22e+5 │ │ 2.22e+5 2.22e+5 ... 2.22e+5 2.22e+5 │ 2.22e+5 2.22e+5 ... 2.22e+5 2.22e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 │ │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 ││ - ││ │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 │ │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 │ 2.26e+5 2.26e+5 ... 2.26e+5 2.26e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││1 @ 1 │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 │ ... │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 ││ - ││axis 2│ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 │ │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 │ 2.27e+5 2.27e+5 ... 2.27e+5 2.27e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.31e+5 2.31e+5 ... 2.31e+5 2.31e+5 │ 2.31e+5 2.31e+5 ... 2.31e+5 2.31e+5 │ │ 2.31e+5 2.31e+5 ... 2.31e+5 2.31e+5 │ 2.31e+5 2.31e+5 ... 2.31e+5 2.31e+5 ││ - ││ │ 2.32e+5 2.32e+5 ... 2.32e+5 2.32e+5 │ 2.32e+5 2.32e+5 ... 2.32e+5 2.32e+5 │ │ 2.32e+5 2.32e+5 ... 2.32e+5 2.32e+5 │ 2.32e+5 2.32e+5 ... 2.32e+5 2.32e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... │ ... ││ - ││axis 2│ │ │ │ │ ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││8 @ 1 │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 │ ... │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 ││ - ││axis 2│ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 │ │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 │ 2.66e+5 2.66e+5 ... 2.66e+5 2.66e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.70e+5 2.70e+5 ... 2.70e+5 2.70e+5 │ 2.70e+5 2.70e+5 ... 2.70e+5 2.70e+5 │ │ 2.70e+5 2.70e+5 ... 2.70e+5 2.70e+5 │ 2.70e+5 2.70e+5 ... 2.70e+5 2.70e+5 ││ - ││ │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 │ │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││9 @ 1 │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 │ ... │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 │ 2.71e+5 2.71e+5 ... 2.71e+5 2.71e+5 ││ - ││axis 2│ 2.72e+5 2.72e+5 ... 2.72e+5 2.72e+5 │ 2.72e+5 2.72e+5 ... 2.72e+5 2.72e+5 │ │ 2.72e+5 2.72e+5 ... 2.72e+5 2.72e+5 │ 2.72e+5 2.72e+5 ... 2.72e+5 2.72e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 │ │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 ││ - ││ │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 │ │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 │ 2.76e+5 2.76e+5 ... 2.76e+5 2.76e+5 ││ - │└──────┴─────────────────────────────────────────┴─────────────────────────────────────────┴──────┴─────────────────────────────────────────┴─────────────────────────────────────────┘│ - ├───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │┌──────┬─────────────────────────────────────────┬─────────────────────────────────────────┬──────┬─────────────────────────────────────────┬─────────────────────────────────────────┐│ - ││5 @ 0 │0 @ 4 │1 @ 4 │~~~~~ │6 @ 4 │7 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5│axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││0 @ 1 │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 │ ... │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 ││ - ││axis 2│ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 │ │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 │ 2.77e+5 2.77e+5 ... 2.77e+5 2.77e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.81e+5 2.81e+5 ... 2.81e+5 2.81e+5 │ 2.81e+5 2.81e+5 ... 2.81e+5 2.81e+5 │ │ 2.81e+5 2.81e+5 ... 2.81e+5 2.81e+5 │ 2.81e+5 2.81e+5 ... 2.81e+5 2.81e+5 ││ - ││ │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 │ │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││1 @ 1 │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 │ ... │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 │ 2.82e+5 2.82e+5 ... 2.82e+5 2.82e+5 ││ - ││axis 2│ 2.83e+5 2.83e+5 ... 2.83e+5 2.83e+5 │ 2.83e+5 2.83e+5 ... 2.83e+5 2.83e+5 │ │ 2.83e+5 2.83e+5 ... 2.83e+5 2.83e+5 │ 2.83e+5 2.83e+5 ... 2.83e+5 2.83e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 │ │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 ││ - ││ │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 │ │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 │ 2.87e+5 2.87e+5 ... 2.87e+5 2.87e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... │ ... ││ - ││axis 2│ │ │ │ │ ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││8 @ 1 │ 3.21e+5 3.21e+5 ... 3.21e+5 3.21e+5 │ 3.21e+5 3.21e+5 ... 3.21e+5 3.21e+5 │ ... │ 3.21e+5 3.21e+5 ... 3.21e+5 3.21e+5 │ 3.21e+5 3.21e+5 ... 3.21e+5 3.21e+5 ││ - ││axis 2│ 3.22e+5 3.22e+5 ... 3.22e+5 3.22e+5 │ 3.22e+5 3.22e+5 ... 3.22e+5 3.22e+5 │ │ 3.22e+5 3.22e+5 ... 3.22e+5 3.22e+5 │ 3.22e+5 3.22e+5 ... 3.22e+5 3.22e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 │ │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 ││ - ││ │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 │ │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 │ 3.26e+5 3.26e+5 ... 3.26e+5 3.26e+5 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼──────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││9 @ 1 │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 │ ... │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 ││ - ││axis 2│ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 │ │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 │ 3.27e+5 3.27e+5 ... 3.27e+5 3.27e+5 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 3.31e+5 3.31e+5 ... 3.31e+5 3.31e+5 │ 3.31e+5 3.31e+5 ... 3.31e+5 3.31e+5 │ │ 3.31e+5 3.31e+5 ... 3.31e+5 3.31e+5 │ 3.31e+5 3.31e+5 ... 3.31e+5 3.31e+5 ││ - ││ │ 3.32e+5 3.32e+5 ... 3.32e+5 3.32e+5 │ 3.32e+5 3.32e+5 ... 3.32e+5 3.32e+5 │ │ 3.32e+5 3.32e+5 ... 3.32e+5 3.32e+5 │ 3.32e+5 3.32e+5 ... 3.32e+5 3.32e+5 ││ - │└──────┴─────────────────────────────────────────┴─────────────────────────────────────────┴──────┴─────────────────────────────────────────┴─────────────────────────────────────────┘│ - └───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r6x10x11x7x8x9 shape 0:6|3:7,4:8,5:9->1:10,2:11 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default hoo; (* Disable line wrapping for viewing the output. In VSCode: `View: Toggle Word Wrap`. *) [%expect File "test/einsum_trivia.ml", line 1, characters 0-0: /usr/bin/git --no-pager diff --no-index --color=always -u _build/default/test/einsum_trivia.ml _build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/einsum_trivia.ml.corrected diff --git a/_build/default/test/einsum_trivia.ml b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/einsum_trivia.ml.corrected index 35a36fa..e92f706 100644 --- a/_build/default/test/einsum_trivia.ml +++ b/_build/.sandbox/bd44cd01297d45e83b551d326f7e1019/default/test/einsum_trivia.ml.corrected @@ -28,20 +28,7 @@ let%expect_test "einsum1 permute axes" = Train.forward_and_forget backend ctx ho; Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect - {| - ┌─────────────────────────────────────────────────────────────┐ - │[0]: r2x4x3 shape 0:2|2:3->1:4 │ - │┌──────┬────────────────────────┬───────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 ││ - ││ │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 1.20e+1 1.30e+1 1.40e+1 ││ - ││ │ 3.00 4.00 5.00 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 6.00 7.00 8.00 │ 1.80e+1 1.90e+1 2.00e+1 ││ - ││ │ 9.00 1.00e+1 1.10e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴───────────────────────────┘│ - └─────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r2x4x3 shape 0:2|2:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho; [%expect {| @@ -64,106 +51,7 @@ let%expect_test "einsum1 permute axes" = Train.forward_and_forget backend ctx ho2; Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2; [%expect - {| - ┌────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[2]: r2x3x6x7x4x5 shape 0:2,1:3|4:4,5:5->2:6,3:7 │ - │┌──────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┐│ - ││0 @ 1 │0 @ 4 │1 @ 4 │2 @ 4 │3 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││0 @ 2 │ 0.00 1.00 2.00 3.00 4.00 │ 5.00 6.00 7.00 8.00 9.00 │ 1.00e+1 1.10e+1 1.20e+1 1.30e+1 1.40e+1 │ 1.50e+1 1.60e+1 1.70e+1 1.80e+1 1.90e+1 ││ - ││axis 3│ 2.00e+1 2.10e+1 2.20e+1 2.30e+1 2.40e+1 │ 2.50e+1 2.60e+1 2.70e+1 2.80e+1 2.90e+1 │ 3.00e+1 3.10e+1 3.20e+1 3.30e+1 3.40e+1 │ 3.50e+1 3.60e+1 3.70e+1 3.80e+1 3.90e+1 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.00e+2 1.01e+2 1.02e+2 1.03e+2 1.04e+2 │ 1.05e+2 1.06e+2 1.07e+2 1.08e+2 1.09e+2 │ 1.10e+2 1.11e+2 1.12e+2 1.13e+2 1.14e+2 │ 1.15e+2 1.16e+2 1.17e+2 1.18e+2 1.19e+2 ││ - ││ │ 1.20e+2 1.21e+2 1.22e+2 1.23e+2 1.24e+2 │ 1.25e+2 1.26e+2 1.27e+2 1.28e+2 1.29e+2 │ 1.30e+2 1.31e+2 1.32e+2 1.33e+2 1.34e+2 │ 1.35e+2 1.36e+2 1.37e+2 1.38e+2 1.39e+2 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││1 @ 2 │ 1.40e+2 1.41e+2 1.42e+2 1.43e+2 1.44e+2 │ 1.45e+2 1.46e+2 1.47e+2 1.48e+2 1.49e+2 │ 1.50e+2 1.51e+2 1.52e+2 1.53e+2 1.54e+2 │ 1.55e+2 1.56e+2 1.57e+2 1.58e+2 1.59e+2 ││ - ││axis 3│ 1.60e+2 1.61e+2 1.62e+2 1.63e+2 1.64e+2 │ 1.65e+2 1.66e+2 1.67e+2 1.68e+2 1.69e+2 │ 1.70e+2 1.71e+2 1.72e+2 1.73e+2 1.74e+2 │ 1.75e+2 1.76e+2 1.77e+2 1.78e+2 1.79e+2 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.40e+2 2.41e+2 2.42e+2 2.43e+2 2.44e+2 │ 2.45e+2 2.46e+2 2.47e+2 2.48e+2 2.49e+2 │ 2.50e+2 2.51e+2 2.52e+2 2.53e+2 2.54e+2 │ 2.55e+2 2.56e+2 2.57e+2 2.58e+2 2.59e+2 ││ - ││ │ 2.60e+2 2.61e+2 2.62e+2 2.63e+2 2.64e+2 │ 2.65e+2 2.66e+2 2.67e+2 2.68e+2 2.69e+2 │ 2.70e+2 2.71e+2 2.72e+2 2.73e+2 2.74e+2 │ 2.75e+2 2.76e+2 2.77e+2 2.78e+2 2.79e+2 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... ││ - ││axis 3│ │ │ │ ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││4 @ 2 │ 5.60e+2 5.61e+2 5.62e+2 5.63e+2 5.64e+2 │ 5.65e+2 5.66e+2 5.67e+2 5.68e+2 5.69e+2 │ 5.70e+2 5.71e+2 5.72e+2 5.73e+2 5.74e+2 │ 5.75e+2 5.76e+2 5.77e+2 5.78e+2 5.79e+2 ││ - ││axis 3│ 5.80e+2 5.81e+2 5.82e+2 5.83e+2 5.84e+2 │ 5.85e+2 5.86e+2 5.87e+2 5.88e+2 5.89e+2 │ 5.90e+2 5.91e+2 5.92e+2 5.93e+2 5.94e+2 │ 5.95e+2 5.96e+2 5.97e+2 5.98e+2 5.99e+2 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 6.60e+2 6.61e+2 6.62e+2 6.63e+2 6.64e+2 │ 6.65e+2 6.66e+2 6.67e+2 6.68e+2 6.69e+2 │ 6.70e+2 6.71e+2 6.72e+2 6.73e+2 6.74e+2 │ 6.75e+2 6.76e+2 6.77e+2 6.78e+2 6.79e+2 ││ - ││ │ 6.80e+2 6.81e+2 6.82e+2 6.83e+2 6.84e+2 │ 6.85e+2 6.86e+2 6.87e+2 6.88e+2 6.89e+2 │ 6.90e+2 6.91e+2 6.92e+2 6.93e+2 6.94e+2 │ 6.95e+2 6.96e+2 6.97e+2 6.98e+2 6.99e+2 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││5 @ 2 │ 7.00e+2 7.01e+2 7.02e+2 7.03e+2 7.04e+2 │ 7.05e+2 7.06e+2 7.07e+2 7.08e+2 7.09e+2 │ 7.10e+2 7.11e+2 7.12e+2 7.13e+2 7.14e+2 │ 7.15e+2 7.16e+2 7.17e+2 7.18e+2 7.19e+2 ││ - ││axis 3│ 7.20e+2 7.21e+2 7.22e+2 7.23e+2 7.24e+2 │ 7.25e+2 7.26e+2 7.27e+2 7.28e+2 7.29e+2 │ 7.30e+2 7.31e+2 7.32e+2 7.33e+2 7.34e+2 │ 7.35e+2 7.36e+2 7.37e+2 7.38e+2 7.39e+2 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 8.00e+2 8.01e+2 8.02e+2 8.03e+2 8.04e+2 │ 8.05e+2 8.06e+2 8.07e+2 8.08e+2 8.09e+2 │ 8.10e+2 8.11e+2 8.12e+2 8.13e+2 8.14e+2 │ 8.15e+2 8.16e+2 8.17e+2 8.18e+2 8.19e+2 ││ - ││ │ 8.20e+2 8.21e+2 8.22e+2 8.23e+2 8.24e+2 │ 8.25e+2 8.26e+2 8.27e+2 8.28e+2 8.29e+2 │ 8.30e+2 8.31e+2 8.32e+2 8.33e+2 8.34e+2 │ 8.35e+2 8.36e+2 8.37e+2 8.38e+2 8.39e+2 ││ - │└──────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┘│ - ├────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │┌──────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┐│ - ││1 @ 1 │0 @ 4 │1 @ 4 │2 @ 4 │3 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││0 @ 2 │ 8.40e+2 8.41e+2 8.42e+2 8.43e+2 8.44e+2 │ 8.45e+2 8.46e+2 8.47e+2 8.48e+2 8.49e+2 │ 8.50e+2 8.51e+2 8.52e+2 8.53e+2 8.54e+2 │ 8.55e+2 8.56e+2 8.57e+2 8.58e+2 8.59e+2 ││ - ││axis 3│ 8.60e+2 8.61e+2 8.62e+2 8.63e+2 8.64e+2 │ 8.65e+2 8.66e+2 8.67e+2 8.68e+2 8.69e+2 │ 8.70e+2 8.71e+2 8.72e+2 8.73e+2 8.74e+2 │ 8.75e+2 8.76e+2 8.77e+2 8.78e+2 8.79e+2 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 9.40e+2 9.41e+2 9.42e+2 9.43e+2 9.44e+2 │ 9.45e+2 9.46e+2 9.47e+2 9.48e+2 9.49e+2 │ 9.50e+2 9.51e+2 9.52e+2 9.53e+2 9.54e+2 │ 9.55e+2 9.56e+2 9.57e+2 9.58e+2 9.59e+2 ││ - ││ │ 9.60e+2 9.61e+2 9.62e+2 9.63e+2 9.64e+2 │ 9.65e+2 9.66e+2 9.67e+2 9.68e+2 9.69e+2 │ 9.70e+2 9.71e+2 9.72e+2 9.73e+2 9.74e+2 │ 9.75e+2 9.76e+2 9.77e+2 9.78e+2 9.79e+2 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││1 @ 2 │ 9.80e+2 9.81e+2 9.82e+2 9.83e+2 9.84e+2 │ 9.85e+2 9.86e+2 9.87e+2 9.88e+2 9.89e+2 │ 9.90e+2 9.91e+2 9.92e+2 9.93e+2 9.94e+2 │ 9.95e+2 9.96e+2 9.97e+2 9.98e+2 9.99e+2 ││ - ││axis 3│ 1.00e+3 1.00e+3 1.00e+3 1.00e+3 1.00e+3 │ 1.00e+3 1.00e+3 1.00e+3 1.00e+3 1.00e+3 │ 1.01e+3 1.01e+3 1.01e+3 1.01e+3 1.01e+3 │ 1.01e+3 1.01e+3 1.01e+3 1.01e+3 1.01e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.08e+3 1.08e+3 1.08e+3 1.08e+3 1.08e+3 │ 1.08e+3 1.08e+3 1.08e+3 1.08e+3 1.08e+3 │ 1.09e+3 1.09e+3 1.09e+3 1.09e+3 1.09e+3 │ 1.09e+3 1.09e+3 1.09e+3 1.09e+3 1.09e+3 ││ - ││ │ 1.10e+3 1.10e+3 1.10e+3 1.10e+3 1.10e+3 │ 1.10e+3 1.10e+3 1.10e+3 1.10e+3 1.10e+3 │ 1.11e+3 1.11e+3 1.11e+3 1.11e+3 1.11e+3 │ 1.11e+3 1.11e+3 1.11e+3 1.11e+3 1.11e+3 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... ││ - ││axis 3│ │ │ │ ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││4 @ 2 │ 1.40e+3 1.40e+3 1.40e+3 1.40e+3 1.40e+3 │ 1.40e+3 1.40e+3 1.40e+3 1.40e+3 1.40e+3 │ 1.41e+3 1.41e+3 1.41e+3 1.41e+3 1.41e+3 │ 1.41e+3 1.41e+3 1.41e+3 1.41e+3 1.41e+3 ││ - ││axis 3│ 1.42e+3 1.42e+3 1.42e+3 1.42e+3 1.42e+3 │ 1.42e+3 1.42e+3 1.42e+3 1.42e+3 1.42e+3 │ 1.43e+3 1.43e+3 1.43e+3 1.43e+3 1.43e+3 │ 1.43e+3 1.43e+3 1.43e+3 1.43e+3 1.43e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.50e+3 1.50e+3 1.50e+3 1.50e+3 1.50e+3 │ 1.50e+3 1.50e+3 1.50e+3 1.50e+3 1.50e+3 │ 1.51e+3 1.51e+3 1.51e+3 1.51e+3 1.51e+3 │ 1.51e+3 1.51e+3 1.51e+3 1.51e+3 1.51e+3 ││ - ││ │ 1.52e+3 1.52e+3 1.52e+3 1.52e+3 1.52e+3 │ 1.52e+3 1.52e+3 1.52e+3 1.52e+3 1.52e+3 │ 1.53e+3 1.53e+3 1.53e+3 1.53e+3 1.53e+3 │ 1.53e+3 1.53e+3 1.53e+3 1.53e+3 1.53e+3 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││5 @ 2 │ 1.54e+3 1.54e+3 1.54e+3 1.54e+3 1.54e+3 │ 1.54e+3 1.54e+3 1.54e+3 1.54e+3 1.54e+3 │ 1.55e+3 1.55e+3 1.55e+3 1.55e+3 1.55e+3 │ 1.55e+3 1.55e+3 1.55e+3 1.55e+3 1.55e+3 ││ - ││axis 3│ 1.56e+3 1.56e+3 1.56e+3 1.56e+3 1.56e+3 │ 1.56e+3 1.56e+3 1.56e+3 1.56e+3 1.56e+3 │ 1.57e+3 1.57e+3 1.57e+3 1.57e+3 1.57e+3 │ 1.57e+3 1.57e+3 1.57e+3 1.57e+3 1.57e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.64e+3 1.64e+3 1.64e+3 1.64e+3 1.64e+3 │ 1.64e+3 1.64e+3 1.64e+3 1.64e+3 1.64e+3 │ 1.65e+3 1.65e+3 1.65e+3 1.65e+3 1.65e+3 │ 1.65e+3 1.65e+3 1.65e+3 1.65e+3 1.65e+3 ││ - ││ │ 1.66e+3 1.66e+3 1.66e+3 1.66e+3 1.66e+3 │ 1.66e+3 1.66e+3 1.66e+3 1.66e+3 1.66e+3 │ 1.67e+3 1.67e+3 1.67e+3 1.67e+3 1.67e+3 │ 1.67e+3 1.67e+3 1.67e+3 1.67e+3 1.67e+3 ││ - │└──────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┘│ - ├────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┤ - │┌──────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┐│ - ││2 @ 1 │0 @ 4 │1 @ 4 │2 @ 4 │3 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││0 @ 2 │ 1.68e+3 1.68e+3 1.68e+3 1.68e+3 1.68e+3 │ 1.68e+3 1.68e+3 1.68e+3 1.68e+3 1.68e+3 │ 1.69e+3 1.69e+3 1.69e+3 1.69e+3 1.69e+3 │ 1.69e+3 1.69e+3 1.69e+3 1.69e+3 1.69e+3 ││ - ││axis 3│ 1.70e+3 1.70e+3 1.70e+3 1.70e+3 1.70e+3 │ 1.70e+3 1.70e+3 1.70e+3 1.70e+3 1.70e+3 │ 1.71e+3 1.71e+3 1.71e+3 1.71e+3 1.71e+3 │ 1.71e+3 1.71e+3 1.71e+3 1.71e+3 1.71e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.78e+3 1.78e+3 1.78e+3 1.78e+3 1.78e+3 │ 1.78e+3 1.78e+3 1.78e+3 1.78e+3 1.78e+3 │ 1.79e+3 1.79e+3 1.79e+3 1.79e+3 1.79e+3 │ 1.79e+3 1.79e+3 1.79e+3 1.79e+3 1.79e+3 ││ - ││ │ 1.80e+3 1.80e+3 1.80e+3 1.80e+3 1.80e+3 │ 1.80e+3 1.80e+3 1.80e+3 1.80e+3 1.80e+3 │ 1.81e+3 1.81e+3 1.81e+3 1.81e+3 1.81e+3 │ 1.81e+3 1.81e+3 1.81e+3 1.81e+3 1.81e+3 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││1 @ 2 │ 1.82e+3 1.82e+3 1.82e+3 1.82e+3 1.82e+3 │ 1.82e+3 1.82e+3 1.82e+3 1.82e+3 1.82e+3 │ 1.83e+3 1.83e+3 1.83e+3 1.83e+3 1.83e+3 │ 1.83e+3 1.83e+3 1.83e+3 1.83e+3 1.83e+3 ││ - ││axis 3│ 1.84e+3 1.84e+3 1.84e+3 1.84e+3 1.84e+3 │ 1.84e+3 1.84e+3 1.84e+3 1.84e+3 1.84e+3 │ 1.85e+3 1.85e+3 1.85e+3 1.85e+3 1.85e+3 │ 1.85e+3 1.85e+3 1.85e+3 1.85e+3 1.85e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 1.92e+3 1.92e+3 1.92e+3 1.92e+3 1.92e+3 │ 1.92e+3 1.92e+3 1.92e+3 1.92e+3 1.92e+3 │ 1.93e+3 1.93e+3 1.93e+3 1.93e+3 1.93e+3 │ 1.93e+3 1.93e+3 1.93e+3 1.93e+3 1.93e+3 ││ - ││ │ 1.94e+3 1.94e+3 1.94e+3 1.94e+3 1.94e+3 │ 1.94e+3 1.94e+3 1.94e+3 1.94e+3 1.94e+3 │ 1.95e+3 1.95e+3 1.95e+3 1.95e+3 1.95e+3 │ 1.95e+3 1.95e+3 1.95e+3 1.95e+3 1.95e+3 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││~~~~~ │ ... │ ... │ ... │ ... ││ - ││axis 3│ │ │ │ ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││4 @ 2 │ 2.24e+3 2.24e+3 2.24e+3 2.24e+3 2.24e+3 │ 2.24e+3 2.24e+3 2.24e+3 2.24e+3 2.24e+3 │ 2.25e+3 2.25e+3 2.25e+3 2.25e+3 2.25e+3 │ 2.25e+3 2.25e+3 2.25e+3 2.25e+3 2.25e+3 ││ - ││axis 3│ 2.26e+3 2.26e+3 2.26e+3 2.26e+3 2.26e+3 │ 2.26e+3 2.26e+3 2.26e+3 2.26e+3 2.26e+3 │ 2.27e+3 2.27e+3 2.27e+3 2.27e+3 2.27e+3 │ 2.27e+3 2.27e+3 2.27e+3 2.27e+3 2.27e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.34e+3 2.34e+3 2.34e+3 2.34e+3 2.34e+3 │ 2.34e+3 2.34e+3 2.34e+3 2.34e+3 2.34e+3 │ 2.35e+3 2.35e+3 2.35e+3 2.35e+3 2.35e+3 │ 2.35e+3 2.35e+3 2.35e+3 2.35e+3 2.35e+3 ││ - ││ │ 2.36e+3 2.36e+3 2.36e+3 2.36e+3 2.36e+3 │ 2.36e+3 2.36e+3 2.36e+3 2.36e+3 2.36e+3 │ 2.37e+3 2.37e+3 2.37e+3 2.37e+3 2.37e+3 │ 2.37e+3 2.37e+3 2.37e+3 2.37e+3 2.37e+3 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ - ││5 @ 2 │ 2.38e+3 2.38e+3 2.38e+3 2.38e+3 2.38e+3 │ 2.38e+3 2.38e+3 2.38e+3 2.38e+3 2.38e+3 │ 2.39e+3 2.39e+3 2.39e+3 2.39e+3 2.39e+3 │ 2.39e+3 2.39e+3 2.39e+3 2.39e+3 2.39e+3 ││ - ││axis 3│ 2.40e+3 2.40e+3 2.40e+3 2.40e+3 2.40e+3 │ 2.40e+3 2.40e+3 2.40e+3 2.40e+3 2.40e+3 │ 2.41e+3 2.41e+3 2.41e+3 2.41e+3 2.41e+3 │ 2.41e+3 2.41e+3 2.41e+3 2.41e+3 2.41e+3 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 2.48e+3 2.48e+3 2.48e+3 2.48e+3 2.48e+3 │ 2.48e+3 2.48e+3 2.48e+3 2.48e+3 2.48e+3 │ 2.49e+3 2.49e+3 2.49e+3 2.49e+3 2.49e+3 │ 2.49e+3 2.49e+3 2.49e+3 2.49e+3 2.49e+3 ││ - ││ │ 2.50e+3 2.50e+3 2.50e+3 2.50e+3 2.50e+3 │ 2.50e+3 2.50e+3 2.50e+3 2.50e+3 2.50e+3 │ 2.51e+3 2.51e+3 2.51e+3 2.51e+3 2.51e+3 │ 2.51e+3 2.51e+3 2.51e+3 2.51e+3 2.51e+3 ││ - │└──────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┴─────────────────────────────────────────────┘│ - └────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [2]: r2x3x6x7x4x5 shape 0:2,1:3|4:4,5:5->2:6,3:7 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho2; [%expect {| @@ -292,20 +180,7 @@ let%expect_test "einsum1 sum out axes" = Train.forward_and_forget backend ctx ho; Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect - {| - ┌─────────────────────────────────────────────────────────────┐ - │[0]: r2x4x3 shape 0:2|2:3->1:4 │ - │┌──────┬────────────────────────┬───────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 ││ - ││ │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 1.20e+1 1.30e+1 1.40e+1 ││ - ││ │ 3.00 4.00 5.00 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 6.00 7.00 8.00 │ 1.80e+1 1.90e+1 2.00e+1 ││ - ││ │ 9.00 1.00e+1 1.10e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴───────────────────────────┘│ - └─────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r2x4x3 shape 0:2|2:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho; [%expect {| @@ -364,28 +239,10 @@ let%expect_test "einsum outer product" = Train.forward_and_forget backend ctx c; Tensor.print ~with_code:false ~with_grad:false `Default @@ a; [%expect - {| - ┌──────────────────┐ - │[0]: r2 shape 0:2 │ - │┌┬────────────┐ │ - │││axis 0 │ │ - │├┼────────────┤ │ - │││ 0.00 1.00 │ │ - │└┴────────────┘ │ - └──────────────────┘ - |}]; + {| [0]: r2 shape 0:2 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ b; [%expect - {| - ┌─────────────────────┐ - │[1]: r3 shape 0:3 │ - │┌┬──────────────────┐│ - │││axis 0 ││ - │├┼──────────────────┤│ - │││ 0.00 1.00 2.00 ││ - │└┴──────────────────┘│ - └─────────────────────┘ - |}]; + {| [1]: r3 shape 0:3 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ c; [%expect {| @@ -406,37 +263,10 @@ let%expect_test "einsum outer product" = Train.forward_and_forget backend ctx c; Tensor.print ~with_code:false ~with_grad:false `Default @@ a; [%expect - {| - ┌─────────────────────────────────────────────────────────────┐ - │[5]: r2x4x3 shape 0:2|2:3->1:4 │ - │┌──────┬────────────────────────┬───────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 ││ - ││ │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 1.20e+1 1.30e+1 1.40e+1 ││ - ││ │ 3.00 4.00 5.00 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 6.00 7.00 8.00 │ 1.80e+1 1.90e+1 2.00e+1 ││ - ││ │ 9.00 1.00e+1 1.10e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴───────────────────────────┘│ - └─────────────────────────────────────────────────────────────┘ - |}]; + {| [5]: r2x4x3 shape 0:2|2:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ b; [%expect - {| - ┌──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[6]: r5x7x6 shape 0:5|2:6->1:7 │ - │┌──────┬─────────────────────────────────────────┬─────────────────────────────────────────┬─────────────────────────────────────────┬─────────────────────────────────────────┬─────────────────────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 │2 @ 0 │3 @ 0 │4 @ 0 ││ - ││ │axis 2 │axis 2 │axis 2 │axis 2 │axis 2 ││ - │├──────┼─────────────────────────────────────────┼─────────────────────────────────────────┼─────────────────────────────────────────┼─────────────────────────────────────────┼─────────────────────────────────────────┤│ - ││axis 1│ 0.00 1.00 ... 4.00 5.00 │ 4.20e+1 4.30e+1 ... 4.60e+1 4.70e+1 │ 8.40e+1 8.50e+1 ... 8.80e+1 8.90e+1 │ 1.26e+2 1.27e+2 ... 1.30e+2 1.31e+2 │ 1.68e+2 1.69e+2 ... 1.72e+2 1.73e+2 ││ - ││ │ 6.00 7.00 ... 1.00e+1 1.10e+1 │ 4.80e+1 4.90e+1 ... 5.20e+1 5.30e+1 │ 9.00e+1 9.10e+1 ... 9.40e+1 9.50e+1 │ 1.32e+2 1.33e+2 ... 1.36e+2 1.37e+2 │ 1.74e+2 1.75e+2 ... 1.78e+2 1.79e+2 ││ - ││ │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... │ ... ... ... ... ... ││ - ││ │ 3.00e+1 3.10e+1 ... 3.40e+1 3.50e+1 │ 7.20e+1 7.30e+1 ... 7.60e+1 7.70e+1 │ 1.14e+2 1.15e+2 ... 1.18e+2 1.19e+2 │ 1.56e+2 1.57e+2 ... 1.60e+2 1.61e+2 │ 1.98e+2 1.99e+2 ... 2.02e+2 2.03e+2 ││ - ││ │ 3.60e+1 3.70e+1 ... 4.00e+1 4.10e+1 │ 7.80e+1 7.90e+1 ... 8.20e+1 8.30e+1 │ 1.20e+2 1.21e+2 ... 1.24e+2 1.25e+2 │ 1.62e+2 1.63e+2 ... 1.66e+2 1.67e+2 │ 2.04e+2 2.05e+2 ... 2.08e+2 2.09e+2 ││ - │└──────┴─────────────────────────────────────────┴─────────────────────────────────────────┴─────────────────────────────────────────┴─────────────────────────────────────────┴─────────────────────────────────────────┘│ - └──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [6]: r5x7x6 shape 0:5|2:6->1:7 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ c; [%expect {| @@ -723,20 +553,7 @@ let%expect_test "einsum1 broadcast or sum out prefix axes" = let ctx = Train.forward_and_ctx backend ctx ho in Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect - {| - ┌─────────────────────────────────────────────────────────────┐ - │[0]: r2x4x3 shape 0:2|2:3->1:4 │ - │┌──────┬────────────────────────┬───────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 ││ - ││ │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 1.20e+1 1.30e+1 1.40e+1 ││ - ││ │ 3.00 4.00 5.00 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 6.00 7.00 8.00 │ 1.80e+1 1.90e+1 2.00e+1 ││ - ││ │ 9.00 1.00e+1 1.10e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴───────────────────────────┘│ - └─────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r2x4x3 shape 0:2|2:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho; [%expect {| @@ -776,106 +593,7 @@ let%expect_test "einsum1 broadcast or sum out prefix axes" = let ctx = Train.forward_and_ctx backend ctx ho3 in Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2; [%expect - {| - ┌────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[3]: r2x3x6x7x4x5 shape 0:2,1:3|4:4,5:5->2:6,3:7 │ - │┌──────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┬─────────────────────────────────────────────┐│ - ││0 @ 1 │0 @ 4 │1 @ 4 │2 @ 4 │3 @ 4 ││ - ││ │axis 5 │axis 5 │axis 5 │axis 5 ││ - │├──────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┼─────────────────────────────────────────────┤│ ...TRUNCATED BY DUNE... - ││ │axis 2 │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼────────────────────────────────────┼────────────────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 3.00 │ 8.00 9.00 1.00e+1 1.10e+1 │ 1.60e+1 1.70e+1 1.80e+1 1.90e+1 ││ - ││ │ 4.00 5.00 6.00 7.00 │ 1.20e+1 1.30e+1 1.40e+1 1.50e+1 │ 2.00e+1 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴────────────────────────────────────┴────────────────────────────────────┘│ - └───────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r3x2x4 shape 0:3|2:4->1:2 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ b; [%expect - {| - ┌────────────────────────────────┐ - │[1]: r3x4x1 shape 0:3|2:1->1:4 │ - │┌──────┬──────┬──────┬─────────┐│ - ││ │0 @ 0 │1 @ 0 │2 @ 0 ││ - ││ │axis 2│axis 2│axis 2 ││ - │├──────┼──────┼──────┼─────────┤│ - ││axis 1│ 0.00 │ 4.00 │ 8.00 ││ - ││ │ 1.00 │ 5.00 │ 9.00 ││ - ││ │ 2.00 │ 6.00 │ 1.00e+1 ││ - ││ │ 3.00 │ 7.00 │ 1.10e+1 ││ - │└──────┴──────┴──────┴─────────┘│ - └────────────────────────────────┘ - |}]; + {| [1]: r3x4x1 shape 0:3|2:1->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ c; [%expect {| @@ -1188,32 +845,10 @@ let%expect_test "einsum broadcast or sum out prefix axes" = Train.forward_and_forget backend ctx f; Tensor.print ~with_code:false ~with_grad:false `Default @@ d; [%expect - {| - ┌─────────────────────────┐ - │[3]: r3x2 shape 1:2->0:3 │ - │┌──────┬────────────┐ │ - ││ │axis 1 │ │ - │├──────┼────────────┤ │ - ││axis 0│ 0.00 1.00 │ │ - ││ │ 2.00 3.00 │ │ - ││ │ 4.00 5.00 │ │ - │└──────┴────────────┘ │ - └─────────────────────────┘ - |}]; + {| [3]: r3x2 shape 1:2->0:3 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ e; [%expect - {| - ┌───────────────────────────────────────┐ - │[4]: r3x4 shape 1:4->0:3 │ - │┌──────┬──────────────────────────────┐│ - ││ │axis 1 ││ - │├──────┼──────────────────────────────┤│ - ││axis 0│ 0.00 1.00 2.00 3.00 ││ - ││ │ 4.00 5.00 6.00 7.00 ││ - ││ │ 8.00 9.00 1.00e+1 1.10e+1 ││ - │└──────┴──────────────────────────────┘│ - └───────────────────────────────────────┘ - |}]; + {| [4]: r3x4 shape 1:4->0:3 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ f; [%expect {| @@ -1253,20 +888,7 @@ let%expect_test "einsum1 fixed dim axis" = let ctx = Train.forward_and_ctx backend ctx ho in Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect - {| - ┌─────────────────────────────────────────────────────────────┐ - │[0]: r2x4x3 shape 0:2|2:3->1:4 │ - │┌──────┬────────────────────────┬───────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 ││ - ││ │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 1.20e+1 1.30e+1 1.40e+1 ││ - ││ │ 3.00 4.00 5.00 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 6.00 7.00 8.00 │ 1.80e+1 1.90e+1 2.00e+1 ││ - ││ │ 9.00 1.00e+1 1.10e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴───────────────────────────┘│ - └─────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r2x4x3 shape 0:2|2:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho; [%expect {| @@ -1284,20 +906,7 @@ let%expect_test "einsum1 fixed dim axis" = let ctx = Train.forward_and_ctx backend ctx ho2 in Tensor.print ~with_code:false ~with_grad:false `Default @@ hey; [%expect - {| - ┌─────────────────────────────────────────────────────────────┐ - │[0]: r2x4x3 shape 0:2|2:3->1:4 │ - │┌──────┬────────────────────────┬───────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 ││ - ││ │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 1.20e+1 1.30e+1 1.40e+1 ││ - ││ │ 3.00 4.00 5.00 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 6.00 7.00 8.00 │ 1.80e+1 1.90e+1 2.00e+1 ││ - ││ │ 9.00 1.00e+1 1.10e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴───────────────────────────┘│ - └─────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r2x4x3 shape 0:2|2:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho2; [%expect {| @@ -1316,18 +925,7 @@ let%expect_test "einsum1 fixed dim axis" = let ctx = Train.forward_and_ctx backend ctx ho3 in Tensor.print ~with_code:false ~with_grad:false `Default @@ hey2; [%expect - {| - ┌─────────────────────────┐ - │[3]: r3x2 shape 1:2->0:3 │ - │┌──────┬────────────┐ │ - ││ │axis 1 │ │ - │├──────┼────────────┤ │ - ││axis 0│ 0.00 1.00 │ │ - ││ │ 2.00 3.00 │ │ - ││ │ 4.00 5.00 │ │ - │└──────┴────────────┘ │ - └─────────────────────────┘ - |}]; + {| [3]: r3x2 shape 1:2->0:3 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ho3; [%expect {| @@ -1379,34 +977,10 @@ let%expect_test "einsum with fixed dim axes" = Train.forward_and_forget backend ctx c; Tensor.print ~with_code:false ~with_grad:false `Default @@ a; [%expect - {| - ┌───────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[0]: r3x2x4 shape 0:3|2:4->1:2 │ - │┌──────┬────────────────────────┬────────────────────────────────────┬────────────────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 │2 @ 0 ││ - ││ │axis 2 │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼────────────────────────────────────┼────────────────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 3.00 │ 8.00 9.00 1.00e+1 1.10e+1 │ 1.60e+1 1.70e+1 1.80e+1 1.90e+1 ││ - ││ │ 4.00 5.00 6.00 7.00 │ 1.20e+1 1.30e+1 1.40e+1 1.50e+1 │ 2.00e+1 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴────────────────────────────────────┴────────────────────────────────────┘│ - └───────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r3x2x4 shape 0:3|2:4->1:2 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ b; [%expect - {| - ┌────────────────────────────────┐ - │[1]: r3x4x1 shape 0:3|2:1->1:4 │ - │┌──────┬──────┬──────┬─────────┐│ - ││ │0 @ 0 │1 @ 0 │2 @ 0 ││ - ││ │axis 2│axis 2│axis 2 ││ - │├──────┼──────┼──────┼─────────┤│ - ││axis 1│ 0.00 │ 4.00 │ 8.00 ││ - ││ │ 1.00 │ 5.00 │ 9.00 ││ - ││ │ 2.00 │ 6.00 │ 1.00e+1 ││ - ││ │ 3.00 │ 7.00 │ 1.10e+1 ││ - │└──────┴──────┴──────┴─────────┘│ - └────────────────────────────────┘ - |}]; + {| [1]: r3x4x1 shape 0:3|2:1->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ c; [%expect {| @@ -1450,144 +1024,144 @@ let%expect_test "outer_sum simulating axis concatenation" = Tensor.print ~with_code:false ~with_grad:false `Default @@ positions; [%expect {| - ┌────────────────────────────────┐ - │[9]: ;=>+ shape 0:4,1:5,2:6,3:3 │ - │┌──────┬──────────────────┐ │ - ││0 @ 0 │axis 3 │ │ - │├──────┼──────────────────┤ │ - ││0 @ 1 │ 0.00 0.00 0.00 │ │ - ││axis 2│ 0.00 0.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 0.00 0.00 4.00 │ │ - ││ │ 0.00 0.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││1 @ 1 │ 0.00 1.00 0.00 │ │ - ││axis 2│ 0.00 1.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 0.00 1.00 4.00 │ │ - ││ │ 0.00 1.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││2 @ 1 │ 0.00 2.00 0.00 │ │ - ││axis 2│ 0.00 2.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 0.00 2.00 4.00 │ │ - ││ │ 0.00 2.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││3 @ 1 │ 0.00 3.00 0.00 │ │ - ││axis 2│ 0.00 3.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 0.00 3.00 4.00 │ │ - ││ │ 0.00 3.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││4 @ 1 │ 0.00 4.00 0.00 │ │ - ││axis 2│ 0.00 4.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 0.00 4.00 4.00 │ │ - ││ │ 0.00 4.00 5.00 │ │ - │└──────┴──────────────────┘ │ - ├────────────────────────────────┤ - │┌──────┬──────────────────┐ │ - ││1 @ 0 │axis 3 │ │ - │├──────┼──────────────────┤ │ - ││0 @ 1 │ 1.00 0.00 0.00 │ │ - ││axis 2│ 1.00 0.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 1.00 0.00 4.00 │ │ - ││ │ 1.00 0.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││1 @ 1 │ 1.00 1.00 0.00 │ │ - ││axis 2│ 1.00 1.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 1.00 1.00 4.00 │ │ - ││ │ 1.00 1.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││2 @ 1 │ 1.00 2.00 0.00 │ │ - ││axis 2│ 1.00 2.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 1.00 2.00 4.00 │ │ - ││ │ 1.00 2.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││3 @ 1 │ 1.00 3.00 0.00 │ │ - ││axis 2│ 1.00 3.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 1.00 3.00 4.00 │ │ - ││ │ 1.00 3.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││4 @ 1 │ 1.00 4.00 0.00 │ │ - ││axis 2│ 1.00 4.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 1.00 4.00 4.00 │ │ - ││ │ 1.00 4.00 5.00 │ │ - │└──────┴──────────────────┘ │ - ├────────────────────────────────┤ - │┌──────┬──────────────────┐ │ - ││2 @ 0 │axis 3 │ │ - │├──────┼──────────────────┤ │ - ││0 @ 1 │ 2.00 0.00 0.00 │ │ - ││axis 2│ 2.00 0.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 2.00 0.00 4.00 │ │ - ││ │ 2.00 0.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││1 @ 1 │ 2.00 1.00 0.00 │ │ - ││axis 2│ 2.00 1.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 2.00 1.00 4.00 │ │ - ││ │ 2.00 1.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││2 @ 1 │ 2.00 2.00 0.00 │ │ - ││axis 2│ 2.00 2.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 2.00 2.00 4.00 │ │ - ││ │ 2.00 2.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││3 @ 1 │ 2.00 3.00 0.00 │ │ - ││axis 2│ 2.00 3.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 2.00 3.00 4.00 │ │ - ││ │ 2.00 3.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││4 @ 1 │ 2.00 4.00 0.00 │ │ - ││axis 2│ 2.00 4.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 2.00 4.00 4.00 │ │ - ││ │ 2.00 4.00 5.00 │ │ - │└──────┴──────────────────┘ │ - ├────────────────────────────────┤ - │┌──────┬──────────────────┐ │ - ││3 @ 0 │axis 3 │ │ - │├──────┼──────────────────┤ │ - ││0 @ 1 │ 3.00 0.00 0.00 │ │ - ││axis 2│ 3.00 0.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 3.00 0.00 4.00 │ │ - ││ │ 3.00 0.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││1 @ 1 │ 3.00 1.00 0.00 │ │ - ││axis 2│ 3.00 1.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 3.00 1.00 4.00 │ │ - ││ │ 3.00 1.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││2 @ 1 │ 3.00 2.00 0.00 │ │ - ││axis 2│ 3.00 2.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 3.00 2.00 4.00 │ │ - ││ │ 3.00 2.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││3 @ 1 │ 3.00 3.00 0.00 │ │ - ││axis 2│ 3.00 3.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 3.00 3.00 4.00 │ │ - ││ │ 3.00 3.00 5.00 │ │ - │├──────┼──────────────────┤ │ - ││4 @ 1 │ 3.00 4.00 0.00 │ │ - ││axis 2│ 3.00 4.00 1.00 │ │ - ││ │ ... ... ... │ │ - ││ │ 3.00 4.00 4.00 │ │ - ││ │ 3.00 4.00 5.00 │ │ - │└──────┴──────────────────┘ │ - └────────────────────────────────┘ + ┌───────────────────────────────────┐ + │[9]: ;=>+ shape 0:4,1:5,2:6,3:3 │ + │┌──────┬──────────────────────────┐│ + ││0 @ 0 │axis 3 ││ + │├──────┼──────────────────────────┤│ + ││0 @ 1 │ 4.58e-26 4.38e-41 0.00 ││ + ││axis 2│ 0.00 0.00 1.00 ││ + ││ │ ... ... ... ││ + ││ │ 0.00 0.00 4.00 ││ + ││ │ 0.00 0.00 5.00 ││ + │├──────┼──────────────────────────┤│ + ││1 @ 1 │ 4.58e-26 1.00 0.00 ││ + ││axis 2│ 0.00 1.00 1.00 ││ + ││ │ ... ... ... ││ + ││ │ 0.00 1.00 4.00 ││ + ││ │ 0.00 1.00 5.00 ││ + │├──────┼──────────────────────────┤│ + ││2 @ 1 │ 4.58e-26 2.00 0.00 ││ + ││axis 2│ 0.00 2.00 1.00 ││ + ││ │ ... ... ... ││ + ││ │ 0.00 2.00 4.00 ││ + ││ │ 0.00 2.00 5.00 ││ + │├──────┼──────────────────────────┤│ + ││3 @ 1 │ 4.58e-26 3.00 0.00 ││ + ││axis 2│ 0.00 3.00 1.00 ││ + ││ │ ... ... ... ││ + ││ │ 0.00 3.00 4.00 ││ + ││ │ 0.00 3.00 5.00 ││ + │├──────┼──────────────────────────┤│ + ││4 @ 1 │ 4.58e-26 4.00 1.68e-44 ││ + ││axis 2│ 9.49e-39 4.00 1.00 ││ + ││ │ ... ... ... ││ + ││ │ 9.49e-39 4.00 4.00 ││ + ││ │ 9.49e-39 4.00 5.00 ││ + │└──────┴──────────────────────────┘│ + ├───────────────────────────────────┤ + │┌──────┬──────────────────────┐ │ + ││1 @ 0 │axis 3 │ │ + │├──────┼──────────────────────┤ │ + ││0 @ 1 │ 1.00 4.38e-41 0.00 │ │ + ││axis 2│ 1.00 0.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 1.00 0.00 4.00 │ │ + ││ │ 1.00 0.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││1 @ 1 │ 1.00 1.00 0.00 │ │ + ││axis 2│ 1.00 1.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 1.00 1.00 4.00 │ │ + ││ │ 1.00 1.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││2 @ 1 │ 1.00 2.00 0.00 │ │ + ││axis 2│ 1.00 2.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 1.00 2.00 4.00 │ │ + ││ │ 1.00 2.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││3 @ 1 │ 1.00 3.00 0.00 │ │ + ││axis 2│ 1.00 3.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 1.00 3.00 4.00 │ │ + ││ │ 1.00 3.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││4 @ 1 │ 1.00 4.00 1.68e-44 │ │ + ││axis 2│ 1.00 4.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 1.00 4.00 4.00 │ │ + ││ │ 1.00 4.00 5.00 │ │ + │└──────┴──────────────────────┘ │ + ├───────────────────────────────────┤ + │┌──────┬──────────────────────┐ │ + ││2 @ 0 │axis 3 │ │ + │├──────┼──────────────────────┤ │ + ││0 @ 1 │ 2.00 4.38e-41 0.00 │ │ + ││axis 2│ 2.00 0.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 2.00 0.00 4.00 │ │ + ││ │ 2.00 0.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││1 @ 1 │ 2.00 1.00 0.00 │ │ + ││axis 2│ 2.00 1.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 2.00 1.00 4.00 │ │ + ││ │ 2.00 1.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││2 @ 1 │ 2.00 2.00 0.00 │ │ + ││axis 2│ 2.00 2.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 2.00 2.00 4.00 │ │ + ││ │ 2.00 2.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││3 @ 1 │ 2.00 3.00 0.00 │ │ + ││axis 2│ 2.00 3.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 2.00 3.00 4.00 │ │ + ││ │ 2.00 3.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││4 @ 1 │ 2.00 4.00 1.68e-44 │ │ + ││axis 2│ 2.00 4.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 2.00 4.00 4.00 │ │ + ││ │ 2.00 4.00 5.00 │ │ + │└──────┴──────────────────────┘ │ + ├───────────────────────────────────┤ + │┌──────┬──────────────────────┐ │ + ││3 @ 0 │axis 3 │ │ + │├──────┼──────────────────────┤ │ + ││0 @ 1 │ 3.00 4.38e-41 0.00 │ │ + ││axis 2│ 3.00 0.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 3.00 0.00 4.00 │ │ + ││ │ 3.00 0.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││1 @ 1 │ 3.00 1.00 0.00 │ │ + ││axis 2│ 3.00 1.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 3.00 1.00 4.00 │ │ + ││ │ 3.00 1.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││2 @ 1 │ 3.00 2.00 0.00 │ │ + ││axis 2│ 3.00 2.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 3.00 2.00 4.00 │ │ + ││ │ 3.00 2.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││3 @ 1 │ 3.00 3.00 0.00 │ │ + ││axis 2│ 3.00 3.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 3.00 3.00 4.00 │ │ + ││ │ 3.00 3.00 5.00 │ │ + │├──────┼──────────────────────┤ │ + ││4 @ 1 │ 3.00 4.00 1.68e-44 │ │ + ││axis 2│ 3.00 4.00 1.00 │ │ + ││ │ ... ... ... │ │ + ││ │ 3.00 4.00 4.00 │ │ + ││ │ 3.00 4.00 5.00 │ │ + │└──────┴──────────────────────┘ │ + └───────────────────────────────────┘ |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ ti; [%expect @@ -1607,18 +1181,18 @@ let%expect_test "outer_sum simulating axis concatenation" = Tensor.print ~with_code:false ~with_grad:false `Default @@ tk; [%expect {| - ┌───────────────────────────┐ - │[7]: =>_tk shape 0:6,1:3 │ - │┌──────┬──────────────────┐│ - ││ │axis 1 ││ - │├──────┼──────────────────┤│ - ││axis 0│ 0.00 0.00 0.00 ││ - ││ │ 0.00 0.00 1.00 ││ - ││ │ ... ... ... ││ - ││ │ 0.00 0.00 4.00 ││ - ││ │ 0.00 0.00 5.00 ││ - │└──────┴──────────────────┘│ - └───────────────────────────┘ + ┌───────────────────────────────────┐ + │[7]: =>_tk shape 0:6,1:3 │ + │┌──────┬──────────────────────────┐│ + ││ │axis 1 ││ + │├──────┼──────────────────────────┤│ + ││axis 0│ 4.58e-26 4.38e-41 0.00 ││ + ││ │ 0.00 0.00 1.00 ││ + ││ │ ... ... ... ││ + ││ │ 0.00 0.00 4.00 ││ + ││ │ 0.00 0.00 5.00 ││ + │└──────┴──────────────────────────┘│ + └───────────────────────────────────┘ |}] let%expect_test "einsum with a leftmost input axis preserved as output axis" = @@ -1644,54 +1218,10 @@ let%expect_test "einsum with a leftmost input axis preserved as output axis" = Train.forward_and_forget backend ctx c; Tensor.print ~with_code:false ~with_grad:false `Default @@ a; [%expect - {| - ┌───────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[0]: r3x2x4_a shape 0:3|2:4->1:2 │ - │┌──────┬────────────────────────┬────────────────────────────────────┬────────────────────────────────────┐│ - ││ │0 @ 0 │1 @ 0 │2 @ 0 ││ - ││ │axis 2 │axis 2 │axis 2 ││ - │├──────┼────────────────────────┼────────────────────────────────────┼────────────────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 3.00 │ 8.00 9.00 1.00e+1 1.10e+1 │ 1.60e+1 1.70e+1 1.80e+1 1.90e+1 ││ - ││ │ 4.00 5.00 6.00 7.00 │ 1.20e+1 1.30e+1 1.40e+1 1.50e+1 │ 2.00e+1 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴────────────────────────┴────────────────────────────────────┴────────────────────────────────────┘│ - └───────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [0]: r3x2x4_a shape 0:3|2:4->1:2 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ b; [%expect - {| - ┌────────────────────────────────────────────────────────────────┐ - │[1]: r3x4x2x3_b shape 0:3|2:2,3:3->1:4 │ - │┌──────┬───────────────────────────┬───────────────────────────┐│ - ││0 @ 0 │0 @ 2 │1 @ 2 ││ - ││ │axis 3 │axis 3 ││ - │├──────┼───────────────────────────┼───────────────────────────┤│ - ││axis 1│ 0.00 1.00 2.00 │ 3.00 4.00 5.00 ││ - ││ │ 6.00 7.00 8.00 │ 9.00 1.00e+1 1.10e+1 ││ - ││ │ 1.20e+1 1.30e+1 1.40e+1 │ 1.50e+1 1.60e+1 1.70e+1 ││ - ││ │ 1.80e+1 1.90e+1 2.00e+1 │ 2.10e+1 2.20e+1 2.30e+1 ││ - │└──────┴───────────────────────────┴───────────────────────────┘│ - ├────────────────────────────────────────────────────────────────┤ - │┌──────┬───────────────────────────┬───────────────────────────┐│ - ││1 @ 0 │0 @ 2 │1 @ 2 ││ - ││ │axis 3 │axis 3 ││ - │├──────┼───────────────────────────┼───────────────────────────┤│ - ││axis 1│ 2.40e+1 2.50e+1 2.60e+1 │ 2.70e+1 2.80e+1 2.90e+1 ││ - ││ │ 3.00e+1 3.10e+1 3.20e+1 │ 3.30e+1 3.40e+1 3.50e+1 ││ - ││ │ 3.60e+1 3.70e+1 3.80e+1 │ 3.90e+1 4.00e+1 4.10e+1 ││ - ││ │ 4.20e+1 4.30e+1 4.40e+1 │ 4.50e+1 4.60e+1 4.70e+1 ││ - │└──────┴───────────────────────────┴───────────────────────────┘│ - ├────────────────────────────────────────────────────────────────┤ - │┌──────┬───────────────────────────┬───────────────────────────┐│ - ││2 @ 0 │0 @ 2 │1 @ 2 ││ - ││ │axis 3 │axis 3 ││ - │├──────┼───────────────────────────┼───────────────────────────┤│ - ││axis 1│ 4.80e+1 4.90e+1 5.00e+1 │ 5.10e+1 5.20e+1 5.30e+1 ││ - ││ │ 5.40e+1 5.50e+1 5.60e+1 │ 5.70e+1 5.80e+1 5.90e+1 ││ - ││ │ 6.00e+1 6.10e+1 6.20e+1 │ 6.30e+1 6.40e+1 6.50e+1 ││ - ││ │ 6.60e+1 6.70e+1 6.80e+1 │ 6.90e+1 7.00e+1 7.10e+1 ││ - │└──────┴───────────────────────────┴───────────────────────────┘│ - └────────────────────────────────────────────────────────────────┘ - |}]; + {| [1]: r3x4x2x3_b shape 0:3|2:2,3:3->1:4 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ c; [%expect {| @@ -1728,34 +1258,10 @@ let%expect_test "einsum permuting two leftmost input axes as output axes" = Train.forward_and_forget backend ctx c; Tensor.print ~with_code:false ~with_grad:false `Default @@ a; [%expect - {| - ┌───────────────────────────┐ - │[0]: r2x2_a shape 1:2->0:2 │ - │┌──────┬────────────┐ │ - ││ │axis 1 │ │ - │├──────┼────────────┤ │ - ││axis 0│ 0.00 1.00 │ │ - ││ │ 2.00 3.00 │ │ - │└──────┴────────────┘ │ - └───────────────────────────┘ - |}]; + {| [0]: r2x2_a shape 1:2->0:2 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ b; [%expect - {| - ┌───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ - │[1]: r2x2x3x4_b shape 1:2,2:3,3:4->0:2 │ - │┌──────┬────────────────────────────────────┬────────────────────────────────────┬────────────────────────────────────┐│ - ││ │0 @ 2 │1 @ 2 │2 @ 2 ││ - ││ │axis 3 │axis 3 │axis 3 ││ - │├──────┼────────────────────────────────────┼────────────────────────────────────┼────────────────────────────────────┤│ - ││0 @ 1 │ 0.00 1.00 2.00 3.00 │ 4.00 5.00 6.00 7.00 │ 8.00 9.00 1.00e+1 1.10e+1 ││ - ││axis 0│ 2.40e+1 2.50e+1 2.60e+1 2.70e+1 │ 2.80e+1 2.90e+1 3.00e+1 3.10e+1 │ 3.20e+1 3.30e+1 3.40e+1 3.50e+1 ││ - │├──────┼────────────────────────────────────┼────────────────────────────────────┼────────────────────────────────────┤│ - ││1 @ 1 │ 1.20e+1 1.30e+1 1.40e+1 1.50e+1 │ 1.60e+1 1.70e+1 1.80e+1 1.90e+1 │ 2.00e+1 2.10e+1 2.20e+1 2.30e+1 ││ - ││axis 0│ 3.60e+1 3.70e+1 3.80e+1 3.90e+1 │ 4.00e+1 4.10e+1 4.20e+1 4.30e+1 │ 4.40e+1 4.50e+1 4.60e+1 4.70e+1 ││ - │└──────┴────────────────────────────────────┴────────────────────────────────────┴────────────────────────────────────┘│ - └───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ - |}]; + {| [1]: r2x2x3x4_b shape 1:2,2:3,3:4->0:2 <virtual> |}]; Tensor.print ~with_code:false ~with_grad:false `Default @@ c; [%expect {| "/usr/bin/env" "bash" "-c" "opam exec -- dune build @install @check @runtest && rm -rf _build" failed with exit status 1 2025-07-05 17:58.13: Job failed: Failed: Build failed