2025-09-09 10:10.43: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (87ac57e3c541a18d798362bbe6ddfdb05bbe6750) (linux-x86_64:(lint-fmt))Base: ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669docamlformat version: version 0.27.0 (from opam)To reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 87ac57e3cat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669dUSER 1000:1000RUN cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -uRUN opam depext -i duneWORKDIR /srcRUN opam depext -i ocamlformat=0.27.0COPY --chown=1000:1000 . /src/RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)END-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-09-09 10:10.43: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d-debian-12-4.08_opam-2.4-ocamlformat-b8021439f8c57ba6435bc2263f6596671f4f4466"2025-09-09 10:10.43: Using OBuilder spec:((from ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d)(user (uid 1000) (gid 1000))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i dune"))(workdir /src)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i ocamlformat=0.27.0"))(copy (src .) (dst /src/))(run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)")))2025-09-09 10:10.43: Waiting for resource in pool OCluster2025-09-09 10:10.43: Waiting for worker…2025-09-09 10:10.55: Got resource from pool OClusterBuilding on doris.caelum.ci.devAll commits already cachedHEAD is now at 87ac57e3 Readme update, bug fix in nn_blocks.ml `layer_norm`(from ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d)Unable to find image 'ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d' locallydocker.io/ocaml/opam@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d: Pulling from ocaml/opambbd0d7f7db1c: Already exists213854c7ccc6: Pulling fs layer5a1fc3ff734f: Pulling fs layer9cb1c8fc2a98: Pulling fs layer50461da03769: Pulling fs layer5a1fc3ff734f: Verifying Checksum5a1fc3ff734f: Download complete9cb1c8fc2a98: Verifying Checksum9cb1c8fc2a98: Download complete50461da03769: Verifying Checksum50461da03769: Download complete213854c7ccc6: Verifying Checksum213854c7ccc6: Download complete213854c7ccc6: Pull complete5a1fc3ff734f: Pull complete9cb1c8fc2a98: Pull complete50461da03769: Pull completeDigest: sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669dStatus: Downloaded newer image for ocaml/opam@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d2025-09-09 10:10.55 ---> using "b69d5f1106f8a1cfb35e904f37f2ccc38216aa61a22ee32dd7c324a91608cf44" from cache/: (user (uid 1000) (gid 1000))/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))b8021439f8 Merge pull request #28261 from kit-ty-kate/deploy-fix-25819<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] Initialiseddefault (at git+file:///home/opam/opam-repository):[INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html)Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# Run eval $(opam env) to update the current shell environment2025-09-09 10:10.55 ---> using "1be76adbf83434462493f6d60d3a403af1e722bb13c1f26df1ed667dd3ad008c" from cache/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i dune"))# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian# No extra OS packages requirements found.# All required OS packages found.# Now letting opam install the packagesThe following actions will be performed:- install dune 3.20.0<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>[dune.3.20.0] found in cache<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> installed dune.3.20.0Done.# Run eval $(opam env) to update the current shell environment2025-09-09 10:10.55 ---> using "76384e7e1078506ef4c7e0c049f9a49f543e9fdc75f2cd2f57b1b769d15b1cc0" from cache/: (workdir /src)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i ocamlformat=0.27.0"))# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian# No extra OS packages requirements found.# All required OS packages found.# Now letting opam install the packagesThe following actions will be performed:- install sexplib0 v0.14.0 [required by base]- install menhirLib 20240715 [required by ocamlformat-lib]- install menhirCST 20240715 [required by menhir]- install ocamlbuild 0.16.1 [required by fpath, astring, uuseg]- install cmdliner 1.3.0 [required by ocamlformat]- install menhirSdk 20240715 [required by ocamlformat-lib]- install either 1.0.0 [required by ocamlformat-lib]- install dune-build-info 3.20.0 [required by ocamlformat-lib]- install csexp 1.5.2 [required by ocamlformat]- install camlp-streams 5.0.1 [required by ocamlformat-lib]- install seq base [required by re]- install fix 20250428 [required by ocamlformat-lib]- install ocaml-version 4.0.1 [required by ocamlformat-lib]- install ocamlfind 1.9.8 [required by ocp-indent, astring, fpath, uuseg]- install menhir 20240715 [required by ocamlformat-lib]- install dune-configurator 3.20.0 [required by base]- install re 1.11.0 [required by ocamlformat]- install topkg 1.1.0 [required by fpath, astring, uuseg]- install base-bytes base [required by ocp-indent]- install base v0.14.3 [required by ocamlformat-lib]- install uutf 1.0.4 [required by ocamlformat-lib]- install astring 0.8.5 [required by ocamlformat-lib]- install ocp-indent 1.8.1 [required by ocamlformat-lib]- install stdio v0.14.0 [required by ocamlformat-lib]- install uucp 15.0.0 [required by uuseg]- install fpath 0.7.3 [required by ocamlformat-lib]- install uuseg 15.0.0 [required by ocamlformat-lib]- install ocamlformat-lib 0.27.0 [required by ocamlformat]- install ocamlformat 0.27.0===== 29 to install =====<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>[astring.0.8.5] found in cache[base.v0.14.3] found in cache[camlp-streams.5.0.1] found in cache[cmdliner.1.3.0] found in cache[csexp.1.5.2] found in cache[dune-build-info.3.20.0] found in cache[dune-configurator.3.20.0] found in cache[either.1.0.0] found in cache[fix.20250428] found in cache[fpath.0.7.3] found in cache[menhir.20240715] found in cache[menhirCST.20240715] found in cache[menhirLib.20240715] found in cache[menhirSdk.20240715] found in cache[ocaml-version.4.0.1] found in cache[ocamlbuild.0.16.1] found in cache[ocamlfind.1.9.8] found in cache[ocamlformat.0.27.0] found in cache[ocamlformat-lib.0.27.0] found in cache[ocp-indent.1.8.1] found in cache[re.1.11.0] found in cache[sexplib0.v0.14.0] found in cache[stdio.v0.14.0] found in cache[topkg.1.1.0] found in cache[uucp.15.0.0] found in cache[uuseg.15.0.0] found in cache[uutf.1.0.4] found in cache<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> installed seq.base-> installed camlp-streams.5.0.1-> installed cmdliner.1.3.0-> installed csexp.1.5.2-> installed either.1.0.0-> installed fix.20250428-> installed menhirCST.20240715-> installed menhirLib.20240715-> installed menhirSdk.20240715-> installed ocaml-version.4.0.1-> installed sexplib0.v0.14.0-> installed re.1.11.0-> installed dune-build-info.3.20.0-> installed ocamlfind.1.9.8-> installed base-bytes.base-> installed dune-configurator.3.20.0-> installed ocamlbuild.0.16.1-> installed ocp-indent.1.8.1-> installed topkg.1.1.0-> installed base.v0.14.3-> installed astring.0.8.5-> installed uutf.1.0.4-> installed stdio.v0.14.0-> installed menhir.20240715-> installed fpath.0.7.3-> installed uucp.15.0.0-> installed uuseg.15.0.0-> installed ocamlformat-lib.0.27.0-> installed ocamlformat.0.27.0Done.<><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><>=> This package requires additional configuration for use in editors. Install package 'user-setup', or manually:* for Emacs, add these lines to ~/.emacs:(add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp")(require 'ocp-indent)* for Vim, add this line to ~/.vimrc:set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim"# Run eval $(opam env) to update the current shell environment2025-09-09 10:10.55 ---> using "ee5f36fa10328bf844b6feeab861ac0336aa35be5e288d81331a0c9c26b8859a" from cache/src: (copy (src .) (dst /src/))2025-09-09 10:10.56 ---> saved as "ec85cb3b788cd36866a0f33febd5998b4da1ac48f567e82e105fa6d55ae1519c"/src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))Warning: Invalid documentation comment:File "lib/nn_blocks.ml", line 12, characters 22-25:'{ }': bad markup.Suggestion: did you mean '{! }' or '[ ]'?File "bin/compilation_speed.ml", line 1, characters 0-0:diff --git a/_build/default/bin/compilation_speed.ml b/_build/default/bin/.formatted/compilation_speed.mlindex e85e87a..6b15fdd 100644--- a/_build/default/bin/compilation_speed.ml+++ b/_build/default/bin/.formatted/compilation_speed.ml@@ -26,10 +26,11 @@ let benchmark_overhead _backend_name () =(* Note: this compiles entirely fresh code for each step of the loop. *)let ys =Array.map xs ~f:(fun v ->- let%cd update_x = ~~("update_x"; x =: !.v )in- let assign_x =- Train.to_routine (Context.context f_routine) IDX.empty update_x+ let%cd update_x =+ ~~("update_x";+ x =: !.v)in+ let assign_x = Train.to_routine (Context.context f_routine) IDX.empty update_x inTrain.run ctx assign_x;Train.run ctx f_routine;f.@[0])File "arrayjit/lib/context.mli", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/context.mli b/_build/default/arrayjit/lib/.formatted/context.mliindex da8a3a0..c17357b 100644--- a/_build/default/arrayjit/lib/context.mli+++ b/_build/default/arrayjit/lib/.formatted/context.mli@@ -2,63 +2,62 @@module Backends_deprecated = Backends-(** Execution context managing device, compilation, and buffers *)type t+(** Execution context managing device, compilation, and buffers *)-(** A compiled computational routine ready for execution *)type routine+(** A compiled computational routine ready for execution *)val bindings : routine -> Ir.Indexing.lowered_bindings-val context : routine -> t(** {2 Context creation} *)-(** Create a CUDA context. *)val cuda : ?device_id:int -> unit -> t+(** Create a CUDA context. *)-(** Create a Metal context. *)val metal : ?device_id:int -> unit -> t+(** Create a Metal context. *)-(** Create a CPU context. *)val cpu : ?threads:int -> unit -> t+(** Create a CPU context. *)-(** Automatically select the best available backend. *)val auto : unit -> t+(** Automatically select the best available backend. *)(** {2 Core operations} *)-(** Compile assignments into an executable routine.- Returns updated context and the compiled routine. *)val compile : t -> Ir.Assignments.comp -> Ir.Indexing.unit_bindings -> t * routine+(** Compile assignments into an executable routine. Returns updated context and the compiled+ routine. *)-(** Execute a compiled routine. Mutates buffers in-place.- Returns updated context with newly initialized nodes tracked. *)val run : t -> routine -> t+(** Execute a compiled routine. Mutates buffers in-place. Returns updated context with newly+ initialized nodes tracked. *)(** {2 Data operations} *)-(** Note: These operations work with backend-specific buffer types hidden behind- the context abstraction. For v0.6.1, you may need to use the existing- backend API for actual buffer manipulation. *)+(** Note: These operations work with backend-specific buffer types hidden behind the context+ abstraction. For v0.6.1, you may need to use the existing backend API for actual buffer+ manipulation. *)-(** Copy a tensor from source context to destination context. *)val copy : src:t -> dst:t -> Ir.Tnode.t -> unit+(** Copy a tensor from source context to destination context. *)(** {2 Node tracking operations} *)-(** Initialize a node from host memory (Ndarray/bigarray data).- This is a temporary solution until the v0.7 refactoring removes hosted arrays.- After calling this, the node is marked as initialized in the returned context. *)val init_from_host_deprecated : t -> Ir.Tnode.t -> t+(** Initialize a node from host memory (Ndarray/bigarray data). This is a temporary solution until+ the v0.7 refactoring removes hosted arrays. After calling this, the node is marked as+ initialized in the returned context. *)-(** Check if a node is initialized. *)val is_initialized : t -> Ir.Tnode.t -> bool+(** Check if a node is initialized. *)(** {2 Debug operations} *)-(** Get the name of the backend. *)val backend_name : t -> string+(** Get the name of the backend. *)+val device_id : t -> int(** Get the device ID. *)-val device_id : t -> int\ No newline at end of fileFile "test/einsum/moons_demo_variant.ml", line 1, characters 0-0:diff --git a/_build/default/test/einsum/moons_demo_variant.ml b/_build/default/test/einsum/.formatted/moons_demo_variant.mlindex 51339c5..072d9b7 100644--- a/_build/default/test/einsum/moons_demo_variant.ml+++ b/_build/default/test/einsum/.formatted/moons_demo_variant.ml@@ -43,9 +43,7 @@ let () =Train.set_hosted learning_rate.value;let sgd = Train.sgd_update ~learning_rate ~weight_decay scalar_loss inlet ctx = Train.init_params ctx bindings scalar_loss in- let sgd_routine =- Train.to_routine ctx bindings (Asgns.sequence [ update; sgd ])- in+ let sgd_routine = Train.to_routine ctx bindings (Asgns.sequence [ update; sgd ]) in(* Skipping over the training loop, not needed for the test. *)Train.run ctx sgd_routine;File "lib/train.ml", line 1, characters 0-0:diff --git a/_build/default/lib/train.ml b/_build/default/lib/.formatted/train.mlindex 481ba72..e2805b8 100644--- a/_build/default/lib/train.ml+++ b/_build/default/lib/.formatted/train.ml@@ -196,15 +196,15 @@ let to_routine (ctx : Context.t) ?(hosted = true) bindings comp =the host as appropriate. If [reinit_all] is true, all parameters are reinitialized, otherwiseonly the parameters that are not in [ctx.ctx_arrays] are initialized. *)let init_params ?(reinit_all = false) ?(hosted = true) ctx bindings t =- let comp =+ let comp =if reinit_all then Tensor.init_params t- else+ else(* Check which params are already initialized *)let skip = Map.empty (module Tn) inSet.fold t.Tensor.params ~init:skip ~f:(fun skip p ->- if Context.is_initialized ctx p.Tensor.value then- Map.set skip ~key:p.Tensor.value ~data:()- else skip)+ if Context.is_initialized ctx p.Tensor.value then+ Map.set skip ~key:p.Tensor.value ~data:()+ else skip)|> fun skip -> Tensor.init_params ~skip tinif hosted then Set.iter (snd @@ Asgns.collect_nodes_guess_output comp.Asgns.asgns) ~f:set_hosted;@@ -213,9 +213,7 @@ let init_params ?(reinit_all = false) ?(hosted = true) ctx bindings t =let ctx = Context.run ctx routine in(* Mark embedded nodes as initialized via init_from_host *)Set.fold comp.Asgns.embedded_nodes ~init:ctx ~f:(fun ctx tn ->- if not (Context.is_initialized ctx tn) then- Context.init_from_host_deprecated ctx tn- else ctx)+ if not (Context.is_initialized ctx tn) then Context.init_from_host_deprecated ctx tn else ctx)type example_train_result = {inputs : Tensor.t;@@ -234,8 +232,8 @@ type example_train_result = {the context. If [skip_init] is true (false by default), no initialization is performmed. If[reinit_all] is true (false by default), all parameters are reinitialized, otherwise only theparameters that are not in [ctx.ctx_arrays] are initialized. *)-let%track3_sexp run_once ?(hosted = true) ?(skip_init = false) ?reinit_all- ?(bindings = IDX.empty) ~f ctx t =+let%track3_sexp run_once ?(hosted = true) ?(skip_init = false) ?reinit_all ?(bindings = IDX.empty)+ ~f ctx t =if hosted then set_hosted t.Tensor.value;(* Compute the update early, to ensure the shape inference is done. *)let update = f t in@@ -270,7 +268,6 @@ let sgd_step ~learning_rate ?momentum ?weight_decay ?nesterov ?(bindings = IDX.elet ctx, sgd_routine = Context.compile ctx sgd_comp bindings inContext.run ctx sgd_routine-(** [printf] is a wrapper around {!Tensor.print} that assumes [~force:true], and by default sets[~with_code:false], [~with_grad:true], and [~style:`Default]. *)let printf ?here ?(with_grad = true) ?(with_code = false) ?(with_low_level = false)File "test/einsum/surjectivity.ml", line 1, characters 0-0:diff --git a/_build/default/test/einsum/surjectivity.ml b/_build/default/test/einsum/.formatted/surjectivity.mlindex 8829fa0..28c4394 100644--- a/_build/default/test/einsum/surjectivity.ml+++ b/_build/default/test/einsum/.formatted/surjectivity.ml@@ -9,7 +9,6 @@ module type Backend = Ir.Backend_intf.Backendlet%expect_test "diagonal_tensor_initialization" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-(* Create a diagonal tensor using einsum: i->ii *)let input = TDSL.range 5 in@@ -41,7 +40,6 @@ let%expect_test "diagonal_tensor_initialization" =let%expect_test "sparse_assignment_with_fixed_indices" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-(* Create a sparse tensor using fixed indices: i->i0j *)let input = TDSL.range 4 in@@ -77,7 +75,6 @@ let%expect_test "sparse_assignment_with_fixed_indices" =let%expect_test "multiple_sparse_axes" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-(* Test with multiple fixed indices: ij->i1j2 *)let input = TDSL.range_of_shape ~output_dims:[ 3; 4 ] () inFile "test/operations/transformer_test.ml", line 1, characters 0-0:diff --git a/_build/default/test/operations/transformer_test.ml b/_build/default/test/operations/.formatted/transformer_test.mlindex bcff68b..fa3af0e 100644--- a/_build/default/test/operations/transformer_test.ml+++ b/_build/default/test/operations/.formatted/transformer_test.ml@@ -51,7 +51,7 @@ let () =let output = transformer_model ~train_step:None ~src ~tgt ~mask inlet _ctx = Ocannl.Train.forward_once ctx output in-+(* Verify output shape *)Stdio.printf "Output shape:\n%s\n%!"(Sexp.to_string_hum ([%sexp_of: Shape.t] output.Tensor.shape))File "test/training/moons_demo.ml", line 1, characters 0-0:diff --git a/_build/default/test/training/moons_demo.ml b/_build/default/test/training/.formatted/moons_demo.mlindex b4f09e7..fd40ba3 100644--- a/_build/default/test/training/moons_demo.ml+++ b/_build/default/test/training/.formatted/moons_demo.ml@@ -50,9 +50,7 @@ let main () =Train.set_hosted learning_rate.value;let sgd = Train.sgd_update ~learning_rate ~weight_decay scalar_loss inlet ctx = Train.init_params ctx bindings scalar_loss in- let sgd_routine =- Train.to_routine ctx bindings (Asgns.sequence [ update; sgd ])- in+ let sgd_routine = Train.to_routine ctx bindings (Asgns.sequence [ update; sgd ]) inlet step_ref = IDX.find_exn (Context.bindings sgd_routine) step_n instep_ref := 0;for epoch = 1 to epochs doFile "test/einsum/einsum_trivia_exec.ml", line 1, characters 0-0:diff --git a/_build/default/test/einsum/einsum_trivia_exec.ml b/_build/default/test/einsum/.formatted/einsum_trivia_exec.mlindex bd6de34..c50722c 100644--- a/_build/default/test/einsum/einsum_trivia_exec.ml+++ b/_build/default/test/einsum/.formatted/einsum_trivia_exec.ml@@ -12,7 +12,6 @@ module type Backend = Ir.Backend_intf.Backendlet () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "b|i->o => o|b->i" in@@ -33,7 +32,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "b|i->o => b|i" in@@ -51,11 +49,10 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[] ~input_dims:[] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~batch_dims:[] ~input_dims:[] ~output_dims:[ 3 ] () in- let%op c = (a + 1) +* "i; j => i->j" b in+ let%op c = a + 1 +* "i; j => i->j" b inignore (Train.forward_once ctx c);Train.printf ~here:[%here] ~with_code:false ~with_grad:false c;let a = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () in@@ -67,7 +64,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet b = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 4 ] ~output_dims:[ 5 ] () in@@ -90,7 +86,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "...|i->o => ...|o->i" in@@ -125,7 +120,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 4 ] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 1 ] ~output_dims:[ 4 ] () in@@ -142,7 +136,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "...|1->... => ...|..." in@@ -163,7 +156,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 4 ] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 1 ] ~output_dims:[ 4 ] () in@@ -174,7 +166,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let ri = TDSL.range 4 inlet%op ti = ri ++ "i=>i0" in@@ -194,7 +185,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a =TDSL.range_of_shape ~label:[ "a" ] ~batch_dims:[ 3 ] ~input_dims:[ 4 ] ~output_dims:[ 2 ] ()@@ -209,7 +199,6 @@ let () =let () =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~label:[ "a" ] ~input_dims:[ 2 ] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~label:[ "b" ] ~input_dims:[ 2; 3; 4 ] ~output_dims:[ 2 ] () inFile "test/einsum/einsum_trivia.ml", line 1, characters 0-0:diff --git a/_build/default/test/einsum/einsum_trivia.ml b/_build/default/test/einsum/.formatted/einsum_trivia.mlindex 8b06dce..cce8ef2 100644--- a/_build/default/test/einsum/einsum_trivia.ml+++ b/_build/default/test/einsum/.formatted/einsum_trivia.ml@@ -13,7 +13,6 @@ module type Backend = Ir.Backend_intf.Backendlet%expect_test "einsum1 permute axes" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "b|i->o => o|b->i" in@@ -152,7 +151,6 @@ let%expect_test "einsum1 permute axes" =let%expect_test "einsum1 sum out axes" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "b|i->o => b|i" in@@ -200,11 +198,10 @@ let%expect_test "einsum1 sum out axes" =let%expect_test "einsum outer product" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[] ~input_dims:[] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~batch_dims:[] ~input_dims:[] ~output_dims:[ 3 ] () in- let%op c = (a + 1) +* "i; j => i->j" b in+ let%op c = a + 1 +* "i; j => i->j" b inignore (Train.forward_once ctx c);Train.printf ~here:[%here] ~with_code:false ~with_grad:false c;[%expect@@ -381,7 +378,6 @@ let%expect_test "einsum outer product" =let%expect_test "einsum matrix/inner+outer products" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet b = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 4 ] ~output_dims:[ 5 ] () in@@ -745,7 +741,6 @@ let%expect_test "einsum1 broadcast or sum out prefix axes" =let%expect_test "einsum broadcast or sum out prefix axes" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 4 ] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 1 ] ~output_dims:[ 4 ] () in@@ -795,7 +790,6 @@ let%expect_test "einsum broadcast or sum out prefix axes" =let%expect_test "einsum1 fixed dim axis" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let hey = TDSL.range_of_shape ~batch_dims:[ 2 ] ~input_dims:[ 3 ] ~output_dims:[ 4 ] () inlet%op ho = hey ++ "...|1->... => ...|..." in@@ -869,7 +863,6 @@ let%expect_test "einsum1 fixed dim axis" =let%expect_test "einsum with fixed dim axes" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 4 ] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~batch_dims:[ 3 ] ~input_dims:[ 1 ] ~output_dims:[ 4 ] () in@@ -894,7 +887,6 @@ let%expect_test "einsum with fixed dim axes" =let%expect_test "outer_sum simulating axis concatenation" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let ri = TDSL.range 4 inlet%op ti = ri ++ "i=>i0" in@@ -1077,7 +1069,6 @@ let%expect_test "outer_sum simulating axis concatenation" =let%expect_test "einsum with a leftmost input axis preserved as output axis" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a =TDSL.range_of_shape ~label:[ "a" ] ~batch_dims:[ 3 ] ~input_dims:[ 4 ] ~output_dims:[ 2 ] ()@@ -1108,7 +1099,6 @@ let%expect_test "einsum with a leftmost input axis preserved as output axis" =let%expect_test "einsum permuting two leftmost input axes as output axes" =Tensor.unsafe_reinitialize ();let ctx = Context.auto () in-let a = TDSL.range_of_shape ~label:[ "a" ] ~input_dims:[ 2 ] ~output_dims:[ 2 ] () inlet b = TDSL.range_of_shape ~label:[ "b" ] ~input_dims:[ 2; 3; 4 ] ~output_dims:[ 2 ] () inFile "arrayjit/lib/ops.ml", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/ops.ml b/_build/default/arrayjit/lib/.formatted/ops.mlindex dbd5214..47632e6 100644--- a/_build/default/arrayjit/lib/ops.ml+++ b/_build/default/arrayjit/lib/.formatted/ops.ml@@ -206,7 +206,7 @@ let compare_prec p1 p2 =| Single_prec _, _ -> -1| _, Single_prec _ -> 1- let prec_in_bytes = function+let prec_in_bytes = function| Void_prec -> 0| Byte_prec _ -> 1| Uint16_prec _ -> 2@@ -220,6 +220,7 @@ let compare_prec p1 p2 =| Fp8_prec _ -> 1| Single_prec _ -> 4| Double_prec _ -> 8+let is_float = function| Void_prec -> false| Byte_prec _ -> falseFile "lib/ppx_cd.ml", line 1, characters 0-0:diff --git a/_build/default/lib/ppx_cd.ml b/_build/default/lib/.formatted/ppx_cd.mlindex 199180b..58d9361 100644--- a/_build/default/lib/ppx_cd.ml+++ b/_build/default/lib/.formatted/ppx_cd.ml@@ -1060,7 +1060,7 @@ let translate ?ident_label (expr : expression) : result =~capture_dims:[%e capture_dims_expr] [%e spec] [%e res1.expr] [%e res2.expr]];array_opt_of_code = None;}- | [%expr+ | [%expr[%e? { pexp_desc = Pexp_ident { txt = Lident op_ident; _ }; _ }][%e? expr1][%e? { pexp_desc = Pexp_ident _; _ } as spec]]@@ -1072,7 +1072,7 @@ let translate ?ident_label (expr : expression) : result =expr =[%expr [%e Hashtbl.find_exn einsum_unary_ops op_ident loc] [%e spec] [%e res1.expr]];}- | [%expr+ | [%expr[%e? { pexp_desc = Pexp_ident { txt = Lident op_ident; _ }; _ }][%e? expr1][%e? { pexp_desc = Pexp_constant (Pconst_string (spec_str, _, _)); _ }]]File "arrayjit/lib/low_level.ml", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/low_level.ml b/_build/default/arrayjit/lib/.formatted/low_level.mlindex 1570367..4e2ca58 100644--- a/_build/default/arrayjit/lib/low_level.ml+++ b/_build/default/arrayjit/lib/.formatted/low_level.ml@@ -1087,11 +1087,13 @@ let simplify_llc llc =when Ops.is_float prec ->loop_scalar (Binop (Mul, (Constant (c2 /. c1), Ops.promote_prec prec2 prec1), llsc), prec3)| Binop (Div, (Constant c1, prec1), (Binop (Mul, (Constant c2, prec2), llsc), prec3))- | Binop (Div, (Constant c1, prec1), (Binop (Mul, llsc, (Constant c2, prec2)), prec3)) when Ops.is_float prec ->+ | Binop (Div, (Constant c1, prec1), (Binop (Mul, llsc, (Constant c2, prec2)), prec3))+ when Ops.is_float prec ->(* TODO: this might worsen the conditioning in hand-designed formula cases. *)loop_scalar (Binop (Div, (Constant (c1 /. c2), Ops.promote_prec prec1 prec2), llsc), prec3)| Binop (Mul, llv1, (Binop (Div, llv2, llv3), prec23))- | Binop (Mul, (Binop (Div, llv2, llv3), prec23), llv1) when Ops.is_float prec ->+ | Binop (Mul, (Binop (Div, llv2, llv3), prec23), llv1)+ when Ops.is_float prec ->loop_scalar (Binop (Div, (Binop (Mul, llv1, llv2), prec), llv3), prec23)| Binop (Div, llv1, (Binop (Div, llv2, llv3), prec23)) when Ops.is_float prec ->loop_scalar (Binop (Div, (Binop (Mul, llv1, llv3), prec), llv2), prec23)dune build @fmt failed"/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 22025-09-09 10:10.57: Job failed: Failed: Build failed