Organisationsahrefsocannl90bf5f ()(lint-fmt)

(lint-fmt)

Logs

Show full logs
2025-07-12 13:39.16: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (90bf5fc6cd4eb365478ffa2c291ca2d69a834b7e) (linux-x86_64:(lint-fmt))
Base: ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e
ocamlformat version: version 0.27.0 (from opam)

To reproduce locally:

git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 90bf5fc6
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e
USER 1000:1000
RUN cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u
RUN opam depext -i dune
WORKDIR /src
RUN opam depext -i ocamlformat=0.27.0
COPY --chown=1000:1000 . /src/
RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)

END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK

2025-07-12 13:39.16: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e-debian-12-4.08_opam-2.3-ocamlformat-0eea63ad71af2b1116c556023bedc6bf083e6125"
2025-07-12 13:39.16: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e)
 (user (uid 1000) (gid 1000))
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u"))
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "opam depext -i dune"))
 (workdir /src)
 (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
      (network host)
      (shell "opam depext -i ocamlformat=0.27.0"))
 (copy (src .) (dst /src/))
 (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))
)

2025-07-12 13:39.16: Waiting for resource in pool OCluster
2025-07-12 13:39.16: Waiting for worker…
2025-07-12 13:39.16: Got resource from pool OCluster
Building on toxis.caelum.ci.dev
HEAD is now at 166b2f63 Better naming Ndarray.apply
HEAD is now at 90bf5fc6 Also rename Ops.map_prec -> apply_prec (see recent commit on renaming Ndarray.map)

(from ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e)
2025-07-12 13:39.18 ---> using "4ea5038d254cfd14663698deb665a2dc4ce1e1383d544c063adebb02ed15ce16" from cache

/: (user (uid 1000) (gid 1000))

/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
        (network host)
        (shell "cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u"))
0eea63ad71 Merge pull request #27946 from mtelvers/opam-publish-ocaml-version.4.0.1

<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
default (at git+file:///home/opam/opam-repository): 
    [INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html)


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# Run eval $(opam env) to update the current shell environment
2025-07-12 13:39.18 ---> using "ed14c208b709d50e26e290f8f669e0b0b6a2456751ee5b87b29d71221f0a9fc1" from cache

/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
        (network host)
        (shell "opam depext -i dune"))
# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian
# No extra OS packages requirements found.
# All required OS packages found.
# Now letting opam install the packages
The following actions will be performed:
  - install dune 3.19.1

<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
[dune.3.19.1] found in cache

<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> installed dune.3.19.1
Done.
# Run eval $(opam env) to update the current shell environment
2025-07-12 13:39.18 ---> using "249f8284e2625ce869f4c794eb6ff1eab40d82e0af762bb312d97ad81bf8d4f1" from cache

/: (workdir /src)

/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
           (network host)
           (shell "opam depext -i ocamlformat=0.27.0"))
# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian
# No extra OS packages requirements found.
# All required OS packages found.
# Now letting opam install the packages
The following actions will be performed:
  - install sexplib0          v0.14.0  [required by base]
  - install cmdliner          1.3.0    [required by ocamlformat]
  - install menhirLib         20240715 [required by ocamlformat-lib]
  - install menhirCST         20240715 [required by menhir]
  - install ocamlbuild        0.16.1   [required by fpath, astring, uuseg]
  - install dune-build-info   3.19.1   [required by ocamlformat-lib]
  - install menhirSdk         20240715 [required by ocamlformat-lib]
  - install either            1.0.0    [required by ocamlformat-lib]
  - install ocaml-version     4.0.1    [required by ocamlformat-lib]
  - install camlp-streams     5.0.1    [required by ocamlformat-lib]
  - install csexp             1.5.2    [required by ocamlformat]
  - install seq               base     [required by re]
  - install fix               20250428 [required by ocamlformat-lib]
  - install ocamlfind         1.9.8    [required by ocp-indent, astring, fpath, uuseg]
  - install menhir            20240715 [required by ocamlformat-lib]
  - install dune-configurator 3.19.1   [required by base]
  - install re                1.11.0   [required by ocamlformat]
  - install topkg             1.0.8    [required by fpath, astring, uuseg]
  - install base-bytes        base     [required by ocp-indent]
  - install base              v0.14.3  [required by ocamlformat-lib]
  - install uutf              1.0.4    [required by ocamlformat-lib]
  - install astring           0.8.5    [required by ocamlformat-lib]
  - install ocp-indent        1.8.1    [required by ocamlformat-lib]
  - install stdio             v0.14.0  [required by ocamlformat-lib]
  - install uucp              15.0.0   [required by uuseg]
  - install fpath             0.7.3    [required by ocamlformat-lib]
  - install uuseg             15.0.0   [required by ocamlformat-lib]
  - install ocamlformat-lib   0.27.0   [required by ocamlformat]
  - install ocamlformat       0.27.0
===== 29 to install =====

<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
[astring.0.8.5] found in cache
[base.v0.14.3] found in cache
[camlp-streams.5.0.1] found in cache
[cmdliner.1.3.0] found in cache
[csexp.1.5.2] found in cache
[dune-build-info.3.19.1] found in cache
[dune-configurator.3.19.1] found in cache
[either.1.0.0] found in cache
[fix.20250428] found in cache
[fpath.0.7.3] found in cache
[menhir.20240715] found in cache
[menhirCST.20240715] found in cache
[menhirLib.20240715] found in cache
[menhirSdk.20240715] found in cache
[ocaml-version.4.0.1] found in cache
[ocamlbuild.0.16.1] found in cache
[ocamlfind.1.9.8] found in cache
[ocamlformat.0.27.0] found in cache
[ocamlformat-lib.0.27.0] found in cache
[ocp-indent.1.8.1] found in cache
[re.1.11.0] found in cache
[sexplib0.v0.14.0] found in cache
[stdio.v0.14.0] found in cache
[topkg.1.0.8] found in cache
[uucp.15.0.0] found in cache
[uuseg.15.0.0] found in cache
[uutf.1.0.4] found in cache

<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> installed seq.base
-> installed camlp-streams.5.0.1
-> installed csexp.1.5.2
-> installed either.1.0.0
-> installed fix.20250428
-> installed cmdliner.1.3.0
-> installed menhirCST.20240715
-> installed menhirLib.20240715
-> installed menhirSdk.20240715
-> installed ocaml-version.4.0.1
-> installed re.1.11.0
-> installed sexplib0.v0.14.0
-> installed dune-build-info.3.19.1
-> installed dune-configurator.3.19.1
-> installed ocamlfind.1.9.8
-> installed base-bytes.base
-> installed ocp-indent.1.8.1
-> installed ocamlbuild.0.16.1
-> installed base.v0.14.3
-> installed stdio.v0.14.0
-> installed topkg.1.0.8
-> installed uutf.1.0.4
-> installed astring.0.8.5
-> installed fpath.0.7.3
-> installed menhir.20240715
-> installed uucp.15.0.0
-> installed uuseg.15.0.0
-> installed ocamlformat-lib.0.27.0
-> installed ocamlformat.0.27.0
Done.

<><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><>
=> This package requires additional configuration for use in editors. Install package 'user-setup', or manually:

   * for Emacs, add these lines to ~/.emacs:
     (add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp")
     (require 'ocp-indent)

   * for Vim, add this line to ~/.vimrc:
     set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim"
# Run eval $(opam env) to update the current shell environment
2025-07-12 13:39.18 ---> using "7fd5d4ac1bb4f93065934b08bd6aad3989793fd62a884f257d9f7ac1e147ec1a" from cache

/src: (copy (src .) (dst /src/))
2025-07-12 13:39.19 ---> saved as "750ab6852de016f72827a02087e572b31d9f5d20f0f500e5fa64e5752e0b92f4"

/src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))
File "arrayjit/bin/dune", line 6, characters 21-34:
6 |   (pps ppx_minidebug ppx_sexp_conv))
                         ^^^^^^^^^^^^^
Error: Library "ppx_sexp_conv" not found.
-> required by _build/default/arrayjit/bin/read_config.exe
-> required by %{dep:../../arrayjit/bin/read_config.exe} at test/dune:25
-> required by _build/default/test/config/ocannl_backend.txt
-> required by %{read:config/ocannl_backend.txt} at test/dune:44
-> required by Computing directory contents of _build/default/test
File "arrayjit/bin/dune", line 6, characters 7-20:
6 |   (pps ppx_minidebug ppx_sexp_conv))
           ^^^^^^^^^^^^^
Error: Library "ppx_minidebug" not found.
-> required by _build/default/arrayjit/bin/.merlin-conf/exe-read_config
-> required by _build/default/arrayjit/bin/read_config.exe
-> required by %{dep:../../arrayjit/bin/read_config.exe} at test/dune:25
-> required by _build/default/test/config/ocannl_backend.txt
-> required by %{read:config/ocannl_backend.txt} at test/dune:44
-> required by Computing directory contents of _build/default/test
File "datasets/half_moons.ml", line 1, characters 0-0:
diff --git a/_build/default/datasets/half_moons.ml b/_build/default/datasets/.formatted/half_moons.ml
index 5e62df2..d860921 100644
--- a/_build/default/datasets/half_moons.ml
+++ b/_build/default/datasets/.formatted/half_moons.ml
@@ -6,37 +6,32 @@ open Bigarray
 module Config = struct
   type t = {
     noise_range : float;  (** Range of noise to add to the coordinates *)
-    seed : int option;    (** Optional random seed for reproducibility *)
+    seed : int option;  (** Optional random seed for reproducibility *)
   }
 
-  let default = {
-    noise_range = 0.1;
-    seed = None;
-  }
+  let default = { noise_range = 0.1; seed = None }
 end
 
 (** Generate the half moons dataset with the specified parameters.
-    
+
     @param config Configuration for noise and randomization
     @param len Number of samples per moon (total samples = len * 2)
-    @return A tuple of (coordinates, labels) where:
-            - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis)
-            - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis)
-            - First moon has label 1.0, second moon has label -1.0
-*)
+    @return
+      A tuple of (coordinates, labels) where:
+      - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis)
+      - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis)
+      - First moon has label 1.0, second moon has label -1.0 *)
 let generate ?(config = Config.default) ~len () =
   (* Initialize random seed if specified *)
-  (match config.seed with
-  | Some seed -> Random.init seed
-  | None -> ());
+  (match config.seed with Some seed -> Random.init seed | None -> ());
 
   let noise () = Random.float (2.0 *. config.noise_range) -. config.noise_range in
   let total_samples = len * 2 in
-  
+
   (* Create bigarrays with batch axis first, then output axis *)
   let coordinates = Genarray.create float64 c_layout [| total_samples; 2 |] in
   let labels = Genarray.create float64 c_layout [| total_samples; 1 |] in
-  
+
   (* Generate first moon (label = 1.0) *)
   for i = 0 to len - 1 do
     let v = Float.of_int i *. Float.pi /. Float.of_int len in
@@ -45,9 +40,9 @@ let generate ?(config = Config.default) ~len () =
     let y = s +. noise () in
     Genarray.set coordinates [| i; 0 |] x;
     Genarray.set coordinates [| i; 1 |] y;
-    Genarray.set labels [| i; 0 |] 1.0;
+    Genarray.set labels [| i; 0 |] 1.0
   done;
-  
+
   (* Generate second moon (label = -1.0) *)
   for i = 0 to len - 1 do
     let v = Float.of_int i *. Float.pi /. Float.of_int len in
@@ -57,26 +52,27 @@ let generate ?(config = Config.default) ~len () =
     let idx = len + i in
     Genarray.set coordinates [| idx; 0 |] x;
     Genarray.set coordinates [| idx; 1 |] y;
-    Genarray.set labels [| idx; 0 |] (-1.0);
+    Genarray.set labels [| idx; 0 |] (-1.0)
   done;
-  
+
   (coordinates, labels)
 
-(** Generate half moons dataset using the old array-based approach for compatibility.
-    This function is deprecated and provided for backwards compatibility.
-    
+(** Generate half moons dataset using the old array-based approach for compatibility. This function
+    is deprecated and provided for backwards compatibility.
+
     @param len Number of samples per moon
-    @param noise_range Range of noise to add  
-    @return A tuple of (coordinates_array, labels_array) as flat arrays
-*)
+    @param noise_range Range of noise to add
+    @return A tuple of (coordinates_array, labels_array) as flat arrays *)
 let generate_arrays ?(noise_range = 0.1) ~len () =
   let noise () = Random.float (2.0 *. noise_range) -. noise_range in
-  let coordinates = 
-    Array.concat (Array.to_list (Array.init len (fun _ ->
-        let i = Random.int len in
-        let v = Float.of_int i *. Float.pi /. Float.of_int len in
-        let c = Float.cos v and s = Float.sin v in
-        [| c +. noise (); s +. noise (); 1.0 -. c +. noise (); 0.5 -. s +. noise () |])))
+  let coordinates =
+    Array.concat
+      (Array.to_list
+         (Array.init len (fun _ ->
+              let i = Random.int len in
+              let v = Float.of_int i *. Float.pi /. Float.of_int len in
+              let c = Float.cos v and s = Float.sin v in
+              [| c +. noise (); s +. noise (); 1.0 -. c +. noise (); 0.5 -. s +. noise () |])))
   in
   let labels = Array.init (len * 2) (fun i -> if i mod 2 = 0 then 1. else -1.) in
-  (coordinates, labels) 
\ No newline at end of file
+  (coordinates, labels)
File "lib/row.mli", line 1, characters 0-0:
diff --git a/_build/default/lib/row.mli b/_build/default/lib/.formatted/row.mli
index b57358a..7d504b5 100644
--- a/_build/default/lib/row.mli
+++ b/_build/default/lib/.formatted/row.mli
@@ -86,9 +86,9 @@ type total_elems =
 type row_constraint =
   | Unconstrained
   | Total_elems of { numerator : total_elems; divided_by : dim_var list }
-      (** The rows, inclusive of the further row spec, have this many elements.
-          The total is numerator / (product of divided_by variables).
-          divided_by has multiset semantics - the same variable can appear multiple times. *)
+      (** The rows, inclusive of the further row spec, have this many elements. The total is
+          numerator / (product of divided_by variables). divided_by has multiset semantics - the
+          same variable can appear multiple times. *)
   | Exact of dim list  (** The concatenated rows have these axes. *)
 [@@deriving equal, hash, compare, sexp_of]
 
File "bin/primitive_ops.ml", line 1, characters 0-0:
diff --git a/_build/default/bin/primitive_ops.ml b/_build/default/bin/.formatted/primitive_ops.ml
index dbe51e3..59f2f35 100644
--- a/_build/default/bin/primitive_ops.ml
+++ b/_build/default/bin/.formatted/primitive_ops.ml
@@ -26,9 +26,7 @@ let%debug_sexp graph_t () : unit =
   let size = 50 in
   let xs = Array.init size ~f:Float.(fun i -> (of_int i / 10.) + 0.1) in
   let x_flat =
-    Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ]
-      ~fetch_op:(Constant_fill xs)
-      ()
+    Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] ~fetch_op:(Constant_fill xs) ()
   in
   let step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in
   let%op xkcd = x_flat @| step_sym in
File "bin/moons_demo.ml", line 1, characters 0-0:
diff --git a/_build/default/bin/moons_demo.ml b/_build/default/bin/.formatted/moons_demo.ml
index 6c0a3c3..4191613 100644
--- a/_build/default/bin/moons_demo.ml
+++ b/_build/default/bin/.formatted/moons_demo.ml
@@ -32,8 +32,12 @@ let demo () =
 
   let config = Datasets.Half_moons.Config.{ noise_range = 0.1; seed = Some seed } in
   let moons_coordinates, moons_labels = Datasets.Half_moons.generate ~config ~len () in
-  let moons_flat = TDSL.rebatch ~l:"moons_flat" (Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates) in
-  let moons_classes = TDSL.rebatch ~l:"moons_classes" (Ir.Ndarray.as_array Ir.Ops.Double moons_labels) in
+  let moons_flat =
+    TDSL.rebatch ~l:"moons_flat" (Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates)
+  in
+  let moons_classes =
+    TDSL.rebatch ~l:"moons_classes" (Ir.Ndarray.as_array Ir.Ops.Double moons_labels)
+  in
 
   let batch_n, bindings = IDX.get_static_symbol ~static_range:n_batches IDX.empty in
   let step_n, bindings = IDX.get_static_symbol bindings in
File "bin/moons_benchmark.ml", line 1, characters 0-0:
diff --git a/_build/default/bin/moons_benchmark.ml b/_build/default/bin/.formatted/moons_benchmark.ml
index 31d245e..f0fb1af 100644
--- a/_build/default/bin/moons_benchmark.ml
+++ b/_build/default/bin/.formatted/moons_benchmark.ml
@@ -57,7 +57,9 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b
   (* let init_lr = 0.1 in *)
   let init_lr = 0.01 in
   let moons_config = Datasets.Half_moons.Config.{ noise_range = 0.1; seed = Some seed } in
-  let moons_coordinates, moons_labels = Datasets.Half_moons.generate ~config:moons_config ~len:flat_len () in
+  let moons_coordinates, moons_labels =
+    Datasets.Half_moons.generate ~config:moons_config ~len:flat_len ()
+  in
   let moons_flat_ndarray = Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates in
   let moons_classes_ndarray = Ir.Ndarray.as_array Ir.Ops.Double moons_labels in
   let moons_flat ~b:_ = TDSL.rebatch ~l:"moons_flat" moons_flat_ndarray in
@@ -83,14 +85,14 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b
   @@ Backend.get_global_debug_info ();
   let per_batch_callback ~at_batch ~at_step ~learning_rate ~batch_loss ~epoch_loss =
     Stdio.printf "Batch=%d, step=%d, lr=%f, batch loss=%f, epoch loss=%f\n%!" at_batch at_step
-       learning_rate batch_loss epoch_loss;
+      learning_rate batch_loss epoch_loss;
     if Option.is_none !start_time then start_time := Some (Time_now.nanoseconds_since_unix_epoch ())
   in
   (* Tn.print_accessible_headers (); *)
   let per_epoch_callback ~at_step ~at_epoch ~learning_rate ~epoch_loss =
     (* if at_epoch % 10 = 9 then *)
-      Stdio.printf "Epoch=%d, step=%d, lr=%f, epoch loss=%f\n%!" at_epoch at_step learning_rate
-        epoch_loss
+    Stdio.printf "Epoch=%d, step=%d, lr=%f, epoch loss=%f\n%!" at_epoch at_step learning_rate
+      epoch_loss
   in
 
   let {
File "bin/zero2hero_1of7.ml", line 1, characters 0-0:
diff --git a/_build/default/bin/zero2hero_1of7.ml b/_build/default/bin/.formatted/zero2hero_1of7.ml
index 4212f12..babcfc2 100644
--- a/_build/default/bin/zero2hero_1of7.ml
+++ b/_build/default/bin/.formatted/zero2hero_1of7.ml
@@ -57,8 +57,7 @@ let _suspended () =
   let x_flat =
     Tensor.term ~grad_spec:Tensor.Require_grad
       ~label:[ "x_flat" ] (* ~input_dims:[] ~output_dims:[ 1 ] *)
-      ~fetch_op:(Constant_fill values)
-      ()
+      ~fetch_op:(Constant_fill values) ()
   in
   let step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in
   (* The [let x =] line is the same as this except [let%op x =] uses [~grad_spec:If_needed]. *)
@@ -110,9 +109,7 @@ let _suspended () =
   let xs = Array.init size ~f:Float.(fun i -> (of_int i / 10.) - 5.) in
   (* Yay, the whole shape gets inferred! *)
   let x_flat =
-    Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ]
-      ~fetch_op:(Constant_fill xs)
-      ()
+    Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] ~fetch_op:(Constant_fill xs) ()
   in
   let step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in
   let%op x = x_flat @| step_sym in
File "lib/ppx_op.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/ppx_op.ml b/_build/default/lib/.formatted/ppx_op.ml
index fe5d727..24aa423 100644
--- a/_build/default/lib/ppx_op.ml
+++ b/_build/default/lib/.formatted/ppx_op.ml
@@ -21,7 +21,7 @@ let make_p ~has_config ~loc =
 
 let make_vb ?value ~has_config ~loc ~str_loc ~ident string =
   let pat = Ast_helper.Pat.var ~loc { loc = str_loc; txt = ident } in
-  let value = match value with Some c -> [%expr Some [%e c] ] | None -> [%expr None] in
+  let value = match value with Some c -> [%expr Some [%e c]] | None -> [%expr None] in
   let v = [%expr [%e make_p ~has_config ~loc] ?value:[%e value] [%e string]] in
   let vb = Ast_helper.Vb.mk ~loc pat v in
   (pat, vb)
File "arrayjit/lib/ops.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/ops.ml b/_build/default/arrayjit/lib/.formatted/ops.ml
index 4b01ab8..44170a3 100644
--- a/_build/default/arrayjit/lib/ops.ml
+++ b/_build/default/arrayjit/lib/.formatted/ops.ml
@@ -381,7 +381,7 @@ let interpret_unop op v =
   | Neg -> ~-.v
   | Tanh_approx -> tanh v
   | Not -> if v = 0. then 1. else 0.
-  | Uint4x32_to_prec_uniform _ -> 
+  | Uint4x32_to_prec_uniform _ ->
       (* FIXME: NOT IMPLEMENTED YET *)
       failwith "NOT IMPLEMENTED YET: Uint4x32_to_prec_uniform"
 
@@ -531,8 +531,7 @@ let unop_cd_syntax = function
   | Neg -> "neg"
   | Tanh_approx -> "tanh"
   | Not -> "not"
-  | Uint4x32_to_prec_uniform target_prec -> 
-      "uint4x32_to_" ^ prec_string target_prec ^ "_uniform"
+  | Uint4x32_to_prec_uniform target_prec -> "uint4x32_to_" ^ prec_string target_prec ^ "_uniform"
 
 let unop_c_syntax prec op =
   let fmax () =
@@ -578,7 +577,7 @@ let unop_c_syntax prec op =
       invalid_arg "Ops.unop_c_syntax: Tanh_approx not supported for integer precisions"
   | Tanh_approx, _ -> ("tanhf(", ")")
   | Not, _ -> ("(", " == 0.0 ? 1.0 : 0.0)")
-  | Uint4x32_to_prec_uniform target_prec, _ -> 
+  | Uint4x32_to_prec_uniform target_prec, _ ->
       (* FIXME: NOT IMPLEMENTED YET *)
       ("uint4x32_to_" ^ prec_string target_prec ^ "_uniform(", ")")
 
File "lib/operation.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/operation.ml b/_build/default/lib/.formatted/operation.ml
index bb5851b..eb36e4c 100644
--- a/_build/default/lib/operation.ml
+++ b/_build/default/lib/.formatted/operation.ml
@@ -447,8 +447,8 @@ end
     omitted. Note: the data should have no padding and if padding is inferred, the data will be
     copied; otherwise, the resulting tensor value shares host memory with the ndarray. *)
 let reshape ~l ?b ?(i = []) ?o ndarray =
-  Tensor.term ~label:[ l ] ?batch_dims:b ~input_dims:i ?output_dims:o ~init_data:(Asgns.Reshape ndarray)
-    ()
+  Tensor.term ~label:[ l ] ?batch_dims:b ~input_dims:i ?output_dims:o
+    ~init_data:(Asgns.Reshape ndarray) ()
 
 (** The dimensions are taken from the provided ndarray, but the split into axis kinds still needs to
     be inferred (or provided). Assumes no padding. See also: {!reshape} and {!TDSL.wrap_param}. *)
File "arrayjit/lib/assignments.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/assignments.ml b/_build/default/arrayjit/lib/.formatted/assignments.ml
index b7df201..a1de0e3 100644
--- a/_build/default/arrayjit/lib/assignments.ml
+++ b/_build/default/arrayjit/lib/.formatted/assignments.ml
@@ -171,7 +171,7 @@ let%diagn2_sexp to_low_level code =
     assert (Array.length idcs = Array.length (Lazy.force tn.Tn.dims));
     match buffer with
     | Node tn -> Low_level.Get (tn, idcs)
-    | Merge_buffer tn -> 
+    | Merge_buffer tn ->
         (* FIXME: NOT IMPLEMENTED YET - need to handle merge buffer access differently now *)
         Low_level.Get (tn, idcs)
   in
@@ -267,7 +267,6 @@ let%diagn2_sexp to_low_level code =
     | Fetch { array; fetch_op = Embed_symbol s; dims } ->
         Low_level.loop_over_dims (Lazy.force dims) ~body:(fun idcs ->
             set array idcs @@ Embed_index (Iterator s.static_symbol))
-
     | Fetch { array; fetch_op = Range_over_offsets; dims = (lazy dims) } ->
         Low_level.loop_over_dims dims ~body:(fun idcs ->
             let offset = Indexing.reflect_projection ~dims ~projection:idcs in
File "arrayjit/lib/metal_backend.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/metal_backend.ml b/_build/default/arrayjit/lib/.formatted/metal_backend.ml
index 4b81f92..45aa4b1 100644
--- a/_build/default/arrayjit/lib/metal_backend.ml
+++ b/_build/default/arrayjit/lib/.formatted/metal_backend.ml
@@ -557,7 +557,11 @@ end) : Ir.Backend_impl.Lowered_backend = struct
       | Not, _ -> fun v -> string "!" ^^ v
       | Uint4x32_to_prec_uniform target_prec, _ ->
           (* FIXME: NOT IMPLEMENTED YET - placeholder for Uint4x32_to_prec_uniform conversion *)
-          fun _v -> string ("/* FIXME: uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform */ (0.0" ^ metal_prec_suffix_float target_prec ^ ")")
+          fun _v ->
+            string
+              ("/* FIXME: uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform */ (0.0"
+              ^ metal_prec_suffix_float target_prec
+              ^ ")")
     (* Logical not *)
 
     let convert_precision ~from ~to_ =
File "lib/tensor.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/tensor.ml b/_build/default/lib/.formatted/tensor.ml
index 6be5895..f3d5a1e 100644
--- a/_build/default/lib/tensor.ml
+++ b/_build/default/lib/.formatted/tensor.ml
@@ -205,8 +205,8 @@ let raw_unop ~initialize_neutral ~accum ~(t : t) ~(lhs_is_grad : bool) ~op ~(t1
 type grad_spec = Require_grad | Prohibit_grad | If_needed [@@deriving sexp, equal, variants]
 
 let op ~(label : string list) ?(ternary_op = Shape.Pointwise_tern)
-    ?(compose_op = Shape.Pointwise_bin) ?(transpose_op = Shape.Pointwise_un) ?terminal_op
-    ~op_asn ~grad_asn ?(grad_spec = If_needed) make_shape (orig_ts : t list) : t =
+    ?(compose_op = Shape.Pointwise_bin) ?(transpose_op = Shape.Pointwise_un) ?terminal_op ~op_asn
+    ~grad_asn ?(grad_spec = If_needed) make_shape (orig_ts : t list) : t =
   (* The code needs to be included in the order it was computed due to potential non-tree DAGs. *)
   let ordered_ts = List.dedup_and_sort orig_ts ~compare:(fun t1 t2 -> Int.ascending t1.id t2.id) in
   let id = session_state.next_id in
@@ -250,8 +250,7 @@ let op ~(label : string list) ?(ternary_op = Shape.Pointwise_tern)
     | Some (Shape.Data (Asgns.Padded { data; padding = padding_spec; padded_value })) ->
         let padding = Some (padding_spec, padded_value) in
         Tn.create_from_padded ~id ~label ~ndarray:data ~padding ()
-    | Some (Shape.Fetch _) | None ->
-        Tn.create ~default_prec ~id ~label ~dims ~padding ()
+    | Some (Shape.Fetch _) | None -> Tn.create ~default_prec ~id ~label ~dims ~padding ()
   in
   let embedded_nodes = ref @@ Set.singleton (module Tn) v in
   let children =
@@ -358,7 +357,7 @@ let unop ~label ?transpose_op ~op_asn ~grad_asn ?grad_spec t1 =
 let term ~label ~grad_spec ?batch_dims ?input_dims ?output_dims ?batch_axes ?input_axes ?output_axes
     ?deduced ?init_data ?fetch_op () =
   let terminal_op =
-    match init_data, fetch_op with
+    match (init_data, fetch_op) with
     | Some _, Some _ -> invalid_arg "Tensor.term: both init_data and fetch_op are provided"
     | Some init_data, None -> Some (Shape.Data init_data)
     | None, Some fetch_op -> Some (Shape.Fetch fetch_op)
@@ -369,7 +368,9 @@ let term ~label ~grad_spec ?batch_dims ?input_dims ?output_dims ?batch_axes ?inp
     let dims = lazy (Lazy.force projections).Idx.lhs_dims in
     match fetch_op with
     | None -> Asgns.empty_comp
-    | Some (( Constant _ | Slice _ | Embed_symbol _ | Range_over_offsets | Constant_fill _ ) as fetch_op) ->
+    | Some
+        ((Constant _ | Slice _ | Embed_symbol _ | Range_over_offsets | Constant_fill _) as fetch_op)
+      ->
         Asgns.to_comp @@ Fetch { array = v; fetch_op; dims }
   in
   let grad_asn ~t:_ ~g:_ ~projections:_ = Asgns.empty_comp in
@@ -377,8 +378,8 @@ let term ~label ~grad_spec ?batch_dims ?input_dims ?output_dims ?batch_axes ?inp
     Shape.make ?batch_dims ?input_dims ?output_dims ?batch_axes ?input_axes ?output_axes ?deduced ()
   in
   (* Note: terminal_op is used for both tensor creation and shape inference. *)
-  op ~label ?compose_op:None ?transpose_op:None ?terminal_op ~op_asn ~grad_asn ~grad_spec
-    make_shape []
+  op ~label ?compose_op:None ?transpose_op:None ?terminal_op ~op_asn ~grad_asn ~grad_spec make_shape
+    []
 
 let float_to_label v = Float.to_string v
 
File "arrayjit/lib/c_syntax.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/c_syntax.ml b/_build/default/arrayjit/lib/.formatted/c_syntax.ml
index 776da2e..722f427 100644
--- a/_build/default/arrayjit/lib/c_syntax.ml
+++ b/_build/default/arrayjit/lib/.formatted/c_syntax.ml
@@ -646,9 +646,9 @@ module C_syntax (B : C_syntax_config) = struct
         (defs, expr)
     | Unop (Ops.Uint4x32_to_prec_uniform target_prec, v) ->
         let defs, expr_v = pp_float prec v in
-        let expr = 
-          string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(") ^^
-          expr_v ^^ string ")"
+        let expr =
+          string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(")
+          ^^ expr_v ^^ string ")"
         in
         (defs, expr)
     | Unop (op, v) ->
@@ -720,9 +720,9 @@ module C_syntax (B : C_syntax_config) = struct
         (B.binop_syntax prec op v1_doc v2_doc, idcs1 @ idcs2)
     | Unop (Ops.Uint4x32_to_prec_uniform target_prec, v) ->
         let v_doc, idcs = debug_float prec v in
-        let expr_doc = 
-          string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(") ^^
-          v_doc ^^ string "){=" ^^ string B.float_log_style ^^ string "}"
+        let expr_doc =
+          string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(")
+          ^^ v_doc ^^ string "){=" ^^ string B.float_log_style ^^ string "}"
         in
         (expr_doc, idcs)
     | Unop (op, v) ->
File "arrayjit/lib/low_level.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/low_level.ml b/_build/default/arrayjit/lib/.formatted/low_level.ml
index e36eeb6..18d0be0 100644
--- a/_build/default/arrayjit/lib/low_level.ml
+++ b/_build/default/arrayjit/lib/.formatted/low_level.ml
@@ -158,7 +158,6 @@ let is_constexpr_comp traced_store llv =
     | Get_local { tn; _ } | Local_scope { id = { tn; _ }; _ } ->
         let traced = get_node traced_store tn in
         traced.is_scalar_constexpr
-
     | Get (tn, _) ->
         let traced = get_node traced_store tn in
         traced.is_scalar_constexpr
dune build @fmt failed
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 2
2025-07-12 13:39.31: Job failed: Failed: Build failed