Organisationsahrefsocannl153e04 ()(lint-fmt)

(lint-fmt)

Link Copied
Code Copied

Logs

2025-08-14 18:25.23: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (153e04bc5b7143a41de4f4d187e33b18cb490898) (linux-x86_64:(lint-fmt))
Base: ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a
ocamlformat version: version 0.27.0 (from opam)


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 153e04bc
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a
USER 1000:1000
RUN cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u
RUN opam depext -i dune
WORKDIR /src
RUN opam depext -i ocamlformat=0.27.0
COPY --chown=1000:1000 . /src/
RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-08-14 18:25.23: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a-debian-12-4.08_opam-2.4-ocamlformat-b8021439f8c57ba6435bc2263f6596671f4f4466"
2025-08-14 18:25.23: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a)
(user (uid 1000) (gid 1000))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i dune"))
(workdir /src)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i ocamlformat=0.27.0"))
(copy (src .) (dst /src/))
(run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))
)


2025-08-14 18:25.23: Waiting for resource in pool OCluster
2025-08-14 18:25.23: Waiting for worker…
2025-08-14 18:25.23: Got resource from pool OCluster
Building on eumache
HEAD is now at 45cd1e09 Introduce a new IR variant: `Constant_bits` for int64 constants, by Claude Claude found out that the discrepancy between sync_cc and metal backends was due to metal casting a float-single to uint4x32, while pure C was casting a double. This should go away with using Constant_bits / Tensor.bits for Fetch Embed_self_id and Tensor.get_random_seed
HEAD is now at 153e04bc Fixed, correct surjectivity testing for initialization; problem spotted and fixed by Claude Opus with my guidance on the surjectivity algo/heuristic


(from ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a)
2025-08-14 18:25.24 ---> using "1d0024db739bd078f91b2384c47919652a4b72a425e3e24ce24cfd1f6debdfbc" from cache


/: (user (uid 1000) (gid 1000))


/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))
b8021439f8 Merge pull request #28261 from kit-ty-kate/deploy-fix-25819


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
default (at git+file:///home/opam/opam-repository):
[INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html)


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# Run eval $(opam env) to update the current shell environment
2025-08-14 18:25.24 ---> using "76d9d96bb26da3c78200d383fd35f876d80571baf05962331a1fff5f47db0e2e" from cache


/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i dune"))
# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian
# No extra OS packages requirements found.
# All required OS packages found.
# Now letting opam install the packages
The following actions will be performed:
- install dune 3.19.1


<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
[dune.3.19.1] found in cache


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> installed dune.3.19.1
Done.
# Run eval $(opam env) to update the current shell environment
2025-08-14 18:25.24 ---> using "da0888a20a067de19f6183f0b497dcc2d1ea7f7036861cc50f633c662efdce4f" from cache


/: (workdir /src)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i ocamlformat=0.27.0"))
# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian
# No extra OS packages requirements found.
# All required OS packages found.
# Now letting opam install the packages
The following actions will be performed:
- install sexplib0          v0.14.0  [required by base]
- install menhirLib         20240715 [required by ocamlformat-lib]
- install menhirCST         20240715 [required by menhir]
- install ocamlbuild        0.16.1   [required by fpath, astring, uuseg]
- install cmdliner          1.3.0    [required by ocamlformat]
- install menhirSdk         20240715 [required by ocamlformat-lib]
- install either            1.0.0    [required by ocamlformat-lib]
- install csexp             1.5.2    [required by ocamlformat]
- install dune-build-info   3.19.1   [required by ocamlformat-lib]
- install camlp-streams     5.0.1    [required by ocamlformat-lib]
- install seq               base     [required by re]
- install fix               20250428 [required by ocamlformat-lib]
- install ocaml-version     4.0.1    [required by ocamlformat-lib]
- install ocamlfind         1.9.8    [required by ocp-indent, astring, fpath, uuseg]
- install menhir            20240715 [required by ocamlformat-lib]
- install dune-configurator 3.19.1   [required by base]
- install re                1.11.0   [required by ocamlformat]
- install topkg             1.1.0    [required by fpath, astring, uuseg]
- install base-bytes        base     [required by ocp-indent]
- install base              v0.14.3  [required by ocamlformat-lib]
- install uutf              1.0.4    [required by ocamlformat-lib]
- install astring           0.8.5    [required by ocamlformat-lib]
- install ocp-indent        1.8.1    [required by ocamlformat-lib]
- install stdio             v0.14.0  [required by ocamlformat-lib]
- install uucp              15.0.0   [required by uuseg]
- install fpath             0.7.3    [required by ocamlformat-lib]
- install uuseg             15.0.0   [required by ocamlformat-lib]
- install ocamlformat-lib   0.27.0   [required by ocamlformat]
- install ocamlformat       0.27.0
===== 29 to install =====


<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
[astring.0.8.5] found in cache
[base.v0.14.3] found in cache
[camlp-streams.5.0.1] found in cache
[cmdliner.1.3.0] found in cache
[csexp.1.5.2] found in cache
[dune-build-info.3.19.1] found in cache
[dune-configurator.3.19.1] found in cache
[either.1.0.0] found in cache
[fix.20250428] found in cache
[fpath.0.7.3] found in cache
[menhir.20240715] found in cache
[menhirCST.20240715] found in cache
[menhirLib.20240715] found in cache
[menhirSdk.20240715] found in cache
[ocaml-version.4.0.1] found in cache
[ocamlbuild.0.16.1] found in cache
[ocamlfind.1.9.8] found in cache
[ocamlformat.0.27.0] found in cache
[ocamlformat-lib.0.27.0] found in cache
[ocp-indent.1.8.1] found in cache
[re.1.11.0] found in cache
[sexplib0.v0.14.0] found in cache
[stdio.v0.14.0] found in cache
[topkg.1.1.0] found in cache
[uucp.15.0.0] found in cache
[uuseg.15.0.0] found in cache
[uutf.1.0.4] found in cache


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> installed seq.base
-> installed camlp-streams.5.0.1
-> installed csexp.1.5.2
-> installed either.1.0.0
-> installed fix.20250428
-> installed cmdliner.1.3.0
-> installed menhirCST.20240715
-> installed menhirLib.20240715
-> installed menhirSdk.20240715
-> installed ocaml-version.4.0.1
-> installed re.1.11.0
-> installed sexplib0.v0.14.0
-> installed dune-build-info.3.19.1
-> installed dune-configurator.3.19.1
-> installed ocamlfind.1.9.8
-> installed base-bytes.base
-> installed ocp-indent.1.8.1
-> installed ocamlbuild.0.16.1
-> installed base.v0.14.3
-> installed stdio.v0.14.0
-> installed topkg.1.1.0
-> installed uutf.1.0.4
-> installed astring.0.8.5
-> installed fpath.0.7.3
-> installed menhir.20240715
-> installed uucp.15.0.0
-> installed uuseg.15.0.0
-> installed ocamlformat-lib.0.27.0
-> installed ocamlformat.0.27.0
Done.


<><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><>
=> This package requires additional configuration for use in editors. Install package 'user-setup', or manually:


* for Emacs, add these lines to ~/.emacs:
(add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp")
(require 'ocp-indent)


* for Vim, add this line to ~/.vimrc:
set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim"
# Run eval $(opam env) to update the current shell environment
2025-08-14 18:25.24 ---> using "86ec8dcb8046a1e5dacfb1841e8c026d30cfead67649bcb6d6a8f9ddd6fb153d" from cache


/src: (copy (src .) (dst /src/))
2025-08-14 18:25.24 ---> saved as "0dd222cc2438ae2b13f44dd75a70f9ff2ab3a273c2984a8d6580a1cd90756a26"


/src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))
File "arrayjit/bin/dune", line 6, characters 30-43:
6 |   (pps ppx_here ppx_minidebug ppx_sexp_conv))
^^^^^^^^^^^^^
Error: Library "ppx_sexp_conv" not found.
-> required by _build/default/arrayjit/bin/read_config.exe
-> required by %{dep:../../../arrayjit/bin/read_config.exe} at
test/operations/dune:17
-> required by _build/default/test/operations/config/ocannl_backend.txt
-> required by %{read:config/ocannl_backend.txt} at test/operations/dune:33
-> required by Computing directory contents of _build/default/test/operations
File "arrayjit/bin/dune", line 6, characters 7-15:
6 |   (pps ppx_here ppx_minidebug ppx_sexp_conv))
^^^^^^^^
Error: Library "ppx_here" not found.
-> required by _build/default/arrayjit/bin/.merlin-conf/exe-read_config
-> required by _build/default/arrayjit/bin/read_config.exe
-> required by %{dep:../../../arrayjit/bin/read_config.exe} at
test/operations/dune:17
-> required by _build/default/test/operations/config/ocannl_backend.txt
-> required by %{read:config/ocannl_backend.txt} at test/operations/dune:33
-> required by Computing directory contents of _build/default/test/operations
File "test/einsum/test_surjectivity.ml", line 1, characters 0-0:
diff --git a/_build/default/test/einsum/test_surjectivity.ml b/_build/default/test/einsum/.formatted/test_surjectivity.ml
index 0c225ed..1bb021e 100644
--- a/_build/default/test/einsum/test_surjectivity.ml
+++ b/_build/default/test/einsum/.formatted/test_surjectivity.ml
@@ -11,15 +11,14 @@ let test_diagonal_tensor () =
Stdio.printf "\nTesting diagonal tensor initialization:\n";
Tensor.unsafe_reinitialize ();
let module Backend = (val Backends.fresh_backend ()) in
-
(* Create a diagonal tensor using einsum: i->ii *)
let input = TDSL.range 5 in
let%op diagonal = input ++ "i=>ii" in
-
+
(* Ensure the diagonal tensor is hosted *)
Train.set_hosted diagonal.value;
ignore (Train.forward_once (module Backend) diagonal);
-
+
(* Print the diagonal tensor *)
Train.printf ~here:[%here] ~with_code:false ~with_grad:false diagonal;
Stdio.printf "\n"
@@ -28,14 +27,13 @@ let test_sparse_fixed_index () =
Stdio.printf "\nTesting sparse assignment with fixed index:\n";
Tensor.unsafe_reinitialize ();
let module Backend = (val Backends.fresh_backend ()) in
-
(* Create a sparse tensor using fixed indices: i->i0j *)
let input = TDSL.range 4 in
let%op sparse = input ++ "i=>i0" in
-
+
Train.set_hosted sparse.value;
ignore (Train.forward_once (module Backend) sparse);
-
+
Train.printf ~here:[%here] ~with_code:false ~with_grad:false sparse;
Stdio.printf "\n"


@@ -43,31 +41,29 @@ let test_multi_sparse () =
Stdio.printf "\nTesting multiple sparse axes:\n";
Tensor.unsafe_reinitialize ();
let module Backend = (val Backends.fresh_backend ()) in
-
(* Create tensor with multiple dims and test sparse assignment *)
-  let input = TDSL.range_of_shape ~output_dims:[3; 4] () in
+  let input = TDSL.range_of_shape ~output_dims:[ 3; 4 ] () in
let%op result = input ++ "ij=>i1j" in
-
+
Train.set_hosted result.value;
ignore (Train.forward_once (module Backend) result);
-
+
Train.printf ~here:[%here] ~with_code:false ~with_grad:false result;
Stdio.printf "\n"


-  let _test_stride_gap () =
-    Stdio.printf "\nTesting stride gap:\n";
-    Tensor.unsafe_reinitialize ();
-    let module Backend = (val Backends.fresh_backend ()) in
-
-    (* Create tensor with multiple dims and test sparse assignment *)
-    let input = TDSL.range_of_shape ~output_dims:[2; 5] () in
-    let%op result = input ++ "ij=>i+3*j" in
-
-    Train.set_hosted result.value;
-    ignore (Train.forward_once (module Backend) result);
-
-    Train.printf ~here:[%here] ~with_code:false ~with_grad:false result;
-    Stdio.printf "\n"
+let _test_stride_gap () =
+  Stdio.printf "\nTesting stride gap:\n";
+  Tensor.unsafe_reinitialize ();
+  let module Backend = (val Backends.fresh_backend ()) in
+  (* Create tensor with multiple dims and test sparse assignment *)
+  let input = TDSL.range_of_shape ~output_dims:[ 2; 5 ] () in
+  let%op result = input ++ "ij=>i+3*j" in
+
+  Train.set_hosted result.value;
+  ignore (Train.forward_once (module Backend) result);
+
+  Train.printf ~here:[%here] ~with_code:false ~with_grad:false result;
+  Stdio.printf "\n"


let () =
test_diagonal_tensor ();
@@ -75,4 +71,4 @@ let () =
test_multi_sparse ();
(* FIXME(#354): Projections inference for convolution-style expressions not implemented yet. *)
(* test_stride_gap (); *)
-  Stdio.printf "All surjectivity tests completed.\n"
\ No newline at end of file
+  Stdio.printf "All surjectivity tests completed.\n"
File "test/einsum/surjectivity.ml", line 1, characters 0-0:
diff --git a/_build/default/test/einsum/surjectivity.ml b/_build/default/test/einsum/.formatted/surjectivity.ml
index f413188..5a6b75a 100644
--- a/_build/default/test/einsum/surjectivity.ml
+++ b/_build/default/test/einsum/.formatted/surjectivity.ml
@@ -22,14 +22,15 @@ let%expect_test "diagonal_tensor_initialization" =
(* Create a diagonal tensor using einsum: i->ii *)
let input = TDSL.range 5 in
let%op diagonal = input ++ "i=>ii" in
-
+
(* Ensure the diagonal tensor is hosted *)
Train.set_hosted diagonal.value;
ignore (Train.forward_once backend diagonal);
-
+
(* Print the diagonal tensor *)
Train.printf ~here:[%here] ~with_code:false ~with_grad:false diagonal;
-  [%expect {|
+  [%expect
+    {|
HERE: test/einsum/surjectivity.ml:31:21
┌──────────────────────────────────────┐
│[1]: =>_diagonal shape 0:6,1:6        │
@@ -60,12 +61,13 @@ let%expect_test "sparse_assignment_with_fixed_indices" =
(* Create a sparse tensor using fixed indices: i->i0j *)
let input = TDSL.range 4 in
let%op sparse = input ++ "i=>i0j" in
-
+
Train.set_hosted sparse.value;
ignore (Train.forward_once backend sparse);
-
+
Train.printf ~here:[%here] ~with_code:false ~with_grad:false sparse;
-  [%expect {|
+  [%expect
+    {|
HERE: test/einsum/surjectivity.ml:64:21
┌─────────────────────────────────┐
│[1]: =>_sparse shape 0:5,1:1,2:1 │
@@ -103,14 +105,15 @@ let%expect_test "multiple_sparse_axes" =
in


(* Test with multiple fixed indices: ij->i1j2 *)
-  let input = TDSL.range_of_shape ~output_dims:[3; 4] () in
+  let input = TDSL.range_of_shape ~output_dims:[ 3; 4 ] () in
let%op sparse_multi = input ++ "ij=>i1j2" in
-
+
Train.set_hosted sparse_multi.value;
ignore (Train.forward_once backend sparse_multi);
-
+
Train.printf ~here:[%here] ~with_code:false ~with_grad:false sparse_multi;
-  [%expect {|
+  [%expect
+    {|
HERE: test/einsum/surjectivity.ml:113:21
┌───────────────────────────────────────────┐
│[1]: =>_sparse_multi shape 0:3,1:2,2:4,3:3 │
@@ -156,4 +159,4 @@ let%expect_test "multiple_sparse_axes" =
││      │ 0.00  0.00  1.10e+1 │             │
│└──────┴─────────────────────┘             │
└───────────────────────────────────────────┘
-    |}]
\ No newline at end of file
+    |}]
File "arrayjit/lib/indexing.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/indexing.ml b/_build/default/arrayjit/lib/.formatted/indexing.ml
index c1d571d..fb355e3 100644
--- a/_build/default/arrayjit/lib/indexing.ml
+++ b/_build/default/arrayjit/lib/.formatted/indexing.ml
@@ -150,50 +150,46 @@ let opt_symbol d = if iterated d then Some (get_symbol ()) else None
let opt_iterator = function None -> Fixed_idx 0 | Some sym -> Iterator sym


let is_surjective proj =
-  (* For surjectivity, we check if all target (LHS) positions will be written to.
-     This is used to determine if we need to zero-initialize before assignment. *)
-
+  (* For surjectivity, we check if all target (LHS) positions will be written to. This is used to
+     determine if we need to zero-initialize before assignment. *)
+
(* Check if there are any fixed indices (except Fixed_idx 0 when dim is 1) *)
let has_non_trivial_fixed =
Array.exists2_exn proj.project_lhs proj.lhs_dims ~f:(fun idx dim ->
match idx with
-        | Fixed_idx i -> not (i = 0 && dim <= 1)  (* Fixed_idx 0 is OK only when dim is 0 or 1 *)
+        | Fixed_idx i -> not (i = 0 && dim <= 1) (* Fixed_idx 0 is OK only when dim is 0 or 1 *)
| _ -> false)
in
if has_non_trivial_fixed then false
else
(* Collect symbols used in LHS *)
-    let lhs_symbols, has_affine, has_sub_axis =
-      Array.fold proj.project_lhs ~init:([], false, false)
-        ~f:(fun (syms, has_aff, has_sub) idx ->
+    let lhs_symbols, has_affine, has_sub_axis =
+      Array.fold proj.project_lhs ~init:([], false, false) ~f:(fun (syms, has_aff, has_sub) idx ->
match idx with
| Iterator s -> (s :: syms, has_aff, has_sub)
| Fixed_idx _ -> (syms, has_aff, has_sub)
| Affine { symbols; _ } ->
-              let coeff1_syms =
-                List.filter_map symbols ~f:(fun (coeff, s) ->
-                  if coeff = 1 then Some s else None)
+              let coeff1_syms =
+                List.filter_map symbols ~f:(fun (coeff, s) -> if coeff = 1 then Some s else None)
in
(coeff1_syms @ syms, true, has_sub)
| Sub_axis -> (syms, has_aff, true))
in
let lhs_symbol_set = Set.of_list (module Symbol) lhs_symbols in
let product_symbol_set = Set.of_array (module Symbol) proj.product_iterators in
-
+
(* All lhs symbols must be from product iterators (no bound symbols) *)
if not (Set.is_subset lhs_symbol_set ~of_:product_symbol_set) then false
-    else if has_sub_axis then
-      (* Conservative: Sub_axis case is complex, so assume non-surjective.
-         This is pessimistic but safe - Sub_axis would require comparing
-         lhs_dims and product_space dimensions carefully. *)
+    else if has_sub_axis then
+      (* Conservative: Sub_axis case is complex, so assume non-surjective. This is pessimistic but
+         safe - Sub_axis would require comparing lhs_dims and product_space dimensions carefully. *)
false
else if has_affine then
-      (* For Affine indices with strides: check coefficient compatibility.
-         A strided access pattern may skip elements. *)
-      let symbol_dims =
+      (* For Affine indices with strides: check coefficient compatibility. A strided access pattern
+         may skip elements. *)
+      let symbol_dims =
Array.filter_mapi proj.product_iterators ~f:(fun i sym ->
-          if Set.mem lhs_symbol_set sym then Some (sym, proj.product_space.(i))
-          else None)
+            if Set.mem lhs_symbol_set sym then Some (sym, proj.product_space.(i)) else None)
|> Array.to_list
|> Map.of_alist_exn (module Symbol)
in
@@ -201,15 +197,14 @@ let is_surjective proj =
Array.for_all proj.project_lhs ~f:(function
| Affine { symbols; _ } ->
(* Find max dimension of coeff=1 symbols *)
-              let max_coeff1_dim =
+              let max_coeff1_dim =
List.filter_map symbols ~f:(fun (coeff, s) ->
-                  if coeff = 1 then Map.find symbol_dims s else None)
+                    if coeff = 1 then Map.find symbol_dims s else None)
|> List.max_elt ~compare:Int.compare
|> Option.value ~default:Int.max_value
in
(* Check that coeff=1 dimension is not smaller than any stride *)
-              List.for_all symbols ~f:(fun (coeff, _) ->
-                coeff = 1 || max_coeff1_dim >= coeff)
+              List.for_all symbols ~f:(fun (coeff, _) -> coeff = 1 || max_coeff1_dim >= coeff)
| _ -> true)
in
if not check_affine_surjective then false
@@ -220,18 +215,18 @@ let is_surjective proj =
(* Simple case: only Iterator and Fixed_idx *)
(* Need enough unique symbols to cover all dimensions *)
Set.length lhs_symbol_set >= Array.length proj.project_lhs
-
-(* For backwards compatibility, keep is_bijective as an alias that checks
-   both surjectivity and injectivity (stricter than just surjectivity) *)
+
+(* For backwards compatibility, keep is_bijective as an alias that checks both surjectivity and
+   injectivity (stricter than just surjectivity) *)
let is_bijective proj =
-  is_surjective proj &&
-  let lhs_symbols =
+  is_surjective proj
+  &&
+  let lhs_symbols =
Array.concat_map proj.project_lhs ~f:(function
| Iterator s -> [| s |]
| Fixed_idx _ -> [||]
| Affine { symbols; _ } ->
-          List.filter_map symbols ~f:(fun (coeff, s) ->
-            if coeff = 1 then Some s else None)
+          List.filter_map symbols ~f:(fun (coeff, s) -> if coeff = 1 then Some s else None)
|> Array.of_list
| Sub_axis -> [||])
|> Set.of_array (module Symbol)
File "lib/ppx_op.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/ppx_op.ml b/_build/default/lib/.formatted/ppx_op.ml
index 59f0690..32726dc 100644
--- a/_build/default/lib/ppx_op.ml
+++ b/_build/default/lib/.formatted/ppx_op.ml
@@ -89,10 +89,7 @@ let rec translate ~num_configs ~is_toplevel ~has_config ?label expr =
let axis =
Ast_helper.Exp.constant ~loc:pexp_loc (Pconst_string (String.of_char ch, pexp_loc, None))
in
-      ( no_vbs,
-        [%expr
-          TDSL.bits ?label:[%e opt_expr ~loc label] ~axis_label:[%e axis] [%e i]]
-      )
+      (no_vbs, [%expr TDSL.bits ?label:[%e opt_expr ~loc label] ~axis_label:[%e axis] [%e i]])
| [%expr
[%e? { pexp_desc = Pexp_constant (Pconst_char ch); pexp_loc; _ }]
[%e? { pexp_desc = Pexp_constant (Pconst_integer _); _ } as i]] ->
File "arrayjit/lib/assignments.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/assignments.ml b/_build/default/arrayjit/lib/.formatted/assignments.ml
index 6ca5f91..0c90542 100644
--- a/_build/default/arrayjit/lib/assignments.ml
+++ b/_build/default/arrayjit/lib/.formatted/assignments.ml
@@ -328,7 +328,8 @@ let%track4_sexp to_low_level code =
| Fetch { array; fetch_op = Constant c; dims } ->
Low_level.loop_over_dims (Lazy.force dims) ~body:(fun idcs -> set array idcs @@ Constant c)
| Fetch { array; fetch_op = Constant_bits i; dims } ->
-        Low_level.loop_over_dims (Lazy.force dims) ~body:(fun idcs -> set array idcs @@ Constant_bits i)
+        Low_level.loop_over_dims (Lazy.force dims) ~body:(fun idcs ->
+            set array idcs @@ Constant_bits i)
| Fetch { array; fetch_op = Slice { batch_idx = { static_symbol = idx; _ }; sliced }; dims } ->
(* TODO: doublecheck this always gets optimized away. *)
Low_level.loop_over_dims (Lazy.force dims) ~body:(fun idcs ->
File "arrayjit/lib/cuda_backend.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/cuda_backend.ml b/_build/default/arrayjit/lib/.formatted/cuda_backend.ml
index 9d86b5f..198c3a8 100644
--- a/_build/default/arrayjit/lib/cuda_backend.ml
+++ b/_build/default/arrayjit/lib/.formatted/cuda_backend.ml
@@ -658,7 +658,7 @@ end) : Ir.Backend_impl.Lowered_backend = struct
let vec_unop_syntax prec op v =
let open PPrint in
match (op, prec) with
-      | Ops.Uint4x32_to_prec_uniform, _ ->
+      | Ops.Uint4x32_to_prec_uniform, _ ->
group (string ("uint4x32_to_" ^ Ops.prec_string prec ^ "_uniform_vec(") ^^ v ^^ rparen)


let ternop_syntax prec v =
@@ -689,7 +689,7 @@ end) : Ir.Backend_impl.Lowered_backend = struct
| Single_prec _, Half_prec _ -> ("__float2half(", ")")
| Byte_prec _, Half_prec _ -> ("__ushort2half_rn((unsigned short int)", ")")
| Double_prec _, Uint4x32_prec _ -> ("{(unsigned int)(", "), 0, 0, 0}")
-      | Single_prec _, Uint4x32_prec _ -> ("{(unsigned int)(", "), 0, 0, 0}")
+      | Single_prec _, Uint4x32_prec _ -> ("{(unsigned int)(", "), 0, 0, 0}")
| Int32_prec _, Uint4x32_prec _ -> ("{(unsigned int)(", "), 0, 0, 0}")
| Uint4x32_prec _, _ -> ("", ".v[0]")
| _, Uint4x32_prec _ -> ("{(unsigned int)(", "), 0, 0, 0}")
File "lib/tensor.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/tensor.ml b/_build/default/lib/.formatted/tensor.ml
index b4df5d4..79ee104 100644
--- a/_build/default/lib/tensor.ml
+++ b/_build/default/lib/.formatted/tensor.ml
@@ -477,8 +477,8 @@ let%track7_sexp term ?init_data ?fetch_op ?grad_spec ?(label = []) ?(top_down_pr
match fetch_op with
| None -> Asgns.empty_comp
| Some
-        (( Constant _ | Constant_bits _ | Slice _ | Embed_symbol _ | Embed_self_id | Range_over_offsets
-         | Constant_fill _ ) as fetch_op) ->
+        (( Constant _ | Constant_bits _ | Slice _ | Embed_symbol _ | Embed_self_id
+         | Range_over_offsets | Constant_fill _ ) as fetch_op) ->
Asgns.to_comp @@ Fetch { array = v; fetch_op; dims }
in
let grad_asn ~t:_ ~g:_ ~projections:_ = Asgns.empty_comp in
dune build @fmt failed
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 2
2025-08-14 18:25.25: Job failed: Failed: Build failed