Organisationsahrefsocannld7bd41 ()(lint-fmt)

(lint-fmt)

Link Copied
Code Copied

Logs

2025-08-09 12:30.05: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (d7bd41654ea41ccc1a937d78c3f134545769e220) (linux-x86_64:(lint-fmt))
Base: ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a
ocamlformat version: version 0.27.0 (from opam)


To reproduce locally:


git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard d7bd4165
cat > Dockerfile <<'END-OF-DOCKERFILE'
FROM ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a
USER 1000:1000
RUN cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u
RUN opam depext -i dune
WORKDIR /src
RUN opam depext -i ocamlformat=0.27.0
COPY --chown=1000:1000 . /src/
RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)


END-OF-DOCKERFILE
docker build .
END-REPRO-BLOCK


2025-08-09 12:30.05: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a-debian-12-4.08_opam-2.4-ocamlformat-b8021439f8c57ba6435bc2263f6596671f4f4466"
2025-08-09 12:30.05: Using OBuilder spec:
((from ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a)
(user (uid 1000) (gid 1000))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i dune"))
(workdir /src)
(run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i ocamlformat=0.27.0"))
(copy (src .) (dst /src/))
(run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))
)


2025-08-09 12:30.05: Waiting for resource in pool OCluster
2025-08-09 12:30.05: Waiting for worker…
2025-08-09 12:30.05: Got resource from pool OCluster
Building on eumache
All commits already cached
HEAD is now at d7bd4165 Top-down precision propagation, coding by Claude Opus


(from ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a)
Unable to find image 'ocaml/opam:debian-12-ocaml-4.08@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a' locally
docker.io/ocaml/opam@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a: Pulling from ocaml/opam
2b437dab448b: Already exists
348b54b4f842: Already exists
d67c1d522a4e: Already exists
dea856d1a4d8: Already exists
b580a9d7b5b1: Already exists
Digest: sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a
Status: Downloaded newer image for ocaml/opam@sha256:474656ea1593a299054f8966c700443fa0944c9534de3da94ca6dfab4a44c47a
2025-08-09 12:30.06 ---> using "1d0024db739bd078f91b2384c47919652a4b72a425e3e24ce24cfd1f6debdfbc" from cache


/: (user (uid 1000) (gid 1000))


/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))
b8021439f8 Merge pull request #28261 from kit-ty-kate/deploy-fix-25819


<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>
[default] Initialised
default (at git+file:///home/opam/opam-repository):
[INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html)


Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).
However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.
Nothing to do.
# Run eval $(opam env) to update the current shell environment
2025-08-09 12:30.06 ---> using "76d9d96bb26da3c78200d383fd35f876d80571baf05962331a1fff5f47db0e2e" from cache


/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i dune"))
# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian
# No extra OS packages requirements found.
# All required OS packages found.
# Now letting opam install the packages
The following actions will be performed:
- install dune 3.19.1


<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
[dune.3.19.1] found in cache


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> installed dune.3.19.1
Done.
# Run eval $(opam env) to update the current shell environment
2025-08-09 12:30.06 ---> using "da0888a20a067de19f6183f0b497dcc2d1ea7f7036861cc50f633c662efdce4f" from cache


/: (workdir /src)


/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))
(network host)
(shell "opam depext -i ocamlformat=0.27.0"))
# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian
# No extra OS packages requirements found.
# All required OS packages found.
# Now letting opam install the packages
The following actions will be performed:
- install sexplib0          v0.14.0  [required by base]
- install menhirLib         20240715 [required by ocamlformat-lib]
- install menhirCST         20240715 [required by menhir]
- install ocamlbuild        0.16.1   [required by fpath, astring, uuseg]
- install cmdliner          1.3.0    [required by ocamlformat]
- install menhirSdk         20240715 [required by ocamlformat-lib]
- install either            1.0.0    [required by ocamlformat-lib]
- install csexp             1.5.2    [required by ocamlformat]
- install dune-build-info   3.19.1   [required by ocamlformat-lib]
- install camlp-streams     5.0.1    [required by ocamlformat-lib]
- install seq               base     [required by re]
- install fix               20250428 [required by ocamlformat-lib]
- install ocaml-version     4.0.1    [required by ocamlformat-lib]
- install ocamlfind         1.9.8    [required by ocp-indent, astring, fpath, uuseg]
- install menhir            20240715 [required by ocamlformat-lib]
- install dune-configurator 3.19.1   [required by base]
- install re                1.11.0   [required by ocamlformat]
- install topkg             1.1.0    [required by fpath, astring, uuseg]
- install base-bytes        base     [required by ocp-indent]
- install base              v0.14.3  [required by ocamlformat-lib]
- install uutf              1.0.4    [required by ocamlformat-lib]
- install astring           0.8.5    [required by ocamlformat-lib]
- install ocp-indent        1.8.1    [required by ocamlformat-lib]
- install stdio             v0.14.0  [required by ocamlformat-lib]
- install uucp              15.0.0   [required by uuseg]
- install fpath             0.7.3    [required by ocamlformat-lib]
- install uuseg             15.0.0   [required by ocamlformat-lib]
- install ocamlformat-lib   0.27.0   [required by ocamlformat]
- install ocamlformat       0.27.0
===== 29 to install =====


<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
[astring.0.8.5] found in cache
[base.v0.14.3] found in cache
[camlp-streams.5.0.1] found in cache
[cmdliner.1.3.0] found in cache
[csexp.1.5.2] found in cache
[dune-build-info.3.19.1] found in cache
[dune-configurator.3.19.1] found in cache
[either.1.0.0] found in cache
[fix.20250428] found in cache
[fpath.0.7.3] found in cache
[menhir.20240715] found in cache
[menhirCST.20240715] found in cache
[menhirLib.20240715] found in cache
[menhirSdk.20240715] found in cache
[ocaml-version.4.0.1] found in cache
[ocamlbuild.0.16.1] found in cache
[ocamlfind.1.9.8] found in cache
[ocamlformat.0.27.0] found in cache
[ocamlformat-lib.0.27.0] found in cache
[ocp-indent.1.8.1] found in cache
[re.1.11.0] found in cache
[sexplib0.v0.14.0] found in cache
[stdio.v0.14.0] found in cache
[topkg.1.1.0] found in cache
[uucp.15.0.0] found in cache
[uuseg.15.0.0] found in cache
[uutf.1.0.4] found in cache


<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>
-> installed seq.base
-> installed camlp-streams.5.0.1
-> installed csexp.1.5.2
-> installed either.1.0.0
-> installed fix.20250428
-> installed cmdliner.1.3.0
-> installed menhirCST.20240715
-> installed menhirLib.20240715
-> installed menhirSdk.20240715
-> installed ocaml-version.4.0.1
-> installed re.1.11.0
-> installed sexplib0.v0.14.0
-> installed dune-build-info.3.19.1
-> installed dune-configurator.3.19.1
-> installed ocamlfind.1.9.8
-> installed base-bytes.base
-> installed ocp-indent.1.8.1
-> installed ocamlbuild.0.16.1
-> installed base.v0.14.3
-> installed stdio.v0.14.0
-> installed topkg.1.1.0
-> installed uutf.1.0.4
-> installed astring.0.8.5
-> installed menhir.20240715
-> installed fpath.0.7.3
-> installed uucp.15.0.0
-> installed uuseg.15.0.0
-> installed ocamlformat-lib.0.27.0
-> installed ocamlformat.0.27.0
Done.


<><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><>
=> This package requires additional configuration for use in editors. Install package 'user-setup', or manually:


* for Emacs, add these lines to ~/.emacs:
(add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp")
(require 'ocp-indent)


* for Vim, add this line to ~/.vimrc:
set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim"
# Run eval $(opam env) to update the current shell environment
2025-08-09 12:30.06 ---> using "86ec8dcb8046a1e5dacfb1841e8c026d30cfead67649bcb6d6a8f9ddd6fb153d" from cache


/src: (copy (src .) (dst /src/))
2025-08-09 12:30.07 ---> saved as "8e5a2df231ce386f57b2d6659100ebb54c4e79cb8ded2ad679a130135311cc7c"


/src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))
File "arrayjit/bin/dune", line 6, characters 30-43:
6 |   (pps ppx_here ppx_minidebug ppx_sexp_conv))
^^^^^^^^^^^^^
Error: Library "ppx_sexp_conv" not found.
-> required by _build/default/arrayjit/bin/read_config.exe
-> required by %{dep:../../../arrayjit/bin/read_config.exe} at
test/operations/dune:17
-> required by _build/default/test/operations/config/ocannl_backend.txt
-> required by %{read:config/ocannl_backend.txt} at test/operations/dune:33
-> required by Computing directory contents of _build/default/test/operations
File "datasets/dune", line 1, characters 0-0:
diff --git a/_build/default/datasets/dune b/_build/default/datasets/.formatted/dune
index 38209fc..51fd1fc 100644
--- a/_build/default/datasets/dune
+++ b/_build/default/datasets/.formatted/dune
@@ -7,8 +7,11 @@


(install
(package neural_nets_lib)
- (section (site (neural_nets_lib data)))
- (files (names.txt as names.txt)))
+ (section
+  (site
+   (neural_nets_lib data)))
+ (files
+  (names.txt as names.txt)))


(generate_sites_module
(module dataset_sites)
File "arrayjit/bin/dune", line 6, characters 7-15:
6 |   (pps ppx_here ppx_minidebug ppx_sexp_conv))
^^^^^^^^
Error: Library "ppx_here" not found.
-> required by _build/default/arrayjit/bin/.merlin-conf/exe-read_config
-> required by _build/default/arrayjit/bin/read_config.exe
-> required by %{dep:../../../arrayjit/bin/read_config.exe} at
test/operations/dune:17
-> required by _build/default/test/operations/config/ocannl_backend.txt
-> required by %{read:config/ocannl_backend.txt} at test/operations/dune:33
-> required by Computing directory contents of _build/default/test/operations
File "test/training/dune", line 1, characters 0-0:
diff --git a/_build/default/test/training/dune b/_build/default/test/training/.formatted/dune
index 680c825..abfc59d 100644
--- a/_build/default/test/training/dune
+++ b/_build/default/test/training/.formatted/dune
@@ -26,4 +26,4 @@
(deps ocannl_config ../../datasets/names.txt)
(libraries ocannl datasets)
(preprocess
-  (pps ppx_here ppx_ocannl)))
\ No newline at end of file
+  (pps ppx_here ppx_ocannl)))
File "arrayjit/lib/builtins_cuda_large.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/builtins_cuda_large.ml b/_build/default/arrayjit/lib/.formatted/builtins_cuda_large.ml
index 9f8e884..9a7add2 100644
--- a/_build/default/arrayjit/lib/builtins_cuda_large.ml
+++ b/_build/default/arrayjit/lib/.formatted/builtins_cuda_large.ml
@@ -1,4 +1,5 @@
-let source = {|
+let source =
+  {|
#include <cuda_runtime.h>
#include <stdint.h>


@@ -110,4 +111,4 @@ __device__ uint4x32_t arrayjit_threefry4x32_impl(uint4x32_t key, uint4x32_t coun


__device__ uint4x32_t ( *arrayjit_threefry4x32)(uint4x32_t key, uint4x32_t counter) = arrayjit_threefry4x32_impl;


-|}
\ No newline at end of file
+|}
File "arrayjit/lib/builtins_cuda_small.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/builtins_cuda_small.ml b/_build/default/arrayjit/lib/.formatted/builtins_cuda_small.ml
index 94b807a..f69f9fd 100644
--- a/_build/default/arrayjit/lib/builtins_cuda_small.ml
+++ b/_build/default/arrayjit/lib/.formatted/builtins_cuda_small.ml
@@ -1,4 +1,5 @@
-let source = {|
+let source =
+  {|
typedef struct {
uint32_t v[4];
} uint4x32_t;
@@ -182,4 +183,4 @@ __device__ uint8x16_t uint4x32_to_u8_uniform_vec(uint4x32_t x) {
}
return result;
}
-|}
\ No newline at end of file
+|}
File "arrayjit/lib/builtins_metal.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/builtins_metal.ml b/_build/default/arrayjit/lib/.formatted/builtins_metal.ml
index ae0e1b1..e87f3a5 100644
--- a/_build/default/arrayjit/lib/builtins_metal.ml
+++ b/_build/default/arrayjit/lib/.formatted/builtins_metal.ml
@@ -1,4 +1,5 @@
-let source = {|
+let source =
+  {|
#include <metal_stdlib>
using namespace metal;


@@ -369,4 +370,4 @@ uint4 half_to_uint4x32(uint16_t x) {
uint4 fp8_to_uint4x32(uint8_t x) {
return uint4(uint32_t(x), 0, 0, 0);
}
-|}
\ No newline at end of file
+|}
File "arrayjit/lib/task.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/task.ml b/_build/default/arrayjit/lib/.formatted/task.ml
index cbc6764..cb456f8 100644
--- a/_build/default/arrayjit/lib/task.ml
+++ b/_build/default/arrayjit/lib/.formatted/task.ml
@@ -36,8 +36,7 @@ let append ~work (Task task) =
work ());
}


-let enschedule ~schedule_task ~get_stream_name stream (Task { description; _ } as task)
-    =
+let enschedule ~schedule_task ~get_stream_name stream (Task { description; _ } as task) =
(* [%log_result "enschedule", description, "on", get_stream_name stream]; *)
let work () = schedule_task stream task in
Task
File "datasets/names.ml", line 1, characters 0-0:
diff --git a/_build/default/datasets/names.ml b/_build/default/datasets/.formatted/names.ml
index 3d9f30c..4314c95 100644
--- a/_build/default/datasets/names.ml
+++ b/_build/default/datasets/.formatted/names.ml
@@ -2,31 +2,38 @@ open Base
open Stdio


(* Access the site locations to find names.txt *)
-let read_names () =
+let read_names () =
let data_locations : string list = Dataset_sites.Sites.data in
let names_file = "names.txt" in
let rec find_file = function
| [] -> None
| dir :: rest ->
let filepath = Stdlib.Filename.concat dir names_file in
-        if Stdlib.Sys.file_exists filepath then Some filepath
-        else find_file rest
+        if Stdlib.Sys.file_exists filepath then Some filepath else find_file rest
in
-  let filepath = match find_file data_locations with
+  let filepath =
+    match find_file data_locations with
| Some path -> path
| None ->
(* Fallback for testing: try to find the file in common locations *)
-        let fallback_paths = [
-          names_file;  (* current directory *)
-          "../../datasets/names.txt";  (* from test/training/ *)
-          "../datasets/names.txt";     (* from test/ *)
-          "datasets/names.txt";        (* from project root *)
-        ] in
+        let fallback_paths =
+          [
+            names_file;
+            (* current directory *)
+            "../../datasets/names.txt";
+            (* from test/training/ *)
+            "../datasets/names.txt";
+            (* from test/ *)
+            "datasets/names.txt";
+            (* from project root *)
+          ]
+        in
let rec try_fallbacks = function
-          | [] -> failwith (Printf.sprintf "Could not find %s in any location (sites: %s)" names_file (String.concat ~sep:"; " data_locations))
-          | path :: rest ->
-              if Stdlib.Sys.file_exists path then path
-              else try_fallbacks rest
+          | [] ->
+              failwith
+                (Printf.sprintf "Could not find %s in any location (sites: %s)" names_file
+                   (String.concat ~sep:"; " data_locations))
+          | path :: rest -> if Stdlib.Sys.file_exists path then path else try_fallbacks rest
in
try_fallbacks fallback_paths
in
@@ -63,4 +70,4 @@ let char_to_one_hot c =
let c_index = char_index c in
let arr = Array.create ~len:dict_size 0. in
arr.(c_index) <- 1.;
-  arr
\ No newline at end of file
+  arr
File "datasets/half_moons.ml", line 1, characters 0-0:
diff --git a/_build/default/datasets/half_moons.ml b/_build/default/datasets/.formatted/half_moons.ml
index dea5c70..2256b9e 100644
--- a/_build/default/datasets/half_moons.ml
+++ b/_build/default/datasets/.formatted/half_moons.ml
@@ -51,7 +51,7 @@ let generate_with_kind kind ?(config = Config.default) ~len () =
let c = Float.cos v and s = Float.sin v in
let x = 1.0 -. c +. noise () in
let y = 0.5 -. s +. noise () in
-    let idx = i * 2 + 1 in
+    let idx = (i * 2) + 1 in
Genarray.set coordinates [| idx; 0 |] x;
Genarray.set coordinates [| idx; 1 |] y;
Genarray.set labels [| idx; 0 |] (-1.0)
File "test/training/bigram.ml", line 1, characters 0-0:
diff --git a/_build/default/test/training/bigram.ml b/_build/default/test/training/.formatted/bigram.ml
index 239b59a..dee9735 100644
--- a/_build/default/test/training/bigram.ml
+++ b/_build/default/test/training/.formatted/bigram.ml
@@ -83,7 +83,10 @@ let () =


let counter_n, bindings = IDX.get_static_symbol IDX.empty in
let%cd infer_probs = mlp "cha" in
-  let%cd infer_step = infer_probs.forward; "dice" =: uniform_at !@counter_n in
+  let%cd infer_step =
+    infer_probs.forward;
+    "dice" =: uniform_at !@counter_n
+  in
Train.set_on_host infer_probs.value;
let infer_step = Train.to_routine (module Backend) sgd_step.context bindings infer_step in
let counter_ref = IDX.find_exn infer_step.bindings counter_n in
File "lib/tensor.mli", line 1, characters 0-0:
diff --git a/_build/default/lib/tensor.mli b/_build/default/lib/.formatted/tensor.mli
index 86c3c0b..209ec6d 100644
--- a/_build/default/lib/tensor.mli
+++ b/_build/default/lib/.formatted/tensor.mli
@@ -134,7 +134,11 @@ type param_op_fun =
t


type op_fun =
-  ?label:string list -> ?batch_dims:int list -> ?batch_axes:(string * int) list -> ?top_down_prec:bool -> param_op_fun
+  ?label:string list ->
+  ?batch_dims:int list ->
+  ?batch_axes:(string * int) list ->
+  ?top_down_prec:bool ->
+  param_op_fun


val binop :
?compose_op:Shape.compose_type ->
@@ -191,8 +195,7 @@ val ndarray : ?grad_spec:grad_spec -> float array -> op_fun
given values must fill the tensor's [value] node precisely; otherwise, the values will be looped
over to populate the [value] node. *)


-val param :
-  t:op_fun -> string -> ?more_label:string list -> param_op_fun
+val param : t:op_fun -> string -> ?more_label:string list -> param_op_fun
(** For proper parameters, [t] should produce a tensor with no batch axes; input and output axes
should by default be inferred; [grad_spec] should be [Require_grad]. [t]'s label is the passed
string, appended by [more_label] if any, other parameters are forwarded to [t]. This function
File "test/training/moons_demo.ml", line 1, characters 0-0:
diff --git a/_build/default/test/training/moons_demo.ml b/_build/default/test/training/.formatted/moons_demo.ml
index 06554bb..2a10c7a 100644
--- a/_build/default/test/training/moons_demo.ml
+++ b/_build/default/test/training/.formatted/moons_demo.ml
@@ -29,7 +29,7 @@ let main () =
let step_n, bindings = IDX.get_static_symbol bindings in
let moons_flat = TDSL.rebatch ~l:"moons_flat" moons_flat_ndarray () in
let moons_classes = TDSL.rebatch ~l:"moons_classes" moons_classes_ndarray () in
-  let%op mlp x = ("w3" * relu ("b2" 16 + ("w2" * relu ("b1" 16 + ("w1" * x))))) in
+  let%op mlp x = "w3" * relu ("b2" 16 + ("w2" * relu ("b1" 16 + ("w1" * x)))) in
(* Don't decay the learning rate too quickly, it behaves better than in the original. *)
let%op moons_input = moons_flat @| batch_n in
let%op moons_class = moons_classes @| batch_n in
File "arrayjit/lib/assignments.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/assignments.ml b/_build/default/arrayjit/lib/.formatted/assignments.ml
index c8d6476..ddfafe2 100644
--- a/_build/default/arrayjit/lib/assignments.ml
+++ b/_build/default/arrayjit/lib/.formatted/assignments.ml
@@ -111,10 +111,11 @@ let%debug3_sexp context_nodes ~(use_host_memory : 'a option) (asgns : t) : Tn.t_
| Seq (t1, t2) -> loop t1 + loop t2
| Block_comment (_, t) -> loop t
| Accum_op { lhs; rhs; _ } ->
-        let rhses = match rhs with
-        | Unop { rhs; _ } -> [ of_node rhs ]
-        | Binop { rhs1; rhs2; _ } -> [ of_node rhs1; of_node rhs2 ]
-        | Ternop { rhs1; rhs2; rhs3; _ } -> [ of_node rhs1; of_node rhs2; of_node rhs3 ]
+        let rhses =
+          match rhs with
+          | Unop { rhs; _ } -> [ of_node rhs ]
+          | Binop { rhs1; rhs2; _ } -> [ of_node rhs1; of_node rhs2 ]
+          | Ternop { rhs1; rhs2; rhs3; _ } -> [ of_node rhs1; of_node rhs2; of_node rhs3 ]
in
Set.union_list (module Tn) (one lhs :: rhses)
| Set_vec_unop { lhs; rhs; _ } -> Set.union (one lhs) (of_node rhs)
@@ -136,10 +137,11 @@ let%debug3_sexp guess_output_nodes (asgns : t) : Tn.t_set =
(i1 + i2, o1 + o2 - (i1 + i2))
| Block_comment (_, t) -> loop t
| Accum_op { lhs; rhs; _ } ->
-        let inputs = match rhs with
-        | Unop { rhs; _ } -> of_node rhs
-        | Binop { rhs1; rhs2; _ } -> of_node rhs1 + of_node rhs2
-        | Ternop { rhs1; rhs2; rhs3; _ } -> of_node rhs1 + of_node rhs2 + of_node rhs3
+        let inputs =
+          match rhs with
+          | Unop { rhs; _ } -> of_node rhs
+          | Binop { rhs1; rhs2; _ } -> of_node rhs1 + of_node rhs2
+          | Ternop { rhs1; rhs2; rhs3; _ } -> of_node rhs1 + of_node rhs2 + of_node rhs3
in
(inputs, one lhs)
| Set_vec_unop { lhs; rhs; _ } -> (of_node rhs, one lhs)
@@ -250,10 +252,11 @@ let%track4_sexp to_low_level code =
and loop (code : t) : Low_level.t =
match code with
| Accum_op { initialize_neutral; accum; lhs; rhs; projections; _ } ->
-        let op, rhses = match rhs with
-        | Unop { op; rhs } -> (Ops.Unop op, [| rhs |])
-        | Binop { op; rhs1; rhs2 } -> (Ops.Binop op, [| rhs1; rhs2 |])
-        | Ternop { op; rhs1; rhs2; rhs3 } -> (Ops.Ternop op, [| rhs1; rhs2; rhs3 |])
+        let op, rhses =
+          match rhs with
+          | Unop { op; rhs } -> (Ops.Unop op, [| rhs |])
+          | Binop { op; rhs1; rhs2 } -> (Ops.Binop op, [| rhs1; rhs2 |])
+          | Ternop { op; rhs1; rhs2; rhs3 } -> (Ops.Ternop op, [| rhs1; rhs2; rhs3 |])
in
loop_accum ~initialize_neutral ~accum ~op ~lhs ~rhses projections
| Set_vec_unop { op; lhs; rhs; projections; _ } ->
@@ -390,13 +393,15 @@ let get_ident_within_code ?no_dots c =
loop c2
| Block_comment (_, c) -> loop c
| Accum_op { lhs; rhs; _ } ->
-        let rhses = match rhs with
-        | Unop { rhs; _ } -> [ tn rhs ]
-        | Binop { rhs1; rhs2; _ } -> [ tn rhs1; tn rhs2 ]
-        | Ternop { rhs1; rhs2; rhs3; _ } -> [ tn rhs1; tn rhs2; tn rhs3 ]
+        let rhses =
+          match rhs with
+          | Unop { rhs; _ } -> [ tn rhs ]
+          | Binop { rhs1; rhs2; _ } -> [ tn rhs1; tn rhs2 ]
+          | Ternop { rhs1; rhs2; rhs3; _ } -> [ tn rhs1; tn rhs2; tn rhs3 ]
in
List.iter ~f:visit (lhs :: rhses)
-    | Set_vec_unop { op = _; lhs; rhs; projections = _; projections_debug = _ } -> List.iter ~f:visit [ lhs; tn rhs ]
+    | Set_vec_unop { op = _; lhs; rhs; projections = _; projections_debug = _ } ->
+        List.iter ~f:visit [ lhs; tn rhs ]
| Fetch { array; fetch_op = _; dims = _ } -> visit array
in
loop c;
@@ -437,9 +442,9 @@ let to_doc ?name ?static_indices () c =
| Seq (c1, c2) -> doc_of_code c1 ^^ doc_of_code c2
| Block_comment (s, Noop) -> string ("# \"" ^ s ^ "\";") ^^ break 1
| Block_comment (s, c) -> string ("# \"" ^ s ^ "\";") ^^ break 1 ^^ doc_of_code c
-    | Accum_op { initialize_neutral; accum; lhs; rhs; projections_debug; _ } ->
+    | Accum_op { initialize_neutral; accum; lhs; rhs; projections_debug; _ } -> (
let proj_spec = projections_debug in
-        (match rhs with
+        match rhs with
| Ternop { op; rhs1; rhs2; rhs3 } ->
(* Uncurried syntax for ternary operations. *)
string (ident lhs)
@@ -485,8 +490,7 @@ let to_doc ?name ?static_indices () c =
else empty)
^^ string ";" ^^ break 1)
| Set_vec_unop { op; lhs; rhs; projections = _; projections_debug } ->
-        let proj_spec = projections_debug
-        in
+        let proj_spec = projections_debug in
string (ident lhs)
^^ space
^^ string (Ops.assign_op_cd_syntax ~initialize_neutral:false Arg2)
File "arrayjit/lib/ops.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/ops.ml b/_build/default/arrayjit/lib/.formatted/ops.ml
index 8c41fed..54fd554 100644
--- a/_build/default/arrayjit/lib/ops.ml
+++ b/_build/default/arrayjit/lib/.formatted/ops.ml
@@ -586,7 +586,7 @@ let unop_c_syntax prec op =
| Not, _ -> ("(", " == 0.0 ? 1.0 : 0.0)")


let vec_unop_c_syntax prec op =
-  match op, prec with
+  match (op, prec) with
| Uint4x32_to_prec_uniform, Uint4x32_prec _ ->
invalid_arg "Ops.vec_unop_c_syntax: Uint4x32_to_prec_uniform not supported for Uint4x32"
| Uint4x32_to_prec_uniform, _ -> ("uint4x32_to_" ^ prec_string prec ^ "_uniform_vec(", ")")
File "lib/operation.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/operation.ml b/_build/default/lib/.formatted/operation.ml
index 4593ee2..707be0f 100644
--- a/_build/default/lib/operation.ml
+++ b/_build/default/lib/.formatted/operation.ml
@@ -559,12 +559,34 @@ module TDSL = struct
let t =
match (value, values) with
| Some _, Some _ -> invalid_arg "TDSL.param: both value and values are set"
-      | Some value, None ->
-          fun ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced () ->
-            Tensor.term_init ~grad_spec:Require_grad [| value |] ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced ()
-      | None, Some values ->
-          fun ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced () ->
-            Tensor.term_init ~grad_spec:Require_grad values ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced ()
+      | Some value, None ->
+          fun ?label
+            ?batch_dims
+            ?batch_axes
+            ?top_down_prec
+            ?input_dims
+            ?output_dims
+            ?input_axes
+            ?output_axes
+            ?deduced
+            ()
+          ->
+            Tensor.term_init ~grad_spec:Require_grad [| value |] ?label ?batch_dims ?batch_axes
+              ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced ()
+      | None, Some values ->
+          fun ?label
+            ?batch_dims
+            ?batch_axes
+            ?top_down_prec
+            ?input_dims
+            ?output_dims
+            ?input_axes
+            ?output_axes
+            ?deduced
+            ()
+          ->
+            Tensor.term_init ~grad_spec:Require_grad values ?label ?batch_dims ?batch_axes
+              ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced ()
| None, None -> !default_param_init ()
in
Tensor.param ~t
@@ -583,17 +605,21 @@ module TDSL = struct


(** The input and output dimensions will be inferred if omitted. See {!reshape}. *)
let reshape_param ~l ?i ?o ndarray =
-    let t ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced () =
-      Tensor.term ~grad_spec:Require_grad ~init_data:(Reshape ndarray) ?fetch_op:None
-        ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced ()
+    let t ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes
+        ?output_axes ?deduced () =
+      Tensor.term ~grad_spec:Require_grad ~init_data:(Reshape ndarray) ?fetch_op:None ?label
+        ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes
+        ?deduced ()
in
Tensor.param ?input_dims:i ?output_dims:o ~t l


(** See {!wrap}. *)
let wrap_param ~l ?i ?o ndarray =
-    let t ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced () =
+    let t ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes
+        ?output_axes ?deduced () =
Tensor.term ~grad_spec:Require_grad ~init_data:(Keep_shape_no_padding ndarray) ?fetch_op:None
-        ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes ?output_axes ?deduced ()
+        ?label ?batch_dims ?batch_axes ?top_down_prec ?input_dims ?output_dims ?input_axes
+        ?output_axes ?deduced ()
in
Tensor.param ?input_dims:i ?output_dims:o ~t l
end
File "arrayjit/lib/tnode.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/tnode.ml b/_build/default/arrayjit/lib/.formatted/tnode.ml
index 9407957..dc1d3d4 100644
--- a/_build/default/arrayjit/lib/tnode.ml
+++ b/_build/default/arrayjit/lib/.formatted/tnode.ml
@@ -432,9 +432,7 @@ let update_infer_prec tn delayed_prec =
| Default_spec old_prec ->
(* Combine with existing default precision via promotion *)
tn.delayed_prec_unsafe <-
-          Default_spec
-            (lazy
-               (Ops.promote_prec (Lazy.force old_prec) (Lazy.force delayed_prec)))
+          Default_spec (lazy (Ops.promote_prec (Lazy.force old_prec) (Lazy.force delayed_prec)))


let exceeds_fp16_cutoff tn c =
match Utils.settings.check_half_prec_constants_cutoff with
File "arrayjit/lib/low_level.ml", line 1, characters 0-0:
diff --git a/_build/default/arrayjit/lib/low_level.ml b/_build/default/arrayjit/lib/.formatted/low_level.ml
index 0359a76..79f5802 100644
--- a/_build/default/arrayjit/lib/low_level.ml
+++ b/_build/default/arrayjit/lib/.formatted/low_level.ml
@@ -1152,12 +1152,13 @@ let get_ident_within_code ?no_dots ?(blacklist = []) llcs =
let to_doc_cstyle ?name ?static_indices () llc =
let ident_label = get_ident_within_code [| llc |] in
let open PPrint in
-  let doc_ident la =
+  let doc_ident la =
let base = string (ident_label la) in
if Utils.get_global_flag ~default:false ~arg_name:"output_prec_in_ll_files" then
let prec_str = Ops.prec_string (Lazy.force la.prec) in
base ^^ string ("<" ^ prec_str ^ ">")
-    else base in
+    else base
+  in
let doc_local { tn; scope_id } = string ("v" ^ Int.to_string scope_id ^ "_") ^^ doc_ident tn in


let rec doc_of_code c =
@@ -1249,12 +1250,13 @@ let to_doc_cstyle ?name ?static_indices () llc =
let to_doc ?name ?static_indices () llc =
let ident_label = get_ident_within_code [| llc |] in
let open PPrint in
-  let doc_ident la =
+  let doc_ident la =
let base = string (ident_label la) in
if Utils.get_global_flag ~default:false ~arg_name:"output_prec_in_ll_files" then
let prec_str = Ops.prec_string (Lazy.force la.prec) in
base ^^ string ("<" ^ prec_str ^ ">")
-    else base in
+    else base
+  in
let doc_local { tn; scope_id } = string ("v" ^ Int.to_string scope_id ^ "_") ^^ doc_ident tn in


let rec doc_of_code c =
File "lib/row.ml", line 1, characters 0-0:
diff --git a/_build/default/lib/row.ml b/_build/default/lib/.formatted/row.ml
index 2f4a9f4..864b2bb 100644
--- a/_build/default/lib/row.ml
+++ b/_build/default/lib/.formatted/row.ml
@@ -896,7 +896,8 @@ let subst_row_constraint_impl ~subst_in_dim ~get_dim_val stage constr =
when is_stage2_up stage && Option.is_some (get_dim_val var) ->
if Utils.settings.debug_log_from_routines then
Stdio.printf "Line 895 case: stage=%s, is_stage2_up=%b\n"
-          (Sexp.to_string @@ sexp_of_stage stage) (is_stage2_up stage);
+          (Sexp.to_string @@ sexp_of_stage stage)
+          (is_stage2_up stage);
let dim = Option.value_exn (get_dim_val var) in
let tot = Utils.safe_force coeff * dim in
reapply_rows_constr := true;
@@ -914,7 +915,7 @@ let subst_row_constraint_impl ~subst_in_dim ~get_dim_val stage constr =
match subst_in_dim (Var var) with
| Dim { d; _ } as value when is_stage2_up stage ->
(* Stage 2+: Replace (coeff * v / denom) with (coeff * d / denom) *)
-         let new_num = Utils.safe_force coeff * d in
+          let new_num = Utils.safe_force coeff * d in
if new_num % denom = 0 then
Total_elems { numerator = Num_elems (new_num / denom); divided_by }
else
dune build @fmt failed
"/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 2
2025-08-09 12:30.08: Job failed: Failed: Build failed