2025-07-14 12:11.53: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (51bc961c154b76725fef1377b47cbc6ea33ffc85) (linux-x86_64:(lint-fmt))Base: ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8eocamlformat version: version 0.27.0 (from opam)To reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard 51bc961ccat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8eUSER 1000:1000RUN cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -uRUN opam depext -i duneWORKDIR /srcRUN opam depext -i ocamlformat=0.27.0COPY --chown=1000:1000 . /src/RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)END-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-07-14 12:11.53: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e-debian-12-4.08_opam-2.3-ocamlformat-0eea63ad71af2b1116c556023bedc6bf083e6125"2025-07-14 12:11.53: Using OBuilder spec:((from ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e)(user (uid 1000) (gid 1000))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i dune"))(workdir /src)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i ocamlformat=0.27.0"))(copy (src .) (dst /src/))(run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)")))2025-07-14 12:11.53: Waiting for resource in pool OCluster2025-07-14 12:11.53: Waiting for worker…2025-07-14 12:11.53: Got resource from pool OClusterBuilding on toxis.caelum.ci.devHEAD is now at 89301cec Missing from the previous commitHEAD is now at 51bc961c Postpone setting dim variables to their lower bounds till the very end.(from ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e)2025-07-14 12:11.54 ---> using "4ea5038d254cfd14663698deb665a2dc4ce1e1383d544c063adebb02ed15ce16" from cache/: (user (uid 1000) (gid 1000))/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u"))0eea63ad71 Merge pull request #27946 from mtelvers/opam-publish-ocaml-version.4.0.1<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] Initialiseddefault (at git+file:///home/opam/opam-repository):[INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html)Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# Run eval $(opam env) to update the current shell environment2025-07-14 12:11.54 ---> using "ed14c208b709d50e26e290f8f669e0b0b6a2456751ee5b87b29d71221f0a9fc1" from cache/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i dune"))# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian# No extra OS packages requirements found.# All required OS packages found.# Now letting opam install the packagesThe following actions will be performed:- install dune 3.19.1<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>[dune.3.19.1] found in cache<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> installed dune.3.19.1Done.# Run eval $(opam env) to update the current shell environment2025-07-14 12:11.54 ---> using "249f8284e2625ce869f4c794eb6ff1eab40d82e0af762bb312d97ad81bf8d4f1" from cache/: (workdir /src)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i ocamlformat=0.27.0"))# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian# No extra OS packages requirements found.# All required OS packages found.# Now letting opam install the packagesThe following actions will be performed:- install sexplib0 v0.14.0 [required by base]- install cmdliner 1.3.0 [required by ocamlformat]- install menhirLib 20240715 [required by ocamlformat-lib]- install menhirCST 20240715 [required by menhir]- install ocamlbuild 0.16.1 [required by fpath, astring, uuseg]- install dune-build-info 3.19.1 [required by ocamlformat-lib]- install menhirSdk 20240715 [required by ocamlformat-lib]- install either 1.0.0 [required by ocamlformat-lib]- install ocaml-version 4.0.1 [required by ocamlformat-lib]- install camlp-streams 5.0.1 [required by ocamlformat-lib]- install csexp 1.5.2 [required by ocamlformat]- install seq base [required by re]- install fix 20250428 [required by ocamlformat-lib]- install ocamlfind 1.9.8 [required by ocp-indent, astring, fpath, uuseg]- install menhir 20240715 [required by ocamlformat-lib]- install dune-configurator 3.19.1 [required by base]- install re 1.11.0 [required by ocamlformat]- install topkg 1.0.8 [required by fpath, astring, uuseg]- install base-bytes base [required by ocp-indent]- install base v0.14.3 [required by ocamlformat-lib]- install uutf 1.0.4 [required by ocamlformat-lib]- install astring 0.8.5 [required by ocamlformat-lib]- install ocp-indent 1.8.1 [required by ocamlformat-lib]- install stdio v0.14.0 [required by ocamlformat-lib]- install uucp 15.0.0 [required by uuseg]- install fpath 0.7.3 [required by ocamlformat-lib]- install uuseg 15.0.0 [required by ocamlformat-lib]- install ocamlformat-lib 0.27.0 [required by ocamlformat]- install ocamlformat 0.27.0===== 29 to install =====<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>[astring.0.8.5] found in cache[base.v0.14.3] found in cache[camlp-streams.5.0.1] found in cache[cmdliner.1.3.0] found in cache[csexp.1.5.2] found in cache[dune-build-info.3.19.1] found in cache[dune-configurator.3.19.1] found in cache[either.1.0.0] found in cache[fix.20250428] found in cache[fpath.0.7.3] found in cache[menhir.20240715] found in cache[menhirCST.20240715] found in cache[menhirLib.20240715] found in cache[menhirSdk.20240715] found in cache[ocaml-version.4.0.1] found in cache[ocamlbuild.0.16.1] found in cache[ocamlfind.1.9.8] found in cache[ocamlformat.0.27.0] found in cache[ocamlformat-lib.0.27.0] found in cache[ocp-indent.1.8.1] found in cache[re.1.11.0] found in cache[sexplib0.v0.14.0] found in cache[stdio.v0.14.0] found in cache[topkg.1.0.8] found in cache[uucp.15.0.0] found in cache[uuseg.15.0.0] found in cache[uutf.1.0.4] found in cache<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> installed seq.base-> installed camlp-streams.5.0.1-> installed csexp.1.5.2-> installed either.1.0.0-> installed fix.20250428-> installed cmdliner.1.3.0-> installed menhirCST.20240715-> installed menhirLib.20240715-> installed menhirSdk.20240715-> installed ocaml-version.4.0.1-> installed re.1.11.0-> installed sexplib0.v0.14.0-> installed dune-build-info.3.19.1-> installed dune-configurator.3.19.1-> installed ocamlfind.1.9.8-> installed base-bytes.base-> installed ocp-indent.1.8.1-> installed ocamlbuild.0.16.1-> installed base.v0.14.3-> installed stdio.v0.14.0-> installed topkg.1.0.8-> installed uutf.1.0.4-> installed astring.0.8.5-> installed fpath.0.7.3-> installed menhir.20240715-> installed uucp.15.0.0-> installed uuseg.15.0.0-> installed ocamlformat-lib.0.27.0-> installed ocamlformat.0.27.0Done.<><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><>=> This package requires additional configuration for use in editors. Install package 'user-setup', or manually:* for Emacs, add these lines to ~/.emacs:(add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp")(require 'ocp-indent)* for Vim, add this line to ~/.vimrc:set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim"# Run eval $(opam env) to update the current shell environment2025-07-14 12:11.54 ---> using "7fd5d4ac1bb4f93065934b08bd6aad3989793fd62a884f257d9f7ac1e147ec1a" from cache/src: (copy (src .) (dst /src/))2025-07-14 12:11.54 ---> saved as "e14b3a0f93d63efb4af4c1a51d55dbeb451278881f80e51859029c50adc0f2c7"/src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))File "arrayjit/bin/dune", line 6, characters 30-43:6 | (pps ppx_here ppx_minidebug ppx_sexp_conv))^^^^^^^^^^^^^Error: Library "ppx_sexp_conv" not found.-> required by _build/default/arrayjit/bin/read_config.exe-> required by %{dep:../../../arrayjit/bin/read_config.exe} attest/operations/dune:17-> required by _build/default/test/operations/config/ocannl_backend.txt-> required by %{read:config/ocannl_backend.txt} at test/operations/dune:33-> required by Computing directory contents of _build/default/test/operationsFile "arrayjit/bin/dune", line 6, characters 7-15:6 | (pps ppx_here ppx_minidebug ppx_sexp_conv))^^^^^^^^Error: Library "ppx_here" not found.-> required by _build/default/arrayjit/bin/.merlin-conf/exe-read_config-> required by _build/default/arrayjit/bin/read_config.exe-> required by %{dep:../../../arrayjit/bin/read_config.exe} attest/operations/dune:17-> required by _build/default/test/operations/config/ocannl_backend.txt-> required by %{read:config/ocannl_backend.txt} at test/operations/dune:33-> required by Computing directory contents of _build/default/test/operationsFile "lib/row.mli", line 1, characters 0-0:diff --git a/_build/default/lib/row.mli b/_build/default/lib/.formatted/row.mliindex b57358a..7d504b5 100644--- a/_build/default/lib/row.mli+++ b/_build/default/lib/.formatted/row.mli@@ -86,9 +86,9 @@ type total_elems =type row_constraint =| Unconstrained| Total_elems of { numerator : total_elems; divided_by : dim_var list }- (** The rows, inclusive of the further row spec, have this many elements.- The total is numerator / (product of divided_by variables).- divided_by has multiset semantics - the same variable can appear multiple times. *)+ (** The rows, inclusive of the further row spec, have this many elements. The total is+ numerator / (product of divided_by variables). divided_by has multiset semantics - the+ same variable can appear multiple times. *)| Exact of dim list (** The concatenated rows have these axes. *)[@@deriving equal, hash, compare, sexp_of]File "datasets/half_moons.ml", line 1, characters 0-0:diff --git a/_build/default/datasets/half_moons.ml b/_build/default/datasets/.formatted/half_moons.mlindex cd6bcd3..b40461a 100644--- a/_build/default/datasets/half_moons.ml+++ b/_build/default/datasets/.formatted/half_moons.ml@@ -6,38 +6,33 @@ open Bigarraymodule Config = structtype t = {noise_range : float; (** Range of noise to add to the coordinates *)- seed : int option; (** Optional random seed for reproducibility *)+ seed : int option; (** Optional random seed for reproducibility *)}- let default = {- noise_range = 0.1;- seed = None;- }+ let default = { noise_range = 0.1; seed = None }end(** Internal helper function to generate half moons with specified precision.-+@param kind The bigarray kind (float32 or float64)@param config Configuration for noise and randomization@param len Number of samples per moon (total samples = len * 2)- @return A tuple of (coordinates, labels) where:- - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis)- - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis)- - First moon has label 1.0, second moon has label -1.0-*)+ @return+ A tuple of (coordinates, labels) where:+ - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis)+ - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis)+ - First moon has label 1.0, second moon has label -1.0 *)let generate_with_kind kind ?(config = Config.default) ~len () =(* Initialize random seed if specified *)- (match config.seed with- | Some seed -> Random.init seed- | None -> ());+ (match config.seed with Some seed -> Random.init seed | None -> ());let noise () = Random.float (2.0 *. config.noise_range) -. config.noise_range inlet total_samples = len * 2 in-+(* Create bigarrays with batch axis first, then output axis *)let coordinates = Genarray.create kind c_layout [| total_samples; 2 |] inlet labels = Genarray.create kind c_layout [| total_samples; 1 |] in-+(* Generate first moon (label = 1.0) *)for i = 0 to len - 1 dolet v = Float.of_int i *. Float.pi /. Float.of_int len in@@ -46,9 +41,9 @@ let generate_with_kind kind ?(config = Config.default) ~len () =let y = s +. noise () inGenarray.set coordinates [| i; 0 |] x;Genarray.set coordinates [| i; 1 |] y;- Genarray.set labels [| i; 0 |] 1.0;+ Genarray.set labels [| i; 0 |] 1.0done;-+(* Generate second moon (label = -1.0) *)for i = 0 to len - 1 dolet v = Float.of_int i *. Float.pi /. Float.of_int len in@@ -58,50 +53,51 @@ let generate_with_kind kind ?(config = Config.default) ~len () =let idx = len + i inGenarray.set coordinates [| idx; 0 |] x;Genarray.set coordinates [| idx; 1 |] y;- Genarray.set labels [| idx; 0 |] (-1.0);+ Genarray.set labels [| idx; 0 |] (-1.0)done;-+(coordinates, labels)(** Generate the half moons dataset with the specified parameters.-+@param config Configuration for noise and randomization@param len Number of samples per moon (total samples = len * 2)- @return A tuple of (coordinates, labels) where:- - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis)- - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis)- - First moon has label 1.0, second moon has label -1.0-*)-let generate ?(config = Config.default) ~len () =- generate_with_kind float64 ~config ~len ()+ @return+ A tuple of (coordinates, labels) where:+ - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis)+ - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis)+ - First moon has label 1.0, second moon has label -1.0 *)+let generate ?(config = Config.default) ~len () = generate_with_kind float64 ~config ~len ()(** Generate the half moons dataset with single precision floats.-+@param config Configuration for noise and randomization@param len Number of samples per moon (total samples = len * 2)- @return A tuple of (coordinates, labels) where:- - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis) with float32 elements- - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis) with float32 elements- - First moon has label 1.0, second moon has label -1.0-*)+ @return+ A tuple of (coordinates, labels) where:+ - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis) with float32+ elements+ - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis) with float32 elements+ - First moon has label 1.0, second moon has label -1.0 *)let generate_single_prec ?(config = Config.default) ~len () =generate_with_kind float32 ~config ~len ()-(** Generate half moons dataset using the old array-based approach for compatibility.- This function is deprecated and provided for backwards compatibility.-+(** Generate half moons dataset using the old array-based approach for compatibility. This function+ is deprecated and provided for backwards compatibility.+@param len Number of samples per moon- @param noise_range Range of noise to add- @return A tuple of (coordinates_array, labels_array) as flat arrays-*)+ @param noise_range Range of noise to add+ @return A tuple of (coordinates_array, labels_array) as flat arrays *)let generate_arrays ?(noise_range = 0.1) ~len () =let noise () = Random.float (2.0 *. noise_range) -. noise_range in- let coordinates =- Array.concat (Array.to_list (Array.init len (fun _ ->- let i = Random.int len in- let v = Float.of_int i *. Float.pi /. Float.of_int len in- let c = Float.cos v and s = Float.sin v in- [| c +. noise (); s +. noise (); 1.0 -. c +. noise (); 0.5 -. s +. noise () |])))+ let coordinates =+ Array.concat+ (Array.to_list+ (Array.init len (fun _ ->+ let i = Random.int len in+ let v = Float.of_int i *. Float.pi /. Float.of_int len in+ let c = Float.cos v and s = Float.sin v in+ [| c +. noise (); s +. noise (); 1.0 -. c +. noise (); 0.5 -. s +. noise () |])))inlet labels = Array.init (len * 2) (fun i -> if i mod 2 = 0 then 1. else -1.) in- (coordinates, labels)\ No newline at end of file+ (coordinates, labels)File "bin/primitive_ops.ml", line 1, characters 0-0:diff --git a/_build/default/bin/primitive_ops.ml b/_build/default/bin/.formatted/primitive_ops.mlindex 52f4c78..d04550f 100644--- a/_build/default/bin/primitive_ops.ml+++ b/_build/default/bin/.formatted/primitive_ops.ml@@ -26,9 +26,7 @@ let%debug_sexp graph_t () : unit =let size = 50 inlet xs = Array.init size ~f:Float.(fun i -> (of_int i / 10.) + 0.1) inlet x_flat =- Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ]- ~fetch_op:(Constant_fill xs)- ()+ Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] ~fetch_op:(Constant_fill xs) ()inlet step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty inlet%op xkcd = x_flat @| step_sym inFile "bin/moons_benchmark.ml", line 1, characters 0-0:diff --git a/_build/default/bin/moons_benchmark.ml b/_build/default/bin/.formatted/moons_benchmark.mlindex 31d245e..f0fb1af 100644--- a/_build/default/bin/moons_benchmark.ml+++ b/_build/default/bin/.formatted/moons_benchmark.ml@@ -57,7 +57,9 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b(* let init_lr = 0.1 in *)let init_lr = 0.01 inlet moons_config = Datasets.Half_moons.Config.{ noise_range = 0.1; seed = Some seed } in- let moons_coordinates, moons_labels = Datasets.Half_moons.generate ~config:moons_config ~len:flat_len () in+ let moons_coordinates, moons_labels =+ Datasets.Half_moons.generate ~config:moons_config ~len:flat_len ()+ inlet moons_flat_ndarray = Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates inlet moons_classes_ndarray = Ir.Ndarray.as_array Ir.Ops.Double moons_labels inlet moons_flat ~b:_ = TDSL.rebatch ~l:"moons_flat" moons_flat_ndarray in@@ -83,14 +85,14 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b@@ Backend.get_global_debug_info ();let per_batch_callback ~at_batch ~at_step ~learning_rate ~batch_loss ~epoch_loss =Stdio.printf "Batch=%d, step=%d, lr=%f, batch loss=%f, epoch loss=%f\n%!" at_batch at_step- learning_rate batch_loss epoch_loss;+ learning_rate batch_loss epoch_loss;if Option.is_none !start_time then start_time := Some (Time_now.nanoseconds_since_unix_epoch ())in(* Tn.print_accessible_headers (); *)let per_epoch_callback ~at_step ~at_epoch ~learning_rate ~epoch_loss =(* if at_epoch % 10 = 9 then *)- Stdio.printf "Epoch=%d, step=%d, lr=%f, epoch loss=%f\n%!" at_epoch at_step learning_rate- epoch_loss+ Stdio.printf "Epoch=%d, step=%d, lr=%f, epoch loss=%f\n%!" at_epoch at_step learning_rate+ epoch_lossinlet {File "bin/zero2hero_1of7.ml", line 1, characters 0-0:diff --git a/_build/default/bin/zero2hero_1of7.ml b/_build/default/bin/.formatted/zero2hero_1of7.mlindex 4212f12..babcfc2 100644--- a/_build/default/bin/zero2hero_1of7.ml+++ b/_build/default/bin/.formatted/zero2hero_1of7.ml@@ -57,8 +57,7 @@ let _suspended () =let x_flat =Tensor.term ~grad_spec:Tensor.Require_grad~label:[ "x_flat" ] (* ~input_dims:[] ~output_dims:[ 1 ] *)- ~fetch_op:(Constant_fill values)- ()+ ~fetch_op:(Constant_fill values) ()inlet step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in(* The [let x =] line is the same as this except [let%op x =] uses [~grad_spec:If_needed]. *)@@ -110,9 +109,7 @@ let _suspended () =let xs = Array.init size ~f:Float.(fun i -> (of_int i / 10.) - 5.) in(* Yay, the whole shape gets inferred! *)let x_flat =- Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ]- ~fetch_op:(Constant_fill xs)- ()+ Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] ~fetch_op:(Constant_fill xs) ()inlet step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty inlet%op x = x_flat @| step_sym inFile "arrayjit/lib/assignments.ml", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/assignments.ml b/_build/default/arrayjit/lib/.formatted/assignments.mlindex b7df201..a1de0e3 100644--- a/_build/default/arrayjit/lib/assignments.ml+++ b/_build/default/arrayjit/lib/.formatted/assignments.ml@@ -171,7 +171,7 @@ let%diagn2_sexp to_low_level code =assert (Array.length idcs = Array.length (Lazy.force tn.Tn.dims));match buffer with| Node tn -> Low_level.Get (tn, idcs)- | Merge_buffer tn ->+ | Merge_buffer tn ->(* FIXME: NOT IMPLEMENTED YET - need to handle merge buffer access differently now *)Low_level.Get (tn, idcs)in@@ -267,7 +267,6 @@ let%diagn2_sexp to_low_level code =| Fetch { array; fetch_op = Embed_symbol s; dims } ->Low_level.loop_over_dims (Lazy.force dims) ~body:(fun idcs ->set array idcs @@ Embed_index (Iterator s.static_symbol))-| Fetch { array; fetch_op = Range_over_offsets; dims = (lazy dims) } ->Low_level.loop_over_dims dims ~body:(fun idcs ->let offset = Indexing.reflect_projection ~dims ~projection:idcs inFile "lib/ppx_op.ml", line 1, characters 0-0:diff --git a/_build/default/lib/ppx_op.ml b/_build/default/lib/.formatted/ppx_op.mlindex fe5d727..24aa423 100644--- a/_build/default/lib/ppx_op.ml+++ b/_build/default/lib/.formatted/ppx_op.ml@@ -21,7 +21,7 @@ let make_p ~has_config ~loc =let make_vb ?value ~has_config ~loc ~str_loc ~ident string =let pat = Ast_helper.Pat.var ~loc { loc = str_loc; txt = ident } in- let value = match value with Some c -> [%expr Some [%e c] ] | None -> [%expr None] in+ let value = match value with Some c -> [%expr Some [%e c]] | None -> [%expr None] inlet v = [%expr [%e make_p ~has_config ~loc] ?value:[%e value] [%e string]] inlet vb = Ast_helper.Vb.mk ~loc pat v in(pat, vb)File "lib/operation.ml", line 1, characters 0-0:diff --git a/_build/default/lib/operation.ml b/_build/default/lib/.formatted/operation.mlindex bb5851b..eb36e4c 100644--- a/_build/default/lib/operation.ml+++ b/_build/default/lib/.formatted/operation.ml@@ -447,8 +447,8 @@ endomitted. Note: the data should have no padding and if padding is inferred, the data will becopied; otherwise, the resulting tensor value shares host memory with the ndarray. *)let reshape ~l ?b ?(i = []) ?o ndarray =- Tensor.term ~label:[ l ] ?batch_dims:b ~input_dims:i ?output_dims:o ~init_data:(Asgns.Reshape ndarray)- ()+ Tensor.term ~label:[ l ] ?batch_dims:b ~input_dims:i ?output_dims:o+ ~init_data:(Asgns.Reshape ndarray) ()(** The dimensions are taken from the provided ndarray, but the split into axis kinds still needs tobe inferred (or provided). Assumes no padding. See also: {!reshape} and {!TDSL.wrap_param}. *)File "arrayjit/lib/metal_backend.ml", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/metal_backend.ml b/_build/default/arrayjit/lib/.formatted/metal_backend.mlindex 5bad5b7..5eb7e2f 100644--- a/_build/default/arrayjit/lib/metal_backend.ml+++ b/_build/default/arrayjit/lib/.formatted/metal_backend.ml@@ -526,8 +526,8 @@ end) : Ir.Backend_impl.Lowered_backend = struct^^ string ("0.0" ^ s)))| ToPowOf, _ -> func "pow"| Threefry4x32, _ ->- (* FIXME: NOT IMPLEMENTED YET *)- func "threefry4x32" (* Metal implementation of Threefry4x32 *)+ (* FIXME: NOT IMPLEMENTED YET *)+ func "threefry4x32" (* Metal implementation of Threefry4x32 *)| Arg1, _ | Arg2, _ -> invalid_arg "Metal C_syntax_config: Arg1/Arg2 not operators"let unop_syntax prec op =@@ -559,7 +559,11 @@ end) : Ir.Backend_impl.Lowered_backend = struct| Not, _ -> fun v -> string "!" ^^ v| Uint4x32_to_prec_uniform target_prec, _ ->(* FIXME: NOT IMPLEMENTED YET - placeholder for Uint4x32_to_prec_uniform conversion *)- fun _v -> string ("/* FIXME: uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform */ (0.0" ^ metal_prec_suffix_float target_prec ^ ")")+ fun _v ->+ string+ ("/* FIXME: uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform */ (0.0"+ ^ metal_prec_suffix_float target_prec+ ^ ")")(* Logical not *)let convert_precision ~from ~to_ =File "arrayjit/lib/c_syntax.ml", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/c_syntax.ml b/_build/default/arrayjit/lib/.formatted/c_syntax.mlindex 776da2e..722f427 100644--- a/_build/default/arrayjit/lib/c_syntax.ml+++ b/_build/default/arrayjit/lib/.formatted/c_syntax.ml@@ -646,9 +646,9 @@ module C_syntax (B : C_syntax_config) = struct(defs, expr)| Unop (Ops.Uint4x32_to_prec_uniform target_prec, v) ->let defs, expr_v = pp_float prec v in- let expr =- string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(") ^^- expr_v ^^ string ")"+ let expr =+ string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(")+ ^^ expr_v ^^ string ")"in(defs, expr)| Unop (op, v) ->@@ -720,9 +720,9 @@ module C_syntax (B : C_syntax_config) = struct(B.binop_syntax prec op v1_doc v2_doc, idcs1 @ idcs2)| Unop (Ops.Uint4x32_to_prec_uniform target_prec, v) ->let v_doc, idcs = debug_float prec v in- let expr_doc =- string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(") ^^- v_doc ^^ string "){=" ^^ string B.float_log_style ^^ string "}"+ let expr_doc =+ string ("uint4x32_to_" ^ Ops.prec_string target_prec ^ "_uniform(")+ ^^ v_doc ^^ string "){=" ^^ string B.float_log_style ^^ string "}"in(expr_doc, idcs)| Unop (op, v) ->File "arrayjit/lib/low_level.ml", line 1, characters 0-0:diff --git a/_build/default/arrayjit/lib/low_level.ml b/_build/default/arrayjit/lib/.formatted/low_level.mlindex e36eeb6..18d0be0 100644--- a/_build/default/arrayjit/lib/low_level.ml+++ b/_build/default/arrayjit/lib/.formatted/low_level.ml@@ -158,7 +158,6 @@ let is_constexpr_comp traced_store llv =| Get_local { tn; _ } | Local_scope { id = { tn; _ }; _ } ->let traced = get_node traced_store tn intraced.is_scalar_constexpr-| Get (tn, _) ->let traced = get_node traced_store tn intraced.is_scalar_constexprdune build @fmt failed"/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 22025-07-14 12:11.56: Job failed: Failed: Build failed