2025-07-05 20:26.13: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/master (ad9a53e26542273c4da93ac063ee677b3e88e41e) (linux-x86_64:(lint-fmt)) Base: ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e ocamlformat version: version 0.27.0 (from opam) To reproduce locally: git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "master" && cd "ocannl" && git reset --hard ad9a53e2 cat > Dockerfile <<'END-OF-DOCKERFILE' FROM ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e USER 1000:1000 RUN cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u RUN opam depext -i dune WORKDIR /src RUN opam depext -i ocamlformat=0.27.0 COPY --chown=1000:1000 . /src/ RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2) END-OF-DOCKERFILE docker build . END-REPRO-BLOCK 2025-07-05 20:26.13: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e-debian-12-4.08_opam-2.3-ocamlformat-0eea63ad71af2b1116c556023bedc6bf083e6125" 2025-07-05 20:26.13: Using OBuilder spec: ((from ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e) (user (uid 1000) (gid 1000)) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u")) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam depext -i dune")) (workdir /src) (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam depext -i ocamlformat=0.27.0")) (copy (src .) (dst /src/)) (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)")) ) 2025-07-05 20:26.13: Waiting for resource in pool OCluster 2025-07-05 20:26.13: Waiting for worker… 2025-07-05 20:26.14: Got resource from pool OCluster Building on asteria.caelum.ci.dev All commits already cached HEAD is now at ad9a53e2 Big refactoring: Uint4x32_to_prec_uniform moves from a fetch op to a proper unary op (Ops) with dedicated shape and projections inference support (once done); getting rid of the remaining dedicated_access fetch ops with a migration of Merge_buffer to a stand-alone Get_merge_buffer variant in float_t (Low_level); and better consistency with the new terminal_type (Shape). (from ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e) Unable to find image 'ocaml/opam:debian-12-ocaml-4.08@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e' locally docker.io/ocaml/opam@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e: Pulling from ocaml/opam 8a97734bcf6d: Pulling fs layer 8a97734bcf6d: Verifying Checksum 8a97734bcf6d: Download complete 8a97734bcf6d: Pull complete Digest: sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e Status: Downloaded newer image for ocaml/opam@sha256:cc4b148b1fa1916574df02fcec0956fedbec6798bedacd9bfd4417c1c098ce8e 2025-07-05 20:26.15 ---> using "4ea5038d254cfd14663698deb665a2dc4ce1e1383d544c063adebb02ed15ce16" from cache /: (user (uid 1000) (gid 1000)) /: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "cd ~/opam-repository && (git cat-file -e 0eea63ad71af2b1116c556023bedc6bf083e6125 || git fetch origin master) && git reset -q --hard 0eea63ad71af2b1116c556023bedc6bf083e6125 && git log --no-decorate -n1 --oneline && opam update -u")) 0eea63ad71 Merge pull request #27946 from mtelvers/opam-publish-ocaml-version.4.0.1 <><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><> [default] Initialised default (at git+file:///home/opam/opam-repository): [INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html) Everything as up-to-date as possible (run with --verbose to show unavailable upgrades). However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages. Nothing to do. # Run eval $(opam env) to update the current shell environment 2025-07-05 20:26.15 ---> using "ed14c208b709d50e26e290f8f669e0b0b6a2456751ee5b87b29d71221f0a9fc1" from cache /: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam depext -i dune")) # Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian # No extra OS packages requirements found. # All required OS packages found. # Now letting opam install the packages The following actions will be performed: - install dune 3.19.1 <><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><> [dune.3.19.1] found in cache <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> installed dune.3.19.1 Done. # Run eval $(opam env) to update the current shell environment 2025-07-05 20:26.15 ---> using "249f8284e2625ce869f4c794eb6ff1eab40d82e0af762bb312d97ad81bf8d4f1" from cache /: (workdir /src) /src: (run (cache (opam-archives (target /home/opam/.opam/download-cache))) (network host) (shell "opam depext -i ocamlformat=0.27.0")) # Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian # No extra OS packages requirements found. # All required OS packages found. # Now letting opam install the packages The following actions will be performed: - install sexplib0 v0.14.0 [required by base] - install cmdliner 1.3.0 [required by ocamlformat] - install menhirLib 20240715 [required by ocamlformat-lib] - install menhirCST 20240715 [required by menhir] - install ocamlbuild 0.16.1 [required by fpath, astring, uuseg] - install dune-build-info 3.19.1 [required by ocamlformat-lib] - install menhirSdk 20240715 [required by ocamlformat-lib] - install either 1.0.0 [required by ocamlformat-lib] - install ocaml-version 4.0.1 [required by ocamlformat-lib] - install camlp-streams 5.0.1 [required by ocamlformat-lib] - install csexp 1.5.2 [required by ocamlformat] - install seq base [required by re] - install fix 20250428 [required by ocamlformat-lib] - install ocamlfind 1.9.8 [required by ocp-indent, astring, fpath, uuseg] - install menhir 20240715 [required by ocamlformat-lib] - install dune-configurator 3.19.1 [required by base] - install re 1.11.0 [required by ocamlformat] - install topkg 1.0.8 [required by fpath, astring, uuseg] - install base-bytes base [required by ocp-indent] - install base v0.14.3 [required by ocamlformat-lib] - install uutf 1.0.4 [required by ocamlformat-lib] - install astring 0.8.5 [required by ocamlformat-lib] - install ocp-indent 1.8.1 [required by ocamlformat-lib] - install stdio v0.14.0 [required by ocamlformat-lib] - install uucp 15.0.0 [required by uuseg] - install fpath 0.7.3 [required by ocamlformat-lib] - install uuseg 15.0.0 [required by ocamlformat-lib] - install ocamlformat-lib 0.27.0 [required by ocamlformat] - install ocamlformat 0.27.0 ===== 29 to install ===== <><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><> [astring.0.8.5] found in cache [base.v0.14.3] found in cache [camlp-streams.5.0.1] found in cache [cmdliner.1.3.0] found in cache [csexp.1.5.2] found in cache [dune-build-info.3.19.1] found in cache [dune-configurator.3.19.1] found in cache [either.1.0.0] found in cache [fix.20250428] found in cache [fpath.0.7.3] found in cache [menhir.20240715] found in cache [menhirCST.20240715] found in cache [menhirLib.20240715] found in cache [menhirSdk.20240715] found in cache [ocaml-version.4.0.1] found in cache [ocamlbuild.0.16.1] found in cache [ocamlfind.1.9.8] found in cache [ocamlformat.0.27.0] found in cache [ocamlformat-lib.0.27.0] found in cache [ocp-indent.1.8.1] found in cache [re.1.11.0] found in cache [sexplib0.v0.14.0] found in cache [stdio.v0.14.0] found in cache [topkg.1.0.8] found in cache [uucp.15.0.0] found in cache [uuseg.15.0.0] found in cache [uutf.1.0.4] found in cache <><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><> -> installed seq.base -> installed camlp-streams.5.0.1 -> installed csexp.1.5.2 -> installed cmdliner.1.3.0 -> installed either.1.0.0 -> installed fix.20250428 -> installed menhirCST.20240715 -> installed menhirLib.20240715 -> installed menhirSdk.20240715 -> installed ocaml-version.4.0.1 -> installed re.1.11.0 -> installed sexplib0.v0.14.0 -> installed dune-build-info.3.19.1 -> installed dune-configurator.3.19.1 -> installed ocamlfind.1.9.8 -> installed base-bytes.base -> installed ocamlbuild.0.16.1 -> installed ocp-indent.1.8.1 -> installed base.v0.14.3 -> installed topkg.1.0.8 -> installed stdio.v0.14.0 -> installed uutf.1.0.4 -> installed astring.0.8.5 -> installed fpath.0.7.3 -> installed menhir.20240715 -> installed uucp.15.0.0 -> installed uuseg.15.0.0 -> installed ocamlformat-lib.0.27.0 -> installed ocamlformat.0.27.0 Done. <><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><> => This package requires additional configuration for use in editors. Install package 'user-setup', or manually: * for Emacs, add these lines to ~/.emacs: (add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp") (require 'ocp-indent) * for Vim, add this line to ~/.vimrc: set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim" # Run eval $(opam env) to update the current shell environment 2025-07-05 20:26.15 ---> using "7fd5d4ac1bb4f93065934b08bd6aad3989793fd62a884f257d9f7ac1e147ec1a" from cache /src: (copy (src .) (dst /src/)) 2025-07-05 20:26.15 ---> saved as "0af969811e6d3c4ce3b2ef987ee16ad2b5cfdebfc925a793f76351d41cb3e136" /src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)")) File "arrayjit/bin/dune", line 6, characters 21-34: 6 | (pps ppx_minidebug ppx_sexp_conv)) ^^^^^^^^^^^^^ Error: Library "ppx_sexp_conv" not found. -> required by _build/default/arrayjit/bin/read_config.exe -> required by %{dep:../../arrayjit/bin/read_config.exe} at test/dune:25 -> required by _build/default/test/config/ocannl_backend.txt -> required by %{read:config/ocannl_backend.txt} at test/dune:44 -> required by Computing directory contents of _build/default/test File "arrayjit/bin/dune", line 6, characters 7-20: 6 | (pps ppx_minidebug ppx_sexp_conv)) ^^^^^^^^^^^^^ Error: Library "ppx_minidebug" not found. -> required by _build/default/arrayjit/bin/.merlin-conf/exe-read_config -> required by _build/default/arrayjit/bin/read_config.exe -> required by %{dep:../../arrayjit/bin/read_config.exe} at test/dune:25 -> required by _build/default/test/config/ocannl_backend.txt -> required by %{read:config/ocannl_backend.txt} at test/dune:44 -> required by Computing directory contents of _build/default/test File "datasets/half_moons.ml", line 1, characters 0-0: diff --git a/_build/default/datasets/half_moons.ml b/_build/default/datasets/.formatted/half_moons.ml index 5e62df2..d860921 100644 --- a/_build/default/datasets/half_moons.ml +++ b/_build/default/datasets/.formatted/half_moons.ml @@ -6,37 +6,32 @@ open Bigarray module Config = struct type t = { noise_range : float; (** Range of noise to add to the coordinates *) - seed : int option; (** Optional random seed for reproducibility *) + seed : int option; (** Optional random seed for reproducibility *) } - let default = { - noise_range = 0.1; - seed = None; - } + let default = { noise_range = 0.1; seed = None } end (** Generate the half moons dataset with the specified parameters. - + @param config Configuration for noise and randomization @param len Number of samples per moon (total samples = len * 2) - @return A tuple of (coordinates, labels) where: - - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis) - - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis) - - First moon has label 1.0, second moon has label -1.0 -*) + @return + A tuple of (coordinates, labels) where: + - coordinates is a bigarray of shape [len*2; 2] (batch_axis, output_axis) + - labels is a bigarray of shape [len*2; 1] (batch_axis, output_axis) + - First moon has label 1.0, second moon has label -1.0 *) let generate ?(config = Config.default) ~len () = (* Initialize random seed if specified *) - (match config.seed with - | Some seed -> Random.init seed - | None -> ()); + (match config.seed with Some seed -> Random.init seed | None -> ()); let noise () = Random.float (2.0 *. config.noise_range) -. config.noise_range in let total_samples = len * 2 in - + (* Create bigarrays with batch axis first, then output axis *) let coordinates = Genarray.create float64 c_layout [| total_samples; 2 |] in let labels = Genarray.create float64 c_layout [| total_samples; 1 |] in - + (* Generate first moon (label = 1.0) *) for i = 0 to len - 1 do let v = Float.of_int i *. Float.pi /. Float.of_int len in @@ -45,9 +40,9 @@ let generate ?(config = Config.default) ~len () = let y = s +. noise () in Genarray.set coordinates [| i; 0 |] x; Genarray.set coordinates [| i; 1 |] y; - Genarray.set labels [| i; 0 |] 1.0; + Genarray.set labels [| i; 0 |] 1.0 done; - + (* Generate second moon (label = -1.0) *) for i = 0 to len - 1 do let v = Float.of_int i *. Float.pi /. Float.of_int len in @@ -57,26 +52,27 @@ let generate ?(config = Config.default) ~len () = let idx = len + i in Genarray.set coordinates [| idx; 0 |] x; Genarray.set coordinates [| idx; 1 |] y; - Genarray.set labels [| idx; 0 |] (-1.0); + Genarray.set labels [| idx; 0 |] (-1.0) done; - + (coordinates, labels) -(** Generate half moons dataset using the old array-based approach for compatibility. - This function is deprecated and provided for backwards compatibility. - +(** Generate half moons dataset using the old array-based approach for compatibility. This function + is deprecated and provided for backwards compatibility. + @param len Number of samples per moon - @param noise_range Range of noise to add - @return A tuple of (coordinates_array, labels_array) as flat arrays -*) + @param noise_range Range of noise to add + @return A tuple of (coordinates_array, labels_array) as flat arrays *) let generate_arrays ?(noise_range = 0.1) ~len () = let noise () = Random.float (2.0 *. noise_range) -. noise_range in - let coordinates = - Array.concat (Array.to_list (Array.init len (fun _ -> - let i = Random.int len in - let v = Float.of_int i *. Float.pi /. Float.of_int len in - let c = Float.cos v and s = Float.sin v in - [| c +. noise (); s +. noise (); 1.0 -. c +. noise (); 0.5 -. s +. noise () |]))) + let coordinates = + Array.concat + (Array.to_list + (Array.init len (fun _ -> + let i = Random.int len in + let v = Float.of_int i *. Float.pi /. Float.of_int len in + let c = Float.cos v and s = Float.sin v in + [| c +. noise (); s +. noise (); 1.0 -. c +. noise (); 0.5 -. s +. noise () |]))) in let labels = Array.init (len * 2) (fun i -> if i mod 2 = 0 then 1. else -1.) in - (coordinates, labels) \ No newline at end of file + (coordinates, labels) File "lib/row.mli", line 1, characters 0-0: diff --git a/_build/default/lib/row.mli b/_build/default/lib/.formatted/row.mli index 5920258..fa3dec9 100644 --- a/_build/default/lib/row.mli +++ b/_build/default/lib/.formatted/row.mli @@ -81,8 +81,7 @@ type row_constraint = | Unconstrained | Total_elems of { nominator : int; divided_by : dim_var_set } (** The rows, inclusive of the further row spec, have this many elements. *) - | Exact of dim list - (** The concatenated rows have these axes. *) + | Exact of dim list (** The concatenated rows have these axes. *) [@@deriving equal, hash, compare, sexp, variants] (** An entry implements inequalities [cur >= v >= subr] and/or an equality [v = solved]. [cur] and File "bin/primitive_ops.ml", line 1, characters 0-0: diff --git a/_build/default/bin/primitive_ops.ml b/_build/default/bin/.formatted/primitive_ops.ml index dbe51e3..59f2f35 100644 --- a/_build/default/bin/primitive_ops.ml +++ b/_build/default/bin/.formatted/primitive_ops.ml @@ -26,9 +26,7 @@ let%debug_sexp graph_t () : unit = let size = 50 in let xs = Array.init size ~f:Float.(fun i -> (of_int i / 10.) + 0.1) in let x_flat = - Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] - ~fetch_op:(Constant_fill xs) - () + Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] ~fetch_op:(Constant_fill xs) () in let step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in let%op xkcd = x_flat @| step_sym in File "bin/moons_demo.ml", line 1, characters 0-0: diff --git a/_build/default/bin/moons_demo.ml b/_build/default/bin/.formatted/moons_demo.ml index 6c0a3c3..4191613 100644 --- a/_build/default/bin/moons_demo.ml +++ b/_build/default/bin/.formatted/moons_demo.ml @@ -32,8 +32,12 @@ let demo () = let config = Datasets.Half_moons.Config.{ noise_range = 0.1; seed = Some seed } in let moons_coordinates, moons_labels = Datasets.Half_moons.generate ~config ~len () in - let moons_flat = TDSL.rebatch ~l:"moons_flat" (Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates) in - let moons_classes = TDSL.rebatch ~l:"moons_classes" (Ir.Ndarray.as_array Ir.Ops.Double moons_labels) in + let moons_flat = + TDSL.rebatch ~l:"moons_flat" (Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates) + in + let moons_classes = + TDSL.rebatch ~l:"moons_classes" (Ir.Ndarray.as_array Ir.Ops.Double moons_labels) + in let batch_n, bindings = IDX.get_static_symbol ~static_range:n_batches IDX.empty in let step_n, bindings = IDX.get_static_symbol bindings in File "bin/zero2hero_1of7.ml", line 1, characters 0-0: diff --git a/_build/default/bin/zero2hero_1of7.ml b/_build/default/bin/.formatted/zero2hero_1of7.ml index 4212f12..babcfc2 100644 --- a/_build/default/bin/zero2hero_1of7.ml +++ b/_build/default/bin/.formatted/zero2hero_1of7.ml @@ -57,8 +57,7 @@ let _suspended () = let x_flat = Tensor.term ~grad_spec:Tensor.Require_grad ~label:[ "x_flat" ] (* ~input_dims:[] ~output_dims:[ 1 ] *) - ~fetch_op:(Constant_fill values) - () + ~fetch_op:(Constant_fill values) () in let step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in (* The [let x =] line is the same as this except [let%op x =] uses [~grad_spec:If_needed]. *) @@ -110,9 +109,7 @@ let _suspended () = let xs = Array.init size ~f:Float.(fun i -> (of_int i / 10.) - 5.) in (* Yay, the whole shape gets inferred! *) let x_flat = - Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] - ~fetch_op:(Constant_fill xs) - () + Tensor.term ~grad_spec:Require_grad ~label:[ "x_flat" ] ~fetch_op:(Constant_fill xs) () in let step_sym, bindings = IDX.get_static_symbol ~static_range:size IDX.empty in let%op x = x_flat @| step_sym in File "lib/ppx_op.ml", line 1, characters 0-0: diff --git a/_build/default/lib/ppx_op.ml b/_build/default/lib/.formatted/ppx_op.ml index fe5d727..24aa423 100644 --- a/_build/default/lib/ppx_op.ml +++ b/_build/default/lib/.formatted/ppx_op.ml @@ -21,7 +21,7 @@ let make_p ~has_config ~loc = let make_vb ?value ~has_config ~loc ~str_loc ~ident string = let pat = Ast_helper.Pat.var ~loc { loc = str_loc; txt = ident } in - let value = match value with Some c -> [%expr Some [%e c] ] | None -> [%expr None] in + let value = match value with Some c -> [%expr Some [%e c]] | None -> [%expr None] in let v = [%expr [%e make_p ~has_config ~loc] ?value:[%e value] [%e string]] in let vb = Ast_helper.Vb.mk ~loc pat v in (pat, vb) File "bin/moons_benchmark.ml", line 1, characters 0-0: diff --git a/_build/default/bin/moons_benchmark.ml b/_build/default/bin/.formatted/moons_benchmark.ml index 31d245e..f0fb1af 100644 --- a/_build/default/bin/moons_benchmark.ml +++ b/_build/default/bin/.formatted/moons_benchmark.ml @@ -57,7 +57,9 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b (* let init_lr = 0.1 in *) let init_lr = 0.01 in let moons_config = Datasets.Half_moons.Config.{ noise_range = 0.1; seed = Some seed } in - let moons_coordinates, moons_labels = Datasets.Half_moons.generate ~config:moons_config ~len:flat_len () in + let moons_coordinates, moons_labels = + Datasets.Half_moons.generate ~config:moons_config ~len:flat_len () + in let moons_flat_ndarray = Ir.Ndarray.as_array Ir.Ops.Double moons_coordinates in let moons_classes_ndarray = Ir.Ndarray.as_array Ir.Ops.Double moons_labels in let moons_flat ~b:_ = TDSL.rebatch ~l:"moons_flat" moons_flat_ndarray in @@ -83,14 +85,14 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b @@ Backend.get_global_debug_info (); let per_batch_callback ~at_batch ~at_step ~learning_rate ~batch_loss ~epoch_loss = Stdio.printf "Batch=%d, step=%d, lr=%f, batch loss=%f, epoch loss=%f\n%!" at_batch at_step - learning_rate batch_loss epoch_loss; + learning_rate batch_loss epoch_loss; if Option.is_none !start_time then start_time := Some (Time_now.nanoseconds_since_unix_epoch ()) in (* Tn.print_accessible_headers (); *) let per_epoch_callback ~at_step ~at_epoch ~learning_rate ~epoch_loss = (* if at_epoch % 10 = 9 then *) - Stdio.printf "Epoch=%d, step=%d, lr=%f, epoch loss=%f\n%!" at_epoch at_step learning_rate - epoch_loss + Stdio.printf "Epoch=%d, step=%d, lr=%f, epoch loss=%f\n%!" at_epoch at_step learning_rate + epoch_loss in let { File "arrayjit/lib/tnode.ml", line 1, characters 0-0: diff --git a/_build/default/arrayjit/lib/tnode.ml b/_build/default/arrayjit/lib/.formatted/tnode.ml index c170d68..d3280eb 100644 --- a/_build/default/arrayjit/lib/tnode.ml +++ b/_build/default/arrayjit/lib/.formatted/tnode.ml @@ -647,11 +647,11 @@ let create_with_reshape ~id ~label ~base_ndarray ~dims ~padding ~from_padded () dim - left - right) in (* Check total elements match, allowing shape differences *) - let source_total = - if Array.is_empty source_dims then 0 else Array.reduce_exn source_dims ~f:( * ) + let source_total = + if Array.is_empty source_dims then 0 else Array.reduce_exn source_dims ~f:( * ) in - let data_total = - if Array.is_empty data_dims then 0 else Array.reduce_exn data_dims ~f:( * ) + let data_total = + if Array.is_empty data_dims then 0 else Array.reduce_exn data_dims ~f:( * ) in if source_total <> data_total then invalid_arg File "arrayjit/lib/ndarray.ml", line 1, characters 0-0: diff --git a/_build/default/arrayjit/lib/ndarray.ml b/_build/default/arrayjit/lib/.formatted/ndarray.ml index 2776fbc..27322ea 100644 --- a/_build/default/arrayjit/lib/ndarray.ml +++ b/_build/default/arrayjit/lib/.formatted/ndarray.ml @@ -385,10 +385,10 @@ let hash nd = Nativeint.hash (to_native nd) let hash_fold_t acc nd = hash_fold_nativeint acc (to_native nd) let hash_t nd = Nativeint.hash @@ to_native nd -(** C function declarations for efficient copying *) -external copy_with_padding_c : +external copy_with_padding_c : ('a, 'b) bigarray -> ('a, 'b) bigarray -> int array -> axis_padding array -> unit = "arrayjit_copy_with_padding" +(** C function declarations for efficient copying *) let copy_with_padding ~source ~target ~padding = let source_dims = dims source in File "lib/shape.ml", line 1, characters 0-0: diff --git a/_build/default/lib/shape.ml b/_build/default/lib/.formatted/shape.ml index 6b1b327..b8d77f2 100644 --- a/_build/default/lib/shape.ml +++ b/_build/default/lib/.formatted/shape.ml @@ -97,9 +97,7 @@ type transpose_type = | Uint4x32_to_prec of Ir.Ops.prec Lazy.t [@@deriving equal, sexp] -type terminal_type = - | Data of Ir.Assignments.init_data - | Fetch of Ir.Assignments.fetch_op +type terminal_type = Data of Ir.Assignments.init_data | Fetch of Ir.Assignments.fetch_op [@@deriving equal, sexp_of] type ternary_type = Pointwise_tern | Compose_accumulate [@@deriving sexp, equal] @@ -450,8 +448,8 @@ let%debug4_sexp get_inequalities ({ shape = cur_sh; logic; id = _ } as _upd : up | Terminal (`Fetch (Access (Merge_buffer _))) -> (Row.dim_map_empty, mark_terminal ()) | Terminal (`Fetch (Access (Uint4x32_to_prec_uniform _))) -> (* FIXME: NOT IMPLEMENTED YET -- we need to propagate the precision-adjusted dimensions - between the source tensor and the target tensor. This is tricky because the dimensions - are not known at the time of the shape inference. *) + between the source tensor and the target tensor. This is tricky because the dimensions are + not known at the time of the shape inference. *) (Row.dim_map_empty, mark_terminal ()) | Terminal (`Fetch (Slice { sliced = tn; batch_idx = _ })) -> if Lazy.is_val tn.dims then File "arrayjit/lib/utils.ml", line 1, characters 0-0: diff --git a/_build/default/arrayjit/lib/utils.ml b/_build/default/arrayjit/lib/.formatted/utils.ml index 3940f87..d80a30d 100644 --- a/_build/default/arrayjit/lib/utils.ml +++ b/_build/default/arrayjit/lib/.formatted/utils.ml @@ -315,12 +315,8 @@ let get_local_debug_runtime = @@ "ocannl_debug_backend setting should be text, html, markdown or flushing; found: " ^ s in let hyperlink = get_global_arg ~default:"./" ~arg_name:"hyperlink_prefix" in - let print_entry_ids = - get_global_flag ~default:false ~arg_name:"logs_print_entry_ids" - in - let verbose_entry_ids = - get_global_flag ~default:false ~arg_name:"logs_verbose_entry_ids" - in + let print_entry_ids = get_global_flag ~default:false ~arg_name:"logs_print_entry_ids" in + let verbose_entry_ids = get_global_flag ~default:false ~arg_name:"logs_verbose_entry_ids" in let log_main_domain_to_stdout = get_global_flag ~default:false ~arg_name:"log_main_domain_to_stdout" in @@ -444,9 +440,7 @@ let restore_settings () = let () = restore_settings () let with_runtime_debug () = settings.output_debug_files_in_build_directory && settings.log_level > 1 let debug_log_from_routines () = settings.debug_log_from_routines && settings.log_level > 1 - -let never_capture_stdout () = - get_global_flag ~default:false ~arg_name:"never_capture_stdout" +let never_capture_stdout () = get_global_flag ~default:false ~arg_name:"never_capture_stdout" let enable_runtime_debug () = settings.output_debug_files_in_build_directory <- true; File "lib/row.ml", line 1, characters 0-0: diff --git a/_build/default/lib/row.ml b/_build/default/lib/.formatted/row.ml index 59a1f43..6f53852 100644 --- a/_build/default/lib/row.ml +++ b/_build/default/lib/.formatted/row.ml @@ -485,10 +485,9 @@ let _lift_row_constraint (constr : row_constraint) ~(beg_dims : dim list) ~(dims | Unconstrained -> Unconstrained | Exact exact_dims -> Exact (beg_dims @ exact_dims @ dims) -(** Helper function to convert a list of rows to a single row option. - Returns None if there is more than one row variable among the rows. - Otherwise, concatenates the leading dims to the beg_dims of the variable, - and the dims of the variable's row with the dims of the following rows. *) +(** Helper function to convert a list of rows to a single row option. Returns None if there is more + than one row variable among the rows. Otherwise, concatenates the leading dims to the beg_dims + of the variable, and the dims of the variable's row with the dims of the following rows. *) let rows_to_row (rows : row list) : row option = let find_row_vars rows = List.foldi rows ~init:([], []) ~f:(fun idx (var_indices, vars) row -> @@ -505,9 +504,10 @@ let rows_to_row (rows : row list) : row option = Some { dims = all_dims; bcast = Broadcastable; id } | [ _ ] -> (* Exactly one row variable *) - let (var_idx, var) = List.hd_exn var_indices in + let var_idx, var = List.hd_exn var_indices in let var_row = List.nth_exn rows var_idx in - let (var_beg_dims, var_dims) = match var_row.bcast with + let var_beg_dims, var_dims = + match var_row.bcast with | Row_var { beg_dims; _ } -> (beg_dims, var_row.dims) | Broadcastable -> assert false (* We know there's a row variable *) in @@ -517,12 +517,14 @@ let rows_to_row (rows : row list) : row option = let after_dims = List.concat_map after_rows ~f:(fun r -> r.dims) in let new_beg_dims = before_dims @ var_beg_dims in let new_dims = var_dims @ after_dims in - Some { dims = new_dims; bcast = Row_var { v = var; beg_dims = new_beg_dims }; id = var_row.id } + Some + { dims = new_dims; bcast = Row_var { v = var; beg_dims = new_beg_dims }; id = var_row.id } | _ :: _ :: _ -> (* More than one row variable *) None -let rec apply_rows_constraint ~stage (rows : row list) (constr : row_constraint) (env : environment) : constraint_ list * environment = +let rec apply_rows_constraint ~stage (rows : row list) (constr : row_constraint) (env : environment) + : constraint_ list * environment = match rows_to_row rows with | Some single_row -> apply_row_constraint ~stage single_row constr env | None -> ([ Rows_constr { r = rows; constr } ], env) @@ -1473,20 +1475,19 @@ let%debug5_sexp close_dim_terminal ~(stage : stage) (env : environment) (dim : d let last_dim_is dims d2 = match List.last dims with Some (Dim { d; _ }) -> d = d2 | _ -> false -let rec eliminate_rows_constraint ~lub (rows : row list) (constr : row_constraint) (env : environment) : constraint_ list = +let rec eliminate_rows_constraint ~lub (rows : row list) (constr : row_constraint) + (env : environment) : constraint_ list = match rows_to_row rows with | Some single_row -> eliminate_row_constraint ~lub single_row constr env | None -> [ Rows_constr { r = rows; constr } ] -and eliminate_row_constraint ~lub (r : row) (constr : row_constraint) env : - constraint_ list = +and eliminate_row_constraint ~lub (r : row) (constr : row_constraint) env : constraint_ list = match r with | { bcast = Broadcastable; _ } -> (* The environment is unchanged, as apply_row_constraint would update only the constr. *) let ineqs, _env = apply_row_constraint ~stage:Stage5 r constr env in List.concat_map ineqs ~f:(function - | Rows_constr { r = rows; constr } -> - eliminate_rows_constraint ~lub:None rows constr env + | Rows_constr { r = rows; constr } -> eliminate_rows_constraint ~lub:None rows constr env | ineq -> [ ineq ]) | { bcast = Row_var { v; beg_dims }; dims; id } -> ( let r1 = row_of_var v id in dune build @fmt failed "/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 2 2025-07-05 20:26.17: Job failed: Failed: Build failed