2025-08-30 07:18.27: New job: test ahrefs/ocannl https://github.com/ahrefs/ocannl.git#refs/heads/concat-and-split-along-axes (07092c0a6c37daa3cb8dc3067ea8b2da62bfd55d) (linux-x86_64:(lint-fmt))Base: ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669docamlformat version: version 0.27.0 (from opam)To reproduce locally:git clone --recursive "https://github.com/ahrefs/ocannl.git" -b "concat-and-split-along-axes" && cd "ocannl" && git reset --hard 07092c0acat > Dockerfile <<'END-OF-DOCKERFILE'FROM ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669dUSER 1000:1000RUN cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -uRUN opam depext -i duneWORKDIR /srcRUN opam depext -i ocamlformat=0.27.0COPY --chown=1000:1000 . /src/RUN opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)END-OF-DOCKERFILEdocker build .END-REPRO-BLOCK2025-08-30 07:18.27: Using cache hint "ahrefs/ocannl-ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d-debian-12-4.08_opam-2.4-ocamlformat-b8021439f8c57ba6435bc2263f6596671f4f4466"2025-08-30 07:18.27: Using OBuilder spec:((from ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d)(user (uid 1000) (gid 1000))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i dune"))(workdir /src)(run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i ocamlformat=0.27.0"))(copy (src .) (dst /src/))(run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)")))2025-08-30 07:18.27: Waiting for resource in pool OCluster2025-08-30 07:18.46: Waiting for worker…2025-08-30 07:18.46: Got resource from pool OClusterBuilding on doris.caelum.ci.devAll commits already cachedHEAD is now at 07092c0a Some more progress on concatenation-along-axes before we give up(from ocaml/opam:debian-12-ocaml-4.08@sha256:1f8f583356b37c1cbcde061de13bfdb1a8963b80127e721c7ca17f4e884e669d)2025-08-30 07:18.30 ---> using "b69d5f1106f8a1cfb35e904f37f2ccc38216aa61a22ee32dd7c324a91608cf44" from cache/: (user (uid 1000) (gid 1000))/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "cd ~/opam-repository && (git cat-file -e b8021439f8c57ba6435bc2263f6596671f4f4466 || git fetch origin master) && git reset -q --hard b8021439f8c57ba6435bc2263f6596671f4f4466 && git log --no-decorate -n1 --oneline && opam update -u"))b8021439f8 Merge pull request #28261 from kit-ty-kate/deploy-fix-25819<><> Updating package repositories ><><><><><><><><><><><><><><><><><><><><><><>[default] Initialiseddefault (at git+file:///home/opam/opam-repository):[INFO] opam 2.1 and 2.2 include many performance and security improvements over 2.0; please consider upgrading (https://opam.ocaml.org/doc/Install.html)Everything as up-to-date as possible (run with --verbose to show unavailable upgrades).However, you may "opam upgrade" these packages explicitly, which will ask permission to downgrade or uninstall the conflicting packages.Nothing to do.# Run eval $(opam env) to update the current shell environment2025-08-30 07:19.16 ---> saved as "1be76adbf83434462493f6d60d3a403af1e722bb13c1f26df1ed667dd3ad008c"/: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i dune"))# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian# No extra OS packages requirements found.# All required OS packages found.# Now letting opam install the packagesThe following actions will be performed:- install dune 3.20.0<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>[dune.3.20.0] found in cache<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> installed dune.3.20.0Done.# Run eval $(opam env) to update the current shell environment2025-08-30 07:20.09 ---> saved as "76384e7e1078506ef4c7e0c049f9a49f543e9fdc75f2cd2f57b1b769d15b1cc0"/: (workdir /src)/src: (run (cache (opam-archives (target /home/opam/.opam/download-cache)))(network host)(shell "opam depext -i ocamlformat=0.27.0"))# Detecting depexts using vars: arch=x86_64, os=linux, os-distribution=debian, os-family=debian# No extra OS packages requirements found.# All required OS packages found.# Now letting opam install the packagesThe following actions will be performed:- install sexplib0 v0.14.0 [required by base]- install menhirLib 20240715 [required by ocamlformat-lib]- install menhirCST 20240715 [required by menhir]- install ocamlbuild 0.16.1 [required by fpath, astring, uuseg]- install cmdliner 1.3.0 [required by ocamlformat]- install menhirSdk 20240715 [required by ocamlformat-lib]- install either 1.0.0 [required by ocamlformat-lib]- install dune-build-info 3.20.0 [required by ocamlformat-lib]- install csexp 1.5.2 [required by ocamlformat]- install camlp-streams 5.0.1 [required by ocamlformat-lib]- install seq base [required by re]- install fix 20250428 [required by ocamlformat-lib]- install ocaml-version 4.0.1 [required by ocamlformat-lib]- install ocamlfind 1.9.8 [required by ocp-indent, astring, fpath, uuseg]- install menhir 20240715 [required by ocamlformat-lib]- install dune-configurator 3.20.0 [required by base]- install re 1.11.0 [required by ocamlformat]- install topkg 1.1.0 [required by fpath, astring, uuseg]- install base-bytes base [required by ocp-indent]- install base v0.14.3 [required by ocamlformat-lib]- install uutf 1.0.4 [required by ocamlformat-lib]- install astring 0.8.5 [required by ocamlformat-lib]- install ocp-indent 1.8.1 [required by ocamlformat-lib]- install stdio v0.14.0 [required by ocamlformat-lib]- install uucp 15.0.0 [required by uuseg]- install fpath 0.7.3 [required by ocamlformat-lib]- install uuseg 15.0.0 [required by ocamlformat-lib]- install ocamlformat-lib 0.27.0 [required by ocamlformat]- install ocamlformat 0.27.0===== 29 to install =====<><> Gathering sources ><><><><><><><><><><><><><><><><><><><><><><><><><><><><>[astring.0.8.5] found in cache[base.v0.14.3] found in cache[camlp-streams.5.0.1] found in cache[cmdliner.1.3.0] found in cache[csexp.1.5.2] found in cache[dune-build-info.3.20.0] found in cache[dune-configurator.3.20.0] found in cache[either.1.0.0] found in cache[fix.20250428] found in cache[fpath.0.7.3] found in cache[menhir.20240715] found in cache[menhirCST.20240715] found in cache[menhirLib.20240715] found in cache[menhirSdk.20240715] found in cache[ocaml-version.4.0.1] found in cache[ocamlbuild.0.16.1] found in cache[ocamlfind.1.9.8] found in cache[ocamlformat.0.27.0] found in cache[ocamlformat-lib.0.27.0] found in cache[ocp-indent.1.8.1] found in cache[re.1.11.0] found in cache[sexplib0.v0.14.0] found in cache[stdio.v0.14.0] found in cache[topkg.1.1.0] found in cache[uucp.15.0.0] found in cache[uuseg.15.0.0] found in cache[uutf.1.0.4] found in cache<><> Processing actions <><><><><><><><><><><><><><><><><><><><><><><><><><><><>-> installed seq.base-> installed camlp-streams.5.0.1-> installed csexp.1.5.2-> installed cmdliner.1.3.0-> installed either.1.0.0-> installed fix.20250428-> installed menhirCST.20240715-> installed menhirLib.20240715-> installed menhirSdk.20240715-> installed ocaml-version.4.0.1-> installed re.1.11.0-> installed sexplib0.v0.14.0-> installed dune-build-info.3.20.0-> installed dune-configurator.3.20.0-> installed ocamlfind.1.9.8-> installed base-bytes.base-> installed ocamlbuild.0.16.1-> installed ocp-indent.1.8.1-> installed base.v0.14.3-> installed topkg.1.1.0-> installed stdio.v0.14.0-> installed uutf.1.0.4-> installed astring.0.8.5-> installed menhir.20240715-> installed fpath.0.7.3-> installed uucp.15.0.0-> installed uuseg.15.0.0-> installed ocamlformat-lib.0.27.0-> installed ocamlformat.0.27.0Done.<><> ocp-indent.1.8.1 installed successfully ><><><><><><><><><><><><><><><><><>=> This package requires additional configuration for use in editors. Install package 'user-setup', or manually:* for Emacs, add these lines to ~/.emacs:(add-to-list 'load-path "/home/opam/.opam/4.08/share/emacs/site-lisp")(require 'ocp-indent)* for Vim, add this line to ~/.vimrc:set rtp^="/home/opam/.opam/4.08/share/ocp-indent/vim"# Run eval $(opam env) to update the current shell environment2025-08-30 07:21.14 ---> saved as "ee5f36fa10328bf844b6feeab861ac0336aa35be5e288d81331a0c9c26b8859a"/src: (copy (src .) (dst /src/))2025-08-30 07:21.17 ---> saved as "7e4b2be36b4c116842d6d120ae97773f75731e770bb43d36790f0ce58e28dec2"/src: (run (shell "opam exec -- dune build @fmt --ignore-promoted-rules || (echo \"dune build @fmt failed\"; exit 2)"))File "arrayjit/bin/dune", line 6, characters 21-34:6 | (pps ppx_minidebug ppx_sexp_conv))^^^^^^^^^^^^^Error: Library "ppx_sexp_conv" not found.-> required by _build/default/arrayjit/bin/read_config.exe-> required by %{dep:../../arrayjit/bin/read_config.exe} at test/dune:25-> required by _build/default/test/config/ocannl_backend.txt-> required by %{read:config/ocannl_backend.txt} at test/dune:44-> required by Computing directory contents of _build/default/testFile "arrayjit/bin/dune", line 6, characters 7-20:6 | (pps ppx_minidebug ppx_sexp_conv))^^^^^^^^^^^^^Error: Library "ppx_minidebug" not found.-> required by _build/default/arrayjit/bin/.merlin-conf/exe-read_config-> required by _build/default/arrayjit/bin/read_config.exe-> required by %{dep:../../arrayjit/bin/read_config.exe} at test/dune:25-> required by _build/default/test/config/ocannl_backend.txt-> required by %{read:config/ocannl_backend.txt} at test/dune:44-> required by Computing directory contents of _build/default/testFile "lib/nn_blocks.ml", line 1, characters 0-0:diff --git a/_build/default/lib/nn_blocks.ml b/_build/default/lib/.formatted/nn_blocks.mlindex 22d8dea..9cfea3b 100644--- a/_build/default/lib/nn_blocks.ml+++ b/_build/default/lib/.formatted/nn_blocks.ml@@ -18,140 +18,93 @@ let mlp ~config =infun x -> List.fold layers ~init:x ~f:(fun x layer -> layer x)- (* Claude's cold-start take on the transformer architecture:-(** Transformer components for decoder-only architectures *)--(** Embedding layer configuration *)-type embedding_config = {- label : string list;- vocab_size : int;- embed_dim : int;-}--(** Embedding layer - uses matrix multiplication as a workaround *)-let%op embedding ~config token_ids =- (* In a real implementation, token_ids should be one-hot encoded- Missing: gather/embedding operation *)- "embed_matrix" (config.vocab_size, config.embed_dim) * token_ids--(** Simple layer normalization implementation *)-let%op simple_layer_norm x =- (* This is a simplified version without learnable parameters- Real layer norm would need gamma and beta parameters *)- let mean = TDSL.einsum1 "b,s,d => b,s,0" x in- let x_centered = x - mean in- let variance = TDSL.einsum1 "b,s,d => b,s,1" (x_centered *. x_centered) in- let eps = !.1e-6 in- let std = sqrt (variance + eps) in- x_centered /. std--(** Simplified attention mechanism *)-let%op simple_attention q k v =- (* Shape: q, k, v are all [batch, seq, dim] *)- (* Compute attention scores *)- let scores = TDSL.einsum "b,s,d;b,t,d => b,s,t" q k in-- (* Scale scores *)- let scale = !.0.1 in (* Should be 1/sqrt(head_dim) *)- let scaled_scores = scores *. scale in-- (* Apply softmax approximation (missing: real softmax) *)- let scores_exp = exp scaled_scores in- let scores_sum = TDSL.einsum1 "b,s,t => b,s,1" scores_exp in- let attention_weights = scores_exp /. scores_sum in-- (* Apply attention to values *)- TDSL.einsum "b,s,t;b,t,d => b,s,d" attention_weights v--(** Simple transformer block *)-type transformer_block_config = {- label : string list;- hidden_dim : int;- embed_dim : int;-}--let%op simple_transformer_block ~config x =- (* Self-attention *)- let q = "q_proj" (config.embed_dim, config.embed_dim) * x in- let k = "k_proj" (config.embed_dim, config.embed_dim) * x in- let v = "v_proj" (config.embed_dim, config.embed_dim) * x in-- let attn_out = simple_attention q k v in- let attn_out = "o_proj" (config.embed_dim, config.embed_dim) * attn_out in-- (* Residual connection *)- let x = x + attn_out in- let x = simple_layer_norm x in-- (* Feed-forward network *)- let ffn = relu ("ffn_w1" (config.embed_dim, config.hidden_dim) * x + "ffn_b1" config.hidden_dim) in- let ffn_out = "ffn_w2" (config.hidden_dim, config.embed_dim) * ffn + "ffn_b2" config.embed_dim in-- (* Residual connection *)- let x = x + ffn_out in- simple_layer_norm x--(** Minimal transformer model *)-type transformer_config = {- label : string list;- num_layers : int;- vocab_size : int;- embed_dim : int;- hidden_dim : int;-}--let simple_transformer ~config =- let embed = embedding ~config:{- label = "embed" :: config.label;- vocab_size = config.vocab_size;- embed_dim = config.embed_dim;- } in-- let blocks = List.init config.num_layers ~f:(fun i ->- simple_transformer_block ~config:{- label = ["layer"; Int.to_string i] @ config.label;- hidden_dim = config.hidden_dim;- embed_dim = config.embed_dim;- }- ) in-- fun token_ids ->- let x = embed token_ids in- (* Missing: positional encoding *)- let x = List.fold blocks ~init:x ~f:(fun x block -> block x) in- (* Output projection *)- "lm_head" (config.embed_dim, config.vocab_size) * x-*)--(**- Key missing functionality in OCANNL for implementing transformers:-- 1. **Embedding/Gather**: No way to index into embedding matrices efficiently.- Workaround requires one-hot encoding which doesn't scale.-- 2. **Softmax**: Critical for attention. Current exp/sum workaround may have- numerical stability issues.-- 3. **Layer Normalization**: No built-in layer/batch norm. Had to implement- simplified version without learnable affine parameters.-- 4. **Reshape/View**: Cannot reshape tensors to handle multi-head attention- properly (splitting head dimension).-- 5. **Positional Encoding**: No sin/cos-based positional encodings. Would need- to pre-compute and pass as constants.-- 6. **Masking**: No way to apply causal masks with -inf values for softmax.-- 7. **Dropout**: No dropout for regularization.-- 8. **Advanced activations**: Only ReLU available, no GELU/SiLU/Swish.-- 9. **Indexing operations**: No advanced indexing for KV-caching in inference.-- 10. **Data types**: No explicit support for int tensors (for token IDs).-- Despite these limitations, OCANNL's automatic differentiation and einsum notation- provide good foundations. The framework could support transformers well with- these additional operations.-*)+(* Claude's cold-start take on the transformer architecture: (** Transformer components for+ decoder-only architectures *)++ (** Embedding layer configuration *) type embedding_config = { label : string list; vocab_size :+ int; embed_dim : int; }++ (** Embedding layer - uses matrix multiplication as a workaround *) let%op embedding ~config+ token_ids = (* In a real implementation, token_ids should be one-hot encoded Missing:+ gather/embedding operation *) "embed_matrix" (config.vocab_size, config.embed_dim) * token_ids++ (** Simple layer normalization implementation *) let%op simple_layer_norm x = (* This is a+ simplified version without learnable parameters Real layer norm would need gamma and beta+ parameters *) let mean = TDSL.einsum1 "b,s,d => b,s,0" x in let x_centered = x - mean in let+ variance = TDSL.einsum1 "b,s,d => b,s,1" (x_centered *. x_centered) in let eps = !.1e-6 in let+ std = sqrt (variance + eps) in x_centered /. std++ (** Simplified attention mechanism *) let%op simple_attention q k v = (* Shape: q, k, v are all+ [batch, seq, dim] *) (* Compute attention scores *) let scores = TDSL.einsum "b,s,d;b,t,d =>+ b,s,t" q k in++ (* Scale scores *) let scale = !.0.1 in (* Should be 1/sqrt(head_dim) *) let scaled_scores =+ scores *. scale in++ (* Apply softmax approximation (missing: real softmax) *) let scores_exp = exp scaled_scores in+ let scores_sum = TDSL.einsum1 "b,s,t => b,s,1" scores_exp in let attention_weights = scores_exp+ /. scores_sum in++ (* Apply attention to values *) TDSL.einsum "b,s,t;b,t,d => b,s,d" attention_weights v++ (** Simple transformer block *) type transformer_block_config = { label : string list; hidden_dim+ : int; embed_dim : int; }++ let%op simple_transformer_block ~config x = (* Self-attention *) let q = "q_proj"+ (config.embed_dim, config.embed_dim) * x in let k = "k_proj" (config.embed_dim, config.embed_dim)+ * x in let v = "v_proj" (config.embed_dim, config.embed_dim) * x in++ let attn_out = simple_attention q k v in let attn_out = "o_proj" (config.embed_dim,+ config.embed_dim) * attn_out in++ (* Residual connection *) let x = x + attn_out in let x = simple_layer_norm x in++ (* Feed-forward network *) let ffn = relu ("ffn_w1" (config.embed_dim, config.hidden_dim) * x ++ "ffn_b1" config.hidden_dim) in let ffn_out = "ffn_w2" (config.hidden_dim, config.embed_dim) * ffn+ + "ffn_b2" config.embed_dim in++ (* Residual connection *) let x = x + ffn_out in simple_layer_norm x++ (** Minimal transformer model *) type transformer_config = { label : string list; num_layers :+ int; vocab_size : int; embed_dim : int; hidden_dim : int; }++ let simple_transformer ~config = let embed = embedding ~config:{ label = "embed" :: config.label;+ vocab_size = config.vocab_size; embed_dim = config.embed_dim; } in++ let blocks = List.init config.num_layers ~f:(fun i -> simple_transformer_block ~config:{ label =+ ["layer"; Int.to_string i] @ config.label; hidden_dim = config.hidden_dim; embed_dim =+ config.embed_dim; } ) in++ fun token_ids -> let x = embed token_ids in (* Missing: positional encoding *) let x = List.fold+ blocks ~init:x ~f:(fun x block -> block x) in (* Output projection *) "lm_head"+ (config.embed_dim, config.vocab_size) * x *)++(** Key missing functionality in OCANNL for implementing transformers:++ 1. **Embedding/Gather**: No way to index into embedding matrices efficiently. Workaround+ requires one-hot encoding which doesn't scale.++ 2. **Softmax**: Critical for attention. Current exp/sum workaround may have numerical stability+ issues.++ 3. **Layer Normalization**: No built-in layer/batch norm. Had to implement simplified version+ without learnable affine parameters.++ 4. **Reshape/View**: Cannot reshape tensors to handle multi-head attention properly (splitting+ head dimension).++ 5. **Positional Encoding**: No sin/cos-based positional encodings. Would need to pre-compute and+ pass as constants.++ 6. **Masking**: No way to apply causal masks with -inf values for softmax.++ 7. **Dropout**: No dropout for regularization.++ 8. **Advanced activations**: Only ReLU available, no GELU/SiLU/Swish.++ 9. **Indexing operations**: No advanced indexing for KV-caching in inference.++ 10. **Data types**: No explicit support for int tensors (for token IDs).++ Despite these limitations, OCANNL's automatic differentiation and einsum notation provide good+ foundations. The framework could support transformers well with these additional operations. *)File "lib/row.mli", line 1, characters 0-0:diff --git a/_build/default/lib/row.mli b/_build/default/lib/.formatted/row.mliindex eb87f95..79ebc11 100644--- a/_build/default/lib/row.mli+++ b/_build/default/lib/.formatted/row.mli@@ -28,11 +28,11 @@ val get_dim : d:int -> ?label:string -> unit -> dimval dim_to_int_exn : dim -> intval dim_to_string : [> `Only_labels ] -> dim -> string-(** Extracts all dimension variables from a dim, including from nested products. *)val dim_vars : dim -> dim_var list+(** Extracts all dimension variables from a dim, including from nested products. *)-(** Checks if a dimension is fully solved (no variables). *)val is_solved_dim : dim -> bool+(** Checks if a dimension is fully solved (no variables). *)type row_id [@@deriving sexp, compare, equal, hash]type row_cmpFile "lib/shape.ml", line 1, characters 0-0:diff --git a/_build/default/lib/shape.ml b/_build/default/lib/.formatted/shape.mlindex 3e3ee29..fc77de4 100644--- a/_build/default/lib/shape.ml+++ b/_build/default/lib/.formatted/shape.ml@@ -627,7 +627,7 @@ let%debug4_sexp finish_inference (() : unit) : unit =(* There should not be any shape variables remaining in any inference-undergoing update steps. *)state := Row.empty_env-let row_to_dims (row : Row.t) : int array=+let row_to_dims (row : Row.t) : int array =let open Row inlet rec f = function| Dim { d; _ } -> dFile "lib/row.ml", line 1, characters 0-0:diff --git a/_build/default/lib/row.ml b/_build/default/lib/.formatted/row.mlindex b485652..1c3e385 100644--- a/_build/default/lib/row.ml+++ b/_build/default/lib/.formatted/row.ml@@ -875,7 +875,7 @@ let%debug5_sexp solve_dim_ineq ~(stage : stage) ~(cur : dim) ~(subr : dim) (env@@ Shape_error( "Cannot compare Prod with unresolved variables in inequality",[ Dim_mismatch [ cur; subr ] ] )- | Var cur_v, Var subr_v -> (+ | Var cur_v, Var subr_v -> (match (Map.find env.dim_env cur_v, Map.find env.dim_env subr_v) with| Some (Bounds_dim { cur = cur1; _ }), _ when List.mem ~equal:equal_dim_var cur1 subr_v ->([ Dim_eq { d1 = cur; d2 = subr } ], env)dune build @fmt failed"/usr/bin/env" "bash" "-c" "opam exec -- dune build @fmt --ignore-promoted-rules || (echo "dune build @fmt failed"; exit 2)" failed with exit status 22025-08-30 07:21.43: Job failed: Failed: Build failed