diff --git a/cr-examples/onnx/opgen/src/main/java/oracle/code/onnx/OpSchema.java b/cr-examples/onnx/opgen/src/main/java/oracle/code/onnx/OpSchema.java
index 4114af78b2b..f9cab2a6f1c 100644
--- a/cr-examples/onnx/opgen/src/main/java/oracle/code/onnx/OpSchema.java
+++ b/cr-examples/onnx/opgen/src/main/java/oracle/code/onnx/OpSchema.java
@@ -58,7 +58,7 @@ public enum SupportLevel implements Serializable {
 
     public enum AttributeType implements Serializable {
         FLOAT(float.class),
-        INT(int.class),
+        INT(long.class),
         STRING(String.class),
         // @@@ proto
         TENSOR(byte[].class),
@@ -69,7 +69,7 @@ public enum AttributeType implements Serializable {
         // OnnxTypeElement?
         TYPE_PROTO(Object.class),
         FLOATS(float[].class),
-        INTS(int[].class),
+        INTS(long[].class),
         STRINGS(String[].class),
         // @@@ proto
         TENSORS(byte[][].class),
diff --git a/cr-examples/onnx/src/main/java/oracle/code/onnx/ExplicitOnnxOperators.java b/cr-examples/onnx/src/main/java/oracle/code/onnx/ExplicitOnnxOperators.java
index e93890f8239..ff8c0ce9bab 100644
--- a/cr-examples/onnx/src/main/java/oracle/code/onnx/ExplicitOnnxOperators.java
+++ b/cr-examples/onnx/src/main/java/oracle/code/onnx/ExplicitOnnxOperators.java
@@ -32,14 +32,14 @@ class ExplicitOnnxOperators {
     // Explicit constant operators
 
     public static Tensor<Long> Constant(
-            Integer c) {
+            Long c) {
         return OnnxOperators.Constant(
                 Optional.of(c),Optional.empty(), Optional.empty(), Optional.empty(),
                 Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
     }
 
     public static Tensor<Long> Constant(
-            int[] c) {
+            long[] c) {
         return OnnxOperators.Constant(
                 Optional.empty(),Optional.empty(), Optional.empty(), Optional.empty(),
                 Optional.empty(), Optional.of(c), Optional.empty(), Optional.empty());
diff --git a/cr-examples/onnx/src/main/java/oracle/code/onnx/OnnxOperators.java b/cr-examples/onnx/src/main/java/oracle/code/onnx/OnnxOperators.java
index 916ddf689f6..76ce9c6d4c0 100644
--- a/cr-examples/onnx/src/main/java/oracle/code/onnx/OnnxOperators.java
+++ b/cr-examples/onnx/src/main/java/oracle/code/onnx/OnnxOperators.java
@@ -67,7 +67,7 @@ public static <T> Tensor<T> Add(Tensor<T> A, Tensor<T> B) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T1> AffineGrid(Tensor<T1> theta, Tensor<Long> size, Optional<Integer> align_corners) {
+    public static <T1, T2> Tensor<T1> AffineGrid(Tensor<T1> theta, Tensor<Long> size, Optional<Long> align_corners) {
         Object result = OnnxInterpreter.interpret(OnnxOps.AffineGrid.class, List.of(theta, size), List.of(align_corners));
         return (Tensor<T1>) result;
     }
@@ -77,12 +77,12 @@ public static <T, T1> Tensor<Boolean> And(Tensor<Boolean> A, Tensor<Boolean> B)
         return (Tensor<Boolean>) result;
     }
 
-    public static <T> Tensor<Long> ArgMax(Tensor<T> data, Optional<Integer> keepdims, Optional<Integer> select_last_index, Optional<Integer> axis) {
+    public static <T> Tensor<Long> ArgMax(Tensor<T> data, Optional<Long> keepdims, Optional<Long> select_last_index, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ArgMax.class, List.of(data), List.of(keepdims, select_last_index, axis));
         return (Tensor<Long>) result;
     }
 
-    public static <T> Tensor<Long> ArgMin(Tensor<T> data, Optional<Integer> keepdims, Optional<Integer> select_last_index, Optional<Integer> axis) {
+    public static <T> Tensor<Long> ArgMin(Tensor<T> data, Optional<Long> keepdims, Optional<Long> select_last_index, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ArgMin.class, List.of(data), List.of(keepdims, select_last_index, axis));
         return (Tensor<Long>) result;
     }
@@ -112,19 +112,19 @@ public static <T> Tensor<T> Atanh(Tensor<T> input) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> AveragePool(Tensor<T> X, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<Integer> count_include_pad, Optional<Integer> ceil_mode, Optional<int[]> strides, int[] kernel_shape) {
+    public static <T> Tensor<T> AveragePool(Tensor<T> X, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<Long> count_include_pad, Optional<Long> ceil_mode, Optional<long[]> strides, long[] kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.AveragePool.class, List.of(X), List.of(pads, dilations, auto_pad, count_include_pad, ceil_mode, strides, kernel_shape));
         return (Tensor<T>) result;
     }
 
     public record BatchNormalizationResult<T, T1, T2>(Tensor<T> Y, Tensor<T2> running_mean, Tensor<T2> running_var) { }
-    public static <T, T1, T2> BatchNormalizationResult<T, T1, T2> BatchNormalization(Tensor<T> X, Tensor<T1> scale, Tensor<T1> B, Tensor<T2> input_mean, Tensor<T2> input_var, Optional<Float> epsilon, Optional<Integer> training_mode, Optional<Float> momentum) {
+    public static <T, T1, T2> BatchNormalizationResult<T, T1, T2> BatchNormalization(Tensor<T> X, Tensor<T1> scale, Tensor<T1> B, Tensor<T2> input_mean, Tensor<T2> input_var, Optional<Float> epsilon, Optional<Long> training_mode, Optional<Float> momentum) {
         Object result = OnnxInterpreter.interpret(OnnxOps.BatchNormalization.class, List.of(X, scale, B, input_mean, input_var), List.of(epsilon, training_mode, momentum));
         Object[] resultArray = (Object[]) result;
         return new BatchNormalizationResult<>((Tensor<T>)resultArray[0], (Tensor<T2>)resultArray[1], (Tensor<T2>)resultArray[2]);
     }
 
-    public static <T1, T2> Tensor<T2> Bernoulli(Tensor<T1> input, Optional<Float> seed, Optional<Integer> dtype) {
+    public static <T1, T2> Tensor<T2> Bernoulli(Tensor<T1> input, Optional<Float> seed, Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Bernoulli.class, List.of(input), List.of(seed, dtype));
         return (Tensor<T2>) result;
     }
@@ -159,27 +159,27 @@ public static <T> Tensor<T> BitwiseXor(Tensor<T> A, Tensor<T> B) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> BlackmanWindow(Tensor<T1> size, Optional<Integer> periodic, Optional<Integer> output_datatype) {
+    public static <T1, T2> Tensor<T2> BlackmanWindow(Tensor<T1> size, Optional<Long> periodic, Optional<Long> output_datatype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.BlackmanWindow.class, List.of(size), List.of(periodic, output_datatype));
         return (Tensor<T2>) result;
     }
 
-    public static <T1, T2> Tensor<T2> Cast(Tensor<T1> input, Optional<Integer> saturate, int to) {
+    public static <T1, T2> Tensor<T2> Cast(Tensor<T1> input, Optional<Long> saturate, long to) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Cast.class, List.of(input), List.of(saturate, to));
         return (Tensor<T2>) result;
     }
 
-    public static <T1, T2> Tensor<T2> CastLike(Tensor<T1> input, Tensor<T2> target_type, Optional<Integer> saturate) {
+    public static <T1, T2> Tensor<T2> CastLike(Tensor<T1> input, Tensor<T2> target_type, Optional<Long> saturate) {
         Object result = OnnxInterpreter.interpret(OnnxOps.CastLike.class, List.of(input, target_type), List.of(saturate));
         return (Tensor<T2>) result;
     }
 
-    public static <T1, T2> Tensor<T2> CastMap(Map<Long, T1> X, Optional<String> map_form, Optional<String> cast_to, Optional<Integer> max_map) {
+    public static <T1, T2> Tensor<T2> CastMap(Map<Long, T1> X, Optional<String> map_form, Optional<String> cast_to, Optional<Long> max_map) {
         Object result = OnnxInterpreter.interpret(OnnxOps.CastMap.class, List.of(X), List.of(map_form, cast_to, max_map));
         return (Tensor<T2>) result;
     }
 
-    public static <T1, T2> Tensor<T2> CategoryMapper(Tensor<T1> X, Optional<int[]> cats_int64s, Optional<String[]> cats_strings, Optional<Integer> default_int64, Optional<String> default_string) {
+    public static <T1, T2> Tensor<T2> CategoryMapper(Tensor<T1> X, Optional<long[]> cats_int64s, Optional<String[]> cats_strings, Optional<Long> default_int64, Optional<String> default_string) {
         Object result = OnnxInterpreter.interpret(OnnxOps.CategoryMapper.class, List.of(X), List.of(cats_int64s, cats_strings, default_int64, default_string));
         return (Tensor<T2>) result;
     }
@@ -194,7 +194,7 @@ public static <T> Tensor<Float> Celu(Tensor<Float> X, Optional<Float> alpha) {
         return (Tensor<Float>) result;
     }
 
-    public static <T, Tind> Tensor<T> CenterCropPad(Tensor<T> input_data, Tensor<Tind> shape, Optional<int[]> axes) {
+    public static <T, Tind> Tensor<T> CenterCropPad(Tensor<T> input_data, Tensor<Tind> shape, Optional<long[]> axes) {
         Object result = OnnxInterpreter.interpret(OnnxOps.CenterCropPad.class, List.of(input_data, shape), List.of(axes));
         return (Tensor<T>) result;
     }
@@ -204,27 +204,27 @@ public static <T> Tensor<T> Clip(Tensor<T> input, Optional<Tensor<T>> min, Optio
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Col2Im(Tensor<T> input, Tensor<Long> image_shape, Tensor<Long> block_shape, Optional<int[]> pads, Optional<int[]> dilations, Optional<int[]> strides) {
+    public static <T> Tensor<T> Col2Im(Tensor<T> input, Tensor<Long> image_shape, Tensor<Long> block_shape, Optional<long[]> pads, Optional<long[]> dilations, Optional<long[]> strides) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Col2Im.class, List.of(input, image_shape, block_shape), List.of(pads, dilations, strides));
         return (Tensor<T>) result;
     }
 
-    public static <T, T1> Tensor<T> Compress(Tensor<T> input, Tensor<Boolean> condition, Optional<Integer> axis) {
+    public static <T, T1> Tensor<T> Compress(Tensor<T> input, Tensor<Boolean> condition, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Compress.class, List.of(input, condition), List.of(axis));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Concat(List<Tensor<T>> inputs, int axis) {
+    public static <T> Tensor<T> Concat(List<Tensor<T>> inputs, long axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Concat.class, List.of(inputs), List.of(axis));
         return (Tensor<T>) result;
     }
 
-    public static <S, T> Tensor<T> ConcatFromSequence(List<Tensor<S>> input_sequence, int axis, Optional<Integer> new_axis) {
+    public static <S, T> Tensor<T> ConcatFromSequence(List<Tensor<S>> input_sequence, long axis, Optional<Long> new_axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ConcatFromSequence.class, List.of(input_sequence), List.of(axis, new_axis));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Constant(Optional<Integer> value_int, Optional<float[]> value_floats, Optional<String[]> value_strings, Optional<Float> value_float, Optional<String> value_string, Optional<int[]> value_ints, Optional<byte[]> sparse_value, Optional<byte[]> value) {
+    public static <T> Tensor<T> Constant(Optional<Long> value_int, Optional<float[]> value_floats, Optional<String[]> value_strings, Optional<Float> value_float, Optional<String> value_string, Optional<long[]> value_ints, Optional<byte[]> sparse_value, Optional<byte[]> value) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Constant.class, List.of(), List.of(value_int, value_floats, value_strings, value_float, value_string, value_ints, sparse_value, value));
         return (Tensor<T>) result;
     }
@@ -234,17 +234,17 @@ public static <T1, T2> Tensor<T2> ConstantOfShape(Tensor<Long> input, Optional<b
         return (Tensor<T2>) result;
     }
 
-    public static <T> Tensor<T> Conv(Tensor<T> X, Tensor<T> W, Optional<Tensor<T>> B, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<int[]> strides, Optional<Integer> group, Optional<int[]> kernel_shape) {
+    public static <T> Tensor<T> Conv(Tensor<T> X, Tensor<T> W, Optional<Tensor<T>> B, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<long[]> strides, Optional<Long> group, Optional<long[]> kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Conv.class, List.of(X, W, B), List.of(pads, dilations, auto_pad, strides, group, kernel_shape));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2, T3> Tensor<Integer> ConvInteger(Tensor<T1> x, Tensor<T2> w, Optional<Tensor<T1>> x_zero_point, Optional<Tensor<T2>> w_zero_point, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<int[]> strides, Optional<Integer> group, Optional<int[]> kernel_shape) {
+    public static <T1, T2, T3> Tensor<Integer> ConvInteger(Tensor<T1> x, Tensor<T2> w, Optional<Tensor<T1>> x_zero_point, Optional<Tensor<T2>> w_zero_point, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<long[]> strides, Optional<Long> group, Optional<long[]> kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ConvInteger.class, List.of(x, w, x_zero_point, w_zero_point), List.of(pads, dilations, auto_pad, strides, group, kernel_shape));
         return (Tensor<Integer>) result;
     }
 
-    public static <T> Tensor<T> ConvTranspose(Tensor<T> X, Tensor<T> W, Optional<Tensor<T>> B, Optional<int[]> output_shape, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<int[]> strides, Optional<Integer> group, Optional<int[]> kernel_shape, Optional<int[]> output_padding) {
+    public static <T> Tensor<T> ConvTranspose(Tensor<T> X, Tensor<T> W, Optional<Tensor<T>> B, Optional<long[]> output_shape, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<long[]> strides, Optional<Long> group, Optional<long[]> kernel_shape, Optional<long[]> output_padding) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ConvTranspose.class, List.of(X, W, B), List.of(output_shape, pads, dilations, auto_pad, strides, group, kernel_shape, output_padding));
         return (Tensor<T>) result;
     }
@@ -259,27 +259,27 @@ public static <T> Tensor<T> Cosh(Tensor<T> input) {
         return (Tensor<T>) result;
     }
 
-    public static <T, T2> Tensor<T> CumSum(Tensor<T> x, Tensor<T2> axis, Optional<Integer> exclusive, Optional<Integer> reverse) {
+    public static <T, T2> Tensor<T> CumSum(Tensor<T> x, Tensor<T2> axis, Optional<Long> exclusive, Optional<Long> reverse) {
         Object result = OnnxInterpreter.interpret(OnnxOps.CumSum.class, List.of(x, axis), List.of(exclusive, reverse));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T1> DFT(Tensor<T1> input, Optional<Tensor<T2>> dft_length, Optional<Tensor<Long>> axis, Optional<Integer> inverse, Optional<Integer> onesided) {
+    public static <T1, T2> Tensor<T1> DFT(Tensor<T1> input, Optional<Tensor<T2>> dft_length, Optional<Tensor<Long>> axis, Optional<Long> inverse, Optional<Long> onesided) {
         Object result = OnnxInterpreter.interpret(OnnxOps.DFT.class, List.of(input, dft_length, axis), List.of(inverse, onesided));
         return (Tensor<T1>) result;
     }
 
-    public static <T> Tensor<T> DeformConv(Tensor<T> X, Tensor<T> W, Tensor<T> offset, Optional<Tensor<T>> B, Optional<Tensor<T>> mask, Optional<int[]> pads, Optional<int[]> dilations, Optional<int[]> strides, Optional<Integer> offset_group, Optional<Integer> group, Optional<int[]> kernel_shape) {
+    public static <T> Tensor<T> DeformConv(Tensor<T> X, Tensor<T> W, Tensor<T> offset, Optional<Tensor<T>> B, Optional<Tensor<T>> mask, Optional<long[]> pads, Optional<long[]> dilations, Optional<long[]> strides, Optional<Long> offset_group, Optional<Long> group, Optional<long[]> kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.DeformConv.class, List.of(X, W, offset, B, mask), List.of(pads, dilations, strides, offset_group, group, kernel_shape));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> DepthToSpace(Tensor<T> input, Optional<String> mode, int blocksize) {
+    public static <T> Tensor<T> DepthToSpace(Tensor<T> input, Optional<String> mode, long blocksize) {
         Object result = OnnxInterpreter.interpret(OnnxOps.DepthToSpace.class, List.of(input), List.of(mode, blocksize));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> DequantizeLinear(Tensor<T1> x, Tensor<T2> x_scale, Optional<Tensor<T1>> x_zero_point, Optional<Integer> axis, Optional<Integer> block_size) {
+    public static <T1, T2> Tensor<T2> DequantizeLinear(Tensor<T1> x, Tensor<T2> x_scale, Optional<Tensor<T1>> x_zero_point, Optional<Long> axis, Optional<Long> block_size) {
         Object result = OnnxInterpreter.interpret(OnnxOps.DequantizeLinear.class, List.of(x, x_scale, x_zero_point), List.of(axis, block_size));
         return (Tensor<T2>) result;
     }
@@ -289,7 +289,7 @@ public static <T> Tensor<T> Det(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> DictVectorizer(Map<?, ?> X, Optional<String[]> string_vocabulary, Optional<int[]> int64_vocabulary) {
+    public static <T1, T2> Tensor<T2> DictVectorizer(Map<?, ?> X, Optional<String[]> string_vocabulary, Optional<long[]> int64_vocabulary) {
         Object result = OnnxInterpreter.interpret(OnnxOps.DictVectorizer.class, List.of(X), List.of(string_vocabulary, int64_vocabulary));
         return (Tensor<T2>) result;
     }
@@ -300,7 +300,7 @@ public static <T> Tensor<T> Div(Tensor<T> A, Tensor<T> B) {
     }
 
     public record DropoutResult<T, T1, T2>(Tensor<T> output, Tensor<Boolean> mask) { }
-    public static <T, T1, T2> DropoutResult<T, T1, T2> Dropout(Tensor<T> data, Optional<Tensor<T1>> ratio, Optional<Tensor<Boolean>> training_mode, Optional<Integer> seed) {
+    public static <T, T1, T2> DropoutResult<T, T1, T2> Dropout(Tensor<T> data, Optional<Tensor<T1>> ratio, Optional<Tensor<Boolean>> training_mode, Optional<Long> seed) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Dropout.class, List.of(data, ratio, training_mode), List.of(seed));
         Object[] resultArray = (Object[]) result;
         return new DropoutResult<>((Tensor<T>)resultArray[0], (Tensor<Boolean>)resultArray[1]);
@@ -343,17 +343,17 @@ public static <T> Tensor<T> Expand(Tensor<T> input, Tensor<Long> shape) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> EyeLike(Tensor<T1> input, Optional<Integer> dtype, Optional<Integer> k) {
+    public static <T1, T2> Tensor<T2> EyeLike(Tensor<T1> input, Optional<Long> dtype, Optional<Long> k) {
         Object result = OnnxInterpreter.interpret(OnnxOps.EyeLike.class, List.of(input), List.of(dtype, k));
         return (Tensor<T2>) result;
     }
 
-    public static <T1> Tensor<Float> FeatureVectorizer(List<Tensor<T1>> X, Optional<int[]> inputdimensions) {
+    public static <T1> Tensor<Float> FeatureVectorizer(List<Tensor<T1>> X, Optional<long[]> inputdimensions) {
         Object result = OnnxInterpreter.interpret(OnnxOps.FeatureVectorizer.class, List.of(X), List.of(inputdimensions));
         return (Tensor<Float>) result;
     }
 
-    public static <T> Tensor<T> Flatten(Tensor<T> input, Optional<Integer> axis) {
+    public static <T> Tensor<T> Flatten(Tensor<T> input, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Flatten.class, List.of(input), List.of(axis));
         return (Tensor<T>) result;
     }
@@ -364,23 +364,23 @@ public static <T> Tensor<T> Floor(Tensor<T> X) {
     }
 
     public record GRUResult<T, T1>(Tensor<T> Y, Tensor<T> Y_h) { }
-    public static <T, T1> GRUResult<T, T1> GRU(Tensor<T> X, Tensor<T> W, Tensor<T> R, Optional<Tensor<T>> B, Optional<Tensor<Integer>> sequence_lens, Optional<Tensor<T>> initial_h, Optional<Integer> layout, Optional<float[]> activation_alpha, Optional<Integer> hidden_size, Optional<float[]> activation_beta, Optional<String[]> activations, Optional<Integer> linear_before_reset, Optional<Float> clip, Optional<String> direction) {
+    public static <T, T1> GRUResult<T, T1> GRU(Tensor<T> X, Tensor<T> W, Tensor<T> R, Optional<Tensor<T>> B, Optional<Tensor<Integer>> sequence_lens, Optional<Tensor<T>> initial_h, Optional<Long> layout, Optional<float[]> activation_alpha, Optional<Long> hidden_size, Optional<float[]> activation_beta, Optional<String[]> activations, Optional<Long> linear_before_reset, Optional<Float> clip, Optional<String> direction) {
         Object result = OnnxInterpreter.interpret(OnnxOps.GRU.class, List.of(X, W, R, B, sequence_lens, initial_h), List.of(layout, activation_alpha, hidden_size, activation_beta, activations, linear_before_reset, clip, direction));
         Object[] resultArray = (Object[]) result;
         return new GRUResult<>((Tensor<T>)resultArray[0], (Tensor<T>)resultArray[1]);
     }
 
-    public static <T, Tind> Tensor<T> Gather(Tensor<T> data, Tensor<Tind> indices, Optional<Integer> axis) {
+    public static <T, Tind> Tensor<T> Gather(Tensor<T> data, Tensor<Tind> indices, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Gather.class, List.of(data, indices), List.of(axis));
         return (Tensor<T>) result;
     }
 
-    public static <T, Tind> Tensor<T> GatherElements(Tensor<T> data, Tensor<Tind> indices, Optional<Integer> axis) {
+    public static <T, Tind> Tensor<T> GatherElements(Tensor<T> data, Tensor<Tind> indices, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.GatherElements.class, List.of(data, indices), List.of(axis));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> GatherND(Tensor<T> data, Tensor<Long> indices, Optional<Integer> batch_dims) {
+    public static <T> Tensor<T> GatherND(Tensor<T> data, Tensor<Long> indices, Optional<Long> batch_dims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.GatherND.class, List.of(data, indices), List.of(batch_dims));
         return (Tensor<T>) result;
     }
@@ -390,7 +390,7 @@ public static <T> Tensor<T> Gelu(Tensor<T> X, Optional<String> approximate) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Gemm(Tensor<T> A, Tensor<T> B, Optional<Tensor<T>> C, Optional<Float> alpha, Optional<Integer> transB, Optional<Float> beta, Optional<Integer> transA) {
+    public static <T> Tensor<T> Gemm(Tensor<T> A, Tensor<T> B, Optional<Tensor<T>> C, Optional<Float> alpha, Optional<Long> transB, Optional<Float> beta, Optional<Long> transA) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Gemm.class, List.of(A, B, C), List.of(alpha, transB, beta, transA));
         return (Tensor<T>) result;
     }
@@ -400,7 +400,7 @@ public static <T> Tensor<T> GlobalAveragePool(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> GlobalLpPool(Tensor<T> X, Optional<Integer> p) {
+    public static <T> Tensor<T> GlobalLpPool(Tensor<T> X, Optional<Long> p) {
         Object result = OnnxInterpreter.interpret(OnnxOps.GlobalLpPool.class, List.of(X), List.of(p));
         return (Tensor<T>) result;
     }
@@ -425,22 +425,22 @@ public static <T, T1> Tensor<Boolean> GreaterOrEqual(Tensor<T> A, Tensor<T> B) {
         return (Tensor<Boolean>) result;
     }
 
-    public static <T1, T2> Tensor<T1> GridSample(Tensor<T1> X, Tensor<T2> grid, Optional<String> mode, Optional<Integer> align_corners, Optional<String> padding_mode) {
+    public static <T1, T2> Tensor<T1> GridSample(Tensor<T1> X, Tensor<T2> grid, Optional<String> mode, Optional<Long> align_corners, Optional<String> padding_mode) {
         Object result = OnnxInterpreter.interpret(OnnxOps.GridSample.class, List.of(X, grid), List.of(mode, align_corners, padding_mode));
         return (Tensor<T1>) result;
     }
 
-    public static <T> Tensor<T> GroupNormalization(Tensor<T> X, Tensor<T> scale, Tensor<T> bias, Optional<Float> epsilon, Optional<Integer> stash_type, int num_groups) {
+    public static <T> Tensor<T> GroupNormalization(Tensor<T> X, Tensor<T> scale, Tensor<T> bias, Optional<Float> epsilon, Optional<Long> stash_type, long num_groups) {
         Object result = OnnxInterpreter.interpret(OnnxOps.GroupNormalization.class, List.of(X, scale, bias), List.of(epsilon, stash_type, num_groups));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> HammingWindow(Tensor<T1> size, Optional<Integer> periodic, Optional<Integer> output_datatype) {
+    public static <T1, T2> Tensor<T2> HammingWindow(Tensor<T1> size, Optional<Long> periodic, Optional<Long> output_datatype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.HammingWindow.class, List.of(size), List.of(periodic, output_datatype));
         return (Tensor<T2>) result;
     }
 
-    public static <T1, T2> Tensor<T2> HannWindow(Tensor<T1> size, Optional<Integer> periodic, Optional<Integer> output_datatype) {
+    public static <T1, T2> Tensor<T2> HannWindow(Tensor<T1> size, Optional<Long> periodic, Optional<Long> output_datatype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.HannWindow.class, List.of(size), List.of(periodic, output_datatype));
         return (Tensor<T2>) result;
     }
@@ -455,7 +455,7 @@ public static <T> Tensor<T> HardSwish(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Hardmax(Tensor<T> input, Optional<Integer> axis) {
+    public static <T> Tensor<T> Hardmax(Tensor<T> input, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Hardmax.class, List.of(input), List.of(axis));
         return (Tensor<T>) result;
     }
@@ -470,7 +470,7 @@ public static <T1, T2> Tensor<Byte> ImageDecoder(Tensor<Byte> encoded_stream, Op
         return (Tensor<Byte>) result;
     }
 
-    public static <T> Tensor<T> Imputer(Tensor<T> X, Optional<Integer> replaced_value_int64, Optional<Float> replaced_value_float, Optional<int[]> imputed_value_int64s, Optional<float[]> imputed_value_floats) {
+    public static <T> Tensor<T> Imputer(Tensor<T> X, Optional<Long> replaced_value_int64, Optional<Float> replaced_value_float, Optional<long[]> imputed_value_int64s, Optional<float[]> imputed_value_floats) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Imputer.class, List.of(X), List.of(replaced_value_int64, replaced_value_float, imputed_value_int64s, imputed_value_floats));
         return (Tensor<T>) result;
     }
@@ -480,7 +480,7 @@ public static <T> Tensor<T> InstanceNormalization(Tensor<T> input, Tensor<T> sca
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<Boolean> IsInf(Tensor<T1> X, Optional<Integer> detect_negative, Optional<Integer> detect_positive) {
+    public static <T1, T2> Tensor<Boolean> IsInf(Tensor<T1> X, Optional<Long> detect_negative, Optional<Long> detect_positive) {
         Object result = OnnxInterpreter.interpret(OnnxOps.IsInf.class, List.of(X), List.of(detect_negative, detect_positive));
         return (Tensor<Boolean>) result;
     }
@@ -490,25 +490,25 @@ public static <T1, T2> Tensor<Boolean> IsNaN(Tensor<T1> X) {
         return (Tensor<Boolean>) result;
     }
 
-    public static <T> Tensor<T> LRN(Tensor<T> X, int size, Optional<Float> alpha, Optional<Float> bias, Optional<Float> beta) {
+    public static <T> Tensor<T> LRN(Tensor<T> X, long size, Optional<Float> alpha, Optional<Float> bias, Optional<Float> beta) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LRN.class, List.of(X), List.of(size, alpha, bias, beta));
         return (Tensor<T>) result;
     }
 
     public record LSTMResult<T, T1>(Tensor<T> Y, Tensor<T> Y_h, Tensor<T> Y_c) { }
-    public static <T, T1> LSTMResult<T, T1> LSTM(Tensor<T> X, Tensor<T> W, Tensor<T> R, Optional<Tensor<T>> B, Optional<Tensor<Integer>> sequence_lens, Optional<Tensor<T>> initial_h, Optional<Tensor<T>> initial_c, Optional<Tensor<T>> P, Optional<Integer> layout, Optional<Integer> input_forget, Optional<float[]> activation_alpha, Optional<Integer> hidden_size, Optional<float[]> activation_beta, Optional<String[]> activations, Optional<Float> clip, Optional<String> direction) {
+    public static <T, T1> LSTMResult<T, T1> LSTM(Tensor<T> X, Tensor<T> W, Tensor<T> R, Optional<Tensor<T>> B, Optional<Tensor<Integer>> sequence_lens, Optional<Tensor<T>> initial_h, Optional<Tensor<T>> initial_c, Optional<Tensor<T>> P, Optional<Long> layout, Optional<Long> input_forget, Optional<float[]> activation_alpha, Optional<Long> hidden_size, Optional<float[]> activation_beta, Optional<String[]> activations, Optional<Float> clip, Optional<String> direction) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LSTM.class, List.of(X, W, R, B, sequence_lens, initial_h, initial_c, P), List.of(layout, input_forget, activation_alpha, hidden_size, activation_beta, activations, clip, direction));
         Object[] resultArray = (Object[]) result;
         return new LSTMResult<>((Tensor<T>)resultArray[0], (Tensor<T>)resultArray[1], (Tensor<T>)resultArray[2]);
     }
 
-    public static <T1, T2> Tensor<T2> LabelEncoder(Tensor<T1> X, Optional<String[]> values_strings, Optional<int[]> keys_int64s, Optional<byte[]> keys_tensor, Optional<String[]> keys_strings, Optional<Float> default_float, Optional<float[]> keys_floats, Optional<byte[]> default_tensor, Optional<Integer> default_int64, Optional<byte[]> values_tensor, Optional<int[]> values_int64s, Optional<String> default_string, Optional<float[]> values_floats) {
+    public static <T1, T2> Tensor<T2> LabelEncoder(Tensor<T1> X, Optional<String[]> values_strings, Optional<long[]> keys_int64s, Optional<byte[]> keys_tensor, Optional<String[]> keys_strings, Optional<Float> default_float, Optional<float[]> keys_floats, Optional<byte[]> default_tensor, Optional<Long> default_int64, Optional<byte[]> values_tensor, Optional<long[]> values_int64s, Optional<String> default_string, Optional<float[]> values_floats) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LabelEncoder.class, List.of(X), List.of(values_strings, keys_int64s, keys_tensor, keys_strings, default_float, keys_floats, default_tensor, default_int64, values_tensor, values_int64s, default_string, values_floats));
         return (Tensor<T2>) result;
     }
 
     public record LayerNormalizationResult<T, U>(Tensor<T> Y, Tensor<U> Mean, Tensor<U> InvStdDev) { }
-    public static <T, U> LayerNormalizationResult<T, U> LayerNormalization(Tensor<T> X, Tensor<T> Scale, Optional<Tensor<T>> B, Optional<Float> epsilon, Optional<Integer> stash_type, Optional<Integer> axis) {
+    public static <T, U> LayerNormalizationResult<T, U> LayerNormalization(Tensor<T> X, Tensor<T> Scale, Optional<Tensor<T>> B, Optional<Float> epsilon, Optional<Long> stash_type, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LayerNormalization.class, List.of(X, Scale, B), List.of(epsilon, stash_type, axis));
         Object[] resultArray = (Object[]) result;
         return new LayerNormalizationResult<>((Tensor<T>)resultArray[0], (Tensor<U>)resultArray[1], (Tensor<U>)resultArray[2]);
@@ -530,13 +530,13 @@ public static <T, T1> Tensor<Boolean> LessOrEqual(Tensor<T> A, Tensor<T> B) {
     }
 
     public record LinearClassifierResult<T1, T2>(Tensor<T2> Y, Tensor<Float> Z) { }
-    public static <T1, T2> LinearClassifierResult<T1, T2> LinearClassifier(Tensor<T1> X, Optional<int[]> classlabels_ints, Optional<String> post_transform, float[] coefficients, Optional<Integer> multi_class, Optional<float[]> intercepts, Optional<String[]> classlabels_strings) {
+    public static <T1, T2> LinearClassifierResult<T1, T2> LinearClassifier(Tensor<T1> X, Optional<long[]> classlabels_ints, Optional<String> post_transform, float[] coefficients, Optional<Long> multi_class, Optional<float[]> intercepts, Optional<String[]> classlabels_strings) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LinearClassifier.class, List.of(X), List.of(classlabels_ints, post_transform, coefficients, multi_class, intercepts, classlabels_strings));
         Object[] resultArray = (Object[]) result;
         return new LinearClassifierResult<>((Tensor<T2>)resultArray[0], (Tensor<Float>)resultArray[1]);
     }
 
-    public static <T> Tensor<Float> LinearRegressor(Tensor<T> X, Optional<String> post_transform, Optional<float[]> coefficients, Optional<Integer> targets, Optional<float[]> intercepts) {
+    public static <T> Tensor<Float> LinearRegressor(Tensor<T> X, Optional<String> post_transform, Optional<float[]> coefficients, Optional<Long> targets, Optional<float[]> intercepts) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LinearRegressor.class, List.of(X), List.of(post_transform, coefficients, targets, intercepts));
         return (Tensor<Float>) result;
     }
@@ -546,17 +546,17 @@ public static <T> Tensor<T> Log(Tensor<T> input) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> LogSoftmax(Tensor<T> input, Optional<Integer> axis) {
+    public static <T> Tensor<T> LogSoftmax(Tensor<T> input, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LogSoftmax.class, List.of(input), List.of(axis));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> LpNormalization(Tensor<T> input, Optional<Integer> p, Optional<Integer> axis) {
+    public static <T> Tensor<T> LpNormalization(Tensor<T> input, Optional<Long> p, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LpNormalization.class, List.of(input), List.of(p, axis));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> LpPool(Tensor<T> X, Optional<Integer> p, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<Integer> ceil_mode, Optional<int[]> strides, int[] kernel_shape) {
+    public static <T> Tensor<T> LpPool(Tensor<T> X, Optional<Long> p, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<Long> ceil_mode, Optional<long[]> strides, long[] kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.LpPool.class, List.of(X), List.of(p, pads, dilations, auto_pad, ceil_mode, strides, kernel_shape));
         return (Tensor<T>) result;
     }
@@ -577,18 +577,18 @@ public static <T> Tensor<T> Max(List<Tensor<T>> data_0) {
     }
 
     public record MaxPoolResult<T, I>(Tensor<T> Y, Tensor<Long> Indices) { }
-    public static <T, I> MaxPoolResult<T, I> MaxPool(Tensor<T> X, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<Integer> ceil_mode, Optional<Integer> storage_order, Optional<int[]> strides, int[] kernel_shape) {
+    public static <T, I> MaxPoolResult<T, I> MaxPool(Tensor<T> X, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<Long> ceil_mode, Optional<Long> storage_order, Optional<long[]> strides, long[] kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.MaxPool.class, List.of(X), List.of(pads, dilations, auto_pad, ceil_mode, storage_order, strides, kernel_shape));
         Object[] resultArray = (Object[]) result;
         return new MaxPoolResult<>((Tensor<T>)resultArray[0], (Tensor<Long>)resultArray[1]);
     }
 
-    public static <T> Tensor<T> MaxRoiPool(Tensor<T> X, Tensor<T> rois, Optional<Float> spatial_scale, int[] pooled_shape) {
+    public static <T> Tensor<T> MaxRoiPool(Tensor<T> X, Tensor<T> rois, Optional<Float> spatial_scale, long[] pooled_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.MaxRoiPool.class, List.of(X, rois), List.of(spatial_scale, pooled_shape));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T1> MaxUnpool(Tensor<T1> X, Tensor<Long> I, Optional<Tensor<Long>> output_shape, Optional<int[]> pads, Optional<int[]> strides, int[] kernel_shape) {
+    public static <T1, T2> Tensor<T1> MaxUnpool(Tensor<T1> X, Tensor<Long> I, Optional<Tensor<Long>> output_shape, Optional<long[]> pads, Optional<long[]> strides, long[] kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.MaxUnpool.class, List.of(X, I, output_shape), List.of(pads, strides, kernel_shape));
         return (Tensor<T1>) result;
     }
@@ -598,12 +598,12 @@ public static <T> Tensor<T> Mean(List<Tensor<T>> data_0) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> MeanVarianceNormalization(Tensor<T> X, Optional<int[]> axes) {
+    public static <T> Tensor<T> MeanVarianceNormalization(Tensor<T> X, Optional<long[]> axes) {
         Object result = OnnxInterpreter.interpret(OnnxOps.MeanVarianceNormalization.class, List.of(X), List.of(axes));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2, T3> Tensor<T3> MelWeightMatrix(Tensor<T1> num_mel_bins, Tensor<T1> dft_length, Tensor<T1> sample_rate, Tensor<T2> lower_edge_hertz, Tensor<T2> upper_edge_hertz, Optional<Integer> output_datatype) {
+    public static <T1, T2, T3> Tensor<T3> MelWeightMatrix(Tensor<T1> num_mel_bins, Tensor<T1> dft_length, Tensor<T1> sample_rate, Tensor<T2> lower_edge_hertz, Tensor<T2> upper_edge_hertz, Optional<Long> output_datatype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.MelWeightMatrix.class, List.of(num_mel_bins, dft_length, sample_rate, lower_edge_hertz, upper_edge_hertz), List.of(output_datatype));
         return (Tensor<T3>) result;
     }
@@ -618,7 +618,7 @@ public static <T> Tensor<T> Mish(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Mod(Tensor<T> A, Tensor<T> B, Optional<Integer> fmod) {
+    public static <T> Tensor<T> Mod(Tensor<T> A, Tensor<T> B, Optional<Long> fmod) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Mod.class, List.of(A, B), List.of(fmod));
         return (Tensor<T>) result;
     }
@@ -633,7 +633,7 @@ public static <T> Tensor<T> Mul(Tensor<T> A, Tensor<T> B) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> Multinomial(Tensor<T1> input, Optional<Float> seed, Optional<Integer> sample_size, Optional<Integer> dtype) {
+    public static <T1, T2> Tensor<T2> Multinomial(Tensor<T1> input, Optional<Float> seed, Optional<Long> sample_size, Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Multinomial.class, List.of(input), List.of(seed, sample_size, dtype));
         return (Tensor<T2>) result;
     }
@@ -643,12 +643,12 @@ public static <T> Tensor<T> Neg(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T, Tind> Tensor<T> NegativeLogLikelihoodLoss(Tensor<T> input, Tensor<Tind> target, Optional<Tensor<T>> weight, Optional<Integer> ignore_index, Optional<String> reduction) {
+    public static <T, Tind> Tensor<T> NegativeLogLikelihoodLoss(Tensor<T> input, Tensor<Tind> target, Optional<Tensor<T>> weight, Optional<Long> ignore_index, Optional<String> reduction) {
         Object result = OnnxInterpreter.interpret(OnnxOps.NegativeLogLikelihoodLoss.class, List.of(input, target, weight), List.of(ignore_index, reduction));
         return (Tensor<T>) result;
     }
 
-    public static Tensor<Long> NonMaxSuppression(Tensor<Float> boxes, Tensor<Float> scores, Optional<Tensor<Long>> max_output_boxes_per_class, Optional<Tensor<Float>> iou_threshold, Optional<Tensor<Float>> score_threshold, Optional<Integer> center_point_box) {
+    public static Tensor<Long> NonMaxSuppression(Tensor<Float> boxes, Tensor<Float> scores, Optional<Tensor<Long>> max_output_boxes_per_class, Optional<Tensor<Float>> iou_threshold, Optional<Tensor<Float>> score_threshold, Optional<Long> center_point_box) {
         Object result = OnnxInterpreter.interpret(OnnxOps.NonMaxSuppression.class, List.of(boxes, scores, max_output_boxes_per_class, iou_threshold, score_threshold), List.of(center_point_box));
         return (Tensor<Long>) result;
     }
@@ -668,12 +668,12 @@ public static <T> Tensor<Boolean> Not(Tensor<Boolean> X) {
         return (Tensor<Boolean>) result;
     }
 
-    public static <T1, T2, T3> Tensor<T3> OneHot(Tensor<T1> indices, Tensor<T2> depth, Tensor<T3> values, Optional<Integer> axis) {
+    public static <T1, T2, T3> Tensor<T3> OneHot(Tensor<T1> indices, Tensor<T2> depth, Tensor<T3> values, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.OneHot.class, List.of(indices, depth, values), List.of(axis));
         return (Tensor<T3>) result;
     }
 
-    public static <T> Tensor<Float> OneHotEncoder(Tensor<T> X, Optional<String[]> cats_strings, Optional<int[]> cats_int64s, Optional<Integer> zeros) {
+    public static <T> Tensor<Float> OneHotEncoder(Tensor<T> X, Optional<String[]> cats_strings, Optional<long[]> cats_int64s, Optional<Long> zeros) {
         Object result = OnnxInterpreter.interpret(OnnxOps.OneHotEncoder.class, List.of(X), List.of(cats_strings, cats_int64s, zeros));
         return (Tensor<Float>) result;
     }
@@ -713,7 +713,7 @@ public static <T, T1> Tensor<T> Pow(Tensor<T> X, Tensor<T1> Y) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2, T3, T4> Tensor<T3> QLinearConv(Tensor<T1> x, Tensor<Float> x_scale, Tensor<T1> x_zero_point, Tensor<T2> w, Tensor<Float> w_scale, Tensor<T2> w_zero_point, Tensor<Float> y_scale, Tensor<T3> y_zero_point, Optional<Tensor<Integer>> B, Optional<int[]> pads, Optional<int[]> dilations, Optional<String> auto_pad, Optional<int[]> strides, Optional<Integer> group, Optional<int[]> kernel_shape) {
+    public static <T1, T2, T3, T4> Tensor<T3> QLinearConv(Tensor<T1> x, Tensor<Float> x_scale, Tensor<T1> x_zero_point, Tensor<T2> w, Tensor<Float> w_scale, Tensor<T2> w_zero_point, Tensor<Float> y_scale, Tensor<T3> y_zero_point, Optional<Tensor<Integer>> B, Optional<long[]> pads, Optional<long[]> dilations, Optional<String> auto_pad, Optional<long[]> strides, Optional<Long> group, Optional<long[]> kernel_shape) {
         Object result = OnnxInterpreter.interpret(OnnxOps.QLinearConv.class, List.of(x, x_scale, x_zero_point, w, w_scale, w_zero_point, y_scale, y_zero_point, B), List.of(pads, dilations, auto_pad, strides, group, kernel_shape));
         return (Tensor<T3>) result;
     }
@@ -723,34 +723,34 @@ public static <TS, T1, T2, T3> Tensor<T3> QLinearMatMul(Tensor<T1> a, Tensor<TS>
         return (Tensor<T3>) result;
     }
 
-    public static <T1, T2> Tensor<T2> QuantizeLinear(Tensor<T1> x, Tensor<T1> y_scale, Optional<Tensor<T2>> y_zero_point, Optional<Integer> output_dtype, Optional<Integer> saturate, Optional<Integer> axis, Optional<Integer> block_size) {
+    public static <T1, T2> Tensor<T2> QuantizeLinear(Tensor<T1> x, Tensor<T1> y_scale, Optional<Tensor<T2>> y_zero_point, Optional<Long> output_dtype, Optional<Long> saturate, Optional<Long> axis, Optional<Long> block_size) {
         Object result = OnnxInterpreter.interpret(OnnxOps.QuantizeLinear.class, List.of(x, y_scale, y_zero_point), List.of(output_dtype, saturate, axis, block_size));
         return (Tensor<T2>) result;
     }
 
     public record RNNResult<T, T1>(Tensor<T> Y, Tensor<T> Y_h) { }
-    public static <T, T1> RNNResult<T, T1> RNN(Tensor<T> X, Tensor<T> W, Tensor<T> R, Optional<Tensor<T>> B, Optional<Tensor<Integer>> sequence_lens, Optional<Tensor<T>> initial_h, Optional<Integer> layout, Optional<float[]> activation_alpha, Optional<Integer> hidden_size, Optional<float[]> activation_beta, Optional<String[]> activations, Optional<Float> clip, Optional<String> direction) {
+    public static <T, T1> RNNResult<T, T1> RNN(Tensor<T> X, Tensor<T> W, Tensor<T> R, Optional<Tensor<T>> B, Optional<Tensor<Integer>> sequence_lens, Optional<Tensor<T>> initial_h, Optional<Long> layout, Optional<float[]> activation_alpha, Optional<Long> hidden_size, Optional<float[]> activation_beta, Optional<String[]> activations, Optional<Float> clip, Optional<String> direction) {
         Object result = OnnxInterpreter.interpret(OnnxOps.RNN.class, List.of(X, W, R, B, sequence_lens, initial_h), List.of(layout, activation_alpha, hidden_size, activation_beta, activations, clip, direction));
         Object[] resultArray = (Object[]) result;
         return new RNNResult<>((Tensor<T>)resultArray[0], (Tensor<T>)resultArray[1]);
     }
 
-    public static <T> Tensor<T> RandomNormal(int[] shape, Optional<Float> seed, Optional<Float> mean, Optional<Float> scale, Optional<Integer> dtype) {
+    public static <T> Tensor<T> RandomNormal(long[] shape, Optional<Float> seed, Optional<Float> mean, Optional<Float> scale, Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.RandomNormal.class, List.of(), List.of(shape, seed, mean, scale, dtype));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> RandomNormalLike(Tensor<T1> input, Optional<Float> seed, Optional<Float> mean, Optional<Float> scale, Optional<Integer> dtype) {
+    public static <T1, T2> Tensor<T2> RandomNormalLike(Tensor<T1> input, Optional<Float> seed, Optional<Float> mean, Optional<Float> scale, Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.RandomNormalLike.class, List.of(input), List.of(seed, mean, scale, dtype));
         return (Tensor<T2>) result;
     }
 
-    public static <T> Tensor<T> RandomUniform(Optional<Float> high, int[] shape, Optional<Float> seed, Optional<Float> low, Optional<Integer> dtype) {
+    public static <T> Tensor<T> RandomUniform(Optional<Float> high, long[] shape, Optional<Float> seed, Optional<Float> low, Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.RandomUniform.class, List.of(), List.of(high, shape, seed, low, dtype));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T2> RandomUniformLike(Tensor<T1> input, Optional<Float> high, Optional<Float> seed, Optional<Float> low, Optional<Integer> dtype) {
+    public static <T1, T2> Tensor<T2> RandomUniformLike(Tensor<T1> input, Optional<Float> high, Optional<Float> seed, Optional<Float> low, Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.RandomUniformLike.class, List.of(input), List.of(high, seed, low, dtype));
         return (Tensor<T2>) result;
     }
@@ -765,52 +765,52 @@ public static <T> Tensor<T> Reciprocal(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceL1(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceL1(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceL1.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceL2(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceL2(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceL2.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceLogSum(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceLogSum(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceLogSum.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceLogSumExp(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceLogSumExp(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceLogSumExp.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceMax(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceMax(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceMax.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceMean(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceMean(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceMean.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceMin(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceMin(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceMin.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceProd(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceProd(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceProd.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceSum(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceSum(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceSum.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> ReduceSumSquare(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Integer> noop_with_empty_axes, Optional<Integer> keepdims) {
+    public static <T> Tensor<T> ReduceSumSquare(Tensor<T> data, Optional<Tensor<Long>> axes, Optional<Long> noop_with_empty_axes, Optional<Long> keepdims) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReduceSumSquare.class, List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         return (Tensor<T>) result;
     }
@@ -825,22 +825,22 @@ public static <T> Tensor<T> Relu(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Reshape(Tensor<T> data, Tensor<Long> shape, Optional<Integer> allowzero) {
+    public static <T> Tensor<T> Reshape(Tensor<T> data, Tensor<Long> shape, Optional<Long> allowzero) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Reshape.class, List.of(data, shape), List.of(allowzero));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T1> Resize(Tensor<T1> X, Optional<Tensor<T2>> roi, Optional<Tensor<Float>> scales, Optional<Tensor<Long>> sizes, Optional<String> mode, Optional<Float> extrapolation_value, Optional<String> nearest_mode, Optional<Integer> antialias, Optional<Float> cubic_coeff_a, Optional<int[]> axes, Optional<String> coordinate_transformation_mode, Optional<String> keep_aspect_ratio_policy, Optional<Integer> exclude_outside) {
+    public static <T1, T2> Tensor<T1> Resize(Tensor<T1> X, Optional<Tensor<T2>> roi, Optional<Tensor<Float>> scales, Optional<Tensor<Long>> sizes, Optional<String> mode, Optional<Float> extrapolation_value, Optional<String> nearest_mode, Optional<Long> antialias, Optional<Float> cubic_coeff_a, Optional<long[]> axes, Optional<String> coordinate_transformation_mode, Optional<String> keep_aspect_ratio_policy, Optional<Long> exclude_outside) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Resize.class, List.of(X, roi, scales, sizes), List.of(mode, extrapolation_value, nearest_mode, antialias, cubic_coeff_a, axes, coordinate_transformation_mode, keep_aspect_ratio_policy, exclude_outside));
         return (Tensor<T1>) result;
     }
 
-    public static <T> Tensor<T> ReverseSequence(Tensor<T> input, Tensor<Long> sequence_lens, Optional<Integer> time_axis, Optional<Integer> batch_axis) {
+    public static <T> Tensor<T> ReverseSequence(Tensor<T> input, Tensor<Long> sequence_lens, Optional<Long> time_axis, Optional<Long> batch_axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ReverseSequence.class, List.of(input, sequence_lens), List.of(time_axis, batch_axis));
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T1> RoiAlign(Tensor<T1> X, Tensor<T1> rois, Tensor<Long> batch_indices, Optional<String> mode, Optional<Integer> output_width, Optional<Float> spatial_scale, Optional<String> coordinate_transformation_mode, Optional<Integer> sampling_ratio, Optional<Integer> output_height) {
+    public static <T1, T2> Tensor<T1> RoiAlign(Tensor<T1> X, Tensor<T1> rois, Tensor<Long> batch_indices, Optional<String> mode, Optional<Long> output_width, Optional<Float> spatial_scale, Optional<String> coordinate_transformation_mode, Optional<Long> sampling_ratio, Optional<Long> output_height) {
         Object result = OnnxInterpreter.interpret(OnnxOps.RoiAlign.class, List.of(X, rois, batch_indices), List.of(mode, output_width, spatial_scale, coordinate_transformation_mode, sampling_ratio, output_height));
         return (Tensor<T1>) result;
     }
@@ -850,19 +850,19 @@ public static <T> Tensor<T> Round(Tensor<T> X) {
         return (Tensor<T>) result;
     }
 
-    public static <T1, T2> Tensor<T1> STFT(Tensor<T1> signal, Tensor<T2> frame_step, Optional<Tensor<T1>> window, Optional<Tensor<T2>> frame_length, Optional<Integer> onesided) {
+    public static <T1, T2> Tensor<T1> STFT(Tensor<T1> signal, Tensor<T2> frame_step, Optional<Tensor<T1>> window, Optional<Tensor<T2>> frame_length, Optional<Long> onesided) {
         Object result = OnnxInterpreter.interpret(OnnxOps.STFT.class, List.of(signal, frame_step, window, frame_length), List.of(onesided));
         return (Tensor<T1>) result;
     }
 
     public record SVMClassifierResult<T1, T2>(Tensor<T2> Y, Tensor<Float> Z) { }
-    public static <T1, T2> SVMClassifierResult<T1, T2> SVMClassifier(Tensor<T1> X, Optional<float[]> prob_b, Optional<float[]> kernel_params, Optional<String> kernel_type, Optional<int[]> classlabels_ints, Optional<String> post_transform, Optional<float[]> rho, Optional<float[]> coefficients, Optional<float[]> support_vectors, Optional<int[]> vectors_per_class, Optional<float[]> prob_a, Optional<String[]> classlabels_strings) {
+    public static <T1, T2> SVMClassifierResult<T1, T2> SVMClassifier(Tensor<T1> X, Optional<float[]> prob_b, Optional<float[]> kernel_params, Optional<String> kernel_type, Optional<long[]> classlabels_ints, Optional<String> post_transform, Optional<float[]> rho, Optional<float[]> coefficients, Optional<float[]> support_vectors, Optional<long[]> vectors_per_class, Optional<float[]> prob_a, Optional<String[]> classlabels_strings) {
         Object result = OnnxInterpreter.interpret(OnnxOps.SVMClassifier.class, List.of(X), List.of(prob_b, kernel_params, kernel_type, classlabels_ints, post_transform, rho, coefficients, support_vectors, vectors_per_class, prob_a, classlabels_strings));
         Object[] resultArray = (Object[]) result;
         return new SVMClassifierResult<>((Tensor<T2>)resultArray[0], (Tensor<Float>)resultArray[1]);
     }
 
-    public static <T> Tensor<Float> SVMRegressor(Tensor<T> X, Optional<String> kernel_type, Optional<float[]> kernel_params, Optional<Integer> n_supports, Optional<float[]> rho, Optional<String> post_transform, Optional<float[]> coefficients, Optional<float[]> support_vectors, Optional<Integer> one_class) {
+    public static <T> Tensor<Float> SVMRegressor(Tensor<T> X, Optional<String> kernel_type, Optional<float[]> kernel_params, Optional<Long> n_supports, Optional<float[]> rho, Optional<String> post_transform, Optional<float[]> coefficients, Optional<float[]> support_vectors, Optional<Long> one_class) {
         Object result = OnnxInterpreter.interpret(OnnxOps.SVMRegressor.class, List.of(X), List.of(kernel_type, kernel_params, n_supports, rho, post_transform, coefficients, support_vectors, one_class));
         return (Tensor<Float>) result;
     }
@@ -872,12 +872,12 @@ public static <T> Tensor<Float> Scaler(Tensor<T> X, Optional<float[]> offset, Op
         return (Tensor<Float>) result;
     }
 
-    public static <T, Tind> Tensor<T> Scatter(Tensor<T> data, Tensor<Tind> indices, Tensor<T> updates, Optional<Integer> axis) {
+    public static <T, Tind> Tensor<T> Scatter(Tensor<T> data, Tensor<Tind> indices, Tensor<T> updates, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Scatter.class, List.of(data, indices, updates), List.of(axis));
         return (Tensor<T>) result;
     }
 
-    public static <T, Tind> Tensor<T> ScatterElements(Tensor<T> data, Tensor<Tind> indices, Tensor<T> updates, Optional<String> reduction, Optional<Integer> axis) {
+    public static <T, Tind> Tensor<T> ScatterElements(Tensor<T> data, Tensor<Tind> indices, Tensor<T> updates, Optional<String> reduction, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ScatterElements.class, List.of(data, indices, updates), List.of(reduction, axis));
         return (Tensor<T>) result;
     }
@@ -902,7 +902,7 @@ public static <T, S> List<Tensor<S>> SequenceConstruct(List<Tensor<T>> inputs) {
         return (List<Tensor<S>>) result;
     }
 
-    public static <S> List<Tensor<S>> SequenceEmpty(Optional<Integer> dtype) {
+    public static <S> List<Tensor<S>> SequenceEmpty(Optional<Long> dtype) {
         Object result = OnnxInterpreter.interpret(OnnxOps.SequenceEmpty.class, List.of(), List.of(dtype));
         return (List<Tensor<S>>) result;
     }
@@ -922,7 +922,7 @@ public static <S, I> Tensor<Long> SequenceLength(List<Tensor<S>> input_sequence)
         return (Tensor<Long>) result;
     }
 
-    public static <T, T1> Tensor<Long> Shape(Tensor<T> data, Optional<Integer> start, Optional<Integer> end) {
+    public static <T, T1> Tensor<Long> Shape(Tensor<T> data, Optional<Long> start, Optional<Long> end) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Shape.class, List.of(data), List.of(start, end));
         return (Tensor<Long>) result;
     }
@@ -962,13 +962,13 @@ public static <T, Tind> Tensor<T> Slice(Tensor<T> data, Tensor<Tind> starts, Ten
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> Softmax(Tensor<T> input, Optional<Integer> axis) {
+    public static <T> Tensor<T> Softmax(Tensor<T> input, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Softmax.class, List.of(input), List.of(axis));
         return (Tensor<T>) result;
     }
 
     public record SoftmaxCrossEntropyLossResult<T, Tind>(Tensor<T> output, Tensor<T> log_prob) { }
-    public static <T, Tind> SoftmaxCrossEntropyLossResult<T, Tind> SoftmaxCrossEntropyLoss(Tensor<T> scores, Tensor<Tind> labels, Optional<Tensor<T>> weights, Optional<Integer> ignore_index, Optional<String> reduction) {
+    public static <T, Tind> SoftmaxCrossEntropyLossResult<T, Tind> SoftmaxCrossEntropyLoss(Tensor<T> scores, Tensor<Tind> labels, Optional<Tensor<T>> weights, Optional<Long> ignore_index, Optional<String> reduction) {
         Object result = OnnxInterpreter.interpret(OnnxOps.SoftmaxCrossEntropyLoss.class, List.of(scores, labels, weights), List.of(ignore_index, reduction));
         Object[] resultArray = (Object[]) result;
         return new SoftmaxCrossEntropyLossResult<>((Tensor<T>)resultArray[0], (Tensor<T>)resultArray[1]);
@@ -984,17 +984,17 @@ public static <T> Tensor<T> Softsign(Tensor<T> input) {
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> SpaceToDepth(Tensor<T> input, int blocksize) {
+    public static <T> Tensor<T> SpaceToDepth(Tensor<T> input, long blocksize) {
         Object result = OnnxInterpreter.interpret(OnnxOps.SpaceToDepth.class, List.of(input), List.of(blocksize));
         return (Tensor<T>) result;
     }
 
-    public static <T> List<Tensor<T>> Split(Tensor<T> input, Optional<Tensor<Long>> split, Optional<Integer> num_outputs, Optional<Integer> axis) {
+    public static <T> List<Tensor<T>> Split(Tensor<T> input, Optional<Tensor<Long>> split, Optional<Long> num_outputs, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Split.class, List.of(input, split), List.of(num_outputs, axis));
         return (List<Tensor<T>>) result;
     }
 
-    public static <T, I, S> List<Tensor<S>> SplitToSequence(Tensor<T> input, Optional<Tensor<I>> split, Optional<Integer> keepdims, Optional<Integer> axis) {
+    public static <T, I, S> List<Tensor<S>> SplitToSequence(Tensor<T> input, Optional<Tensor<I>> split, Optional<Long> keepdims, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.SplitToSequence.class, List.of(input, split), List.of(keepdims, axis));
         return (List<Tensor<S>>) result;
     }
@@ -1014,13 +1014,13 @@ public static <T> Tensor<String> StringConcat(Tensor<String> X, Tensor<String> Y
         return (Tensor<String>) result;
     }
 
-    public static Tensor<String> StringNormalizer(Tensor<String> X, Optional<Integer> is_case_sensitive, Optional<String> locale, Optional<String[]> stopwords, Optional<String> case_change_action) {
+    public static Tensor<String> StringNormalizer(Tensor<String> X, Optional<Long> is_case_sensitive, Optional<String> locale, Optional<String[]> stopwords, Optional<String> case_change_action) {
         Object result = OnnxInterpreter.interpret(OnnxOps.StringNormalizer.class, List.of(X), List.of(is_case_sensitive, locale, stopwords, case_change_action));
         return (Tensor<String>) result;
     }
 
     public record StringSplitResult<T1, T2, T3>(Tensor<String> Y, Tensor<Long> Z) { }
-    public static <T1, T2, T3> StringSplitResult<T1, T2, T3> StringSplit(Tensor<String> X, Optional<String> delimiter, Optional<Integer> maxsplit) {
+    public static <T1, T2, T3> StringSplitResult<T1, T2, T3> StringSplit(Tensor<String> X, Optional<String> delimiter, Optional<Long> maxsplit) {
         Object result = OnnxInterpreter.interpret(OnnxOps.StringSplit.class, List.of(X), List.of(delimiter, maxsplit));
         Object[] resultArray = (Object[]) result;
         return new StringSplitResult<>((Tensor<String>)resultArray[0], (Tensor<Long>)resultArray[1]);
@@ -1046,7 +1046,7 @@ public static <T> Tensor<T> Tanh(Tensor<T> input) {
         return (Tensor<T>) result;
     }
 
-    public static <T, T1> Tensor<Float> TfIdfVectorizer(Tensor<T> X, int[] ngram_counts, int min_gram_length, Optional<String[]> pool_strings, String mode, int max_gram_length, int max_skip_count, Optional<int[]> pool_int64s, Optional<float[]> weights, int[] ngram_indexes) {
+    public static <T, T1> Tensor<Float> TfIdfVectorizer(Tensor<T> X, long[] ngram_counts, long min_gram_length, Optional<String[]> pool_strings, String mode, long max_gram_length, long max_skip_count, Optional<long[]> pool_int64s, Optional<float[]> weights, long[] ngram_indexes) {
         Object result = OnnxInterpreter.interpret(OnnxOps.TfIdfVectorizer.class, List.of(X), List.of(ngram_counts, min_gram_length, pool_strings, mode, max_gram_length, max_skip_count, pool_int64s, weights, ngram_indexes));
         return (Tensor<Float>) result;
     }
@@ -1062,41 +1062,41 @@ public static <T, T1> Tensor<T> Tile(Tensor<T> input, Tensor<Long> repeats) {
     }
 
     public record TopKResult<T, I>(Tensor<T> Values, Tensor<Long> Indices) { }
-    public static <T, I> TopKResult<T, I> TopK(Tensor<T> X, Tensor<Long> K, Optional<Integer> largest, Optional<Integer> sorted, Optional<Integer> axis) {
+    public static <T, I> TopKResult<T, I> TopK(Tensor<T> X, Tensor<Long> K, Optional<Long> largest, Optional<Long> sorted, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.TopK.class, List.of(X, K), List.of(largest, sorted, axis));
         Object[] resultArray = (Object[]) result;
         return new TopKResult<>((Tensor<T>)resultArray[0], (Tensor<Long>)resultArray[1]);
     }
 
-    public static <T> Tensor<T> Transpose(Tensor<T> data, Optional<int[]> perm) {
+    public static <T> Tensor<T> Transpose(Tensor<T> data, Optional<long[]> perm) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Transpose.class, List.of(data), List.of(perm));
         return (Tensor<T>) result;
     }
 
-    public static <T> Tensor<T> TreeEnsemble(Tensor<T> X, Optional<Integer> aggregate_function, Optional<byte[]> nodes_hitrates, int[] nodes_featureids, int[] nodes_falseleafs, Optional<Integer> post_transform, int[] nodes_trueleafs, byte[] nodes_modes, int[] nodes_falsenodeids, int[] nodes_truenodeids, byte[] leaf_weights, int[] leaf_targetids, int[] tree_roots, Optional<Integer> n_targets, Optional<int[]> nodes_missing_value_tracks_true, Optional<byte[]> membership_values, byte[] nodes_splits) {
+    public static <T> Tensor<T> TreeEnsemble(Tensor<T> X, Optional<Long> aggregate_function, Optional<byte[]> nodes_hitrates, long[] nodes_featureids, long[] nodes_falseleafs, Optional<Long> post_transform, long[] nodes_trueleafs, byte[] nodes_modes, long[] nodes_falsenodeids, long[] nodes_truenodeids, byte[] leaf_weights, long[] leaf_targetids, long[] tree_roots, Optional<Long> n_targets, Optional<long[]> nodes_missing_value_tracks_true, Optional<byte[]> membership_values, byte[] nodes_splits) {
         Object result = OnnxInterpreter.interpret(OnnxOps.TreeEnsemble.class, List.of(X), List.of(aggregate_function, nodes_hitrates, nodes_featureids, nodes_falseleafs, post_transform, nodes_trueleafs, nodes_modes, nodes_falsenodeids, nodes_truenodeids, leaf_weights, leaf_targetids, tree_roots, n_targets, nodes_missing_value_tracks_true, membership_values, nodes_splits));
         return (Tensor<T>) result;
     }
 
     public record TreeEnsembleClassifierResult<T1, T2>(Tensor<T2> Y, Tensor<Float> Z) { }
-    public static <T1, T2> TreeEnsembleClassifierResult<T1, T2> TreeEnsembleClassifier(Tensor<T1> X, Optional<int[]> classlabels_int64s, Optional<int[]> class_ids, Optional<float[]> nodes_hitrates, Optional<int[]> nodes_featureids, Optional<int[]> nodes_treeids, Optional<byte[]> class_weights_as_tensor, Optional<String> post_transform, Optional<String[]> nodes_modes, Optional<int[]> nodes_falsenodeids, Optional<String[]> classlabels_strings, Optional<int[]> nodes_truenodeids, Optional<int[]> nodes_nodeids, Optional<byte[]> nodes_hitrates_as_tensor, Optional<float[]> class_weights, Optional<byte[]> base_values_as_tensor, Optional<int[]> nodes_missing_value_tracks_true, Optional<int[]> class_nodeids, Optional<int[]> class_treeids, Optional<float[]> base_values, Optional<float[]> nodes_values, Optional<byte[]> nodes_values_as_tensor) {
+    public static <T1, T2> TreeEnsembleClassifierResult<T1, T2> TreeEnsembleClassifier(Tensor<T1> X, Optional<long[]> classlabels_int64s, Optional<long[]> class_ids, Optional<float[]> nodes_hitrates, Optional<long[]> nodes_featureids, Optional<long[]> nodes_treeids, Optional<byte[]> class_weights_as_tensor, Optional<String> post_transform, Optional<String[]> nodes_modes, Optional<long[]> nodes_falsenodeids, Optional<String[]> classlabels_strings, Optional<long[]> nodes_truenodeids, Optional<long[]> nodes_nodeids, Optional<byte[]> nodes_hitrates_as_tensor, Optional<float[]> class_weights, Optional<byte[]> base_values_as_tensor, Optional<long[]> nodes_missing_value_tracks_true, Optional<long[]> class_nodeids, Optional<long[]> class_treeids, Optional<float[]> base_values, Optional<float[]> nodes_values, Optional<byte[]> nodes_values_as_tensor) {
         Object result = OnnxInterpreter.interpret(OnnxOps.TreeEnsembleClassifier.class, List.of(X), List.of(classlabels_int64s, class_ids, nodes_hitrates, nodes_featureids, nodes_treeids, class_weights_as_tensor, post_transform, nodes_modes, nodes_falsenodeids, classlabels_strings, nodes_truenodeids, nodes_nodeids, nodes_hitrates_as_tensor, class_weights, base_values_as_tensor, nodes_missing_value_tracks_true, class_nodeids, class_treeids, base_values, nodes_values, nodes_values_as_tensor));
         Object[] resultArray = (Object[]) result;
         return new TreeEnsembleClassifierResult<>((Tensor<T2>)resultArray[0], (Tensor<Float>)resultArray[1]);
     }
 
-    public static <T> Tensor<Float> TreeEnsembleRegressor(Tensor<T> X, Optional<String> aggregate_function, Optional<float[]> nodes_hitrates, Optional<byte[]> target_weights_as_tensor, Optional<int[]> nodes_featureids, Optional<int[]> target_treeids, Optional<int[]> nodes_treeids, Optional<String> post_transform, Optional<String[]> nodes_modes, Optional<float[]> target_weights, Optional<int[]> nodes_falsenodeids, Optional<int[]> target_ids, Optional<int[]> nodes_truenodeids, Optional<int[]> target_nodeids, Optional<int[]> nodes_nodeids, Optional<byte[]> nodes_hitrates_as_tensor, Optional<byte[]> base_values_as_tensor, Optional<Integer> n_targets, Optional<int[]> nodes_missing_value_tracks_true, Optional<float[]> base_values, Optional<float[]> nodes_values, Optional<byte[]> nodes_values_as_tensor) {
+    public static <T> Tensor<Float> TreeEnsembleRegressor(Tensor<T> X, Optional<String> aggregate_function, Optional<float[]> nodes_hitrates, Optional<byte[]> target_weights_as_tensor, Optional<long[]> nodes_featureids, Optional<long[]> target_treeids, Optional<long[]> nodes_treeids, Optional<String> post_transform, Optional<String[]> nodes_modes, Optional<float[]> target_weights, Optional<long[]> nodes_falsenodeids, Optional<long[]> target_ids, Optional<long[]> nodes_truenodeids, Optional<long[]> target_nodeids, Optional<long[]> nodes_nodeids, Optional<byte[]> nodes_hitrates_as_tensor, Optional<byte[]> base_values_as_tensor, Optional<Long> n_targets, Optional<long[]> nodes_missing_value_tracks_true, Optional<float[]> base_values, Optional<float[]> nodes_values, Optional<byte[]> nodes_values_as_tensor) {
         Object result = OnnxInterpreter.interpret(OnnxOps.TreeEnsembleRegressor.class, List.of(X), List.of(aggregate_function, nodes_hitrates, target_weights_as_tensor, nodes_featureids, target_treeids, nodes_treeids, post_transform, nodes_modes, target_weights, nodes_falsenodeids, target_ids, nodes_truenodeids, target_nodeids, nodes_nodeids, nodes_hitrates_as_tensor, base_values_as_tensor, n_targets, nodes_missing_value_tracks_true, base_values, nodes_values, nodes_values_as_tensor));
         return (Tensor<Float>) result;
     }
 
-    public static <T> Tensor<T> Trilu(Tensor<T> input, Optional<Tensor<Long>> k, Optional<Integer> upper) {
+    public static <T> Tensor<T> Trilu(Tensor<T> input, Optional<Tensor<Long>> k, Optional<Long> upper) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Trilu.class, List.of(input, k), List.of(upper));
         return (Tensor<T>) result;
     }
 
     public record UniqueResult<T>(Tensor<T> Y, Tensor<Long> indices, Tensor<Long> inverse_indices, Tensor<Long> counts) { }
-    public static <T> UniqueResult<T> Unique(Tensor<T> X, Optional<Integer> sorted, Optional<Integer> axis) {
+    public static <T> UniqueResult<T> Unique(Tensor<T> X, Optional<Long> sorted, Optional<Long> axis) {
         Object result = OnnxInterpreter.interpret(OnnxOps.Unique.class, List.of(X), List.of(sorted, axis));
         Object[] resultArray = (Object[]) result;
         return new UniqueResult<>((Tensor<T>)resultArray[0], (Tensor<Long>)resultArray[1], (Tensor<Long>)resultArray[2], (Tensor<Long>)resultArray[3]);
@@ -1122,7 +1122,7 @@ public static <T, T1> Tensor<Boolean> Xor(Tensor<Boolean> A, Tensor<Boolean> B)
         return (Tensor<Boolean>) result;
     }
 
-    public static <T> List<Map<T, Float>> ZipMap(Tensor<Float> X, Optional<int[]> classlabels_int64s, Optional<String[]> classlabels_strings) {
+    public static <T> List<Map<T, Float>> ZipMap(Tensor<Float> X, Optional<long[]> classlabels_int64s, Optional<String[]> classlabels_strings) {
         Object result = OnnxInterpreter.interpret(OnnxOps.ZipMap.class, List.of(X), List.of(classlabels_int64s, classlabels_strings));
         return (List<Map<T, Float>>) result;
     }
diff --git a/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/InvokableLeafOps.java b/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/InvokableLeafOps.java
new file mode 100644
index 00000000000..2bf60e9daac
--- /dev/null
+++ b/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/InvokableLeafOps.java
@@ -0,0 +1,876 @@
+/*
+ * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package oracle.code.onnx.compiler;
+
+final class InvokableLeafOps {
+
+    public static String add(String a, String b) {
+        return a.concat(b);
+    }
+
+
+    public static boolean eq(Object a, Object b) {
+        return a == b;
+    }
+
+    public static boolean neq(Object a, Object b) {
+        return a != b;
+    }
+
+
+    public static boolean not(boolean l) {
+        return !l;
+    }
+
+    // int
+
+    public static int neg(int l) {
+        return -l;
+    }
+
+    public static int compl(int l) {
+        return ~l;
+    }
+
+    public static int add(int l, int r) {
+        return l + r;
+    }
+
+    public static int sub(int l, int r) {
+        return l - r;
+    }
+
+    public static int mul(int l, int r) {
+        return l * r;
+    }
+
+    public static int div(int l, int r) {
+        return l / r;
+    }
+
+    public static int mod(int l, int r) {
+        return l % r;
+    }
+
+    public static int or(int l, int r) {
+        return l | r;
+    }
+
+    public static int and(int l, int r) {
+        return l & r;
+    }
+
+    public static int xor(int l, int r) {
+        return l ^ r;
+    }
+
+    public static int lshl(int l, int r) {
+        return l << r;
+    }
+
+    public static int ashr(int l, int r) {
+        return l >> r;
+    }
+
+    public static int lshr(int l, int r) {
+        return l >>> r;
+    }
+
+    public static int lshl(int l, long r) {
+        return l << r;
+    }
+
+    public static int ashr(int l, long r) {
+        return l >> r;
+    }
+
+    public static int lshr(int l, long r) {
+        return l >>> r;
+    }
+
+    public static boolean eq(int l, int r) {
+        return l == r;
+    }
+
+    public static boolean neq(int l, int r) {
+        return l != r;
+    }
+
+    public static boolean gt(int l, int r) {
+        return l > r;
+    }
+
+    public static boolean ge(int l, int r) {
+        return l >= r;
+    }
+
+    public static boolean lt(int l, int r) {
+        return l < r;
+    }
+
+    public static boolean le(int l, int r) {
+        return l <= r;
+    }
+
+    // byte
+
+    public static byte neg(byte l) {
+        return (byte) -l;
+    }
+
+    public static byte compl(byte l) {
+        return (byte) ~l;
+    }
+
+    public static byte add(byte l, byte r) {
+        return (byte) (l + r);
+    }
+
+    public static byte sub(byte l, byte r) {
+        return (byte) (l - r);
+    }
+
+    public static byte mul(byte l, byte r) {
+        return (byte) (l * r);
+    }
+
+    public static byte div(byte l, byte r) {
+        return (byte) (l / r);
+    }
+
+    public static byte mod(byte l, byte r) {
+        return (byte) (l % r);
+    }
+
+    public static byte or(byte l, byte r) {
+        return (byte) (l | r);
+    }
+
+    public static byte and(byte l, byte r) {
+        return (byte) (l & r);
+    }
+
+    public static byte xor(byte l, byte r) {
+        return (byte) (l ^ r);
+    }
+
+    public static byte ashr(byte l, long r) {
+        return (byte) (l >> r);
+    }
+
+    public static byte lshr(byte l, long r) {
+        return (byte) (l >>> r);
+    }
+
+    public static byte lshl(byte l, int r) {
+        return (byte) (l << r);
+    }
+
+    public static byte ashr(byte l, int r) {
+        return (byte) (l >> r);
+    }
+
+    public static byte lshr(byte l, int r) {
+        return (byte) (l >>> r);
+    }
+
+    public static boolean eq(byte l, byte r) {
+        return l == r;
+    }
+
+    public static boolean neq(byte l, byte r) {
+        return l != r;
+    }
+
+    public static boolean gt(byte l, byte r) {
+        return l > r;
+    }
+
+    public static boolean ge(byte l, byte r) {
+        return l >= r;
+    }
+
+    public static boolean lt(byte l, byte r) {
+        return l < r;
+    }
+
+    public static boolean le(byte l, byte r) {
+        return l <= r;
+    }
+
+    // short
+
+    public static short neg(short l) {
+        return (short) -l;
+    }
+
+    public static short compl(short l) {
+        return (short) ~l;
+    }
+
+    public static short add(short l, short r) {
+        return (short) (l + r);
+    }
+
+    public static short sub(short l, short r) {
+        return (short) (l - r);
+    }
+
+    public static short mul(short l, short r) {
+        return (short) (l * r);
+    }
+
+    public static short div(short l, short r) {
+        return (short) (l / r);
+    }
+
+    public static short mod(short l, short r) {
+        return (short) (l % r);
+    }
+
+    public static short or(short l, short r) {
+        return (short) (l | r);
+    }
+
+    public static short and(short l, short r) {
+        return (short) (l & r);
+    }
+
+    public static short xor(short l, short r) {
+        return (short) (l ^ r);
+    }
+
+    public static short ashr(short l, long r) {
+        return (short) (l >> r);
+    }
+
+    public static short lshr(short l, long r) {
+        return (short) (l >>> r);
+    }
+
+    public static short lshl(short l, int r) {
+        return (short) (l << r);
+    }
+
+    public static short ashr(short l, int r) {
+        return (short) (l >> r);
+    }
+
+    public static short lshr(short l, int r) {
+        return (short) (l >>> r);
+    }
+
+    public static boolean eq(short l, short r) {
+        return l == r;
+    }
+
+    public static boolean neq(short l, short r) {
+        return l != r;
+    }
+
+    public static boolean gt(short l, short r) {
+        return l > r;
+    }
+
+    public static boolean ge(short l, short r) {
+        return l >= r;
+    }
+
+    public static boolean lt(short l, short r) {
+        return l < r;
+    }
+
+    public static boolean le(short l, short r) {
+        return l <= r;
+    }
+
+    // char
+
+    public static char neg(char l) {
+        return (char) -l;
+    }
+
+    public static char compl(char l) {
+        return (char) ~l;
+    }
+
+    public static char add(char l, char r) {
+        return (char) (l + r);
+    }
+
+    public static char sub(char l, char r) {
+        return (char) (l - r);
+    }
+
+    public static char mul(char l, char r) {
+        return (char) (l * r);
+    }
+
+    public static char div(char l, char r) {
+        return (char) (l / r);
+    }
+
+    public static char mod(char l, char r) {
+        return (char) (l % r);
+    }
+
+    public static char or(char l, char r) {
+        return (char) (l | r);
+    }
+
+    public static char and(char l, char r) {
+        return (char) (l & r);
+    }
+
+    public static char xor(char l, char r) {
+        return (char) (l ^ r);
+    }
+
+    public static char ashr(char l, long r) {
+        return (char) (l >> r);
+    }
+
+    public static char lshr(char l, long r) {
+        return (char) (l >>> r);
+    }
+
+    public static char lshl(char l, int r) {
+        return (char) (l << r);
+    }
+
+    public static char ashr(char l, int r) {
+        return (char) (l >> r);
+    }
+
+    public static char lshr(char l, int r) {
+        return (char) (l >>> r);
+    }
+
+    public static boolean eq(char l, char r) {
+        return l == r;
+    }
+
+    public static boolean neq(char l, char r) {
+        return l != r;
+    }
+
+    public static boolean gt(char l, char r) {
+        return l > r;
+    }
+
+    public static boolean ge(char l, char r) {
+        return l >= r;
+    }
+
+    public static boolean lt(char l, char r) {
+        return l < r;
+    }
+
+    public static boolean le(char l, char r) {
+        return l <= r;
+    }
+    // long
+
+    public static long neg(long l) {
+        return -l;
+    }
+
+    public static long compl(long l) {
+        return ~l;
+    }
+
+    public static long add(long l, long r) {
+        return l + r;
+    }
+
+    public static long sub(long l, long r) {
+        return l - r;
+    }
+
+    public static long mul(long l, long r) {
+        return l * r;
+    }
+
+    public static long div(long l, long r) {
+        return l / r;
+    }
+
+    public static long mod(long l, long r) {
+        return l % r;
+    }
+
+    public static long or(long l, long r) {
+        return l | r;
+    }
+
+    public static long and(long l, long r) {
+        return l & r;
+    }
+
+    public static long xor(long l, long r) {
+        return l ^ r;
+    }
+
+    public static long lshl(long l, long r) {
+        return l << r;
+    }
+
+    public static long ashr(long l, long r) {
+        return l >> r;
+    }
+
+    public static long lshr(long l, long r) {
+        return l >>> r;
+    }
+
+    public static long lshl(long l, int r) {
+        return l << r;
+    }
+
+    public static long ashr(long l, int r) {
+        return l >> r;
+    }
+
+    public static long lshr(long l, int r) {
+        return l >>> r;
+    }
+
+    public static boolean eq(long l, long r) {
+        return l == r;
+    }
+
+    public static boolean neq(long l, long r) {
+        return l != r;
+    }
+
+    public static boolean gt(long l, long r) {
+        return l > r;
+    }
+
+    public static boolean ge(long l, long r) {
+        return l >= r;
+    }
+
+    public static boolean lt(long l, long r) {
+        return l < r;
+    }
+
+    public static boolean le(long l, long r) {
+        return l <= r;
+    }
+
+
+    // float
+
+    static float neg(float l) {
+        return -l;
+    }
+
+    static float add(float l, float r) {
+        return l + r;
+    }
+
+    static float sub(float l, float r) {
+        return l - r;
+    }
+
+    static float mul(float l, float r) {
+        return l * r;
+    }
+
+    static float div(float l, float r) {
+        return l / r;
+    }
+
+    static float mod(float l, float r) {
+        return l % r;
+    }
+
+    public static boolean eq(float l, float r) {
+        return l == r;
+    }
+
+    public static boolean neq(float l, float r) {
+        return l != r;
+    }
+
+    public static boolean gt(float l, float r) {
+        return l > r;
+    }
+
+    public static boolean ge(float l, float r) {
+        return l >= r;
+    }
+
+    public static boolean lt(float l, float r) {
+        return l < r;
+    }
+
+    public static boolean le(float l, float r) {
+        return l <= r;
+    }
+
+
+    // double
+
+    static double neg(double l) {
+        return -l;
+    }
+
+    static double add(double l, double r) {
+        return l + r;
+    }
+
+    static double sub(double l, double r) {
+        return l - r;
+    }
+
+    static double mul(double l, double r) {
+        return l * r;
+    }
+
+    static double div(double l, double r) {
+        return l / r;
+    }
+
+    static double mod(double l, double r) {
+        return l % r;
+    }
+
+    public static boolean eq(double l, double r) {
+        return l == r;
+    }
+
+    public static boolean neq(double l, double r) {
+        return l != r;
+    }
+
+    public static boolean gt(double l, double r) {
+        return l > r;
+    }
+
+    public static boolean ge(double l, double r) {
+        return l >= r;
+    }
+
+    public static boolean lt(double l, double r) {
+        return l < r;
+    }
+
+    public static boolean le(double l, double r) {
+        return l <= r;
+    }
+
+
+    // boolean
+
+    static boolean eq(boolean l, boolean r) {
+        return l == r;
+    }
+
+    static boolean neq(boolean l, boolean r) {
+        return l != r;
+    }
+
+    static boolean and(boolean l, boolean r) {
+        return l & r;
+    }
+
+    static boolean or(boolean l, boolean r) {
+        return l | r;
+    }
+
+    static boolean xor(boolean l, boolean r) {
+        return l ^ r;
+    }
+
+
+    // Primitive conversions
+
+    // double conversion
+    static double conv_double(double i) {
+        return i;
+    }
+
+    static float conv_float(double i) {
+        return (float) i;
+    }
+
+    static long conv_long(double i) {
+        return (long) i;
+    }
+
+    static int conv_int(double i) {
+        return (int) i;
+    }
+
+    static short conv_short(double i) {
+        return (short) i;
+    }
+
+    static char conv_char(double i) {
+        return (char) i;
+    }
+
+    static byte conv_byte(double i) {
+        return (byte) i;
+    }
+
+    static boolean conv_boolean(double i) {
+        return ((int) i & 1) == 1;
+    }
+
+    // float conversion
+    static double conv_double(float i) {
+        return i;
+    }
+
+    static float conv_float(float i) {
+        return i;
+    }
+
+    static long conv_long(float i) {
+        return (long) i;
+    }
+
+    static int conv_int(float i) {
+        return (int) i;
+    }
+
+    static short conv_short(float i) {
+        return (short) i;
+    }
+
+    static char conv_char(float i) {
+        return (char) i;
+    }
+
+    static byte conv_byte(float i) {
+        return (byte) i;
+    }
+
+    static boolean conv_boolean(float i) {
+        return ((int) i & 1) == 1;
+    }
+
+    // long conversion
+    static double conv_double(long i) {
+        return (double) i;
+    }
+
+    static float conv_float(long i) {
+        return (float) i;
+    }
+
+    static long conv_long(long i) {
+        return i;
+    }
+
+    static int conv_int(long i) {
+        return (int) i;
+    }
+
+    static short conv_short(long i) {
+        return (short) i;
+    }
+
+    static char conv_char(long i) {
+        return (char) i;
+    }
+
+    static byte conv_byte(long i) {
+        return (byte) i;
+    }
+
+    static boolean conv_boolean(long i) {
+        return (i & 1) == 1;
+    }
+
+    // int conversion
+    static double conv_double(int i) {
+        return (double) i;
+    }
+
+    static float conv_float(int i) {
+        return (float) i;
+    }
+
+    static long conv_long(int i) {
+        return i;
+    }
+
+    static int conv_int(int i) {
+        return i;
+    }
+
+    static short conv_short(int i) {
+        return (short) i;
+    }
+
+    static char conv_char(int i) {
+        return (char) i;
+    }
+
+    static byte conv_byte(int i) {
+        return (byte) i;
+    }
+
+    static boolean conv_boolean(int i) {
+        return (i & 1) == 1;
+    }
+
+    // short conversion
+    static double conv_double(short i) {
+        return i;
+    }
+
+    static float conv_float(short i) {
+        return i;
+    }
+
+    static long conv_long(short i) {
+        return i;
+    }
+
+    static int conv_int(short i) {
+        return i;
+    }
+
+    static short conv_short(short i) {
+        return i;
+    }
+
+    static char conv_char(short i) {
+        return (char) i;
+    }
+
+    static byte conv_byte(short i) {
+        return (byte) i;
+    }
+
+    static boolean conv_boolean(short i) {
+        return (i & 1) == 1;
+    }
+
+    // char conversion
+    static double conv_double(char i) {
+        return i;
+    }
+
+    static float conv_float(char i) {
+        return i;
+    }
+
+    static long conv_long(char i) {
+        return i;
+    }
+
+    static int conv_int(char i) {
+        return i;
+    }
+
+    static short conv_short(char i) {
+        return (short) i;
+    }
+
+    static char conv_char(char i) {
+        return i;
+    }
+
+    static byte conv_byte(char i) {
+        return (byte) i;
+    }
+
+    static boolean conv_boolean(char i) {
+        return (i & 1) == 1;
+    }
+
+    // byte conversion
+    static double conv_double(byte i) {
+        return i;
+    }
+
+    static float conv_float(byte i) {
+        return i;
+    }
+
+    static long conv_long(byte i) {
+        return i;
+    }
+
+    static int conv_int(byte i) {
+        return i;
+    }
+
+    static short conv_short(byte i) {
+        return i;
+    }
+
+    static char conv_char(byte i) {
+        return (char) i;
+    }
+
+    static byte conv_byte(byte i) {
+        return i;
+    }
+
+    static boolean conv_boolean(byte i) {
+        return (i & 1) == 1;
+    }
+
+    // boolean conversion
+    static double conv_double(boolean i) {
+        return i ? 1d : 0d;
+    }
+
+    static float conv_float(boolean i) {
+        return i ? 1f : 0f;
+    }
+
+    static long conv_long(boolean i) {
+        return i ? 1l : 0l;
+    }
+
+    static int conv_int(boolean i) {
+        return i ? 1 : 0;
+    }
+
+    static short conv_short(boolean i) {
+        return i ? (short) 1 : 0;
+    }
+
+    static char conv_char(boolean i) {
+        return i ? (char) 1 : 0;
+    }
+
+    static byte conv_byte(boolean i) {
+        return i ? (byte) 1 : 0;
+    }
+
+    static boolean conv_boolean(boolean i) {
+        return i;
+    }
+}
diff --git a/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/OnnxPartialEvaluator.java b/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/OnnxPartialEvaluator.java
new file mode 100644
index 00000000000..3df3ff971a8
--- /dev/null
+++ b/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/OnnxPartialEvaluator.java
@@ -0,0 +1,565 @@
+/*
+ * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.  Oracle designates this
+ * particular file as subject to the "Classpath" exception as provided
+ * by Oracle in the LICENSE file that accompanied this code.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package oracle.code.onnx.compiler;
+
+import jdk.incubator.code.*;
+import jdk.incubator.code.op.CoreOp;
+import jdk.incubator.code.type.*;
+import oracle.code.onnx.OnnxOperators;
+import oracle.code.onnx.ir.OnnxOp;
+import oracle.code.onnx.ir.OnnxOps;
+
+import java.lang.invoke.*;
+import java.lang.reflect.Array;
+import java.util.*;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+final class OnnxPartialEvaluator {
+
+    static final JavaType ONNX_OPERATORS_CLASS = JavaType.type(OnnxOperators.class);
+
+    // Map from ONNX operator invocation to evaluated attributes
+    Map<CoreOp.InvokeOp, List<Object>> evaluatedAttributes;
+
+    // Operations that depend directly or indirectly on input parameters
+    // The operations' results are not evaluated
+    Set<Op> unevaluatedOperations;
+
+    public OnnxPartialEvaluator() {
+        this.evaluatedAttributes = new HashMap<>();
+        this.unevaluatedOperations = new HashSet<>();
+    }
+
+    public <T extends Op & Op.Invokable>
+    void evaluate(MethodHandles.Lookup l, T op) {
+        Map<Value, Object> evaluatedValues = new HashMap<>();
+        interpretEntryBlock(l, op.body().entryBlock(), new OpContext(), evaluatedValues);
+
+        evaluatedAttributes.forEach((invokeOp, objects) -> {
+            System.out.println(invokeOp.invokeDescriptor().name() + " -> " + objects);
+        });
+    }
+
+
+    @SuppressWarnings("serial")
+    public static final class InterpreterException extends RuntimeException {
+        private InterpreterException(Throwable cause) {
+            super(cause);
+        }
+    }
+
+    static InterpreterException interpreterException(Throwable cause) {
+        return new InterpreterException(cause);
+    }
+
+    record BlockContext(Block b, Map<Value, Object> evaluatedValues) {
+    }
+
+    static final class OpContext {
+        final Deque<BlockContext> stack = new ArrayDeque<>();
+
+        boolean isValueDefined(Value v) {
+            // @@@ Only dominating values are accessible
+            BlockContext bc = findContext(v);
+            return bc != null;
+        }
+
+        Object getValue(Value v) {
+            // @@@ Only dominating values are accessible
+            BlockContext bc = findContext(v);
+            if (bc != null) {
+                return bc.evaluatedValues.get(v);
+            } else {
+                throw interpreterException(new IllegalArgumentException("Undefined value: " + v));
+            }
+        }
+
+        Object setValue(Value v, Object o) {
+            BlockContext bc = findContext(v);
+            if (bc != null) {
+                throw interpreterException(new IllegalArgumentException("Value already defined: " + v));
+            }
+            stack.peek().evaluatedValues.put(v, o);
+            return o;
+        }
+
+        BlockContext findContext(Value v) {
+            Optional<BlockContext> ob = stack.stream().filter(b -> b.evaluatedValues.containsKey(v))
+                    .findFirst();
+            return ob.orElse(null);
+        }
+
+        boolean contains(Block.Reference s) {
+            Block sb = s.targetBlock();
+            return stack.stream().anyMatch(bc -> bc.b.equals(sb));
+        }
+
+        void successor(Block.Reference sb) {
+            List<Object> sbValues = sb.arguments().stream().map(this::getValue).toList();
+
+            Block b = sb.targetBlock();
+            Map<Value, Object> bValues = new HashMap<>();
+            for (int i = 0; i < sbValues.size(); i++) {
+                bValues.put(b.parameters().get(i), sbValues.get(i));
+            }
+
+            if (contains(sb)) {
+                // if block is already dominating pop back up from the back branch to the block
+                // before the successor block
+                while (!stack.peek().b.equals(sb.targetBlock())) {
+                    stack.pop();
+                }
+                stack.pop();
+            }
+            stack.push(new BlockContext(b, bValues));
+        }
+
+        void popTo(BlockContext bc) {
+            while (!stack.peek().equals(bc)) {
+                stack.pop();
+            }
+        }
+    }
+
+    static final class VarBox
+            implements CoreOp.Var<Object> {
+        Object value;
+
+        public Object value() {
+            return value;
+        }
+
+        VarBox(Object value) {
+            this.value = value;
+        }
+
+        static final Object UINITIALIZED = new Object();
+    }
+
+    record TupleRecord(List<Object> components) {
+        Object getComponent(int index) {
+            return components.get(index);
+        }
+
+        TupleRecord with(int index, Object value) {
+            List<Object> copy = new ArrayList<>(components);
+            copy.set(index, value);
+            return new TupleRecord(copy);
+        }
+    }
+
+    void interpretBody(MethodHandles.Lookup l, Body body,
+                       OpContext oc,
+                       List<Object> args) {
+        List<Block.Parameter> parameters = body.entryBlock().parameters();
+        if (parameters.size() != args.size()) {
+            throw interpreterException(new IllegalArgumentException(
+                    "Incorrect number of arguments arguments"));
+        }
+
+        // Map symbolic parameters to runtime arguments
+        Map<Value, Object> arguments = new HashMap<>();
+        for (int i = 0; i < parameters.size(); i++) {
+            arguments.put(parameters.get(i), args.get(i));
+        }
+
+        interpretEntryBlock(l, body.entryBlock(), oc, arguments);
+    }
+
+    void interpretEntryBlock(MethodHandles.Lookup l, Block entry,
+                             OpContext oc,
+                             Map<Value, Object> evaluatedValues) {
+        assert entry.isEntryBlock();
+
+        // If the stack is not empty it means we are interpreting
+        // an entry block with a parent body whose nearest ancestor body
+        // is the current context block's parent body
+        BlockContext yieldContext = oc.stack.peek();
+        assert yieldContext == null ||
+                yieldContext.b().parentBody() == entry.parentBody().parentOp().ancestorBody();
+
+        // Note that first block cannot have any successors so the queue will have at least one entry
+        oc.stack.push(new BlockContext(entry, evaluatedValues));
+        while (true) {
+            BlockContext bc = oc.stack.peek();
+
+            // Execute all but the terminating operation
+            int nops = bc.b.ops().size();
+            try {
+                for (int i = 0; i < nops - 1; i++) {
+                    Op op = bc.b.ops().get(i);
+                    assert !(op instanceof Op.Terminating) : op.opName();
+
+                    Object result = interpretOp(l, oc, op);
+                    if (result != null) {
+                        oc.setValue(op.result(), result);
+                    }
+                }
+            } catch (InterpreterException e) {
+                throw e;
+            }
+
+            // Execute the terminating operation
+            Op to = bc.b.terminatingOp();
+            if (!to.operands().stream().allMatch(oc::isValueDefined)) {
+                // Ignore operation if any value is undefined, meaning it is not part of the attribute value space
+                unevaluatedOperations.add(to);
+            }
+
+            if (to instanceof CoreOp.ConditionalBranchOp cb) {
+                boolean p;
+                Object bop = oc.getValue(cb.predicate());
+                if (bop instanceof Boolean bp) {
+                    p = bp;
+                } else if (bop instanceof Integer ip) {
+                    // @@@ This is required when lifting up from bytecode, since boolean values
+                    // are erased to int values, abd the bytecode lifting implementation is not currently
+                    // sophisticated enough to recover the type information
+                    p = ip != 0;
+                } else {
+                    throw interpreterException(
+                            new UnsupportedOperationException("Unsupported type input to operation: " + cb));
+                }
+                Block.Reference sb = p ? cb.trueBranch() : cb.falseBranch();
+                oc.successor(sb);
+            } else if (to instanceof CoreOp.BranchOp b) {
+                Block.Reference sb = b.branch();
+
+                oc.successor(sb);
+            } else if (to instanceof CoreOp.ReturnOp ret) {
+                // @@@ value should not be in scope
+                // return rv == null ? null : oc.getValue(rv);
+                return;
+            } else {
+                throw interpreterException(
+                        new UnsupportedOperationException("Unsupported terminating operation: " + to.opName()));
+            }
+        }
+    }
+
+
+    @SuppressWarnings("unchecked")
+    public static <E extends Throwable> void eraseAndThrow(Throwable e) throws E {
+        throw (E) e;
+    }
+
+    @SuppressWarnings({"rawtypes", "unchecked"})
+    static Class<? extends OnnxOp> onnxOpClassFromName(String operatorName) {
+        Class<? extends OnnxOp> opClass;
+        try {
+            return (Class) Class.forName(OnnxOps.class.getName() + "$" + operatorName);
+        } catch (ClassNotFoundException e) {
+            throw new InternalError(e);
+        }
+    }
+
+    static OnnxOp.OnnxSchema schemaFromOnnxOpClass(Class<? extends OnnxOp> opClass) {
+        try {
+            return (OnnxOp.OnnxSchema) opClass.getField("SCHEMA").get(null);
+        } catch (ReflectiveOperationException e) {
+            throw new InternalError(e);
+        }
+    }
+
+    Object interpretOp(MethodHandles.Lookup l, OpContext oc, Op o) {
+        // Invocation to ONNX operator
+        // The input operands will be left unevaluated
+        // The attribute operands will be evaluated
+        // @@@ Clone attributes or disallow subsequent operation
+        if (o instanceof CoreOp.InvokeOp io && io.invokeDescriptor().refType().equals(ONNX_OPERATORS_CLASS)) {
+            String operatorName = io.invokeDescriptor().name();
+
+            Class<? extends OnnxOp> opClass = onnxOpClassFromName(operatorName);
+            OnnxOp.OnnxSchema schema = schemaFromOnnxOpClass(opClass);
+
+            List<OnnxOp.OnnxParameter> inputs = schema.inputs();
+            assert o.operands().subList(0, inputs.size()).stream().noneMatch(oc::isValueDefined);
+            List<OnnxOp.OnnxAttribute> attributes = schema.attributes();
+
+            if (opClass == OnnxOps.Constant.class && o.operands().size() == 1) {
+                // Specialized one argument invocations
+                List<Object> attrs = new ArrayList<>();
+                for (OnnxOp.OnnxAttribute attribute : attributes) {
+                    if (JavaType.type(attribute.type()).equals(o.operands().getFirst().type())) {
+                        attrs.add(Optional.of(oc.getValue(o.operands().getFirst())));
+                    } else {
+                        attrs.add(Optional.empty());
+                    }
+                }
+                evaluatedAttributes.put(io, attrs);
+            } else {
+                for (int i = 0; i < attributes.size(); i++) {
+                    assert oc.isValueDefined(o.operands().get(inputs.size() + i)) : operatorName;
+                }
+                List<Object> attrs = o.operands().subList(inputs.size(), inputs.size() + attributes.size()).stream()
+                        .map(oc::getValue)
+                        .toList();
+                evaluatedAttributes.put(io, attrs);
+            }
+
+            unevaluatedOperations.add(o);
+            return null;
+        } else if (!o.operands().stream().allMatch(oc::isValueDefined)) {
+            // Ignore operation if any value is undefined, meaning it is not part of the attribute value space
+            unevaluatedOperations.add(o);
+            return null;
+        }
+
+        switch (o) {
+            case CoreOp.ConstantOp co -> {
+                if (co.resultType().equals(JavaType.J_L_CLASS)) {
+                    return resolveToClass(l, (JavaType) co.value());
+                } else {
+                    return co.value();
+                }
+            }
+            case CoreOp.InvokeOp co -> {
+                MethodType target = resolveToMethodType(l, o.opType());
+                MethodHandles.Lookup il = switch (co.invokeKind()) {
+                    case STATIC, INSTANCE -> l;
+                    case SUPER -> l.in(target.parameterType(0));
+                };
+                MethodHandle mh = resolveToMethodHandle(il, co.invokeDescriptor(), co.invokeKind());
+
+                mh = mh.asType(target).asFixedArity();
+                Object[] values = o.operands().stream().map(oc::getValue).toArray();
+                return invoke(mh, values);
+            }
+            case CoreOp.NewOp no -> {
+                Object[] values = o.operands().stream().map(oc::getValue).toArray();
+                JavaType nType = (JavaType) no.constructorType().returnType();
+                if (nType instanceof ArrayType at) {
+                    if (values.length > at.dimensions()) {
+                        throw interpreterException(new IllegalArgumentException("Bad constructor NewOp: " + no));
+                    }
+                    int[] lengths = Stream.of(values).mapToInt(v -> (int) v).toArray();
+                    for (int length : lengths) {
+                        nType = ((ArrayType) nType).componentType();
+                    }
+                    return Array.newInstance(resolveToClass(l, nType), lengths);
+                } else {
+                    MethodHandle mh = constructorHandle(l, no.constructorType());
+                    return invoke(mh, values);
+                }
+            }
+            case CoreOp.VarOp vo -> {
+                Object v = vo.isUninitialized()
+                        ? VarBox.UINITIALIZED
+                        : oc.getValue(o.operands().get(0));
+                return new VarBox(v);
+            }
+            case CoreOp.VarAccessOp.VarLoadOp vlo -> {
+                // Cast to CoreOp.Var, since the instance may have originated as an external instance
+                // via a captured value map
+                CoreOp.Var<?> vb = (CoreOp.Var<?>) oc.getValue(o.operands().get(0));
+                Object value = vb.value();
+                if (value == VarBox.UINITIALIZED) {
+                    throw interpreterException(new IllegalStateException("Loading from uninitialized variable"));
+                }
+                return value;
+            }
+            case CoreOp.VarAccessOp.VarStoreOp vso -> {
+                VarBox vb = (VarBox) oc.getValue(o.operands().get(0));
+                vb.value = oc.getValue(o.operands().get(1));
+                return null;
+            }
+            case CoreOp.TupleOp to -> {
+                List<Object> values = o.operands().stream().map(oc::getValue).toList();
+                return new TupleRecord(values);
+            }
+            case CoreOp.TupleLoadOp tlo -> {
+                TupleRecord tb = (TupleRecord) oc.getValue(o.operands().get(0));
+                return tb.getComponent(tlo.index());
+            }
+            case CoreOp.TupleWithOp two -> {
+                TupleRecord tb = (TupleRecord) oc.getValue(o.operands().get(0));
+                return tb.with(two.index(), oc.getValue(o.operands().get(1)));
+            }
+            case CoreOp.FieldAccessOp.FieldLoadOp fo -> {
+                if (fo.operands().isEmpty()) {
+                    VarHandle vh = fieldStaticHandle(l, fo.fieldDescriptor());
+                    return vh.get();
+                } else {
+                    Object v = oc.getValue(o.operands().get(0));
+                    VarHandle vh = fieldHandle(l, fo.fieldDescriptor());
+                    return vh.get(v);
+                }
+            }
+            case CoreOp.FieldAccessOp.FieldStoreOp fo -> {
+                if (fo.operands().size() == 1) {
+                    Object v = oc.getValue(o.operands().get(0));
+                    VarHandle vh = fieldStaticHandle(l, fo.fieldDescriptor());
+                    vh.set(v);
+                } else {
+                    Object r = oc.getValue(o.operands().get(0));
+                    Object v = oc.getValue(o.operands().get(1));
+                    VarHandle vh = fieldHandle(l, fo.fieldDescriptor());
+                    vh.set(r, v);
+                }
+                return null;
+            }
+            case CoreOp.InstanceOfOp io -> {
+                Object v = oc.getValue(o.operands().get(0));
+                return isInstance(l, io.type(), v);
+            }
+            case CoreOp.CastOp co -> {
+                Object v = oc.getValue(o.operands().get(0));
+                return cast(l, co.type(), v);
+            }
+            case CoreOp.ArrayLengthOp arrayLengthOp -> {
+                Object a = oc.getValue(o.operands().get(0));
+                return Array.getLength(a);
+            }
+            case CoreOp.ArrayAccessOp.ArrayLoadOp arrayLoadOp -> {
+                Object a = oc.getValue(o.operands().get(0));
+                Object index = oc.getValue(o.operands().get(1));
+                return Array.get(a, (int) index);
+            }
+            case CoreOp.ArrayAccessOp.ArrayStoreOp arrayStoreOp -> {
+                Object a = oc.getValue(o.operands().get(0));
+                Object index = oc.getValue(o.operands().get(1));
+                Object v = oc.getValue(o.operands().get(2));
+                Array.set(a, (int) index, v);
+                return null;
+            }
+            case CoreOp.ArithmeticOperation arithmeticOperation -> {
+                MethodHandle mh = opHandle(l, o.opName(), o.opType());
+                Object[] values = o.operands().stream().map(oc::getValue).toArray();
+                return invoke(mh, values);
+            }
+            case CoreOp.TestOperation testOperation -> {
+                MethodHandle mh = opHandle(l, o.opName(), o.opType());
+                Object[] values = o.operands().stream().map(oc::getValue).toArray();
+                return invoke(mh, values);
+            }
+            case CoreOp.ConvOp convOp -> {
+                MethodHandle mh = opHandle(l, o.opName() + "_" + o.opType().returnType(), o.opType());
+                Object[] values = o.operands().stream().map(oc::getValue).toArray();
+                return invoke(mh, values);
+            }
+            case CoreOp.ConcatOp concatOp -> {
+                return o.operands().stream()
+                        .map(oc::getValue)
+                        .map(String::valueOf)
+                        .collect(Collectors.joining());
+            }
+            case null, default -> throw interpreterException(
+                    new UnsupportedOperationException("Unsupported operation: " + o.opName()));
+        }
+    }
+
+    static MethodHandle opHandle(MethodHandles.Lookup l, String opName, FunctionType ft) {
+        MethodType mt = resolveToMethodType(l, ft).erase();
+        try {
+            return MethodHandles.lookup().findStatic(InvokableLeafOps.class, opName, mt);
+        } catch (NoSuchMethodException | IllegalAccessException e) {
+            throw interpreterException(e);
+        }
+    }
+
+    static MethodHandle constructorHandle(MethodHandles.Lookup l, FunctionType ft) {
+        MethodType mt = resolveToMethodType(l, ft);
+
+        if (mt.returnType().isArray()) {
+            if (mt.parameterCount() != 1 || mt.parameterType(0) != int.class) {
+                throw interpreterException(new IllegalArgumentException("Bad constructor descriptor: " + ft));
+            }
+            return MethodHandles.arrayConstructor(mt.returnType());
+        } else {
+            try {
+                return l.findConstructor(mt.returnType(), mt.changeReturnType(void.class));
+            } catch (NoSuchMethodException | IllegalAccessException e) {
+                throw interpreterException(e);
+            }
+        }
+    }
+
+    static VarHandle fieldStaticHandle(MethodHandles.Lookup l, FieldRef d) {
+        return resolveToVarHandle(l, d);
+    }
+
+    static VarHandle fieldHandle(MethodHandles.Lookup l, FieldRef d) {
+        return resolveToVarHandle(l, d);
+    }
+
+    static Object isInstance(MethodHandles.Lookup l, TypeElement d, Object v) {
+        Class<?> c = resolveToClass(l, d);
+        return c.isInstance(v);
+    }
+
+    static Object cast(MethodHandles.Lookup l, TypeElement d, Object v) {
+        Class<?> c = resolveToClass(l, d);
+        return c.cast(v);
+    }
+
+    static MethodHandle resolveToMethodHandle(MethodHandles.Lookup l, MethodRef d, CoreOp.InvokeOp.InvokeKind kind) {
+        try {
+            return d.resolveToHandle(l, kind);
+        } catch (ReflectiveOperationException e) {
+            throw interpreterException(e);
+        }
+    }
+
+    static VarHandle resolveToVarHandle(MethodHandles.Lookup l, FieldRef d) {
+        try {
+            return d.resolveToHandle(l);
+        } catch (ReflectiveOperationException e) {
+            throw interpreterException(e);
+        }
+    }
+
+    public static MethodType resolveToMethodType(MethodHandles.Lookup l, FunctionType ft) {
+        try {
+            return MethodRef.toNominalDescriptor(ft).resolveConstantDesc(l);
+        } catch (ReflectiveOperationException e) {
+            throw interpreterException(e);
+        }
+    }
+
+    public static Class<?> resolveToClass(MethodHandles.Lookup l, TypeElement d) {
+        try {
+            if (d instanceof JavaType jt) {
+                return (Class<?>) jt.erasure().resolve(l);
+            } else {
+                throw new ReflectiveOperationException();
+            }
+        } catch (ReflectiveOperationException e) {
+            throw interpreterException(e);
+        }
+    }
+
+    static Object invoke(MethodHandle m, Object... args) {
+        try {
+            return m.invokeWithArguments(args);
+        } catch (RuntimeException | Error e) {
+            throw e;
+        } catch (Throwable e) {
+            eraseAndThrow(e);
+            throw new InternalError("should not reach here");
+        }
+    }
+}
diff --git a/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/OnnxTransformer.java b/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/OnnxTransformer.java
new file mode 100644
index 00000000000..5b6a44e4573
--- /dev/null
+++ b/cr-examples/onnx/src/main/java/oracle/code/onnx/compiler/OnnxTransformer.java
@@ -0,0 +1,229 @@
+package oracle.code.onnx.compiler;
+
+import jdk.incubator.code.Op;
+import jdk.incubator.code.TypeElement;
+import jdk.incubator.code.Value;
+import jdk.incubator.code.analysis.SSA;
+import jdk.incubator.code.op.CoreOp;
+import jdk.incubator.code.type.*;
+import oracle.code.onnx.OnnxOperators;
+import oracle.code.onnx.Tensor;
+import oracle.code.onnx.ir.OnnxOp;
+import oracle.code.onnx.ir.OnnxOps;
+import oracle.code.onnx.ir.OnnxType;
+
+import java.lang.invoke.MethodHandles;
+import java.lang.reflect.*;
+import java.util.*;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+// Transform the Java code model of an ONNX function to an ONNX code model
+public class OnnxTransformer {
+
+    static final JavaType ONNX_OPERATORS_CLASS = JavaType.type(OnnxOperators.class);
+
+    private OnnxTransformer() {
+    }
+
+    public static CoreOp.FuncOp transform(MethodHandles.Lookup l, CoreOp.FuncOp in) {
+        OnnxPartialEvaluator pe = new OnnxPartialEvaluator();
+        pe.evaluate(l, in);
+
+        FunctionType ft = FunctionType.functionType(
+                type(in.invokableType().returnType()),
+                in.invokableType().parameterTypes().stream().map(OnnxTransformer::type).toList()
+        );
+
+        CoreOp.FuncOp onnxModel = CoreOp.func(in.funcName(), ft).body(b -> {
+            b.transformBody(in.body(), b.parameters(), (bb, op) -> {
+                if (!pe.unevaluatedOperations.contains(op)) {
+                    return bb;
+                }
+                switch (op) {
+                    // Transform invocation to ONNX operator to operation modeling the operator
+                    case CoreOp.InvokeOp io when io.invokeDescriptor().refType().equals(ONNX_OPERATORS_CLASS) -> {
+                        String operatorName = io.invokeDescriptor().name();
+                        Class<? extends OnnxOp> opClass = onnxOpClassFromName(operatorName);
+                        OnnxOp.OnnxSchema schema = schemaFromOnnxOpClass(opClass);
+
+                        List<Object> attributes = pe.evaluatedAttributes.get(io);
+
+                        Method opMethod = Stream.of(OnnxOps.class.getMethods())
+                                .filter(m -> m.getName().equals(operatorName))
+                                .findFirst().orElseThrow();
+
+                        List<Object> opArgs = new ArrayList<>();
+
+                        // @@@ Operator API currently requires all optional output parameters are required
+                        if (schema.outputs().stream().anyMatch(p -> p.quantifier().isOptional())) {
+                            opArgs.add(recordTypeToTupleType(l, (ClassType) op.resultType()));
+                            Set<? extends OnnxOp.OnnxParameter> optionalOutputs = schema.outputs().stream()
+                                    .filter(p -> p.quantifier().isOptional())
+                                    .collect(Collectors.toSet());
+                            opArgs.add(optionalOutputs);
+                        } else {
+                            opArgs.add(type(op.resultType()));
+                        }
+
+                        for (int i = 0; i < schema.inputs().size(); i++) {
+                            OnnxOp.OnnxParameter p = schema.inputs().get(i);
+                            Value v = io.operands().get(i);
+
+                            switch (p.quantifier()) {
+                                case REQUIRED -> {
+                                    opArgs.add(bb.context().getValue(v));
+                                }
+                                case OPTIONAL -> {
+                                    // Evaluation of expressions Optional.empty and Optional.of() with symbolic values
+                                    if (v instanceof Op.Result r && r.op() instanceof CoreOp.InvokeOp optionalInvoke
+                                            && optionalInvoke.invokeDescriptor().refType().equals(JavaType.type(Optional.class))) {
+                                        switch (optionalInvoke.invokeDescriptor().name()) {
+                                            case "of" -> {
+                                                opArgs.add(Optional.of(bb.context().getValue(optionalInvoke.operands().getFirst())));
+                                            }
+                                            case "empty" -> {
+                                                opArgs.add(Optional.empty());
+                                            }
+                                            default -> throw new UnsupportedOperationException();
+                                        }
+                                    } else {
+                                        throw new UnsupportedOperationException();
+                                    }
+                                }
+                                case VARIADIC -> {
+                                    throw new UnsupportedOperationException();
+                                }
+                            }
+                        }
+                        opArgs.addAll(attributes);
+
+                        OnnxOp onnxOp;
+                        try {
+                            onnxOp = (OnnxOp) opMethod.invoke(null, opArgs.toArray());
+                        } catch (ReflectiveOperationException | RuntimeException e) {
+                            throw new RuntimeException(e);
+                        }
+                        Op.Result result = bb.op(onnxOp);
+                        bb.context().mapValue(io.result(), result);
+                    }
+                    // Transform access to the result of an operator that is a record access
+                    case CoreOp.InvokeOp io when
+                            recordComponentAccessToTupleIndex(l, io.invokeDescriptor()) instanceof Integer index -> {
+                        Op.Result result = bb.op(CoreOp.tupleLoad(bb.context().getValue(io.operands().getFirst()), index));
+                        bb.context().mapValue(io.result(), result);
+                    }
+                    // Copy remaining operations, which may be removed later transformations
+                    default -> bb.op(op);
+                }
+                return bb;
+            });
+        });
+
+        return SSA.transform(onnxModel).transform((b, op) -> {
+            // Drop any non-terminating operation whose result is not used
+            if (op instanceof Op.Terminating || !op.result().uses().isEmpty()) {
+                b.op(op);
+            }
+            return b;
+        });
+    }
+
+    @SuppressWarnings({"rawtypes", "unchecked"})
+    static Class<? extends OnnxOp> onnxOpClassFromName(String operatorName) {
+        Class<? extends OnnxOp> opClass;
+        try {
+            return (Class) Class.forName(OnnxOps.class.getName() + "$" + operatorName);
+        } catch (ClassNotFoundException e) {
+            throw new InternalError(e);
+        }
+    }
+
+    static OnnxOp.OnnxSchema schemaFromOnnxOpClass(Class<? extends OnnxOp> opClass) {
+        try {
+            return (OnnxOp.OnnxSchema) opClass.getField("SCHEMA").get(null);
+        } catch (ReflectiveOperationException e) {
+            throw new InternalError(e);
+        }
+    }
+
+    static TupleType recordTypeToTupleType(MethodHandles.Lookup l, ClassType recordType) {
+        Class<?> recordClass;
+        try {
+            recordClass = (Class<?>) recordType.rawType().resolve(l);
+        } catch (ReflectiveOperationException e) {
+            throw new RuntimeException(e);
+        }
+        assert recordClass.isRecord();
+
+        List<TypeElement> tupleComponentTypes = new ArrayList<>();
+        for (RecordComponent rc : recordClass.getRecordComponents()) {
+            switch (rc.getGenericType()) {
+                case ParameterizedType pt when pt.getRawType().equals(Tensor.class) -> {
+                    Type elementType = pt.getActualTypeArguments()[0];
+                    switch (elementType) {
+                        case Class<?> _ -> {
+                            tupleComponentTypes.add(type(JavaType.type(pt)));
+                        }
+                        case TypeVariable<?> tv -> {
+                            // Resolve type variable
+                            JavaType e = null;
+                            for (int j = 0; j < recordClass.getTypeParameters().length; j++) {
+                                if (recordClass.getTypeParameters()[j].getName().equals(tv.getName())) {
+                                    e = recordType.typeArguments().get(j);
+                                    break;
+                                }
+                            }
+                            tupleComponentTypes.add(type(JavaType.parameterized(JavaType.type(Tensor.class), e)));
+                        }
+                        default -> throw new IllegalStateException("Unexpected value: " + elementType);
+                    }
+                }
+                default -> throw new IllegalStateException("Unexpected value: " + rc.getGenericType());
+            }
+        }
+
+        return TupleType.tupleType(tupleComponentTypes);
+    }
+
+    static Integer recordComponentAccessToTupleIndex(MethodHandles.Lookup l, MethodRef ref) {
+        if (ref.refType() instanceof ClassType ct && ct.toClassName().startsWith("oracle.code.onnx.OnnxOperators$")) {
+            Class<?> refClass;
+            try {
+                refClass = (Class<?>) ct.resolve(l);
+            } catch (ReflectiveOperationException e) {
+                throw new RuntimeException(e);
+            }
+
+            if (refClass.isRecord()) {
+                RecordComponent[] recordComponents = refClass.getRecordComponents();
+                for (int i = 0; i < recordComponents.length; i++) {
+                    if (recordComponents[i].getName().equals(ref.name())) {
+                        return i;
+                    }
+                }
+                throw new InternalError();
+            }
+        }
+        return null;
+    }
+
+    static final TypeElement TENSOR_RAW_CLASS = JavaType.type(Tensor.class);
+
+    // @@@ Map of Java tensor types to ONNX tensor types
+    // @@@ Shape??
+    static OnnxType type(TypeElement type) {
+        if (type instanceof ClassType ct && ct.rawType().equals(TENSOR_RAW_CLASS)) {
+            JavaType elementType = ct.typeArguments().getFirst();
+            if (elementType.equals(JavaType.J_L_INTEGER)) {
+                return OnnxType.TENSOR_INT32;
+            } else if (elementType.equals(JavaType.J_L_FLOAT)) {
+                return OnnxType.TENSOR_FLOAT32;
+            } else if (elementType.equals(JavaType.J_L_LONG)) {
+                return OnnxType.TENSOR_INT64;
+            }
+        }
+        throw new UnsupportedOperationException("Unknown type: " + type);
+    }
+
+}
diff --git a/cr-examples/onnx/src/main/java/oracle/code/onnx/ir/OnnxOps.java b/cr-examples/onnx/src/main/java/oracle/code/onnx/ir/OnnxOps.java
index 940f0b23202..1c584bb07e3 100644
--- a/cr-examples/onnx/src/main/java/oracle/code/onnx/ir/OnnxOps.java
+++ b/cr-examples/onnx/src/main/java/oracle/code/onnx/ir/OnnxOps.java
@@ -852,7 +852,7 @@ public static final class AffineGrid extends OnnxOp {
         public static final String NAME = "AffineGrid";
 
         public enum Attribute implements OnnxAttribute {
-            align_corners(Integer.class, true, 0),
+            align_corners(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -965,7 +965,7 @@ public AffineGrid transform(CopyContext cc, OpTransformer ot) {
             return new AffineGrid(this, cc);
         }
 
-        AffineGrid(TypeElement resultType, Value theta, Value size, java.util.Optional<Integer> align_corners) {
+        AffineGrid(TypeElement resultType, Value theta, Value size, java.util.Optional<Long> align_corners) {
             super(SCHEMA, resultType, Set.of(), List.of(theta, size), List.of(align_corners));
         }
 
@@ -987,14 +987,14 @@ public Value size() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> align_corners() {
-            Integer align_corners = Attribute.align_corners.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> align_corners() {
+            Long align_corners = Attribute.align_corners.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(align_corners);
         }
 
     }
 
-    public static AffineGrid AffineGrid(TypeElement resultType, Value theta, Value size, java.util.Optional<Integer> align_corners) {
+    public static AffineGrid AffineGrid(TypeElement resultType, Value theta, Value size, java.util.Optional<Long> align_corners) {
         return new AffineGrid(resultType, theta, size, align_corners);
     }
 
@@ -1123,9 +1123,9 @@ public static final class ArgMax extends OnnxOp {
         public static final String NAME = "ArgMax";
 
         public enum Attribute implements OnnxAttribute {
-            keepdims(Integer.class, true, 1),
-            select_last_index(Integer.class, true, 0),
-            axis(Integer.class, true, 0),
+            keepdims(Long.class, true, 1),
+            select_last_index(Long.class, true, 0),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -1236,7 +1236,7 @@ public ArgMax transform(CopyContext cc, OpTransformer ot) {
             return new ArgMax(this, cc);
         }
 
-        ArgMax(TypeElement resultType, Value data, java.util.Optional<Integer> keepdims, java.util.Optional<Integer> select_last_index, java.util.Optional<Integer> axis) {
+        ArgMax(TypeElement resultType, Value data, java.util.Optional<Long> keepdims, java.util.Optional<Long> select_last_index, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(data), List.of(keepdims, select_last_index, axis));
         }
 
@@ -1254,24 +1254,24 @@ public Value data() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
-        public java.util.Optional<Integer> select_last_index() {
-            Integer select_last_index = Attribute.select_last_index.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> select_last_index() {
+            Long select_last_index = Attribute.select_last_index.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(select_last_index);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static ArgMax ArgMax(TypeElement resultType, Value data, java.util.Optional<Integer> keepdims, java.util.Optional<Integer> select_last_index, java.util.Optional<Integer> axis) {
+    public static ArgMax ArgMax(TypeElement resultType, Value data, java.util.Optional<Long> keepdims, java.util.Optional<Long> select_last_index, java.util.Optional<Long> axis) {
         return new ArgMax(resultType, data, keepdims, select_last_index, axis);
     }
 
@@ -1280,9 +1280,9 @@ public static final class ArgMin extends OnnxOp {
         public static final String NAME = "ArgMin";
 
         public enum Attribute implements OnnxAttribute {
-            keepdims(Integer.class, true, 1),
-            select_last_index(Integer.class, true, 0),
-            axis(Integer.class, true, 0),
+            keepdims(Long.class, true, 1),
+            select_last_index(Long.class, true, 0),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -1393,7 +1393,7 @@ public ArgMin transform(CopyContext cc, OpTransformer ot) {
             return new ArgMin(this, cc);
         }
 
-        ArgMin(TypeElement resultType, Value data, java.util.Optional<Integer> keepdims, java.util.Optional<Integer> select_last_index, java.util.Optional<Integer> axis) {
+        ArgMin(TypeElement resultType, Value data, java.util.Optional<Long> keepdims, java.util.Optional<Long> select_last_index, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(data), List.of(keepdims, select_last_index, axis));
         }
 
@@ -1411,24 +1411,24 @@ public Value data() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
-        public java.util.Optional<Integer> select_last_index() {
-            Integer select_last_index = Attribute.select_last_index.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> select_last_index() {
+            Long select_last_index = Attribute.select_last_index.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(select_last_index);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static ArgMin ArgMin(TypeElement resultType, Value data, java.util.Optional<Integer> keepdims, java.util.Optional<Integer> select_last_index, java.util.Optional<Integer> axis) {
+    public static ArgMin ArgMin(TypeElement resultType, Value data, java.util.Optional<Long> keepdims, java.util.Optional<Long> select_last_index, java.util.Optional<Long> axis) {
         return new ArgMin(resultType, data, keepdims, select_last_index, axis);
     }
 
@@ -2012,13 +2012,13 @@ public static final class AveragePool extends OnnxOp {
         public static final String NAME = "AveragePool";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            count_include_pad(Integer.class, true, 0),
-            ceil_mode(Integer.class, true, 0),
-            strides(int[].class, true, null),
-            kernel_shape(int[].class, false, null),
+            count_include_pad(Long.class, true, 0),
+            ceil_mode(Long.class, true, 0),
+            strides(long[].class, true, null),
+            kernel_shape(long[].class, false, null),
             ;
 
                 final Class<?> t;
@@ -2129,7 +2129,7 @@ public AveragePool transform(CopyContext cc, OpTransformer ot) {
             return new AveragePool(this, cc);
         }
 
-        AveragePool(TypeElement resultType, Value X, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Integer> count_include_pad, java.util.Optional<Integer> ceil_mode, java.util.Optional<int[]> strides, int[] kernel_shape) {
+        AveragePool(TypeElement resultType, Value X, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Long> count_include_pad, java.util.Optional<Long> ceil_mode, java.util.Optional<long[]> strides, long[] kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(pads, dilations, auto_pad, count_include_pad, ceil_mode, strides, kernel_shape));
         }
 
@@ -2147,14 +2147,14 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -2162,29 +2162,29 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<Integer> count_include_pad() {
-            Integer count_include_pad = Attribute.count_include_pad.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> count_include_pad() {
+            Long count_include_pad = Attribute.count_include_pad.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(count_include_pad);
         }
 
-        public java.util.Optional<Integer> ceil_mode() {
-            Integer ceil_mode = Attribute.ceil_mode.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> ceil_mode() {
+            Long ceil_mode = Attribute.ceil_mode.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(ceil_mode);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public int[] kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
+        public long[] kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
             return kernel_shape.clone();
         }
 
     }
 
-    public static AveragePool AveragePool(TypeElement resultType, Value X, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Integer> count_include_pad, java.util.Optional<Integer> ceil_mode, java.util.Optional<int[]> strides, int[] kernel_shape) {
+    public static AveragePool AveragePool(TypeElement resultType, Value X, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Long> count_include_pad, java.util.Optional<Long> ceil_mode, java.util.Optional<long[]> strides, long[] kernel_shape) {
         return new AveragePool(resultType, X, pads, dilations, auto_pad, count_include_pad, ceil_mode, strides, kernel_shape);
     }
 
@@ -2194,7 +2194,7 @@ public static final class BatchNormalization extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             epsilon(Float.class, true, 1.0E-5f),
-            training_mode(Integer.class, true, 0),
+            training_mode(Long.class, true, 0),
             momentum(Float.class, true, 0.9f),
             ;
 
@@ -2314,7 +2314,7 @@ public BatchNormalization transform(CopyContext cc, OpTransformer ot) {
             return new BatchNormalization(this, cc);
         }
 
-        BatchNormalization(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value scale, Value B, Value input_mean, Value input_var, java.util.Optional<Float> epsilon, java.util.Optional<Integer> training_mode, java.util.Optional<Float> momentum) {
+        BatchNormalization(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value scale, Value B, Value input_mean, Value input_var, java.util.Optional<Float> epsilon, java.util.Optional<Long> training_mode, java.util.Optional<Float> momentum) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X, scale, B, input_mean, input_var), List.of(epsilon, training_mode, momentum));
         }
 
@@ -2353,8 +2353,8 @@ public java.util.Optional<Float> epsilon() {
             return java.util.Optional.ofNullable(epsilon);
         }
 
-        public java.util.Optional<Integer> training_mode() {
-            Integer training_mode = Attribute.training_mode.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> training_mode() {
+            Long training_mode = Attribute.training_mode.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(training_mode);
         }
 
@@ -2365,7 +2365,7 @@ public java.util.Optional<Float> momentum() {
 
     }
 
-    public static BatchNormalization BatchNormalization(TypeElement resultType, Set<BatchNormalization.OutputParameter> optionalOutputs, Value X, Value scale, Value B, Value input_mean, Value input_var, java.util.Optional<Float> epsilon, java.util.Optional<Integer> training_mode, java.util.Optional<Float> momentum) {
+    public static BatchNormalization BatchNormalization(TypeElement resultType, Set<BatchNormalization.OutputParameter> optionalOutputs, Value X, Value scale, Value B, Value input_mean, Value input_var, java.util.Optional<Float> epsilon, java.util.Optional<Long> training_mode, java.util.Optional<Float> momentum) {
         return new BatchNormalization(resultType, optionalOutputs, X, scale, B, input_mean, input_var, epsilon, training_mode, momentum);
     }
 
@@ -2375,7 +2375,7 @@ public static final class Bernoulli extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             seed(Float.class, true, null),
-            dtype(Integer.class, true, null),
+            dtype(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -2487,7 +2487,7 @@ public Bernoulli transform(CopyContext cc, OpTransformer ot) {
             return new Bernoulli(this, cc);
         }
 
-        Bernoulli(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Integer> dtype) {
+        Bernoulli(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(seed, dtype));
         }
 
@@ -2510,14 +2510,14 @@ public java.util.Optional<Float> seed() {
             return java.util.Optional.ofNullable(seed);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static Bernoulli Bernoulli(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Integer> dtype) {
+    public static Bernoulli Bernoulli(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Long> dtype) {
         return new Bernoulli(resultType, input, seed, dtype);
     }
 
@@ -3292,8 +3292,8 @@ public static final class BlackmanWindow extends OnnxOp {
         public static final String NAME = "BlackmanWindow";
 
         public enum Attribute implements OnnxAttribute {
-            periodic(Integer.class, true, 1),
-            output_datatype(Integer.class, true, 1),
+            periodic(Long.class, true, 1),
+            output_datatype(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -3405,7 +3405,7 @@ public BlackmanWindow transform(CopyContext cc, OpTransformer ot) {
             return new BlackmanWindow(this, cc);
         }
 
-        BlackmanWindow(TypeElement resultType, Value size, java.util.Optional<Integer> periodic, java.util.Optional<Integer> output_datatype) {
+        BlackmanWindow(TypeElement resultType, Value size, java.util.Optional<Long> periodic, java.util.Optional<Long> output_datatype) {
             super(SCHEMA, resultType, Set.of(), List.of(size), List.of(periodic, output_datatype));
         }
 
@@ -3423,19 +3423,19 @@ public Value size() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> periodic() {
-            Integer periodic = Attribute.periodic.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> periodic() {
+            Long periodic = Attribute.periodic.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(periodic);
         }
 
-        public java.util.Optional<Integer> output_datatype() {
-            Integer output_datatype = Attribute.output_datatype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_datatype() {
+            Long output_datatype = Attribute.output_datatype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_datatype);
         }
 
     }
 
-    public static BlackmanWindow BlackmanWindow(TypeElement resultType, Value size, java.util.Optional<Integer> periodic, java.util.Optional<Integer> output_datatype) {
+    public static BlackmanWindow BlackmanWindow(TypeElement resultType, Value size, java.util.Optional<Long> periodic, java.util.Optional<Long> output_datatype) {
         return new BlackmanWindow(resultType, size, periodic, output_datatype);
     }
 
@@ -3444,8 +3444,8 @@ public static final class Cast extends OnnxOp {
         public static final String NAME = "Cast";
 
         public enum Attribute implements OnnxAttribute {
-            saturate(Integer.class, true, 1),
-            to(Integer.class, false, null),
+            saturate(Long.class, true, 1),
+            to(Long.class, false, null),
             ;
 
                 final Class<?> t;
@@ -3557,7 +3557,7 @@ public Cast transform(CopyContext cc, OpTransformer ot) {
             return new Cast(this, cc);
         }
 
-        Cast(TypeElement resultType, Value input, java.util.Optional<Integer> saturate, int to) {
+        Cast(TypeElement resultType, Value input, java.util.Optional<Long> saturate, long to) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(saturate, to));
         }
 
@@ -3575,19 +3575,19 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> saturate() {
-            Integer saturate = Attribute.saturate.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> saturate() {
+            Long saturate = Attribute.saturate.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(saturate);
         }
 
-        public int to() {
-            int to = Attribute.to.access(int.class, onnxAttributes);
+        public long to() {
+            long to = Attribute.to.access(long.class, onnxAttributes);
             return to;
         }
 
     }
 
-    public static Cast Cast(TypeElement resultType, Value input, java.util.Optional<Integer> saturate, int to) {
+    public static Cast Cast(TypeElement resultType, Value input, java.util.Optional<Long> saturate, long to) {
         return new Cast(resultType, input, saturate, to);
     }
 
@@ -3596,7 +3596,7 @@ public static final class CastLike extends OnnxOp {
         public static final String NAME = "CastLike";
 
         public enum Attribute implements OnnxAttribute {
-            saturate(Integer.class, true, 1),
+            saturate(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -3709,7 +3709,7 @@ public CastLike transform(CopyContext cc, OpTransformer ot) {
             return new CastLike(this, cc);
         }
 
-        CastLike(TypeElement resultType, Value input, Value target_type, java.util.Optional<Integer> saturate) {
+        CastLike(TypeElement resultType, Value input, Value target_type, java.util.Optional<Long> saturate) {
             super(SCHEMA, resultType, Set.of(), List.of(input, target_type), List.of(saturate));
         }
 
@@ -3731,14 +3731,14 @@ public Value target_type() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> saturate() {
-            Integer saturate = Attribute.saturate.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> saturate() {
+            Long saturate = Attribute.saturate.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(saturate);
         }
 
     }
 
-    public static CastLike CastLike(TypeElement resultType, Value input, Value target_type, java.util.Optional<Integer> saturate) {
+    public static CastLike CastLike(TypeElement resultType, Value input, Value target_type, java.util.Optional<Long> saturate) {
         return new CastLike(resultType, input, target_type, saturate);
     }
 
@@ -3749,7 +3749,7 @@ public static final class CastMap extends OnnxOp {
         public enum Attribute implements OnnxAttribute {
             map_form(String.class, true, "DENSE"),
             cast_to(String.class, true, "TO_FLOAT"),
-            max_map(Integer.class, true, 1),
+            max_map(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -3861,7 +3861,7 @@ public CastMap transform(CopyContext cc, OpTransformer ot) {
             return new CastMap(this, cc);
         }
 
-        CastMap(TypeElement resultType, Value X, java.util.Optional<String> map_form, java.util.Optional<String> cast_to, java.util.Optional<Integer> max_map) {
+        CastMap(TypeElement resultType, Value X, java.util.Optional<String> map_form, java.util.Optional<String> cast_to, java.util.Optional<Long> max_map) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(map_form, cast_to, max_map));
         }
 
@@ -3889,14 +3889,14 @@ public java.util.Optional<String> cast_to() {
             return java.util.Optional.ofNullable(cast_to);
         }
 
-        public java.util.Optional<Integer> max_map() {
-            Integer max_map = Attribute.max_map.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> max_map() {
+            Long max_map = Attribute.max_map.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(max_map);
         }
 
     }
 
-    public static CastMap CastMap(TypeElement resultType, Value X, java.util.Optional<String> map_form, java.util.Optional<String> cast_to, java.util.Optional<Integer> max_map) {
+    public static CastMap CastMap(TypeElement resultType, Value X, java.util.Optional<String> map_form, java.util.Optional<String> cast_to, java.util.Optional<Long> max_map) {
         return new CastMap(resultType, X, map_form, cast_to, max_map);
     }
 
@@ -3905,9 +3905,9 @@ public static final class CategoryMapper extends OnnxOp {
         public static final String NAME = "CategoryMapper";
 
         public enum Attribute implements OnnxAttribute {
-            cats_int64s(int[].class, true, null),
+            cats_int64s(long[].class, true, null),
             cats_strings(String[].class, true, null),
-            default_int64(Integer.class, true, -1),
+            default_int64(Long.class, true, -1),
             default_string(String.class, true, "_Unused"),
             ;
 
@@ -4020,7 +4020,7 @@ public CategoryMapper transform(CopyContext cc, OpTransformer ot) {
             return new CategoryMapper(this, cc);
         }
 
-        CategoryMapper(TypeElement resultType, Value X, java.util.Optional<int[]> cats_int64s, java.util.Optional<String[]> cats_strings, java.util.Optional<Integer> default_int64, java.util.Optional<String> default_string) {
+        CategoryMapper(TypeElement resultType, Value X, java.util.Optional<long[]> cats_int64s, java.util.Optional<String[]> cats_strings, java.util.Optional<Long> default_int64, java.util.Optional<String> default_string) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(cats_int64s, cats_strings, default_int64, default_string));
         }
 
@@ -4038,9 +4038,9 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> cats_int64s() {
-            int[] cats_int64s = Attribute.cats_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(cats_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> cats_int64s() {
+            long[] cats_int64s = Attribute.cats_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(cats_int64s).map(long[]::clone);
         }
 
         public java.util.Optional<String[]> cats_strings() {
@@ -4048,8 +4048,8 @@ public java.util.Optional<String[]> cats_strings() {
             return java.util.Optional.ofNullable(cats_strings).map(String[]::clone);
         }
 
-        public java.util.Optional<Integer> default_int64() {
-            Integer default_int64 = Attribute.default_int64.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> default_int64() {
+            Long default_int64 = Attribute.default_int64.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(default_int64);
         }
 
@@ -4060,7 +4060,7 @@ public java.util.Optional<String> default_string() {
 
     }
 
-    public static CategoryMapper CategoryMapper(TypeElement resultType, Value X, java.util.Optional<int[]> cats_int64s, java.util.Optional<String[]> cats_strings, java.util.Optional<Integer> default_int64, java.util.Optional<String> default_string) {
+    public static CategoryMapper CategoryMapper(TypeElement resultType, Value X, java.util.Optional<long[]> cats_int64s, java.util.Optional<String[]> cats_strings, java.util.Optional<Long> default_int64, java.util.Optional<String> default_string) {
         return new CategoryMapper(resultType, X, cats_int64s, cats_strings, default_int64, default_string);
     }
 
@@ -4328,7 +4328,7 @@ public static final class CenterCropPad extends OnnxOp {
         public static final String NAME = "CenterCropPad";
 
         public enum Attribute implements OnnxAttribute {
-            axes(int[].class, true, null),
+            axes(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -4441,7 +4441,7 @@ public CenterCropPad transform(CopyContext cc, OpTransformer ot) {
             return new CenterCropPad(this, cc);
         }
 
-        CenterCropPad(TypeElement resultType, Value input_data, Value shape, java.util.Optional<int[]> axes) {
+        CenterCropPad(TypeElement resultType, Value input_data, Value shape, java.util.Optional<long[]> axes) {
             super(SCHEMA, resultType, Set.of(), List.of(input_data, shape), List.of(axes));
         }
 
@@ -4463,14 +4463,14 @@ public Value shape() {
             return operands().get(1);
         }
 
-        public java.util.Optional<int[]> axes() {
-            int[] axes = Attribute.axes.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(axes).map(int[]::clone);
+        public java.util.Optional<long[]> axes() {
+            long[] axes = Attribute.axes.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(axes).map(long[]::clone);
         }
 
     }
 
-    public static CenterCropPad CenterCropPad(TypeElement resultType, Value input_data, Value shape, java.util.Optional<int[]> axes) {
+    public static CenterCropPad CenterCropPad(TypeElement resultType, Value input_data, Value shape, java.util.Optional<long[]> axes) {
         return new CenterCropPad(resultType, input_data, shape, axes);
     }
 
@@ -4605,9 +4605,9 @@ public static final class Col2Im extends OnnxOp {
         public static final String NAME = "Col2Im";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
-            strides(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
+            strides(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -4720,7 +4720,7 @@ public Col2Im transform(CopyContext cc, OpTransformer ot) {
             return new Col2Im(this, cc);
         }
 
-        Col2Im(TypeElement resultType, Value input, Value image_shape, Value block_shape, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<int[]> strides) {
+        Col2Im(TypeElement resultType, Value input, Value image_shape, Value block_shape, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<long[]> strides) {
             super(SCHEMA, resultType, Set.of(), List.of(input, image_shape, block_shape), List.of(pads, dilations, strides));
         }
 
@@ -4746,24 +4746,24 @@ public Value block_shape() {
             return operands().get(2);
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
     }
 
-    public static Col2Im Col2Im(TypeElement resultType, Value input, Value image_shape, Value block_shape, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<int[]> strides) {
+    public static Col2Im Col2Im(TypeElement resultType, Value input, Value image_shape, Value block_shape, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<long[]> strides) {
         return new Col2Im(resultType, input, image_shape, block_shape, pads, dilations, strides);
     }
 
@@ -4772,7 +4772,7 @@ public static final class Compress extends OnnxOp {
         public static final String NAME = "Compress";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, null),
+            axis(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -4885,7 +4885,7 @@ public Compress transform(CopyContext cc, OpTransformer ot) {
             return new Compress(this, cc);
         }
 
-        Compress(TypeElement resultType, Value input, Value condition, java.util.Optional<Integer> axis) {
+        Compress(TypeElement resultType, Value input, Value condition, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input, condition), List.of(axis));
         }
 
@@ -4907,14 +4907,14 @@ public Value condition() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Compress Compress(TypeElement resultType, Value input, Value condition, java.util.Optional<Integer> axis) {
+    public static Compress Compress(TypeElement resultType, Value input, Value condition, java.util.Optional<Long> axis) {
         return new Compress(resultType, input, condition, axis);
     }
 
@@ -4923,7 +4923,7 @@ public static final class Concat extends OnnxOp {
         public static final String NAME = "Concat";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, false, null),
+            axis(Long.class, false, null),
             ;
 
                 final Class<?> t;
@@ -5034,7 +5034,7 @@ public Concat transform(CopyContext cc, OpTransformer ot) {
             return new Concat(this, cc);
         }
 
-        Concat(TypeElement resultType, List<Value> inputs, int axis) {
+        Concat(TypeElement resultType, List<Value> inputs, long axis) {
             super(SCHEMA, resultType, Set.of(), List.of(inputs), List.of(axis));
         }
 
@@ -5052,14 +5052,14 @@ public List<Value> inputs() {
             return operands();
         }
 
-        public int axis() {
-            int axis = Attribute.axis.access(int.class, onnxAttributes);
+        public long axis() {
+            long axis = Attribute.axis.access(long.class, onnxAttributes);
             return axis;
         }
 
     }
 
-    public static Concat Concat(TypeElement resultType, List<Value> inputs, int axis) {
+    public static Concat Concat(TypeElement resultType, List<Value> inputs, long axis) {
         return new Concat(resultType, inputs, axis);
     }
 
@@ -5068,8 +5068,8 @@ public static final class ConcatFromSequence extends OnnxOp {
         public static final String NAME = "ConcatFromSequence";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, false, null),
-            new_axis(Integer.class, true, 0),
+            axis(Long.class, false, null),
+            new_axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -5181,7 +5181,7 @@ public ConcatFromSequence transform(CopyContext cc, OpTransformer ot) {
             return new ConcatFromSequence(this, cc);
         }
 
-        ConcatFromSequence(TypeElement resultType, Value input_sequence, int axis, java.util.Optional<Integer> new_axis) {
+        ConcatFromSequence(TypeElement resultType, Value input_sequence, long axis, java.util.Optional<Long> new_axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input_sequence), List.of(axis, new_axis));
         }
 
@@ -5199,19 +5199,19 @@ public Value input_sequence() {
             return operands().get(0);
         }
 
-        public int axis() {
-            int axis = Attribute.axis.access(int.class, onnxAttributes);
+        public long axis() {
+            long axis = Attribute.axis.access(long.class, onnxAttributes);
             return axis;
         }
 
-        public java.util.Optional<Integer> new_axis() {
-            Integer new_axis = Attribute.new_axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> new_axis() {
+            Long new_axis = Attribute.new_axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(new_axis);
         }
 
     }
 
-    public static ConcatFromSequence ConcatFromSequence(TypeElement resultType, Value input_sequence, int axis, java.util.Optional<Integer> new_axis) {
+    public static ConcatFromSequence ConcatFromSequence(TypeElement resultType, Value input_sequence, long axis, java.util.Optional<Long> new_axis) {
         return new ConcatFromSequence(resultType, input_sequence, axis, new_axis);
     }
 
@@ -5220,12 +5220,12 @@ public static final class Constant extends OnnxOp {
         public static final String NAME = "Constant";
 
         public enum Attribute implements OnnxAttribute {
-            value_int(Integer.class, true, null),
+            value_int(Long.class, true, null),
             value_floats(float[].class, true, null),
             value_strings(String[].class, true, null),
             value_float(Float.class, true, null),
             value_string(String.class, true, null),
-            value_ints(int[].class, true, null),
+            value_ints(long[].class, true, null),
             sparse_value(byte[].class, true, null),
             value(byte[].class, true, null),
             ;
@@ -5317,7 +5317,7 @@ public Constant transform(CopyContext cc, OpTransformer ot) {
             return new Constant(this, cc);
         }
 
-        Constant(TypeElement resultType, java.util.Optional<Integer> value_int, java.util.Optional<float[]> value_floats, java.util.Optional<String[]> value_strings, java.util.Optional<Float> value_float, java.util.Optional<String> value_string, java.util.Optional<int[]> value_ints, java.util.Optional<byte[]> sparse_value, java.util.Optional<byte[]> value) {
+        Constant(TypeElement resultType, java.util.Optional<Long> value_int, java.util.Optional<float[]> value_floats, java.util.Optional<String[]> value_strings, java.util.Optional<Float> value_float, java.util.Optional<String> value_string, java.util.Optional<long[]> value_ints, java.util.Optional<byte[]> sparse_value, java.util.Optional<byte[]> value) {
             super(SCHEMA, resultType, Set.of(), List.of(), List.of(value_int, value_floats, value_strings, value_float, value_string, value_ints, sparse_value, value));
         }
 
@@ -5331,8 +5331,8 @@ public SequencedMap<OnnxParameter, Object> onnxInputs() {
             return onnxInputs(SCHEMA, List.of());
         }
 
-        public java.util.Optional<Integer> value_int() {
-            Integer value_int = Attribute.value_int.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> value_int() {
+            Long value_int = Attribute.value_int.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(value_int);
         }
 
@@ -5356,9 +5356,9 @@ public java.util.Optional<String> value_string() {
             return java.util.Optional.ofNullable(value_string);
         }
 
-        public java.util.Optional<int[]> value_ints() {
-            int[] value_ints = Attribute.value_ints.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(value_ints).map(int[]::clone);
+        public java.util.Optional<long[]> value_ints() {
+            long[] value_ints = Attribute.value_ints.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(value_ints).map(long[]::clone);
         }
 
         public java.util.Optional<byte[]> sparse_value() {
@@ -5373,7 +5373,7 @@ public java.util.Optional<byte[]> value() {
 
     }
 
-    public static Constant Constant(TypeElement resultType, java.util.Optional<Integer> value_int, java.util.Optional<float[]> value_floats, java.util.Optional<String[]> value_strings, java.util.Optional<Float> value_float, java.util.Optional<String> value_string, java.util.Optional<int[]> value_ints, java.util.Optional<byte[]> sparse_value, java.util.Optional<byte[]> value) {
+    public static Constant Constant(TypeElement resultType, java.util.Optional<Long> value_int, java.util.Optional<float[]> value_floats, java.util.Optional<String[]> value_strings, java.util.Optional<Float> value_float, java.util.Optional<String> value_string, java.util.Optional<long[]> value_ints, java.util.Optional<byte[]> sparse_value, java.util.Optional<byte[]> value) {
         return new Constant(resultType, value_int, value_floats, value_strings, value_float, value_string, value_ints, sparse_value, value);
     }
 
@@ -5528,12 +5528,12 @@ public static final class Conv extends OnnxOp {
         public static final String NAME = "Conv";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            strides(int[].class, true, null),
-            group(Integer.class, true, 1),
-            kernel_shape(int[].class, true, null),
+            strides(long[].class, true, null),
+            group(Long.class, true, 1),
+            kernel_shape(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -5646,7 +5646,7 @@ public Conv transform(CopyContext cc, OpTransformer ot) {
             return new Conv(this, cc);
         }
 
-        Conv(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+        Conv(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(X, W, B), List.of(pads, dilations, auto_pad, strides, group, kernel_shape));
         }
 
@@ -5673,14 +5673,14 @@ public java.util.Optional<Value> B() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -5688,24 +5688,24 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public java.util.Optional<Integer> group() {
-            Integer group = Attribute.group.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> group() {
+            Long group = Attribute.group.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(group);
         }
 
-        public java.util.Optional<int[]> kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(kernel_shape).map(int[]::clone);
+        public java.util.Optional<long[]> kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(kernel_shape).map(long[]::clone);
         }
 
     }
 
-    public static Conv Conv(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+    public static Conv Conv(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
         return new Conv(resultType, X, W, B, pads, dilations, auto_pad, strides, group, kernel_shape);
     }
 
@@ -5714,12 +5714,12 @@ public static final class ConvInteger extends OnnxOp {
         public static final String NAME = "ConvInteger";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            strides(int[].class, true, null),
-            group(Integer.class, true, 1),
-            kernel_shape(int[].class, true, null),
+            strides(long[].class, true, null),
+            group(Long.class, true, 1),
+            kernel_shape(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -5835,7 +5835,7 @@ public ConvInteger transform(CopyContext cc, OpTransformer ot) {
             return new ConvInteger(this, cc);
         }
 
-        ConvInteger(TypeElement resultType, Value x, Value w, java.util.Optional<Value> x_zero_point, java.util.Optional<Value> w_zero_point, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+        ConvInteger(TypeElement resultType, Value x, Value w, java.util.Optional<Value> x_zero_point, java.util.Optional<Value> w_zero_point, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(x, w, x_zero_point, w_zero_point), List.of(pads, dilations, auto_pad, strides, group, kernel_shape));
         }
 
@@ -5867,14 +5867,14 @@ public java.util.Optional<Value> w_zero_point() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -5882,24 +5882,24 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public java.util.Optional<Integer> group() {
-            Integer group = Attribute.group.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> group() {
+            Long group = Attribute.group.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(group);
         }
 
-        public java.util.Optional<int[]> kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(kernel_shape).map(int[]::clone);
+        public java.util.Optional<long[]> kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(kernel_shape).map(long[]::clone);
         }
 
     }
 
-    public static ConvInteger ConvInteger(TypeElement resultType, Value x, Value w, java.util.Optional<Value> x_zero_point, java.util.Optional<Value> w_zero_point, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+    public static ConvInteger ConvInteger(TypeElement resultType, Value x, Value w, java.util.Optional<Value> x_zero_point, java.util.Optional<Value> w_zero_point, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
         return new ConvInteger(resultType, x, w, x_zero_point, w_zero_point, pads, dilations, auto_pad, strides, group, kernel_shape);
     }
 
@@ -5908,14 +5908,14 @@ public static final class ConvTranspose extends OnnxOp {
         public static final String NAME = "ConvTranspose";
 
         public enum Attribute implements OnnxAttribute {
-            output_shape(int[].class, true, null),
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            output_shape(long[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            strides(int[].class, true, null),
-            group(Integer.class, true, 1),
-            kernel_shape(int[].class, true, null),
-            output_padding(int[].class, true, null),
+            strides(long[].class, true, null),
+            group(Long.class, true, 1),
+            kernel_shape(long[].class, true, null),
+            output_padding(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -6028,7 +6028,7 @@ public ConvTranspose transform(CopyContext cc, OpTransformer ot) {
             return new ConvTranspose(this, cc);
         }
 
-        ConvTranspose(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<int[]> output_shape, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape, java.util.Optional<int[]> output_padding) {
+        ConvTranspose(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<long[]> output_shape, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape, java.util.Optional<long[]> output_padding) {
             super(SCHEMA, resultType, Set.of(), List.of(X, W, B), List.of(output_shape, pads, dilations, auto_pad, strides, group, kernel_shape, output_padding));
         }
 
@@ -6055,19 +6055,19 @@ public java.util.Optional<Value> B() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<int[]> output_shape() {
-            int[] output_shape = Attribute.output_shape.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(output_shape).map(int[]::clone);
+        public java.util.Optional<long[]> output_shape() {
+            long[] output_shape = Attribute.output_shape.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(output_shape).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -6075,29 +6075,29 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public java.util.Optional<Integer> group() {
-            Integer group = Attribute.group.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> group() {
+            Long group = Attribute.group.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(group);
         }
 
-        public java.util.Optional<int[]> kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(kernel_shape).map(int[]::clone);
+        public java.util.Optional<long[]> kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(kernel_shape).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> output_padding() {
-            int[] output_padding = Attribute.output_padding.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(output_padding).map(int[]::clone);
+        public java.util.Optional<long[]> output_padding() {
+            long[] output_padding = Attribute.output_padding.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(output_padding).map(long[]::clone);
         }
 
     }
 
-    public static ConvTranspose ConvTranspose(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<int[]> output_shape, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape, java.util.Optional<int[]> output_padding) {
+    public static ConvTranspose ConvTranspose(TypeElement resultType, Value X, Value W, java.util.Optional<Value> B, java.util.Optional<long[]> output_shape, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape, java.util.Optional<long[]> output_padding) {
         return new ConvTranspose(resultType, X, W, B, output_shape, pads, dilations, auto_pad, strides, group, kernel_shape, output_padding);
     }
 
@@ -6334,8 +6334,8 @@ public static final class CumSum extends OnnxOp {
         public static final String NAME = "CumSum";
 
         public enum Attribute implements OnnxAttribute {
-            exclusive(Integer.class, true, 0),
-            reverse(Integer.class, true, 0),
+            exclusive(Long.class, true, 0),
+            reverse(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -6448,7 +6448,7 @@ public CumSum transform(CopyContext cc, OpTransformer ot) {
             return new CumSum(this, cc);
         }
 
-        CumSum(TypeElement resultType, Value x, Value axis, java.util.Optional<Integer> exclusive, java.util.Optional<Integer> reverse) {
+        CumSum(TypeElement resultType, Value x, Value axis, java.util.Optional<Long> exclusive, java.util.Optional<Long> reverse) {
             super(SCHEMA, resultType, Set.of(), List.of(x, axis), List.of(exclusive, reverse));
         }
 
@@ -6470,19 +6470,19 @@ public Value axis() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> exclusive() {
-            Integer exclusive = Attribute.exclusive.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> exclusive() {
+            Long exclusive = Attribute.exclusive.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(exclusive);
         }
 
-        public java.util.Optional<Integer> reverse() {
-            Integer reverse = Attribute.reverse.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> reverse() {
+            Long reverse = Attribute.reverse.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(reverse);
         }
 
     }
 
-    public static CumSum CumSum(TypeElement resultType, Value x, Value axis, java.util.Optional<Integer> exclusive, java.util.Optional<Integer> reverse) {
+    public static CumSum CumSum(TypeElement resultType, Value x, Value axis, java.util.Optional<Long> exclusive, java.util.Optional<Long> reverse) {
         return new CumSum(resultType, x, axis, exclusive, reverse);
     }
 
@@ -6491,8 +6491,8 @@ public static final class DFT extends OnnxOp {
         public static final String NAME = "DFT";
 
         public enum Attribute implements OnnxAttribute {
-            inverse(Integer.class, true, 0),
-            onesided(Integer.class, true, 0),
+            inverse(Long.class, true, 0),
+            onesided(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -6606,7 +6606,7 @@ public DFT transform(CopyContext cc, OpTransformer ot) {
             return new DFT(this, cc);
         }
 
-        DFT(TypeElement resultType, Value input, java.util.Optional<Value> dft_length, java.util.Optional<Value> axis, java.util.Optional<Integer> inverse, java.util.Optional<Integer> onesided) {
+        DFT(TypeElement resultType, Value input, java.util.Optional<Value> dft_length, java.util.Optional<Value> axis, java.util.Optional<Long> inverse, java.util.Optional<Long> onesided) {
             super(SCHEMA, resultType, Set.of(), List.of(input, dft_length, axis), List.of(inverse, onesided));
         }
 
@@ -6634,19 +6634,19 @@ public java.util.Optional<Value> axis() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> inverse() {
-            Integer inverse = Attribute.inverse.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> inverse() {
+            Long inverse = Attribute.inverse.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(inverse);
         }
 
-        public java.util.Optional<Integer> onesided() {
-            Integer onesided = Attribute.onesided.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> onesided() {
+            Long onesided = Attribute.onesided.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(onesided);
         }
 
     }
 
-    public static DFT DFT(TypeElement resultType, Value input, java.util.Optional<Value> dft_length, java.util.Optional<Value> axis, java.util.Optional<Integer> inverse, java.util.Optional<Integer> onesided) {
+    public static DFT DFT(TypeElement resultType, Value input, java.util.Optional<Value> dft_length, java.util.Optional<Value> axis, java.util.Optional<Long> inverse, java.util.Optional<Long> onesided) {
         return new DFT(resultType, input, dft_length, axis, inverse, onesided);
     }
 
@@ -6655,12 +6655,12 @@ public static final class DeformConv extends OnnxOp {
         public static final String NAME = "DeformConv";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
-            strides(int[].class, true, null),
-            offset_group(Integer.class, true, 1),
-            group(Integer.class, true, 1),
-            kernel_shape(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
+            strides(long[].class, true, null),
+            offset_group(Long.class, true, 1),
+            group(Long.class, true, 1),
+            kernel_shape(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -6775,7 +6775,7 @@ public DeformConv transform(CopyContext cc, OpTransformer ot) {
             return new DeformConv(this, cc);
         }
 
-        DeformConv(TypeElement resultType, Value X, Value W, Value offset, java.util.Optional<Value> B, java.util.Optional<Value> mask, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<int[]> strides, java.util.Optional<Integer> offset_group, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+        DeformConv(TypeElement resultType, Value X, Value W, Value offset, java.util.Optional<Value> B, java.util.Optional<Value> mask, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<long[]> strides, java.util.Optional<Long> offset_group, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(X, W, offset, B, mask), List.of(pads, dilations, strides, offset_group, group, kernel_shape));
         }
 
@@ -6811,39 +6811,39 @@ public java.util.Optional<Value> mask() {
             return i != -1 ? java.util.Optional.of(operands().get(3 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public java.util.Optional<Integer> offset_group() {
-            Integer offset_group = Attribute.offset_group.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> offset_group() {
+            Long offset_group = Attribute.offset_group.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(offset_group);
         }
 
-        public java.util.Optional<Integer> group() {
-            Integer group = Attribute.group.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> group() {
+            Long group = Attribute.group.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(group);
         }
 
-        public java.util.Optional<int[]> kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(kernel_shape).map(int[]::clone);
+        public java.util.Optional<long[]> kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(kernel_shape).map(long[]::clone);
         }
 
     }
 
-    public static DeformConv DeformConv(TypeElement resultType, Value X, Value W, Value offset, java.util.Optional<Value> B, java.util.Optional<Value> mask, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<int[]> strides, java.util.Optional<Integer> offset_group, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+    public static DeformConv DeformConv(TypeElement resultType, Value X, Value W, Value offset, java.util.Optional<Value> B, java.util.Optional<Value> mask, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<long[]> strides, java.util.Optional<Long> offset_group, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
         return new DeformConv(resultType, X, W, offset, B, mask, pads, dilations, strides, offset_group, group, kernel_shape);
     }
 
@@ -6853,7 +6853,7 @@ public static final class DepthToSpace extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             mode(String.class, true, "DCR"),
-            blocksize(Integer.class, false, null),
+            blocksize(Long.class, false, null),
             ;
 
                 final Class<?> t;
@@ -6964,7 +6964,7 @@ public DepthToSpace transform(CopyContext cc, OpTransformer ot) {
             return new DepthToSpace(this, cc);
         }
 
-        DepthToSpace(TypeElement resultType, Value input, java.util.Optional<String> mode, int blocksize) {
+        DepthToSpace(TypeElement resultType, Value input, java.util.Optional<String> mode, long blocksize) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(mode, blocksize));
         }
 
@@ -6987,14 +6987,14 @@ public java.util.Optional<String> mode() {
             return java.util.Optional.ofNullable(mode);
         }
 
-        public int blocksize() {
-            int blocksize = Attribute.blocksize.access(int.class, onnxAttributes);
+        public long blocksize() {
+            long blocksize = Attribute.blocksize.access(long.class, onnxAttributes);
             return blocksize;
         }
 
     }
 
-    public static DepthToSpace DepthToSpace(TypeElement resultType, Value input, java.util.Optional<String> mode, int blocksize) {
+    public static DepthToSpace DepthToSpace(TypeElement resultType, Value input, java.util.Optional<String> mode, long blocksize) {
         return new DepthToSpace(resultType, input, mode, blocksize);
     }
 
@@ -7003,8 +7003,8 @@ public static final class DequantizeLinear extends OnnxOp {
         public static final String NAME = "DequantizeLinear";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, 1),
-            block_size(Integer.class, true, 0),
+            axis(Long.class, true, 1),
+            block_size(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -7118,7 +7118,7 @@ public DequantizeLinear transform(CopyContext cc, OpTransformer ot) {
             return new DequantizeLinear(this, cc);
         }
 
-        DequantizeLinear(TypeElement resultType, Value x, Value x_scale, java.util.Optional<Value> x_zero_point, java.util.Optional<Integer> axis, java.util.Optional<Integer> block_size) {
+        DequantizeLinear(TypeElement resultType, Value x, Value x_scale, java.util.Optional<Value> x_zero_point, java.util.Optional<Long> axis, java.util.Optional<Long> block_size) {
             super(SCHEMA, resultType, Set.of(), List.of(x, x_scale, x_zero_point), List.of(axis, block_size));
         }
 
@@ -7145,19 +7145,19 @@ public java.util.Optional<Value> x_zero_point() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
-        public java.util.Optional<Integer> block_size() {
-            Integer block_size = Attribute.block_size.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> block_size() {
+            Long block_size = Attribute.block_size.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(block_size);
         }
 
     }
 
-    public static DequantizeLinear DequantizeLinear(TypeElement resultType, Value x, Value x_scale, java.util.Optional<Value> x_zero_point, java.util.Optional<Integer> axis, java.util.Optional<Integer> block_size) {
+    public static DequantizeLinear DequantizeLinear(TypeElement resultType, Value x, Value x_scale, java.util.Optional<Value> x_zero_point, java.util.Optional<Long> axis, java.util.Optional<Long> block_size) {
         return new DequantizeLinear(resultType, x, x_scale, x_zero_point, axis, block_size);
     }
 
@@ -7281,7 +7281,7 @@ public static final class DictVectorizer extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             string_vocabulary(String[].class, true, null),
-            int64_vocabulary(int[].class, true, null),
+            int64_vocabulary(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -7393,7 +7393,7 @@ public DictVectorizer transform(CopyContext cc, OpTransformer ot) {
             return new DictVectorizer(this, cc);
         }
 
-        DictVectorizer(TypeElement resultType, Value X, java.util.Optional<String[]> string_vocabulary, java.util.Optional<int[]> int64_vocabulary) {
+        DictVectorizer(TypeElement resultType, Value X, java.util.Optional<String[]> string_vocabulary, java.util.Optional<long[]> int64_vocabulary) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(string_vocabulary, int64_vocabulary));
         }
 
@@ -7416,14 +7416,14 @@ public java.util.Optional<String[]> string_vocabulary() {
             return java.util.Optional.ofNullable(string_vocabulary).map(String[]::clone);
         }
 
-        public java.util.Optional<int[]> int64_vocabulary() {
-            int[] int64_vocabulary = Attribute.int64_vocabulary.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(int64_vocabulary).map(int[]::clone);
+        public java.util.Optional<long[]> int64_vocabulary() {
+            long[] int64_vocabulary = Attribute.int64_vocabulary.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(int64_vocabulary).map(long[]::clone);
         }
 
     }
 
-    public static DictVectorizer DictVectorizer(TypeElement resultType, Value X, java.util.Optional<String[]> string_vocabulary, java.util.Optional<int[]> int64_vocabulary) {
+    public static DictVectorizer DictVectorizer(TypeElement resultType, Value X, java.util.Optional<String[]> string_vocabulary, java.util.Optional<long[]> int64_vocabulary) {
         return new DictVectorizer(resultType, X, string_vocabulary, int64_vocabulary);
     }
 
@@ -7551,7 +7551,7 @@ public static final class Dropout extends OnnxOp {
         public static final String NAME = "Dropout";
 
         public enum Attribute implements OnnxAttribute {
-            seed(Integer.class, true, null),
+            seed(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -7667,7 +7667,7 @@ public Dropout transform(CopyContext cc, OpTransformer ot) {
             return new Dropout(this, cc);
         }
 
-        Dropout(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value data, java.util.Optional<Value> ratio, java.util.Optional<Value> training_mode, java.util.Optional<Integer> seed) {
+        Dropout(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value data, java.util.Optional<Value> ratio, java.util.Optional<Value> training_mode, java.util.Optional<Long> seed) {
             super(SCHEMA, resultType, optionalOutputs, List.of(data, ratio, training_mode), List.of(seed));
         }
 
@@ -7695,14 +7695,14 @@ public java.util.Optional<Value> training_mode() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> seed() {
-            Integer seed = Attribute.seed.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> seed() {
+            Long seed = Attribute.seed.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(seed);
         }
 
     }
 
-    public static Dropout Dropout(TypeElement resultType, Set<Dropout.OutputParameter> optionalOutputs, Value data, java.util.Optional<Value> ratio, java.util.Optional<Value> training_mode, java.util.Optional<Integer> seed) {
+    public static Dropout Dropout(TypeElement resultType, Set<Dropout.OutputParameter> optionalOutputs, Value data, java.util.Optional<Value> ratio, java.util.Optional<Value> training_mode, java.util.Optional<Long> seed) {
         return new Dropout(resultType, optionalOutputs, data, ratio, training_mode, seed);
     }
 
@@ -8585,8 +8585,8 @@ public static final class EyeLike extends OnnxOp {
         public static final String NAME = "EyeLike";
 
         public enum Attribute implements OnnxAttribute {
-            dtype(Integer.class, true, null),
-            k(Integer.class, true, 0),
+            dtype(Long.class, true, null),
+            k(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -8698,7 +8698,7 @@ public EyeLike transform(CopyContext cc, OpTransformer ot) {
             return new EyeLike(this, cc);
         }
 
-        EyeLike(TypeElement resultType, Value input, java.util.Optional<Integer> dtype, java.util.Optional<Integer> k) {
+        EyeLike(TypeElement resultType, Value input, java.util.Optional<Long> dtype, java.util.Optional<Long> k) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(dtype, k));
         }
 
@@ -8716,19 +8716,19 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
-        public java.util.Optional<Integer> k() {
-            Integer k = Attribute.k.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> k() {
+            Long k = Attribute.k.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(k);
         }
 
     }
 
-    public static EyeLike EyeLike(TypeElement resultType, Value input, java.util.Optional<Integer> dtype, java.util.Optional<Integer> k) {
+    public static EyeLike EyeLike(TypeElement resultType, Value input, java.util.Optional<Long> dtype, java.util.Optional<Long> k) {
         return new EyeLike(resultType, input, dtype, k);
     }
 
@@ -8737,7 +8737,7 @@ public static final class FeatureVectorizer extends OnnxOp {
         public static final String NAME = "FeatureVectorizer";
 
         public enum Attribute implements OnnxAttribute {
-            inputdimensions(int[].class, true, null),
+            inputdimensions(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -8848,7 +8848,7 @@ public FeatureVectorizer transform(CopyContext cc, OpTransformer ot) {
             return new FeatureVectorizer(this, cc);
         }
 
-        FeatureVectorizer(TypeElement resultType, List<Value> X, java.util.Optional<int[]> inputdimensions) {
+        FeatureVectorizer(TypeElement resultType, List<Value> X, java.util.Optional<long[]> inputdimensions) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(inputdimensions));
         }
 
@@ -8866,14 +8866,14 @@ public List<Value> X() {
             return operands();
         }
 
-        public java.util.Optional<int[]> inputdimensions() {
-            int[] inputdimensions = Attribute.inputdimensions.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(inputdimensions).map(int[]::clone);
+        public java.util.Optional<long[]> inputdimensions() {
+            long[] inputdimensions = Attribute.inputdimensions.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(inputdimensions).map(long[]::clone);
         }
 
     }
 
-    public static FeatureVectorizer FeatureVectorizer(TypeElement resultType, List<Value> X, java.util.Optional<int[]> inputdimensions) {
+    public static FeatureVectorizer FeatureVectorizer(TypeElement resultType, List<Value> X, java.util.Optional<long[]> inputdimensions) {
         return new FeatureVectorizer(resultType, X, inputdimensions);
     }
 
@@ -8882,7 +8882,7 @@ public static final class Flatten extends OnnxOp {
         public static final String NAME = "Flatten";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, 1),
+            axis(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -8993,7 +8993,7 @@ public Flatten transform(CopyContext cc, OpTransformer ot) {
             return new Flatten(this, cc);
         }
 
-        Flatten(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+        Flatten(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(axis));
         }
 
@@ -9011,14 +9011,14 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Flatten Flatten(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+    public static Flatten Flatten(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
         return new Flatten(resultType, input, axis);
     }
 
@@ -9141,12 +9141,12 @@ public static final class GRU extends OnnxOp {
         public static final String NAME = "GRU";
 
         public enum Attribute implements OnnxAttribute {
-            layout(Integer.class, true, 0),
+            layout(Long.class, true, 0),
             activation_alpha(float[].class, true, null),
-            hidden_size(Integer.class, true, null),
+            hidden_size(Long.class, true, null),
             activation_beta(float[].class, true, null),
             activations(String[].class, true, null),
-            linear_before_reset(Integer.class, true, 0),
+            linear_before_reset(Long.class, true, 0),
             clip(Float.class, true, null),
             direction(String.class, true, "forward"),
             ;
@@ -9266,7 +9266,7 @@ public GRU transform(CopyContext cc, OpTransformer ot) {
             return new GRU(this, cc);
         }
 
-        GRU(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Integer> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Integer> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Integer> linear_before_reset, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
+        GRU(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Long> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Long> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Long> linear_before_reset, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X, W, R, B, sequence_lens, initial_h), List.of(layout, activation_alpha, hidden_size, activation_beta, activations, linear_before_reset, clip, direction));
         }
 
@@ -9307,8 +9307,8 @@ public java.util.Optional<Value> initial_h() {
             return i != -1 ? java.util.Optional.of(operands().get(3 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> layout() {
-            Integer layout = Attribute.layout.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> layout() {
+            Long layout = Attribute.layout.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(layout);
         }
 
@@ -9317,8 +9317,8 @@ public java.util.Optional<float[]> activation_alpha() {
             return java.util.Optional.ofNullable(activation_alpha).map(float[]::clone);
         }
 
-        public java.util.Optional<Integer> hidden_size() {
-            Integer hidden_size = Attribute.hidden_size.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> hidden_size() {
+            Long hidden_size = Attribute.hidden_size.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(hidden_size);
         }
 
@@ -9332,8 +9332,8 @@ public java.util.Optional<String[]> activations() {
             return java.util.Optional.ofNullable(activations).map(String[]::clone);
         }
 
-        public java.util.Optional<Integer> linear_before_reset() {
-            Integer linear_before_reset = Attribute.linear_before_reset.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> linear_before_reset() {
+            Long linear_before_reset = Attribute.linear_before_reset.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(linear_before_reset);
         }
 
@@ -9349,7 +9349,7 @@ public java.util.Optional<String> direction() {
 
     }
 
-    public static GRU GRU(TypeElement resultType, Set<GRU.OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Integer> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Integer> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Integer> linear_before_reset, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
+    public static GRU GRU(TypeElement resultType, Set<GRU.OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Long> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Long> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Long> linear_before_reset, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
         return new GRU(resultType, optionalOutputs, X, W, R, B, sequence_lens, initial_h, layout, activation_alpha, hidden_size, activation_beta, activations, linear_before_reset, clip, direction);
     }
 
@@ -9358,7 +9358,7 @@ public static final class Gather extends OnnxOp {
         public static final String NAME = "Gather";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, 0),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -9471,7 +9471,7 @@ public Gather transform(CopyContext cc, OpTransformer ot) {
             return new Gather(this, cc);
         }
 
-        Gather(TypeElement resultType, Value data, Value indices, java.util.Optional<Integer> axis) {
+        Gather(TypeElement resultType, Value data, Value indices, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(data, indices), List.of(axis));
         }
 
@@ -9493,14 +9493,14 @@ public Value indices() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Gather Gather(TypeElement resultType, Value data, Value indices, java.util.Optional<Integer> axis) {
+    public static Gather Gather(TypeElement resultType, Value data, Value indices, java.util.Optional<Long> axis) {
         return new Gather(resultType, data, indices, axis);
     }
 
@@ -9509,7 +9509,7 @@ public static final class GatherElements extends OnnxOp {
         public static final String NAME = "GatherElements";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, 0),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -9622,7 +9622,7 @@ public GatherElements transform(CopyContext cc, OpTransformer ot) {
             return new GatherElements(this, cc);
         }
 
-        GatherElements(TypeElement resultType, Value data, Value indices, java.util.Optional<Integer> axis) {
+        GatherElements(TypeElement resultType, Value data, Value indices, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(data, indices), List.of(axis));
         }
 
@@ -9644,14 +9644,14 @@ public Value indices() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static GatherElements GatherElements(TypeElement resultType, Value data, Value indices, java.util.Optional<Integer> axis) {
+    public static GatherElements GatherElements(TypeElement resultType, Value data, Value indices, java.util.Optional<Long> axis) {
         return new GatherElements(resultType, data, indices, axis);
     }
 
@@ -9660,7 +9660,7 @@ public static final class GatherND extends OnnxOp {
         public static final String NAME = "GatherND";
 
         public enum Attribute implements OnnxAttribute {
-            batch_dims(Integer.class, true, 0),
+            batch_dims(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -9772,7 +9772,7 @@ public GatherND transform(CopyContext cc, OpTransformer ot) {
             return new GatherND(this, cc);
         }
 
-        GatherND(TypeElement resultType, Value data, Value indices, java.util.Optional<Integer> batch_dims) {
+        GatherND(TypeElement resultType, Value data, Value indices, java.util.Optional<Long> batch_dims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, indices), List.of(batch_dims));
         }
 
@@ -9794,14 +9794,14 @@ public Value indices() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> batch_dims() {
-            Integer batch_dims = Attribute.batch_dims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> batch_dims() {
+            Long batch_dims = Attribute.batch_dims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(batch_dims);
         }
 
     }
 
-    public static GatherND GatherND(TypeElement resultType, Value data, Value indices, java.util.Optional<Integer> batch_dims) {
+    public static GatherND GatherND(TypeElement resultType, Value data, Value indices, java.util.Optional<Long> batch_dims) {
         return new GatherND(resultType, data, indices, batch_dims);
     }
 
@@ -9956,9 +9956,9 @@ public static final class Gemm extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             alpha(Float.class, true, 1.0f),
-            transB(Integer.class, true, 0),
+            transB(Long.class, true, 0),
             beta(Float.class, true, 1.0f),
-            transA(Integer.class, true, 0),
+            transA(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -10071,7 +10071,7 @@ public Gemm transform(CopyContext cc, OpTransformer ot) {
             return new Gemm(this, cc);
         }
 
-        Gemm(TypeElement resultType, Value A, Value B, java.util.Optional<Value> C, java.util.Optional<Float> alpha, java.util.Optional<Integer> transB, java.util.Optional<Float> beta, java.util.Optional<Integer> transA) {
+        Gemm(TypeElement resultType, Value A, Value B, java.util.Optional<Value> C, java.util.Optional<Float> alpha, java.util.Optional<Long> transB, java.util.Optional<Float> beta, java.util.Optional<Long> transA) {
             super(SCHEMA, resultType, Set.of(), List.of(A, B, C), List.of(alpha, transB, beta, transA));
         }
 
@@ -10103,8 +10103,8 @@ public java.util.Optional<Float> alpha() {
             return java.util.Optional.ofNullable(alpha);
         }
 
-        public java.util.Optional<Integer> transB() {
-            Integer transB = Attribute.transB.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> transB() {
+            Long transB = Attribute.transB.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(transB);
         }
 
@@ -10113,14 +10113,14 @@ public java.util.Optional<Float> beta() {
             return java.util.Optional.ofNullable(beta);
         }
 
-        public java.util.Optional<Integer> transA() {
-            Integer transA = Attribute.transA.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> transA() {
+            Long transA = Attribute.transA.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(transA);
         }
 
     }
 
-    public static Gemm Gemm(TypeElement resultType, Value A, Value B, java.util.Optional<Value> C, java.util.Optional<Float> alpha, java.util.Optional<Integer> transB, java.util.Optional<Float> beta, java.util.Optional<Integer> transA) {
+    public static Gemm Gemm(TypeElement resultType, Value A, Value B, java.util.Optional<Value> C, java.util.Optional<Float> alpha, java.util.Optional<Long> transB, java.util.Optional<Float> beta, java.util.Optional<Long> transA) {
         return new Gemm(resultType, A, B, C, alpha, transB, beta, transA);
     }
 
@@ -10243,7 +10243,7 @@ public static final class GlobalLpPool extends OnnxOp {
         public static final String NAME = "GlobalLpPool";
 
         public enum Attribute implements OnnxAttribute {
-            p(Integer.class, true, 2),
+            p(Long.class, true, 2),
             ;
 
                 final Class<?> t;
@@ -10354,7 +10354,7 @@ public GlobalLpPool transform(CopyContext cc, OpTransformer ot) {
             return new GlobalLpPool(this, cc);
         }
 
-        GlobalLpPool(TypeElement resultType, Value X, java.util.Optional<Integer> p) {
+        GlobalLpPool(TypeElement resultType, Value X, java.util.Optional<Long> p) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(p));
         }
 
@@ -10372,14 +10372,14 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> p() {
-            Integer p = Attribute.p.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> p() {
+            Long p = Attribute.p.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(p);
         }
 
     }
 
-    public static GlobalLpPool GlobalLpPool(TypeElement resultType, Value X, java.util.Optional<Integer> p) {
+    public static GlobalLpPool GlobalLpPool(TypeElement resultType, Value X, java.util.Optional<Long> p) {
         return new GlobalLpPool(resultType, X, p);
     }
 
@@ -10901,7 +10901,7 @@ public static final class GridSample extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             mode(String.class, true, "linear"),
-            align_corners(Integer.class, true, 0),
+            align_corners(Long.class, true, 0),
             padding_mode(String.class, true, "zeros"),
             ;
 
@@ -11015,7 +11015,7 @@ public GridSample transform(CopyContext cc, OpTransformer ot) {
             return new GridSample(this, cc);
         }
 
-        GridSample(TypeElement resultType, Value X, Value grid, java.util.Optional<String> mode, java.util.Optional<Integer> align_corners, java.util.Optional<String> padding_mode) {
+        GridSample(TypeElement resultType, Value X, Value grid, java.util.Optional<String> mode, java.util.Optional<Long> align_corners, java.util.Optional<String> padding_mode) {
             super(SCHEMA, resultType, Set.of(), List.of(X, grid), List.of(mode, align_corners, padding_mode));
         }
 
@@ -11042,8 +11042,8 @@ public java.util.Optional<String> mode() {
             return java.util.Optional.ofNullable(mode);
         }
 
-        public java.util.Optional<Integer> align_corners() {
-            Integer align_corners = Attribute.align_corners.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> align_corners() {
+            Long align_corners = Attribute.align_corners.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(align_corners);
         }
 
@@ -11054,7 +11054,7 @@ public java.util.Optional<String> padding_mode() {
 
     }
 
-    public static GridSample GridSample(TypeElement resultType, Value X, Value grid, java.util.Optional<String> mode, java.util.Optional<Integer> align_corners, java.util.Optional<String> padding_mode) {
+    public static GridSample GridSample(TypeElement resultType, Value X, Value grid, java.util.Optional<String> mode, java.util.Optional<Long> align_corners, java.util.Optional<String> padding_mode) {
         return new GridSample(resultType, X, grid, mode, align_corners, padding_mode);
     }
 
@@ -11064,8 +11064,8 @@ public static final class GroupNormalization extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             epsilon(Float.class, true, 1.0E-5f),
-            stash_type(Integer.class, true, 1),
-            num_groups(Integer.class, false, null),
+            stash_type(Long.class, true, 1),
+            num_groups(Long.class, false, null),
             ;
 
                 final Class<?> t;
@@ -11178,7 +11178,7 @@ public GroupNormalization transform(CopyContext cc, OpTransformer ot) {
             return new GroupNormalization(this, cc);
         }
 
-        GroupNormalization(TypeElement resultType, Value X, Value scale, Value bias, java.util.Optional<Float> epsilon, java.util.Optional<Integer> stash_type, int num_groups) {
+        GroupNormalization(TypeElement resultType, Value X, Value scale, Value bias, java.util.Optional<Float> epsilon, java.util.Optional<Long> stash_type, long num_groups) {
             super(SCHEMA, resultType, Set.of(), List.of(X, scale, bias), List.of(epsilon, stash_type, num_groups));
         }
 
@@ -11209,19 +11209,19 @@ public java.util.Optional<Float> epsilon() {
             return java.util.Optional.ofNullable(epsilon);
         }
 
-        public java.util.Optional<Integer> stash_type() {
-            Integer stash_type = Attribute.stash_type.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> stash_type() {
+            Long stash_type = Attribute.stash_type.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(stash_type);
         }
 
-        public int num_groups() {
-            int num_groups = Attribute.num_groups.access(int.class, onnxAttributes);
+        public long num_groups() {
+            long num_groups = Attribute.num_groups.access(long.class, onnxAttributes);
             return num_groups;
         }
 
     }
 
-    public static GroupNormalization GroupNormalization(TypeElement resultType, Value X, Value scale, Value bias, java.util.Optional<Float> epsilon, java.util.Optional<Integer> stash_type, int num_groups) {
+    public static GroupNormalization GroupNormalization(TypeElement resultType, Value X, Value scale, Value bias, java.util.Optional<Float> epsilon, java.util.Optional<Long> stash_type, long num_groups) {
         return new GroupNormalization(resultType, X, scale, bias, epsilon, stash_type, num_groups);
     }
 
@@ -11230,8 +11230,8 @@ public static final class HammingWindow extends OnnxOp {
         public static final String NAME = "HammingWindow";
 
         public enum Attribute implements OnnxAttribute {
-            periodic(Integer.class, true, 1),
-            output_datatype(Integer.class, true, 1),
+            periodic(Long.class, true, 1),
+            output_datatype(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -11343,7 +11343,7 @@ public HammingWindow transform(CopyContext cc, OpTransformer ot) {
             return new HammingWindow(this, cc);
         }
 
-        HammingWindow(TypeElement resultType, Value size, java.util.Optional<Integer> periodic, java.util.Optional<Integer> output_datatype) {
+        HammingWindow(TypeElement resultType, Value size, java.util.Optional<Long> periodic, java.util.Optional<Long> output_datatype) {
             super(SCHEMA, resultType, Set.of(), List.of(size), List.of(periodic, output_datatype));
         }
 
@@ -11361,19 +11361,19 @@ public Value size() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> periodic() {
-            Integer periodic = Attribute.periodic.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> periodic() {
+            Long periodic = Attribute.periodic.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(periodic);
         }
 
-        public java.util.Optional<Integer> output_datatype() {
-            Integer output_datatype = Attribute.output_datatype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_datatype() {
+            Long output_datatype = Attribute.output_datatype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_datatype);
         }
 
     }
 
-    public static HammingWindow HammingWindow(TypeElement resultType, Value size, java.util.Optional<Integer> periodic, java.util.Optional<Integer> output_datatype) {
+    public static HammingWindow HammingWindow(TypeElement resultType, Value size, java.util.Optional<Long> periodic, java.util.Optional<Long> output_datatype) {
         return new HammingWindow(resultType, size, periodic, output_datatype);
     }
 
@@ -11382,8 +11382,8 @@ public static final class HannWindow extends OnnxOp {
         public static final String NAME = "HannWindow";
 
         public enum Attribute implements OnnxAttribute {
-            periodic(Integer.class, true, 1),
-            output_datatype(Integer.class, true, 1),
+            periodic(Long.class, true, 1),
+            output_datatype(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -11495,7 +11495,7 @@ public HannWindow transform(CopyContext cc, OpTransformer ot) {
             return new HannWindow(this, cc);
         }
 
-        HannWindow(TypeElement resultType, Value size, java.util.Optional<Integer> periodic, java.util.Optional<Integer> output_datatype) {
+        HannWindow(TypeElement resultType, Value size, java.util.Optional<Long> periodic, java.util.Optional<Long> output_datatype) {
             super(SCHEMA, resultType, Set.of(), List.of(size), List.of(periodic, output_datatype));
         }
 
@@ -11513,19 +11513,19 @@ public Value size() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> periodic() {
-            Integer periodic = Attribute.periodic.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> periodic() {
+            Long periodic = Attribute.periodic.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(periodic);
         }
 
-        public java.util.Optional<Integer> output_datatype() {
-            Integer output_datatype = Attribute.output_datatype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_datatype() {
+            Long output_datatype = Attribute.output_datatype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_datatype);
         }
 
     }
 
-    public static HannWindow HannWindow(TypeElement resultType, Value size, java.util.Optional<Integer> periodic, java.util.Optional<Integer> output_datatype) {
+    public static HannWindow HannWindow(TypeElement resultType, Value size, java.util.Optional<Long> periodic, java.util.Optional<Long> output_datatype) {
         return new HannWindow(resultType, size, periodic, output_datatype);
     }
 
@@ -11799,7 +11799,7 @@ public static final class Hardmax extends OnnxOp {
         public static final String NAME = "Hardmax";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, -1),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -11910,7 +11910,7 @@ public Hardmax transform(CopyContext cc, OpTransformer ot) {
             return new Hardmax(this, cc);
         }
 
-        Hardmax(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+        Hardmax(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(axis));
         }
 
@@ -11928,14 +11928,14 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Hardmax Hardmax(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+    public static Hardmax Hardmax(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
         return new Hardmax(resultType, input, axis);
     }
 
@@ -12204,9 +12204,9 @@ public static final class Imputer extends OnnxOp {
         public static final String NAME = "Imputer";
 
         public enum Attribute implements OnnxAttribute {
-            replaced_value_int64(Integer.class, true, 0),
+            replaced_value_int64(Long.class, true, 0),
             replaced_value_float(Float.class, true, 0.0f),
-            imputed_value_int64s(int[].class, true, null),
+            imputed_value_int64s(long[].class, true, null),
             imputed_value_floats(float[].class, true, null),
             ;
 
@@ -12318,7 +12318,7 @@ public Imputer transform(CopyContext cc, OpTransformer ot) {
             return new Imputer(this, cc);
         }
 
-        Imputer(TypeElement resultType, Value X, java.util.Optional<Integer> replaced_value_int64, java.util.Optional<Float> replaced_value_float, java.util.Optional<int[]> imputed_value_int64s, java.util.Optional<float[]> imputed_value_floats) {
+        Imputer(TypeElement resultType, Value X, java.util.Optional<Long> replaced_value_int64, java.util.Optional<Float> replaced_value_float, java.util.Optional<long[]> imputed_value_int64s, java.util.Optional<float[]> imputed_value_floats) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(replaced_value_int64, replaced_value_float, imputed_value_int64s, imputed_value_floats));
         }
 
@@ -12336,8 +12336,8 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> replaced_value_int64() {
-            Integer replaced_value_int64 = Attribute.replaced_value_int64.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> replaced_value_int64() {
+            Long replaced_value_int64 = Attribute.replaced_value_int64.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(replaced_value_int64);
         }
 
@@ -12346,9 +12346,9 @@ public java.util.Optional<Float> replaced_value_float() {
             return java.util.Optional.ofNullable(replaced_value_float);
         }
 
-        public java.util.Optional<int[]> imputed_value_int64s() {
-            int[] imputed_value_int64s = Attribute.imputed_value_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(imputed_value_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> imputed_value_int64s() {
+            long[] imputed_value_int64s = Attribute.imputed_value_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(imputed_value_int64s).map(long[]::clone);
         }
 
         public java.util.Optional<float[]> imputed_value_floats() {
@@ -12358,7 +12358,7 @@ public java.util.Optional<float[]> imputed_value_floats() {
 
     }
 
-    public static Imputer Imputer(TypeElement resultType, Value X, java.util.Optional<Integer> replaced_value_int64, java.util.Optional<Float> replaced_value_float, java.util.Optional<int[]> imputed_value_int64s, java.util.Optional<float[]> imputed_value_floats) {
+    public static Imputer Imputer(TypeElement resultType, Value X, java.util.Optional<Long> replaced_value_int64, java.util.Optional<Float> replaced_value_float, java.util.Optional<long[]> imputed_value_int64s, java.util.Optional<float[]> imputed_value_floats) {
         return new Imputer(resultType, X, replaced_value_int64, replaced_value_float, imputed_value_int64s, imputed_value_floats);
     }
 
@@ -12522,8 +12522,8 @@ public static final class IsInf extends OnnxOp {
         public static final String NAME = "IsInf";
 
         public enum Attribute implements OnnxAttribute {
-            detect_negative(Integer.class, true, 1),
-            detect_positive(Integer.class, true, 1),
+            detect_negative(Long.class, true, 1),
+            detect_positive(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -12635,7 +12635,7 @@ public IsInf transform(CopyContext cc, OpTransformer ot) {
             return new IsInf(this, cc);
         }
 
-        IsInf(TypeElement resultType, Value X, java.util.Optional<Integer> detect_negative, java.util.Optional<Integer> detect_positive) {
+        IsInf(TypeElement resultType, Value X, java.util.Optional<Long> detect_negative, java.util.Optional<Long> detect_positive) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(detect_negative, detect_positive));
         }
 
@@ -12653,19 +12653,19 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> detect_negative() {
-            Integer detect_negative = Attribute.detect_negative.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> detect_negative() {
+            Long detect_negative = Attribute.detect_negative.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(detect_negative);
         }
 
-        public java.util.Optional<Integer> detect_positive() {
-            Integer detect_positive = Attribute.detect_positive.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> detect_positive() {
+            Long detect_positive = Attribute.detect_positive.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(detect_positive);
         }
 
     }
 
-    public static IsInf IsInf(TypeElement resultType, Value X, java.util.Optional<Integer> detect_negative, java.util.Optional<Integer> detect_positive) {
+    public static IsInf IsInf(TypeElement resultType, Value X, java.util.Optional<Long> detect_negative, java.util.Optional<Long> detect_positive) {
         return new IsInf(resultType, X, detect_negative, detect_positive);
     }
 
@@ -12789,7 +12789,7 @@ public static final class LRN extends OnnxOp {
         public static final String NAME = "LRN";
 
         public enum Attribute implements OnnxAttribute {
-            size(Integer.class, false, null),
+            size(Long.class, false, null),
             alpha(Float.class, true, 1.0E-4f),
             bias(Float.class, true, 1.0f),
             beta(Float.class, true, 0.75f),
@@ -12903,7 +12903,7 @@ public LRN transform(CopyContext cc, OpTransformer ot) {
             return new LRN(this, cc);
         }
 
-        LRN(TypeElement resultType, Value X, int size, java.util.Optional<Float> alpha, java.util.Optional<Float> bias, java.util.Optional<Float> beta) {
+        LRN(TypeElement resultType, Value X, long size, java.util.Optional<Float> alpha, java.util.Optional<Float> bias, java.util.Optional<Float> beta) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(size, alpha, bias, beta));
         }
 
@@ -12921,8 +12921,8 @@ public Value X() {
             return operands().get(0);
         }
 
-        public int size() {
-            int size = Attribute.size.access(int.class, onnxAttributes);
+        public long size() {
+            long size = Attribute.size.access(long.class, onnxAttributes);
             return size;
         }
 
@@ -12943,7 +12943,7 @@ public java.util.Optional<Float> beta() {
 
     }
 
-    public static LRN LRN(TypeElement resultType, Value X, int size, java.util.Optional<Float> alpha, java.util.Optional<Float> bias, java.util.Optional<Float> beta) {
+    public static LRN LRN(TypeElement resultType, Value X, long size, java.util.Optional<Float> alpha, java.util.Optional<Float> bias, java.util.Optional<Float> beta) {
         return new LRN(resultType, X, size, alpha, bias, beta);
     }
 
@@ -12952,10 +12952,10 @@ public static final class LSTM extends OnnxOp {
         public static final String NAME = "LSTM";
 
         public enum Attribute implements OnnxAttribute {
-            layout(Integer.class, true, 0),
-            input_forget(Integer.class, true, 0),
+            layout(Long.class, true, 0),
+            input_forget(Long.class, true, 0),
             activation_alpha(float[].class, true, null),
-            hidden_size(Integer.class, true, null),
+            hidden_size(Long.class, true, null),
             activation_beta(float[].class, true, null),
             activations(String[].class, true, null),
             clip(Float.class, true, null),
@@ -13080,7 +13080,7 @@ public LSTM transform(CopyContext cc, OpTransformer ot) {
             return new LSTM(this, cc);
         }
 
-        LSTM(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Value> initial_c, java.util.Optional<Value> P, java.util.Optional<Integer> layout, java.util.Optional<Integer> input_forget, java.util.Optional<float[]> activation_alpha, java.util.Optional<Integer> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
+        LSTM(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Value> initial_c, java.util.Optional<Value> P, java.util.Optional<Long> layout, java.util.Optional<Long> input_forget, java.util.Optional<float[]> activation_alpha, java.util.Optional<Long> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X, W, R, B, sequence_lens, initial_h, initial_c, P), List.of(layout, input_forget, activation_alpha, hidden_size, activation_beta, activations, clip, direction));
         }
 
@@ -13131,13 +13131,13 @@ public java.util.Optional<Value> P() {
             return i != -1 ? java.util.Optional.of(operands().get(3 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> layout() {
-            Integer layout = Attribute.layout.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> layout() {
+            Long layout = Attribute.layout.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(layout);
         }
 
-        public java.util.Optional<Integer> input_forget() {
-            Integer input_forget = Attribute.input_forget.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> input_forget() {
+            Long input_forget = Attribute.input_forget.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(input_forget);
         }
 
@@ -13146,8 +13146,8 @@ public java.util.Optional<float[]> activation_alpha() {
             return java.util.Optional.ofNullable(activation_alpha).map(float[]::clone);
         }
 
-        public java.util.Optional<Integer> hidden_size() {
-            Integer hidden_size = Attribute.hidden_size.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> hidden_size() {
+            Long hidden_size = Attribute.hidden_size.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(hidden_size);
         }
 
@@ -13173,7 +13173,7 @@ public java.util.Optional<String> direction() {
 
     }
 
-    public static LSTM LSTM(TypeElement resultType, Set<LSTM.OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Value> initial_c, java.util.Optional<Value> P, java.util.Optional<Integer> layout, java.util.Optional<Integer> input_forget, java.util.Optional<float[]> activation_alpha, java.util.Optional<Integer> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
+    public static LSTM LSTM(TypeElement resultType, Set<LSTM.OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Value> initial_c, java.util.Optional<Value> P, java.util.Optional<Long> layout, java.util.Optional<Long> input_forget, java.util.Optional<float[]> activation_alpha, java.util.Optional<Long> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
         return new LSTM(resultType, optionalOutputs, X, W, R, B, sequence_lens, initial_h, initial_c, P, layout, input_forget, activation_alpha, hidden_size, activation_beta, activations, clip, direction);
     }
 
@@ -13183,15 +13183,15 @@ public static final class LabelEncoder extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             values_strings(String[].class, true, null),
-            keys_int64s(int[].class, true, null),
+            keys_int64s(long[].class, true, null),
             keys_tensor(byte[].class, true, null),
             keys_strings(String[].class, true, null),
             default_float(Float.class, true, -0.0f),
             keys_floats(float[].class, true, null),
             default_tensor(byte[].class, true, null),
-            default_int64(Integer.class, true, -1),
+            default_int64(Long.class, true, -1),
             values_tensor(byte[].class, true, null),
-            values_int64s(int[].class, true, null),
+            values_int64s(long[].class, true, null),
             default_string(String.class, true, "_Unused"),
             values_floats(float[].class, true, null),
             ;
@@ -13305,7 +13305,7 @@ public LabelEncoder transform(CopyContext cc, OpTransformer ot) {
             return new LabelEncoder(this, cc);
         }
 
-        LabelEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> values_strings, java.util.Optional<int[]> keys_int64s, java.util.Optional<byte[]> keys_tensor, java.util.Optional<String[]> keys_strings, java.util.Optional<Float> default_float, java.util.Optional<float[]> keys_floats, java.util.Optional<byte[]> default_tensor, java.util.Optional<Integer> default_int64, java.util.Optional<byte[]> values_tensor, java.util.Optional<int[]> values_int64s, java.util.Optional<String> default_string, java.util.Optional<float[]> values_floats) {
+        LabelEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> values_strings, java.util.Optional<long[]> keys_int64s, java.util.Optional<byte[]> keys_tensor, java.util.Optional<String[]> keys_strings, java.util.Optional<Float> default_float, java.util.Optional<float[]> keys_floats, java.util.Optional<byte[]> default_tensor, java.util.Optional<Long> default_int64, java.util.Optional<byte[]> values_tensor, java.util.Optional<long[]> values_int64s, java.util.Optional<String> default_string, java.util.Optional<float[]> values_floats) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(values_strings, keys_int64s, keys_tensor, keys_strings, default_float, keys_floats, default_tensor, default_int64, values_tensor, values_int64s, default_string, values_floats));
         }
 
@@ -13328,9 +13328,9 @@ public java.util.Optional<String[]> values_strings() {
             return java.util.Optional.ofNullable(values_strings).map(String[]::clone);
         }
 
-        public java.util.Optional<int[]> keys_int64s() {
-            int[] keys_int64s = Attribute.keys_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(keys_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> keys_int64s() {
+            long[] keys_int64s = Attribute.keys_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(keys_int64s).map(long[]::clone);
         }
 
         public java.util.Optional<byte[]> keys_tensor() {
@@ -13358,8 +13358,8 @@ public java.util.Optional<byte[]> default_tensor() {
             return java.util.Optional.ofNullable(default_tensor).map(byte[]::clone);
         }
 
-        public java.util.Optional<Integer> default_int64() {
-            Integer default_int64 = Attribute.default_int64.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> default_int64() {
+            Long default_int64 = Attribute.default_int64.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(default_int64);
         }
 
@@ -13368,9 +13368,9 @@ public java.util.Optional<byte[]> values_tensor() {
             return java.util.Optional.ofNullable(values_tensor).map(byte[]::clone);
         }
 
-        public java.util.Optional<int[]> values_int64s() {
-            int[] values_int64s = Attribute.values_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(values_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> values_int64s() {
+            long[] values_int64s = Attribute.values_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(values_int64s).map(long[]::clone);
         }
 
         public java.util.Optional<String> default_string() {
@@ -13385,7 +13385,7 @@ public java.util.Optional<float[]> values_floats() {
 
     }
 
-    public static LabelEncoder LabelEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> values_strings, java.util.Optional<int[]> keys_int64s, java.util.Optional<byte[]> keys_tensor, java.util.Optional<String[]> keys_strings, java.util.Optional<Float> default_float, java.util.Optional<float[]> keys_floats, java.util.Optional<byte[]> default_tensor, java.util.Optional<Integer> default_int64, java.util.Optional<byte[]> values_tensor, java.util.Optional<int[]> values_int64s, java.util.Optional<String> default_string, java.util.Optional<float[]> values_floats) {
+    public static LabelEncoder LabelEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> values_strings, java.util.Optional<long[]> keys_int64s, java.util.Optional<byte[]> keys_tensor, java.util.Optional<String[]> keys_strings, java.util.Optional<Float> default_float, java.util.Optional<float[]> keys_floats, java.util.Optional<byte[]> default_tensor, java.util.Optional<Long> default_int64, java.util.Optional<byte[]> values_tensor, java.util.Optional<long[]> values_int64s, java.util.Optional<String> default_string, java.util.Optional<float[]> values_floats) {
         return new LabelEncoder(resultType, X, values_strings, keys_int64s, keys_tensor, keys_strings, default_float, keys_floats, default_tensor, default_int64, values_tensor, values_int64s, default_string, values_floats);
     }
 
@@ -13395,8 +13395,8 @@ public static final class LayerNormalization extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             epsilon(Float.class, true, 1.0E-5f),
-            stash_type(Integer.class, true, 1),
-            axis(Integer.class, true, -1),
+            stash_type(Long.class, true, 1),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -13512,7 +13512,7 @@ public LayerNormalization transform(CopyContext cc, OpTransformer ot) {
             return new LayerNormalization(this, cc);
         }
 
-        LayerNormalization(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value Scale, java.util.Optional<Value> B, java.util.Optional<Float> epsilon, java.util.Optional<Integer> stash_type, java.util.Optional<Integer> axis) {
+        LayerNormalization(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value Scale, java.util.Optional<Value> B, java.util.Optional<Float> epsilon, java.util.Optional<Long> stash_type, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X, Scale, B), List.of(epsilon, stash_type, axis));
         }
 
@@ -13544,19 +13544,19 @@ public java.util.Optional<Float> epsilon() {
             return java.util.Optional.ofNullable(epsilon);
         }
 
-        public java.util.Optional<Integer> stash_type() {
-            Integer stash_type = Attribute.stash_type.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> stash_type() {
+            Long stash_type = Attribute.stash_type.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(stash_type);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static LayerNormalization LayerNormalization(TypeElement resultType, Set<LayerNormalization.OutputParameter> optionalOutputs, Value X, Value Scale, java.util.Optional<Value> B, java.util.Optional<Float> epsilon, java.util.Optional<Integer> stash_type, java.util.Optional<Integer> axis) {
+    public static LayerNormalization LayerNormalization(TypeElement resultType, Set<LayerNormalization.OutputParameter> optionalOutputs, Value X, Value Scale, java.util.Optional<Value> B, java.util.Optional<Float> epsilon, java.util.Optional<Long> stash_type, java.util.Optional<Long> axis) {
         return new LayerNormalization(resultType, optionalOutputs, X, Scale, B, epsilon, stash_type, axis);
     }
 
@@ -13950,10 +13950,10 @@ public static final class LinearClassifier extends OnnxOp {
         public static final String NAME = "LinearClassifier";
 
         public enum Attribute implements OnnxAttribute {
-            classlabels_ints(int[].class, true, null),
+            classlabels_ints(long[].class, true, null),
             post_transform(String.class, true, "NONE"),
             coefficients(float[].class, false, null),
-            multi_class(Integer.class, true, 0),
+            multi_class(Long.class, true, 0),
             intercepts(float[].class, true, null),
             classlabels_strings(String[].class, true, null),
             ;
@@ -14068,7 +14068,7 @@ public LinearClassifier transform(CopyContext cc, OpTransformer ot) {
             return new LinearClassifier(this, cc);
         }
 
-        LinearClassifier(TypeElement resultType, Value X, java.util.Optional<int[]> classlabels_ints, java.util.Optional<String> post_transform, float[] coefficients, java.util.Optional<Integer> multi_class, java.util.Optional<float[]> intercepts, java.util.Optional<String[]> classlabels_strings) {
+        LinearClassifier(TypeElement resultType, Value X, java.util.Optional<long[]> classlabels_ints, java.util.Optional<String> post_transform, float[] coefficients, java.util.Optional<Long> multi_class, java.util.Optional<float[]> intercepts, java.util.Optional<String[]> classlabels_strings) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(classlabels_ints, post_transform, coefficients, multi_class, intercepts, classlabels_strings));
         }
 
@@ -14086,9 +14086,9 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> classlabels_ints() {
-            int[] classlabels_ints = Attribute.classlabels_ints.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(classlabels_ints).map(int[]::clone);
+        public java.util.Optional<long[]> classlabels_ints() {
+            long[] classlabels_ints = Attribute.classlabels_ints.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(classlabels_ints).map(long[]::clone);
         }
 
         public java.util.Optional<String> post_transform() {
@@ -14101,8 +14101,8 @@ public float[] coefficients() {
             return coefficients.clone();
         }
 
-        public java.util.Optional<Integer> multi_class() {
-            Integer multi_class = Attribute.multi_class.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> multi_class() {
+            Long multi_class = Attribute.multi_class.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(multi_class);
         }
 
@@ -14118,7 +14118,7 @@ public java.util.Optional<String[]> classlabels_strings() {
 
     }
 
-    public static LinearClassifier LinearClassifier(TypeElement resultType, Value X, java.util.Optional<int[]> classlabels_ints, java.util.Optional<String> post_transform, float[] coefficients, java.util.Optional<Integer> multi_class, java.util.Optional<float[]> intercepts, java.util.Optional<String[]> classlabels_strings) {
+    public static LinearClassifier LinearClassifier(TypeElement resultType, Value X, java.util.Optional<long[]> classlabels_ints, java.util.Optional<String> post_transform, float[] coefficients, java.util.Optional<Long> multi_class, java.util.Optional<float[]> intercepts, java.util.Optional<String[]> classlabels_strings) {
         return new LinearClassifier(resultType, X, classlabels_ints, post_transform, coefficients, multi_class, intercepts, classlabels_strings);
     }
 
@@ -14129,7 +14129,7 @@ public static final class LinearRegressor extends OnnxOp {
         public enum Attribute implements OnnxAttribute {
             post_transform(String.class, true, "NONE"),
             coefficients(float[].class, true, null),
-            targets(Integer.class, true, 1),
+            targets(Long.class, true, 1),
             intercepts(float[].class, true, null),
             ;
 
@@ -14241,7 +14241,7 @@ public LinearRegressor transform(CopyContext cc, OpTransformer ot) {
             return new LinearRegressor(this, cc);
         }
 
-        LinearRegressor(TypeElement resultType, Value X, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<Integer> targets, java.util.Optional<float[]> intercepts) {
+        LinearRegressor(TypeElement resultType, Value X, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<Long> targets, java.util.Optional<float[]> intercepts) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(post_transform, coefficients, targets, intercepts));
         }
 
@@ -14269,8 +14269,8 @@ public java.util.Optional<float[]> coefficients() {
             return java.util.Optional.ofNullable(coefficients).map(float[]::clone);
         }
 
-        public java.util.Optional<Integer> targets() {
-            Integer targets = Attribute.targets.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> targets() {
+            Long targets = Attribute.targets.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(targets);
         }
 
@@ -14281,7 +14281,7 @@ public java.util.Optional<float[]> intercepts() {
 
     }
 
-    public static LinearRegressor LinearRegressor(TypeElement resultType, Value X, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<Integer> targets, java.util.Optional<float[]> intercepts) {
+    public static LinearRegressor LinearRegressor(TypeElement resultType, Value X, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<Long> targets, java.util.Optional<float[]> intercepts) {
         return new LinearRegressor(resultType, X, post_transform, coefficients, targets, intercepts);
     }
 
@@ -14404,7 +14404,7 @@ public static final class LogSoftmax extends OnnxOp {
         public static final String NAME = "LogSoftmax";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, -1),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -14515,7 +14515,7 @@ public LogSoftmax transform(CopyContext cc, OpTransformer ot) {
             return new LogSoftmax(this, cc);
         }
 
-        LogSoftmax(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+        LogSoftmax(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(axis));
         }
 
@@ -14533,14 +14533,14 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static LogSoftmax LogSoftmax(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+    public static LogSoftmax LogSoftmax(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
         return new LogSoftmax(resultType, input, axis);
     }
 
@@ -14549,8 +14549,8 @@ public static final class LpNormalization extends OnnxOp {
         public static final String NAME = "LpNormalization";
 
         public enum Attribute implements OnnxAttribute {
-            p(Integer.class, true, 2),
-            axis(Integer.class, true, -1),
+            p(Long.class, true, 2),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -14661,7 +14661,7 @@ public LpNormalization transform(CopyContext cc, OpTransformer ot) {
             return new LpNormalization(this, cc);
         }
 
-        LpNormalization(TypeElement resultType, Value input, java.util.Optional<Integer> p, java.util.Optional<Integer> axis) {
+        LpNormalization(TypeElement resultType, Value input, java.util.Optional<Long> p, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(p, axis));
         }
 
@@ -14679,19 +14679,19 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> p() {
-            Integer p = Attribute.p.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> p() {
+            Long p = Attribute.p.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(p);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static LpNormalization LpNormalization(TypeElement resultType, Value input, java.util.Optional<Integer> p, java.util.Optional<Integer> axis) {
+    public static LpNormalization LpNormalization(TypeElement resultType, Value input, java.util.Optional<Long> p, java.util.Optional<Long> axis) {
         return new LpNormalization(resultType, input, p, axis);
     }
 
@@ -14700,13 +14700,13 @@ public static final class LpPool extends OnnxOp {
         public static final String NAME = "LpPool";
 
         public enum Attribute implements OnnxAttribute {
-            p(Integer.class, true, 2),
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            p(Long.class, true, 2),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            ceil_mode(Integer.class, true, 0),
-            strides(int[].class, true, null),
-            kernel_shape(int[].class, false, null),
+            ceil_mode(Long.class, true, 0),
+            strides(long[].class, true, null),
+            kernel_shape(long[].class, false, null),
             ;
 
                 final Class<?> t;
@@ -14817,7 +14817,7 @@ public LpPool transform(CopyContext cc, OpTransformer ot) {
             return new LpPool(this, cc);
         }
 
-        LpPool(TypeElement resultType, Value X, java.util.Optional<Integer> p, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Integer> ceil_mode, java.util.Optional<int[]> strides, int[] kernel_shape) {
+        LpPool(TypeElement resultType, Value X, java.util.Optional<Long> p, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Long> ceil_mode, java.util.Optional<long[]> strides, long[] kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(p, pads, dilations, auto_pad, ceil_mode, strides, kernel_shape));
         }
 
@@ -14835,19 +14835,19 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> p() {
-            Integer p = Attribute.p.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> p() {
+            Long p = Attribute.p.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(p);
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -14855,24 +14855,24 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<Integer> ceil_mode() {
-            Integer ceil_mode = Attribute.ceil_mode.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> ceil_mode() {
+            Long ceil_mode = Attribute.ceil_mode.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(ceil_mode);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public int[] kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
+        public long[] kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
             return kernel_shape.clone();
         }
 
     }
 
-    public static LpPool LpPool(TypeElement resultType, Value X, java.util.Optional<Integer> p, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Integer> ceil_mode, java.util.Optional<int[]> strides, int[] kernel_shape) {
+    public static LpPool LpPool(TypeElement resultType, Value X, java.util.Optional<Long> p, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Long> ceil_mode, java.util.Optional<long[]> strides, long[] kernel_shape) {
         return new LpPool(resultType, X, p, pads, dilations, auto_pad, ceil_mode, strides, kernel_shape);
     }
 
@@ -15247,13 +15247,13 @@ public static final class MaxPool extends OnnxOp {
         public static final String NAME = "MaxPool";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            ceil_mode(Integer.class, true, 0),
-            storage_order(Integer.class, true, 0),
-            strides(int[].class, true, null),
-            kernel_shape(int[].class, false, null),
+            ceil_mode(Long.class, true, 0),
+            storage_order(Long.class, true, 0),
+            strides(long[].class, true, null),
+            kernel_shape(long[].class, false, null),
             ;
 
                 final Class<?> t;
@@ -15366,7 +15366,7 @@ public MaxPool transform(CopyContext cc, OpTransformer ot) {
             return new MaxPool(this, cc);
         }
 
-        MaxPool(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Integer> ceil_mode, java.util.Optional<Integer> storage_order, java.util.Optional<int[]> strides, int[] kernel_shape) {
+        MaxPool(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Long> ceil_mode, java.util.Optional<Long> storage_order, java.util.Optional<long[]> strides, long[] kernel_shape) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X), List.of(pads, dilations, auto_pad, ceil_mode, storage_order, strides, kernel_shape));
         }
 
@@ -15384,14 +15384,14 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -15399,29 +15399,29 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<Integer> ceil_mode() {
-            Integer ceil_mode = Attribute.ceil_mode.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> ceil_mode() {
+            Long ceil_mode = Attribute.ceil_mode.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(ceil_mode);
         }
 
-        public java.util.Optional<Integer> storage_order() {
-            Integer storage_order = Attribute.storage_order.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> storage_order() {
+            Long storage_order = Attribute.storage_order.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(storage_order);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public int[] kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
+        public long[] kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
             return kernel_shape.clone();
         }
 
     }
 
-    public static MaxPool MaxPool(TypeElement resultType, Set<MaxPool.OutputParameter> optionalOutputs, Value X, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Integer> ceil_mode, java.util.Optional<Integer> storage_order, java.util.Optional<int[]> strides, int[] kernel_shape) {
+    public static MaxPool MaxPool(TypeElement resultType, Set<MaxPool.OutputParameter> optionalOutputs, Value X, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<Long> ceil_mode, java.util.Optional<Long> storage_order, java.util.Optional<long[]> strides, long[] kernel_shape) {
         return new MaxPool(resultType, optionalOutputs, X, pads, dilations, auto_pad, ceil_mode, storage_order, strides, kernel_shape);
     }
 
@@ -15431,7 +15431,7 @@ public static final class MaxRoiPool extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             spatial_scale(Float.class, true, 1.0f),
-            pooled_shape(int[].class, false, null),
+            pooled_shape(long[].class, false, null),
             ;
 
                 final Class<?> t;
@@ -15543,7 +15543,7 @@ public MaxRoiPool transform(CopyContext cc, OpTransformer ot) {
             return new MaxRoiPool(this, cc);
         }
 
-        MaxRoiPool(TypeElement resultType, Value X, Value rois, java.util.Optional<Float> spatial_scale, int[] pooled_shape) {
+        MaxRoiPool(TypeElement resultType, Value X, Value rois, java.util.Optional<Float> spatial_scale, long[] pooled_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(X, rois), List.of(spatial_scale, pooled_shape));
         }
 
@@ -15570,14 +15570,14 @@ public java.util.Optional<Float> spatial_scale() {
             return java.util.Optional.ofNullable(spatial_scale);
         }
 
-        public int[] pooled_shape() {
-            int[] pooled_shape = Attribute.pooled_shape.access(int[].class, onnxAttributes);
+        public long[] pooled_shape() {
+            long[] pooled_shape = Attribute.pooled_shape.access(long[].class, onnxAttributes);
             return pooled_shape.clone();
         }
 
     }
 
-    public static MaxRoiPool MaxRoiPool(TypeElement resultType, Value X, Value rois, java.util.Optional<Float> spatial_scale, int[] pooled_shape) {
+    public static MaxRoiPool MaxRoiPool(TypeElement resultType, Value X, Value rois, java.util.Optional<Float> spatial_scale, long[] pooled_shape) {
         return new MaxRoiPool(resultType, X, rois, spatial_scale, pooled_shape);
     }
 
@@ -15586,9 +15586,9 @@ public static final class MaxUnpool extends OnnxOp {
         public static final String NAME = "MaxUnpool";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            strides(int[].class, true, null),
-            kernel_shape(int[].class, false, null),
+            pads(long[].class, true, null),
+            strides(long[].class, true, null),
+            kernel_shape(long[].class, false, null),
             ;
 
                 final Class<?> t;
@@ -15702,7 +15702,7 @@ public MaxUnpool transform(CopyContext cc, OpTransformer ot) {
             return new MaxUnpool(this, cc);
         }
 
-        MaxUnpool(TypeElement resultType, Value X, Value I, java.util.Optional<Value> output_shape, java.util.Optional<int[]> pads, java.util.Optional<int[]> strides, int[] kernel_shape) {
+        MaxUnpool(TypeElement resultType, Value X, Value I, java.util.Optional<Value> output_shape, java.util.Optional<long[]> pads, java.util.Optional<long[]> strides, long[] kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(X, I, output_shape), List.of(pads, strides, kernel_shape));
         }
 
@@ -15729,24 +15729,24 @@ public java.util.Optional<Value> output_shape() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public int[] kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
+        public long[] kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
             return kernel_shape.clone();
         }
 
     }
 
-    public static MaxUnpool MaxUnpool(TypeElement resultType, Value X, Value I, java.util.Optional<Value> output_shape, java.util.Optional<int[]> pads, java.util.Optional<int[]> strides, int[] kernel_shape) {
+    public static MaxUnpool MaxUnpool(TypeElement resultType, Value X, Value I, java.util.Optional<Value> output_shape, java.util.Optional<long[]> pads, java.util.Optional<long[]> strides, long[] kernel_shape) {
         return new MaxUnpool(resultType, X, I, output_shape, pads, strides, kernel_shape);
     }
 
@@ -15869,7 +15869,7 @@ public static final class MeanVarianceNormalization extends OnnxOp {
         public static final String NAME = "MeanVarianceNormalization";
 
         public enum Attribute implements OnnxAttribute {
-            axes(int[].class, true, null),
+            axes(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -15980,7 +15980,7 @@ public MeanVarianceNormalization transform(CopyContext cc, OpTransformer ot) {
             return new MeanVarianceNormalization(this, cc);
         }
 
-        MeanVarianceNormalization(TypeElement resultType, Value X, java.util.Optional<int[]> axes) {
+        MeanVarianceNormalization(TypeElement resultType, Value X, java.util.Optional<long[]> axes) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(axes));
         }
 
@@ -15998,14 +15998,14 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> axes() {
-            int[] axes = Attribute.axes.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(axes).map(int[]::clone);
+        public java.util.Optional<long[]> axes() {
+            long[] axes = Attribute.axes.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(axes).map(long[]::clone);
         }
 
     }
 
-    public static MeanVarianceNormalization MeanVarianceNormalization(TypeElement resultType, Value X, java.util.Optional<int[]> axes) {
+    public static MeanVarianceNormalization MeanVarianceNormalization(TypeElement resultType, Value X, java.util.Optional<long[]> axes) {
         return new MeanVarianceNormalization(resultType, X, axes);
     }
 
@@ -16014,7 +16014,7 @@ public static final class MelWeightMatrix extends OnnxOp {
         public static final String NAME = "MelWeightMatrix";
 
         public enum Attribute implements OnnxAttribute {
-            output_datatype(Integer.class, true, 1),
+            output_datatype(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -16131,7 +16131,7 @@ public MelWeightMatrix transform(CopyContext cc, OpTransformer ot) {
             return new MelWeightMatrix(this, cc);
         }
 
-        MelWeightMatrix(TypeElement resultType, Value num_mel_bins, Value dft_length, Value sample_rate, Value lower_edge_hertz, Value upper_edge_hertz, java.util.Optional<Integer> output_datatype) {
+        MelWeightMatrix(TypeElement resultType, Value num_mel_bins, Value dft_length, Value sample_rate, Value lower_edge_hertz, Value upper_edge_hertz, java.util.Optional<Long> output_datatype) {
             super(SCHEMA, resultType, Set.of(), List.of(num_mel_bins, dft_length, sample_rate, lower_edge_hertz, upper_edge_hertz), List.of(output_datatype));
         }
 
@@ -16165,14 +16165,14 @@ public Value upper_edge_hertz() {
             return operands().get(4);
         }
 
-        public java.util.Optional<Integer> output_datatype() {
-            Integer output_datatype = Attribute.output_datatype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_datatype() {
+            Long output_datatype = Attribute.output_datatype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_datatype);
         }
 
     }
 
-    public static MelWeightMatrix MelWeightMatrix(TypeElement resultType, Value num_mel_bins, Value dft_length, Value sample_rate, Value lower_edge_hertz, Value upper_edge_hertz, java.util.Optional<Integer> output_datatype) {
+    public static MelWeightMatrix MelWeightMatrix(TypeElement resultType, Value num_mel_bins, Value dft_length, Value sample_rate, Value lower_edge_hertz, Value upper_edge_hertz, java.util.Optional<Long> output_datatype) {
         return new MelWeightMatrix(resultType, num_mel_bins, dft_length, sample_rate, lower_edge_hertz, upper_edge_hertz, output_datatype);
     }
 
@@ -16409,7 +16409,7 @@ public static final class Mod extends OnnxOp {
         public static final String NAME = "Mod";
 
         public enum Attribute implements OnnxAttribute {
-            fmod(Integer.class, true, 0),
+            fmod(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -16521,7 +16521,7 @@ public Mod transform(CopyContext cc, OpTransformer ot) {
             return new Mod(this, cc);
         }
 
-        Mod(TypeElement resultType, Value A, Value B, java.util.Optional<Integer> fmod) {
+        Mod(TypeElement resultType, Value A, Value B, java.util.Optional<Long> fmod) {
             super(SCHEMA, resultType, Set.of(), List.of(A, B), List.of(fmod));
         }
 
@@ -16543,14 +16543,14 @@ public Value B() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> fmod() {
-            Integer fmod = Attribute.fmod.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> fmod() {
+            Long fmod = Attribute.fmod.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(fmod);
         }
 
     }
 
-    public static Mod Mod(TypeElement resultType, Value A, Value B, java.util.Optional<Integer> fmod) {
+    public static Mod Mod(TypeElement resultType, Value A, Value B, java.util.Optional<Long> fmod) {
         return new Mod(resultType, A, B, fmod);
     }
 
@@ -16854,8 +16854,8 @@ public static final class Multinomial extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             seed(Float.class, true, null),
-            sample_size(Integer.class, true, 1),
-            dtype(Integer.class, true, 6),
+            sample_size(Long.class, true, 1),
+            dtype(Long.class, true, 6),
             ;
 
                 final Class<?> t;
@@ -16967,7 +16967,7 @@ public Multinomial transform(CopyContext cc, OpTransformer ot) {
             return new Multinomial(this, cc);
         }
 
-        Multinomial(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Integer> sample_size, java.util.Optional<Integer> dtype) {
+        Multinomial(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Long> sample_size, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(seed, sample_size, dtype));
         }
 
@@ -16990,19 +16990,19 @@ public java.util.Optional<Float> seed() {
             return java.util.Optional.ofNullable(seed);
         }
 
-        public java.util.Optional<Integer> sample_size() {
-            Integer sample_size = Attribute.sample_size.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> sample_size() {
+            Long sample_size = Attribute.sample_size.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(sample_size);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static Multinomial Multinomial(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Integer> sample_size, java.util.Optional<Integer> dtype) {
+    public static Multinomial Multinomial(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Long> sample_size, java.util.Optional<Long> dtype) {
         return new Multinomial(resultType, input, seed, sample_size, dtype);
     }
 
@@ -17125,7 +17125,7 @@ public static final class NegativeLogLikelihoodLoss extends OnnxOp {
         public static final String NAME = "NegativeLogLikelihoodLoss";
 
         public enum Attribute implements OnnxAttribute {
-            ignore_index(Integer.class, true, null),
+            ignore_index(Long.class, true, null),
             reduction(String.class, true, "mean"),
             ;
 
@@ -17240,7 +17240,7 @@ public NegativeLogLikelihoodLoss transform(CopyContext cc, OpTransformer ot) {
             return new NegativeLogLikelihoodLoss(this, cc);
         }
 
-        NegativeLogLikelihoodLoss(TypeElement resultType, Value input, Value target, java.util.Optional<Value> weight, java.util.Optional<Integer> ignore_index, java.util.Optional<String> reduction) {
+        NegativeLogLikelihoodLoss(TypeElement resultType, Value input, Value target, java.util.Optional<Value> weight, java.util.Optional<Long> ignore_index, java.util.Optional<String> reduction) {
             super(SCHEMA, resultType, Set.of(), List.of(input, target, weight), List.of(ignore_index, reduction));
         }
 
@@ -17267,8 +17267,8 @@ public java.util.Optional<Value> weight() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> ignore_index() {
-            Integer ignore_index = Attribute.ignore_index.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> ignore_index() {
+            Long ignore_index = Attribute.ignore_index.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(ignore_index);
         }
 
@@ -17279,7 +17279,7 @@ public java.util.Optional<String> reduction() {
 
     }
 
-    public static NegativeLogLikelihoodLoss NegativeLogLikelihoodLoss(TypeElement resultType, Value input, Value target, java.util.Optional<Value> weight, java.util.Optional<Integer> ignore_index, java.util.Optional<String> reduction) {
+    public static NegativeLogLikelihoodLoss NegativeLogLikelihoodLoss(TypeElement resultType, Value input, Value target, java.util.Optional<Value> weight, java.util.Optional<Long> ignore_index, java.util.Optional<String> reduction) {
         return new NegativeLogLikelihoodLoss(resultType, input, target, weight, ignore_index, reduction);
     }
 
@@ -17288,7 +17288,7 @@ public static final class NonMaxSuppression extends OnnxOp {
         public static final String NAME = "NonMaxSuppression";
 
         public enum Attribute implements OnnxAttribute {
-            center_point_box(Integer.class, true, 0),
+            center_point_box(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -17388,7 +17388,7 @@ public NonMaxSuppression transform(CopyContext cc, OpTransformer ot) {
             return new NonMaxSuppression(this, cc);
         }
 
-        NonMaxSuppression(TypeElement resultType, Value boxes, Value scores, java.util.Optional<Value> max_output_boxes_per_class, java.util.Optional<Value> iou_threshold, java.util.Optional<Value> score_threshold, java.util.Optional<Integer> center_point_box) {
+        NonMaxSuppression(TypeElement resultType, Value boxes, Value scores, java.util.Optional<Value> max_output_boxes_per_class, java.util.Optional<Value> iou_threshold, java.util.Optional<Value> score_threshold, java.util.Optional<Long> center_point_box) {
             super(SCHEMA, resultType, Set.of(), List.of(boxes, scores, max_output_boxes_per_class, iou_threshold, score_threshold), List.of(center_point_box));
         }
 
@@ -17425,14 +17425,14 @@ public java.util.Optional<Value> score_threshold() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> center_point_box() {
-            Integer center_point_box = Attribute.center_point_box.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> center_point_box() {
+            Long center_point_box = Attribute.center_point_box.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(center_point_box);
         }
 
     }
 
-    public static NonMaxSuppression NonMaxSuppression(TypeElement resultType, Value boxes, Value scores, java.util.Optional<Value> max_output_boxes_per_class, java.util.Optional<Value> iou_threshold, java.util.Optional<Value> score_threshold, java.util.Optional<Integer> center_point_box) {
+    public static NonMaxSuppression NonMaxSuppression(TypeElement resultType, Value boxes, Value scores, java.util.Optional<Value> max_output_boxes_per_class, java.util.Optional<Value> iou_threshold, java.util.Optional<Value> score_threshold, java.util.Optional<Long> center_point_box) {
         return new NonMaxSuppression(resultType, boxes, scores, max_output_boxes_per_class, iou_threshold, score_threshold, center_point_box);
     }
 
@@ -17814,7 +17814,7 @@ public static final class OneHot extends OnnxOp {
         public static final String NAME = "OneHot";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, -1),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -17929,7 +17929,7 @@ public OneHot transform(CopyContext cc, OpTransformer ot) {
             return new OneHot(this, cc);
         }
 
-        OneHot(TypeElement resultType, Value indices, Value depth, Value values, java.util.Optional<Integer> axis) {
+        OneHot(TypeElement resultType, Value indices, Value depth, Value values, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(indices, depth, values), List.of(axis));
         }
 
@@ -17955,14 +17955,14 @@ public Value values() {
             return operands().get(2);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static OneHot OneHot(TypeElement resultType, Value indices, Value depth, Value values, java.util.Optional<Integer> axis) {
+    public static OneHot OneHot(TypeElement resultType, Value indices, Value depth, Value values, java.util.Optional<Long> axis) {
         return new OneHot(resultType, indices, depth, values, axis);
     }
 
@@ -17972,8 +17972,8 @@ public static final class OneHotEncoder extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             cats_strings(String[].class, true, null),
-            cats_int64s(int[].class, true, null),
-            zeros(Integer.class, true, 1),
+            cats_int64s(long[].class, true, null),
+            zeros(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -18084,7 +18084,7 @@ public OneHotEncoder transform(CopyContext cc, OpTransformer ot) {
             return new OneHotEncoder(this, cc);
         }
 
-        OneHotEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> cats_strings, java.util.Optional<int[]> cats_int64s, java.util.Optional<Integer> zeros) {
+        OneHotEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> cats_strings, java.util.Optional<long[]> cats_int64s, java.util.Optional<Long> zeros) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(cats_strings, cats_int64s, zeros));
         }
 
@@ -18107,19 +18107,19 @@ public java.util.Optional<String[]> cats_strings() {
             return java.util.Optional.ofNullable(cats_strings).map(String[]::clone);
         }
 
-        public java.util.Optional<int[]> cats_int64s() {
-            int[] cats_int64s = Attribute.cats_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(cats_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> cats_int64s() {
+            long[] cats_int64s = Attribute.cats_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(cats_int64s).map(long[]::clone);
         }
 
-        public java.util.Optional<Integer> zeros() {
-            Integer zeros = Attribute.zeros.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> zeros() {
+            Long zeros = Attribute.zeros.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(zeros);
         }
 
     }
 
-    public static OneHotEncoder OneHotEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> cats_strings, java.util.Optional<int[]> cats_int64s, java.util.Optional<Integer> zeros) {
+    public static OneHotEncoder OneHotEncoder(TypeElement resultType, Value X, java.util.Optional<String[]> cats_strings, java.util.Optional<long[]> cats_int64s, java.util.Optional<Long> zeros) {
         return new OneHotEncoder(resultType, X, cats_strings, cats_int64s, zeros);
     }
 
@@ -19028,12 +19028,12 @@ public static final class QLinearConv extends OnnxOp {
         public static final String NAME = "QLinearConv";
 
         public enum Attribute implements OnnxAttribute {
-            pads(int[].class, true, null),
-            dilations(int[].class, true, null),
+            pads(long[].class, true, null),
+            dilations(long[].class, true, null),
             auto_pad(String.class, true, "NOTSET"),
-            strides(int[].class, true, null),
-            group(Integer.class, true, 1),
-            kernel_shape(int[].class, true, null),
+            strides(long[].class, true, null),
+            group(Long.class, true, 1),
+            kernel_shape(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -19155,7 +19155,7 @@ public QLinearConv transform(CopyContext cc, OpTransformer ot) {
             return new QLinearConv(this, cc);
         }
 
-        QLinearConv(TypeElement resultType, Value x, Value x_scale, Value x_zero_point, Value w, Value w_scale, Value w_zero_point, Value y_scale, Value y_zero_point, java.util.Optional<Value> B, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+        QLinearConv(TypeElement resultType, Value x, Value x_scale, Value x_zero_point, Value w, Value w_scale, Value w_zero_point, Value y_scale, Value y_zero_point, java.util.Optional<Value> B, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
             super(SCHEMA, resultType, Set.of(), List.of(x, x_scale, x_zero_point, w, w_scale, w_zero_point, y_scale, y_zero_point, B), List.of(pads, dilations, auto_pad, strides, group, kernel_shape));
         }
 
@@ -19206,14 +19206,14 @@ public java.util.Optional<Value> B() {
             return i != -1 ? java.util.Optional.of(operands().get(8 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<int[]> pads() {
-            int[] pads = Attribute.pads.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pads).map(int[]::clone);
+        public java.util.Optional<long[]> pads() {
+            long[] pads = Attribute.pads.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pads).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> dilations() {
-            int[] dilations = Attribute.dilations.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(dilations).map(int[]::clone);
+        public java.util.Optional<long[]> dilations() {
+            long[] dilations = Attribute.dilations.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(dilations).map(long[]::clone);
         }
 
         public java.util.Optional<String> auto_pad() {
@@ -19221,24 +19221,24 @@ public java.util.Optional<String> auto_pad() {
             return java.util.Optional.ofNullable(auto_pad);
         }
 
-        public java.util.Optional<int[]> strides() {
-            int[] strides = Attribute.strides.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(strides).map(int[]::clone);
+        public java.util.Optional<long[]> strides() {
+            long[] strides = Attribute.strides.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(strides).map(long[]::clone);
         }
 
-        public java.util.Optional<Integer> group() {
-            Integer group = Attribute.group.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> group() {
+            Long group = Attribute.group.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(group);
         }
 
-        public java.util.Optional<int[]> kernel_shape() {
-            int[] kernel_shape = Attribute.kernel_shape.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(kernel_shape).map(int[]::clone);
+        public java.util.Optional<long[]> kernel_shape() {
+            long[] kernel_shape = Attribute.kernel_shape.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(kernel_shape).map(long[]::clone);
         }
 
     }
 
-    public static QLinearConv QLinearConv(TypeElement resultType, Value x, Value x_scale, Value x_zero_point, Value w, Value w_scale, Value w_zero_point, Value y_scale, Value y_zero_point, java.util.Optional<Value> B, java.util.Optional<int[]> pads, java.util.Optional<int[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<int[]> strides, java.util.Optional<Integer> group, java.util.Optional<int[]> kernel_shape) {
+    public static QLinearConv QLinearConv(TypeElement resultType, Value x, Value x_scale, Value x_zero_point, Value w, Value w_scale, Value w_zero_point, Value y_scale, Value y_zero_point, java.util.Optional<Value> B, java.util.Optional<long[]> pads, java.util.Optional<long[]> dilations, java.util.Optional<String> auto_pad, java.util.Optional<long[]> strides, java.util.Optional<Long> group, java.util.Optional<long[]> kernel_shape) {
         return new QLinearConv(resultType, x, x_scale, x_zero_point, w, w_scale, w_zero_point, y_scale, y_zero_point, B, pads, dilations, auto_pad, strides, group, kernel_shape);
     }
 
@@ -19399,10 +19399,10 @@ public static final class QuantizeLinear extends OnnxOp {
         public static final String NAME = "QuantizeLinear";
 
         public enum Attribute implements OnnxAttribute {
-            output_dtype(Integer.class, true, 0),
-            saturate(Integer.class, true, 1),
-            axis(Integer.class, true, 1),
-            block_size(Integer.class, true, 0),
+            output_dtype(Long.class, true, 0),
+            saturate(Long.class, true, 1),
+            axis(Long.class, true, 1),
+            block_size(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -19516,7 +19516,7 @@ public QuantizeLinear transform(CopyContext cc, OpTransformer ot) {
             return new QuantizeLinear(this, cc);
         }
 
-        QuantizeLinear(TypeElement resultType, Value x, Value y_scale, java.util.Optional<Value> y_zero_point, java.util.Optional<Integer> output_dtype, java.util.Optional<Integer> saturate, java.util.Optional<Integer> axis, java.util.Optional<Integer> block_size) {
+        QuantizeLinear(TypeElement resultType, Value x, Value y_scale, java.util.Optional<Value> y_zero_point, java.util.Optional<Long> output_dtype, java.util.Optional<Long> saturate, java.util.Optional<Long> axis, java.util.Optional<Long> block_size) {
             super(SCHEMA, resultType, Set.of(), List.of(x, y_scale, y_zero_point), List.of(output_dtype, saturate, axis, block_size));
         }
 
@@ -19543,29 +19543,29 @@ public java.util.Optional<Value> y_zero_point() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> output_dtype() {
-            Integer output_dtype = Attribute.output_dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_dtype() {
+            Long output_dtype = Attribute.output_dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_dtype);
         }
 
-        public java.util.Optional<Integer> saturate() {
-            Integer saturate = Attribute.saturate.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> saturate() {
+            Long saturate = Attribute.saturate.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(saturate);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
-        public java.util.Optional<Integer> block_size() {
-            Integer block_size = Attribute.block_size.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> block_size() {
+            Long block_size = Attribute.block_size.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(block_size);
         }
 
     }
 
-    public static QuantizeLinear QuantizeLinear(TypeElement resultType, Value x, Value y_scale, java.util.Optional<Value> y_zero_point, java.util.Optional<Integer> output_dtype, java.util.Optional<Integer> saturate, java.util.Optional<Integer> axis, java.util.Optional<Integer> block_size) {
+    public static QuantizeLinear QuantizeLinear(TypeElement resultType, Value x, Value y_scale, java.util.Optional<Value> y_zero_point, java.util.Optional<Long> output_dtype, java.util.Optional<Long> saturate, java.util.Optional<Long> axis, java.util.Optional<Long> block_size) {
         return new QuantizeLinear(resultType, x, y_scale, y_zero_point, output_dtype, saturate, axis, block_size);
     }
 
@@ -19574,9 +19574,9 @@ public static final class RNN extends OnnxOp {
         public static final String NAME = "RNN";
 
         public enum Attribute implements OnnxAttribute {
-            layout(Integer.class, true, 0),
+            layout(Long.class, true, 0),
             activation_alpha(float[].class, true, null),
-            hidden_size(Integer.class, true, null),
+            hidden_size(Long.class, true, null),
             activation_beta(float[].class, true, null),
             activations(String[].class, true, null),
             clip(Float.class, true, null),
@@ -19698,7 +19698,7 @@ public RNN transform(CopyContext cc, OpTransformer ot) {
             return new RNN(this, cc);
         }
 
-        RNN(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Integer> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Integer> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
+        RNN(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Long> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Long> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X, W, R, B, sequence_lens, initial_h), List.of(layout, activation_alpha, hidden_size, activation_beta, activations, clip, direction));
         }
 
@@ -19739,8 +19739,8 @@ public java.util.Optional<Value> initial_h() {
             return i != -1 ? java.util.Optional.of(operands().get(3 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> layout() {
-            Integer layout = Attribute.layout.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> layout() {
+            Long layout = Attribute.layout.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(layout);
         }
 
@@ -19749,8 +19749,8 @@ public java.util.Optional<float[]> activation_alpha() {
             return java.util.Optional.ofNullable(activation_alpha).map(float[]::clone);
         }
 
-        public java.util.Optional<Integer> hidden_size() {
-            Integer hidden_size = Attribute.hidden_size.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> hidden_size() {
+            Long hidden_size = Attribute.hidden_size.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(hidden_size);
         }
 
@@ -19776,7 +19776,7 @@ public java.util.Optional<String> direction() {
 
     }
 
-    public static RNN RNN(TypeElement resultType, Set<RNN.OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Integer> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Integer> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
+    public static RNN RNN(TypeElement resultType, Set<RNN.OutputParameter> optionalOutputs, Value X, Value W, Value R, java.util.Optional<Value> B, java.util.Optional<Value> sequence_lens, java.util.Optional<Value> initial_h, java.util.Optional<Long> layout, java.util.Optional<float[]> activation_alpha, java.util.Optional<Long> hidden_size, java.util.Optional<float[]> activation_beta, java.util.Optional<String[]> activations, java.util.Optional<Float> clip, java.util.Optional<String> direction) {
         return new RNN(resultType, optionalOutputs, X, W, R, B, sequence_lens, initial_h, layout, activation_alpha, hidden_size, activation_beta, activations, clip, direction);
     }
 
@@ -19785,11 +19785,11 @@ public static final class RandomNormal extends OnnxOp {
         public static final String NAME = "RandomNormal";
 
         public enum Attribute implements OnnxAttribute {
-            shape(int[].class, false, null),
+            shape(long[].class, false, null),
             seed(Float.class, true, null),
             mean(Float.class, true, 0.0f),
             scale(Float.class, true, 1.0f),
-            dtype(Integer.class, true, 1),
+            dtype(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -19879,7 +19879,7 @@ public RandomNormal transform(CopyContext cc, OpTransformer ot) {
             return new RandomNormal(this, cc);
         }
 
-        RandomNormal(TypeElement resultType, int[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Integer> dtype) {
+        RandomNormal(TypeElement resultType, long[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(), List.of(shape, seed, mean, scale, dtype));
         }
 
@@ -19893,8 +19893,8 @@ public SequencedMap<OnnxParameter, Object> onnxInputs() {
             return onnxInputs(SCHEMA, List.of());
         }
 
-        public int[] shape() {
-            int[] shape = Attribute.shape.access(int[].class, onnxAttributes);
+        public long[] shape() {
+            long[] shape = Attribute.shape.access(long[].class, onnxAttributes);
             return shape.clone();
         }
 
@@ -19913,14 +19913,14 @@ public java.util.Optional<Float> scale() {
             return java.util.Optional.ofNullable(scale);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static RandomNormal RandomNormal(TypeElement resultType, int[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Integer> dtype) {
+    public static RandomNormal RandomNormal(TypeElement resultType, long[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Long> dtype) {
         return new RandomNormal(resultType, shape, seed, mean, scale, dtype);
     }
 
@@ -19932,7 +19932,7 @@ public enum Attribute implements OnnxAttribute {
             seed(Float.class, true, null),
             mean(Float.class, true, 0.0f),
             scale(Float.class, true, 1.0f),
-            dtype(Integer.class, true, null),
+            dtype(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -20044,7 +20044,7 @@ public RandomNormalLike transform(CopyContext cc, OpTransformer ot) {
             return new RandomNormalLike(this, cc);
         }
 
-        RandomNormalLike(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Integer> dtype) {
+        RandomNormalLike(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(seed, mean, scale, dtype));
         }
 
@@ -20077,14 +20077,14 @@ public java.util.Optional<Float> scale() {
             return java.util.Optional.ofNullable(scale);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static RandomNormalLike RandomNormalLike(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Integer> dtype) {
+    public static RandomNormalLike RandomNormalLike(TypeElement resultType, Value input, java.util.Optional<Float> seed, java.util.Optional<Float> mean, java.util.Optional<Float> scale, java.util.Optional<Long> dtype) {
         return new RandomNormalLike(resultType, input, seed, mean, scale, dtype);
     }
 
@@ -20094,10 +20094,10 @@ public static final class RandomUniform extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             high(Float.class, true, 1.0f),
-            shape(int[].class, false, null),
+            shape(long[].class, false, null),
             seed(Float.class, true, null),
             low(Float.class, true, 0.0f),
-            dtype(Integer.class, true, 1),
+            dtype(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -20187,7 +20187,7 @@ public RandomUniform transform(CopyContext cc, OpTransformer ot) {
             return new RandomUniform(this, cc);
         }
 
-        RandomUniform(TypeElement resultType, java.util.Optional<Float> high, int[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Integer> dtype) {
+        RandomUniform(TypeElement resultType, java.util.Optional<Float> high, long[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(), List.of(high, shape, seed, low, dtype));
         }
 
@@ -20206,8 +20206,8 @@ public java.util.Optional<Float> high() {
             return java.util.Optional.ofNullable(high);
         }
 
-        public int[] shape() {
-            int[] shape = Attribute.shape.access(int[].class, onnxAttributes);
+        public long[] shape() {
+            long[] shape = Attribute.shape.access(long[].class, onnxAttributes);
             return shape.clone();
         }
 
@@ -20221,14 +20221,14 @@ public java.util.Optional<Float> low() {
             return java.util.Optional.ofNullable(low);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static RandomUniform RandomUniform(TypeElement resultType, java.util.Optional<Float> high, int[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Integer> dtype) {
+    public static RandomUniform RandomUniform(TypeElement resultType, java.util.Optional<Float> high, long[] shape, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Long> dtype) {
         return new RandomUniform(resultType, high, shape, seed, low, dtype);
     }
 
@@ -20240,7 +20240,7 @@ public enum Attribute implements OnnxAttribute {
             high(Float.class, true, 1.0f),
             seed(Float.class, true, null),
             low(Float.class, true, 0.0f),
-            dtype(Integer.class, true, null),
+            dtype(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -20352,7 +20352,7 @@ public RandomUniformLike transform(CopyContext cc, OpTransformer ot) {
             return new RandomUniformLike(this, cc);
         }
 
-        RandomUniformLike(TypeElement resultType, Value input, java.util.Optional<Float> high, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Integer> dtype) {
+        RandomUniformLike(TypeElement resultType, Value input, java.util.Optional<Float> high, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(high, seed, low, dtype));
         }
 
@@ -20385,14 +20385,14 @@ public java.util.Optional<Float> low() {
             return java.util.Optional.ofNullable(low);
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static RandomUniformLike RandomUniformLike(TypeElement resultType, Value input, java.util.Optional<Float> high, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Integer> dtype) {
+    public static RandomUniformLike RandomUniformLike(TypeElement resultType, Value input, java.util.Optional<Float> high, java.util.Optional<Float> seed, java.util.Optional<Float> low, java.util.Optional<Long> dtype) {
         return new RandomUniformLike(resultType, input, high, seed, low, dtype);
     }
 
@@ -20639,8 +20639,8 @@ public static final class ReduceL1 extends OnnxOp {
         public static final String NAME = "ReduceL1";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -20752,7 +20752,7 @@ public ReduceL1 transform(CopyContext cc, OpTransformer ot) {
             return new ReduceL1(this, cc);
         }
 
-        ReduceL1(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceL1(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -20775,19 +20775,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceL1 ReduceL1(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceL1 ReduceL1(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceL1(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -20796,8 +20796,8 @@ public static final class ReduceL2 extends OnnxOp {
         public static final String NAME = "ReduceL2";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -20909,7 +20909,7 @@ public ReduceL2 transform(CopyContext cc, OpTransformer ot) {
             return new ReduceL2(this, cc);
         }
 
-        ReduceL2(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceL2(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -20932,19 +20932,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceL2 ReduceL2(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceL2 ReduceL2(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceL2(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -20953,8 +20953,8 @@ public static final class ReduceLogSum extends OnnxOp {
         public static final String NAME = "ReduceLogSum";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -21066,7 +21066,7 @@ public ReduceLogSum transform(CopyContext cc, OpTransformer ot) {
             return new ReduceLogSum(this, cc);
         }
 
-        ReduceLogSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceLogSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -21089,19 +21089,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceLogSum ReduceLogSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceLogSum ReduceLogSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceLogSum(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -21110,8 +21110,8 @@ public static final class ReduceLogSumExp extends OnnxOp {
         public static final String NAME = "ReduceLogSumExp";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -21223,7 +21223,7 @@ public ReduceLogSumExp transform(CopyContext cc, OpTransformer ot) {
             return new ReduceLogSumExp(this, cc);
         }
 
-        ReduceLogSumExp(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceLogSumExp(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -21246,19 +21246,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceLogSumExp ReduceLogSumExp(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceLogSumExp ReduceLogSumExp(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceLogSumExp(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -21267,8 +21267,8 @@ public static final class ReduceMax extends OnnxOp {
         public static final String NAME = "ReduceMax";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -21380,7 +21380,7 @@ public ReduceMax transform(CopyContext cc, OpTransformer ot) {
             return new ReduceMax(this, cc);
         }
 
-        ReduceMax(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceMax(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -21403,19 +21403,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceMax ReduceMax(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceMax ReduceMax(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceMax(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -21424,8 +21424,8 @@ public static final class ReduceMean extends OnnxOp {
         public static final String NAME = "ReduceMean";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -21537,7 +21537,7 @@ public ReduceMean transform(CopyContext cc, OpTransformer ot) {
             return new ReduceMean(this, cc);
         }
 
-        ReduceMean(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceMean(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -21560,19 +21560,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceMean ReduceMean(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceMean ReduceMean(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceMean(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -21581,8 +21581,8 @@ public static final class ReduceMin extends OnnxOp {
         public static final String NAME = "ReduceMin";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -21694,7 +21694,7 @@ public ReduceMin transform(CopyContext cc, OpTransformer ot) {
             return new ReduceMin(this, cc);
         }
 
-        ReduceMin(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceMin(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -21717,19 +21717,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceMin ReduceMin(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceMin ReduceMin(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceMin(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -21738,8 +21738,8 @@ public static final class ReduceProd extends OnnxOp {
         public static final String NAME = "ReduceProd";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -21851,7 +21851,7 @@ public ReduceProd transform(CopyContext cc, OpTransformer ot) {
             return new ReduceProd(this, cc);
         }
 
-        ReduceProd(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceProd(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -21874,19 +21874,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceProd ReduceProd(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceProd ReduceProd(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceProd(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -21895,8 +21895,8 @@ public static final class ReduceSum extends OnnxOp {
         public static final String NAME = "ReduceSum";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -22008,7 +22008,7 @@ public ReduceSum transform(CopyContext cc, OpTransformer ot) {
             return new ReduceSum(this, cc);
         }
 
-        ReduceSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -22031,19 +22031,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceSum ReduceSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceSum ReduceSum(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceSum(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -22052,8 +22052,8 @@ public static final class ReduceSumSquare extends OnnxOp {
         public static final String NAME = "ReduceSumSquare";
 
         public enum Attribute implements OnnxAttribute {
-            noop_with_empty_axes(Integer.class, true, 0),
-            keepdims(Integer.class, true, 1),
+            noop_with_empty_axes(Long.class, true, 0),
+            keepdims(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -22165,7 +22165,7 @@ public ReduceSumSquare transform(CopyContext cc, OpTransformer ot) {
             return new ReduceSumSquare(this, cc);
         }
 
-        ReduceSumSquare(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+        ReduceSumSquare(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
             super(SCHEMA, resultType, Set.of(), List.of(data, axes), List.of(noop_with_empty_axes, keepdims));
         }
 
@@ -22188,19 +22188,19 @@ public java.util.Optional<Value> axes() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> noop_with_empty_axes() {
-            Integer noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> noop_with_empty_axes() {
+            Long noop_with_empty_axes = Attribute.noop_with_empty_axes.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(noop_with_empty_axes);
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
     }
 
-    public static ReduceSumSquare ReduceSumSquare(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Integer> noop_with_empty_axes, java.util.Optional<Integer> keepdims) {
+    public static ReduceSumSquare ReduceSumSquare(TypeElement resultType, Value data, java.util.Optional<Value> axes, java.util.Optional<Long> noop_with_empty_axes, java.util.Optional<Long> keepdims) {
         return new ReduceSumSquare(resultType, data, axes, noop_with_empty_axes, keepdims);
     }
 
@@ -22469,7 +22469,7 @@ public static final class Reshape extends OnnxOp {
         public static final String NAME = "Reshape";
 
         public enum Attribute implements OnnxAttribute {
-            allowzero(Integer.class, true, 0),
+            allowzero(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -22581,7 +22581,7 @@ public Reshape transform(CopyContext cc, OpTransformer ot) {
             return new Reshape(this, cc);
         }
 
-        Reshape(TypeElement resultType, Value data, Value shape, java.util.Optional<Integer> allowzero) {
+        Reshape(TypeElement resultType, Value data, Value shape, java.util.Optional<Long> allowzero) {
             super(SCHEMA, resultType, Set.of(), List.of(data, shape), List.of(allowzero));
         }
 
@@ -22603,14 +22603,14 @@ public Value shape() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> allowzero() {
-            Integer allowzero = Attribute.allowzero.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> allowzero() {
+            Long allowzero = Attribute.allowzero.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(allowzero);
         }
 
     }
 
-    public static Reshape Reshape(TypeElement resultType, Value data, Value shape, java.util.Optional<Integer> allowzero) {
+    public static Reshape Reshape(TypeElement resultType, Value data, Value shape, java.util.Optional<Long> allowzero) {
         return new Reshape(resultType, data, shape, allowzero);
     }
 
@@ -22622,12 +22622,12 @@ public enum Attribute implements OnnxAttribute {
             mode(String.class, true, "nearest"),
             extrapolation_value(Float.class, true, 0.0f),
             nearest_mode(String.class, true, "round_prefer_floor"),
-            antialias(Integer.class, true, 0),
+            antialias(Long.class, true, 0),
             cubic_coeff_a(Float.class, true, -0.75f),
-            axes(int[].class, true, null),
+            axes(long[].class, true, null),
             coordinate_transformation_mode(String.class, true, "half_pixel"),
             keep_aspect_ratio_policy(String.class, true, "stretch"),
-            exclude_outside(Integer.class, true, 0),
+            exclude_outside(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -22742,7 +22742,7 @@ public Resize transform(CopyContext cc, OpTransformer ot) {
             return new Resize(this, cc);
         }
 
-        Resize(TypeElement resultType, Value X, java.util.Optional<Value> roi, java.util.Optional<Value> scales, java.util.Optional<Value> sizes, java.util.Optional<String> mode, java.util.Optional<Float> extrapolation_value, java.util.Optional<String> nearest_mode, java.util.Optional<Integer> antialias, java.util.Optional<Float> cubic_coeff_a, java.util.Optional<int[]> axes, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<String> keep_aspect_ratio_policy, java.util.Optional<Integer> exclude_outside) {
+        Resize(TypeElement resultType, Value X, java.util.Optional<Value> roi, java.util.Optional<Value> scales, java.util.Optional<Value> sizes, java.util.Optional<String> mode, java.util.Optional<Float> extrapolation_value, java.util.Optional<String> nearest_mode, java.util.Optional<Long> antialias, java.util.Optional<Float> cubic_coeff_a, java.util.Optional<long[]> axes, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<String> keep_aspect_ratio_policy, java.util.Optional<Long> exclude_outside) {
             super(SCHEMA, resultType, Set.of(), List.of(X, roi, scales, sizes), List.of(mode, extrapolation_value, nearest_mode, antialias, cubic_coeff_a, axes, coordinate_transformation_mode, keep_aspect_ratio_policy, exclude_outside));
         }
 
@@ -22790,8 +22790,8 @@ public java.util.Optional<String> nearest_mode() {
             return java.util.Optional.ofNullable(nearest_mode);
         }
 
-        public java.util.Optional<Integer> antialias() {
-            Integer antialias = Attribute.antialias.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> antialias() {
+            Long antialias = Attribute.antialias.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(antialias);
         }
 
@@ -22800,9 +22800,9 @@ public java.util.Optional<Float> cubic_coeff_a() {
             return java.util.Optional.ofNullable(cubic_coeff_a);
         }
 
-        public java.util.Optional<int[]> axes() {
-            int[] axes = Attribute.axes.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(axes).map(int[]::clone);
+        public java.util.Optional<long[]> axes() {
+            long[] axes = Attribute.axes.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(axes).map(long[]::clone);
         }
 
         public java.util.Optional<String> coordinate_transformation_mode() {
@@ -22815,14 +22815,14 @@ public java.util.Optional<String> keep_aspect_ratio_policy() {
             return java.util.Optional.ofNullable(keep_aspect_ratio_policy);
         }
 
-        public java.util.Optional<Integer> exclude_outside() {
-            Integer exclude_outside = Attribute.exclude_outside.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> exclude_outside() {
+            Long exclude_outside = Attribute.exclude_outside.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(exclude_outside);
         }
 
     }
 
-    public static Resize Resize(TypeElement resultType, Value X, java.util.Optional<Value> roi, java.util.Optional<Value> scales, java.util.Optional<Value> sizes, java.util.Optional<String> mode, java.util.Optional<Float> extrapolation_value, java.util.Optional<String> nearest_mode, java.util.Optional<Integer> antialias, java.util.Optional<Float> cubic_coeff_a, java.util.Optional<int[]> axes, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<String> keep_aspect_ratio_policy, java.util.Optional<Integer> exclude_outside) {
+    public static Resize Resize(TypeElement resultType, Value X, java.util.Optional<Value> roi, java.util.Optional<Value> scales, java.util.Optional<Value> sizes, java.util.Optional<String> mode, java.util.Optional<Float> extrapolation_value, java.util.Optional<String> nearest_mode, java.util.Optional<Long> antialias, java.util.Optional<Float> cubic_coeff_a, java.util.Optional<long[]> axes, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<String> keep_aspect_ratio_policy, java.util.Optional<Long> exclude_outside) {
         return new Resize(resultType, X, roi, scales, sizes, mode, extrapolation_value, nearest_mode, antialias, cubic_coeff_a, axes, coordinate_transformation_mode, keep_aspect_ratio_policy, exclude_outside);
     }
 
@@ -22831,8 +22831,8 @@ public static final class ReverseSequence extends OnnxOp {
         public static final String NAME = "ReverseSequence";
 
         public enum Attribute implements OnnxAttribute {
-            time_axis(Integer.class, true, 0),
-            batch_axis(Integer.class, true, 1),
+            time_axis(Long.class, true, 0),
+            batch_axis(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -22944,7 +22944,7 @@ public ReverseSequence transform(CopyContext cc, OpTransformer ot) {
             return new ReverseSequence(this, cc);
         }
 
-        ReverseSequence(TypeElement resultType, Value input, Value sequence_lens, java.util.Optional<Integer> time_axis, java.util.Optional<Integer> batch_axis) {
+        ReverseSequence(TypeElement resultType, Value input, Value sequence_lens, java.util.Optional<Long> time_axis, java.util.Optional<Long> batch_axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input, sequence_lens), List.of(time_axis, batch_axis));
         }
 
@@ -22966,19 +22966,19 @@ public Value sequence_lens() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> time_axis() {
-            Integer time_axis = Attribute.time_axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> time_axis() {
+            Long time_axis = Attribute.time_axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(time_axis);
         }
 
-        public java.util.Optional<Integer> batch_axis() {
-            Integer batch_axis = Attribute.batch_axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> batch_axis() {
+            Long batch_axis = Attribute.batch_axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(batch_axis);
         }
 
     }
 
-    public static ReverseSequence ReverseSequence(TypeElement resultType, Value input, Value sequence_lens, java.util.Optional<Integer> time_axis, java.util.Optional<Integer> batch_axis) {
+    public static ReverseSequence ReverseSequence(TypeElement resultType, Value input, Value sequence_lens, java.util.Optional<Long> time_axis, java.util.Optional<Long> batch_axis) {
         return new ReverseSequence(resultType, input, sequence_lens, time_axis, batch_axis);
     }
 
@@ -22988,11 +22988,11 @@ public static final class RoiAlign extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             mode(String.class, true, "avg"),
-            output_width(Integer.class, true, 1),
+            output_width(Long.class, true, 1),
             spatial_scale(Float.class, true, 1.0f),
             coordinate_transformation_mode(String.class, true, "half_pixel"),
-            sampling_ratio(Integer.class, true, 0),
-            output_height(Integer.class, true, 1),
+            sampling_ratio(Long.class, true, 0),
+            output_height(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -23106,7 +23106,7 @@ public RoiAlign transform(CopyContext cc, OpTransformer ot) {
             return new RoiAlign(this, cc);
         }
 
-        RoiAlign(TypeElement resultType, Value X, Value rois, Value batch_indices, java.util.Optional<String> mode, java.util.Optional<Integer> output_width, java.util.Optional<Float> spatial_scale, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<Integer> sampling_ratio, java.util.Optional<Integer> output_height) {
+        RoiAlign(TypeElement resultType, Value X, Value rois, Value batch_indices, java.util.Optional<String> mode, java.util.Optional<Long> output_width, java.util.Optional<Float> spatial_scale, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<Long> sampling_ratio, java.util.Optional<Long> output_height) {
             super(SCHEMA, resultType, Set.of(), List.of(X, rois, batch_indices), List.of(mode, output_width, spatial_scale, coordinate_transformation_mode, sampling_ratio, output_height));
         }
 
@@ -23137,8 +23137,8 @@ public java.util.Optional<String> mode() {
             return java.util.Optional.ofNullable(mode);
         }
 
-        public java.util.Optional<Integer> output_width() {
-            Integer output_width = Attribute.output_width.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_width() {
+            Long output_width = Attribute.output_width.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_width);
         }
 
@@ -23152,19 +23152,19 @@ public java.util.Optional<String> coordinate_transformation_mode() {
             return java.util.Optional.ofNullable(coordinate_transformation_mode);
         }
 
-        public java.util.Optional<Integer> sampling_ratio() {
-            Integer sampling_ratio = Attribute.sampling_ratio.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> sampling_ratio() {
+            Long sampling_ratio = Attribute.sampling_ratio.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(sampling_ratio);
         }
 
-        public java.util.Optional<Integer> output_height() {
-            Integer output_height = Attribute.output_height.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> output_height() {
+            Long output_height = Attribute.output_height.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(output_height);
         }
 
     }
 
-    public static RoiAlign RoiAlign(TypeElement resultType, Value X, Value rois, Value batch_indices, java.util.Optional<String> mode, java.util.Optional<Integer> output_width, java.util.Optional<Float> spatial_scale, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<Integer> sampling_ratio, java.util.Optional<Integer> output_height) {
+    public static RoiAlign RoiAlign(TypeElement resultType, Value X, Value rois, Value batch_indices, java.util.Optional<String> mode, java.util.Optional<Long> output_width, java.util.Optional<Float> spatial_scale, java.util.Optional<String> coordinate_transformation_mode, java.util.Optional<Long> sampling_ratio, java.util.Optional<Long> output_height) {
         return new RoiAlign(resultType, X, rois, batch_indices, mode, output_width, spatial_scale, coordinate_transformation_mode, sampling_ratio, output_height);
     }
 
@@ -23287,7 +23287,7 @@ public static final class STFT extends OnnxOp {
         public static final String NAME = "STFT";
 
         public enum Attribute implements OnnxAttribute {
-            onesided(Integer.class, true, 1),
+            onesided(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -23402,7 +23402,7 @@ public STFT transform(CopyContext cc, OpTransformer ot) {
             return new STFT(this, cc);
         }
 
-        STFT(TypeElement resultType, Value signal, Value frame_step, java.util.Optional<Value> window, java.util.Optional<Value> frame_length, java.util.Optional<Integer> onesided) {
+        STFT(TypeElement resultType, Value signal, Value frame_step, java.util.Optional<Value> window, java.util.Optional<Value> frame_length, java.util.Optional<Long> onesided) {
             super(SCHEMA, resultType, Set.of(), List.of(signal, frame_step, window, frame_length), List.of(onesided));
         }
 
@@ -23434,14 +23434,14 @@ public java.util.Optional<Value> frame_length() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> onesided() {
-            Integer onesided = Attribute.onesided.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> onesided() {
+            Long onesided = Attribute.onesided.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(onesided);
         }
 
     }
 
-    public static STFT STFT(TypeElement resultType, Value signal, Value frame_step, java.util.Optional<Value> window, java.util.Optional<Value> frame_length, java.util.Optional<Integer> onesided) {
+    public static STFT STFT(TypeElement resultType, Value signal, Value frame_step, java.util.Optional<Value> window, java.util.Optional<Value> frame_length, java.util.Optional<Long> onesided) {
         return new STFT(resultType, signal, frame_step, window, frame_length, onesided);
     }
 
@@ -23453,12 +23453,12 @@ public enum Attribute implements OnnxAttribute {
             prob_b(float[].class, true, null),
             kernel_params(float[].class, true, null),
             kernel_type(String.class, true, "LINEAR"),
-            classlabels_ints(int[].class, true, null),
+            classlabels_ints(long[].class, true, null),
             post_transform(String.class, true, "NONE"),
             rho(float[].class, true, null),
             coefficients(float[].class, true, null),
             support_vectors(float[].class, true, null),
-            vectors_per_class(int[].class, true, null),
+            vectors_per_class(long[].class, true, null),
             prob_a(float[].class, true, null),
             classlabels_strings(String[].class, true, null),
             ;
@@ -23573,7 +23573,7 @@ public SVMClassifier transform(CopyContext cc, OpTransformer ot) {
             return new SVMClassifier(this, cc);
         }
 
-        SVMClassifier(TypeElement resultType, Value X, java.util.Optional<float[]> prob_b, java.util.Optional<float[]> kernel_params, java.util.Optional<String> kernel_type, java.util.Optional<int[]> classlabels_ints, java.util.Optional<String> post_transform, java.util.Optional<float[]> rho, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<int[]> vectors_per_class, java.util.Optional<float[]> prob_a, java.util.Optional<String[]> classlabels_strings) {
+        SVMClassifier(TypeElement resultType, Value X, java.util.Optional<float[]> prob_b, java.util.Optional<float[]> kernel_params, java.util.Optional<String> kernel_type, java.util.Optional<long[]> classlabels_ints, java.util.Optional<String> post_transform, java.util.Optional<float[]> rho, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<long[]> vectors_per_class, java.util.Optional<float[]> prob_a, java.util.Optional<String[]> classlabels_strings) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(prob_b, kernel_params, kernel_type, classlabels_ints, post_transform, rho, coefficients, support_vectors, vectors_per_class, prob_a, classlabels_strings));
         }
 
@@ -23606,9 +23606,9 @@ public java.util.Optional<String> kernel_type() {
             return java.util.Optional.ofNullable(kernel_type);
         }
 
-        public java.util.Optional<int[]> classlabels_ints() {
-            int[] classlabels_ints = Attribute.classlabels_ints.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(classlabels_ints).map(int[]::clone);
+        public java.util.Optional<long[]> classlabels_ints() {
+            long[] classlabels_ints = Attribute.classlabels_ints.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(classlabels_ints).map(long[]::clone);
         }
 
         public java.util.Optional<String> post_transform() {
@@ -23631,9 +23631,9 @@ public java.util.Optional<float[]> support_vectors() {
             return java.util.Optional.ofNullable(support_vectors).map(float[]::clone);
         }
 
-        public java.util.Optional<int[]> vectors_per_class() {
-            int[] vectors_per_class = Attribute.vectors_per_class.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(vectors_per_class).map(int[]::clone);
+        public java.util.Optional<long[]> vectors_per_class() {
+            long[] vectors_per_class = Attribute.vectors_per_class.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(vectors_per_class).map(long[]::clone);
         }
 
         public java.util.Optional<float[]> prob_a() {
@@ -23648,7 +23648,7 @@ public java.util.Optional<String[]> classlabels_strings() {
 
     }
 
-    public static SVMClassifier SVMClassifier(TypeElement resultType, Value X, java.util.Optional<float[]> prob_b, java.util.Optional<float[]> kernel_params, java.util.Optional<String> kernel_type, java.util.Optional<int[]> classlabels_ints, java.util.Optional<String> post_transform, java.util.Optional<float[]> rho, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<int[]> vectors_per_class, java.util.Optional<float[]> prob_a, java.util.Optional<String[]> classlabels_strings) {
+    public static SVMClassifier SVMClassifier(TypeElement resultType, Value X, java.util.Optional<float[]> prob_b, java.util.Optional<float[]> kernel_params, java.util.Optional<String> kernel_type, java.util.Optional<long[]> classlabels_ints, java.util.Optional<String> post_transform, java.util.Optional<float[]> rho, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<long[]> vectors_per_class, java.util.Optional<float[]> prob_a, java.util.Optional<String[]> classlabels_strings) {
         return new SVMClassifier(resultType, X, prob_b, kernel_params, kernel_type, classlabels_ints, post_transform, rho, coefficients, support_vectors, vectors_per_class, prob_a, classlabels_strings);
     }
 
@@ -23659,12 +23659,12 @@ public static final class SVMRegressor extends OnnxOp {
         public enum Attribute implements OnnxAttribute {
             kernel_type(String.class, true, "LINEAR"),
             kernel_params(float[].class, true, null),
-            n_supports(Integer.class, true, 0),
+            n_supports(Long.class, true, 0),
             rho(float[].class, true, null),
             post_transform(String.class, true, "NONE"),
             coefficients(float[].class, true, null),
             support_vectors(float[].class, true, null),
-            one_class(Integer.class, true, 0),
+            one_class(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -23775,7 +23775,7 @@ public SVMRegressor transform(CopyContext cc, OpTransformer ot) {
             return new SVMRegressor(this, cc);
         }
 
-        SVMRegressor(TypeElement resultType, Value X, java.util.Optional<String> kernel_type, java.util.Optional<float[]> kernel_params, java.util.Optional<Integer> n_supports, java.util.Optional<float[]> rho, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<Integer> one_class) {
+        SVMRegressor(TypeElement resultType, Value X, java.util.Optional<String> kernel_type, java.util.Optional<float[]> kernel_params, java.util.Optional<Long> n_supports, java.util.Optional<float[]> rho, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<Long> one_class) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(kernel_type, kernel_params, n_supports, rho, post_transform, coefficients, support_vectors, one_class));
         }
 
@@ -23803,8 +23803,8 @@ public java.util.Optional<float[]> kernel_params() {
             return java.util.Optional.ofNullable(kernel_params).map(float[]::clone);
         }
 
-        public java.util.Optional<Integer> n_supports() {
-            Integer n_supports = Attribute.n_supports.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> n_supports() {
+            Long n_supports = Attribute.n_supports.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(n_supports);
         }
 
@@ -23828,14 +23828,14 @@ public java.util.Optional<float[]> support_vectors() {
             return java.util.Optional.ofNullable(support_vectors).map(float[]::clone);
         }
 
-        public java.util.Optional<Integer> one_class() {
-            Integer one_class = Attribute.one_class.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> one_class() {
+            Long one_class = Attribute.one_class.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(one_class);
         }
 
     }
 
-    public static SVMRegressor SVMRegressor(TypeElement resultType, Value X, java.util.Optional<String> kernel_type, java.util.Optional<float[]> kernel_params, java.util.Optional<Integer> n_supports, java.util.Optional<float[]> rho, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<Integer> one_class) {
+    public static SVMRegressor SVMRegressor(TypeElement resultType, Value X, java.util.Optional<String> kernel_type, java.util.Optional<float[]> kernel_params, java.util.Optional<Long> n_supports, java.util.Optional<float[]> rho, java.util.Optional<String> post_transform, java.util.Optional<float[]> coefficients, java.util.Optional<float[]> support_vectors, java.util.Optional<Long> one_class) {
         return new SVMRegressor(resultType, X, kernel_type, kernel_params, n_supports, rho, post_transform, coefficients, support_vectors, one_class);
     }
 
@@ -23995,7 +23995,7 @@ public static final class Scatter extends OnnxOp {
         public static final String NAME = "Scatter";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, 0),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -24109,7 +24109,7 @@ public Scatter transform(CopyContext cc, OpTransformer ot) {
             return new Scatter(this, cc);
         }
 
-        Scatter(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<Integer> axis) {
+        Scatter(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(data, indices, updates), List.of(axis));
         }
 
@@ -24135,14 +24135,14 @@ public Value updates() {
             return operands().get(2);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Scatter Scatter(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<Integer> axis) {
+    public static Scatter Scatter(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<Long> axis) {
         return new Scatter(resultType, data, indices, updates, axis);
     }
 
@@ -24152,7 +24152,7 @@ public static final class ScatterElements extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             reduction(String.class, true, "none"),
-            axis(Integer.class, true, 0),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -24266,7 +24266,7 @@ public ScatterElements transform(CopyContext cc, OpTransformer ot) {
             return new ScatterElements(this, cc);
         }
 
-        ScatterElements(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<String> reduction, java.util.Optional<Integer> axis) {
+        ScatterElements(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<String> reduction, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(data, indices, updates), List.of(reduction, axis));
         }
 
@@ -24297,14 +24297,14 @@ public java.util.Optional<String> reduction() {
             return java.util.Optional.ofNullable(reduction);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static ScatterElements ScatterElements(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<String> reduction, java.util.Optional<Integer> axis) {
+    public static ScatterElements ScatterElements(TypeElement resultType, Value data, Value indices, Value updates, java.util.Optional<String> reduction, java.util.Optional<Long> axis) {
         return new ScatterElements(resultType, data, indices, updates, reduction, axis);
     }
 
@@ -24855,7 +24855,7 @@ public static final class SequenceEmpty extends OnnxOp {
         public static final String NAME = "SequenceEmpty";
 
         public enum Attribute implements OnnxAttribute {
-            dtype(Integer.class, true, null),
+            dtype(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -24945,7 +24945,7 @@ public SequenceEmpty transform(CopyContext cc, OpTransformer ot) {
             return new SequenceEmpty(this, cc);
         }
 
-        SequenceEmpty(TypeElement resultType, java.util.Optional<Integer> dtype) {
+        SequenceEmpty(TypeElement resultType, java.util.Optional<Long> dtype) {
             super(SCHEMA, resultType, Set.of(), List.of(), List.of(dtype));
         }
 
@@ -24959,14 +24959,14 @@ public SequencedMap<OnnxParameter, Object> onnxInputs() {
             return onnxInputs(SCHEMA, List.of());
         }
 
-        public java.util.Optional<Integer> dtype() {
-            Integer dtype = Attribute.dtype.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> dtype() {
+            Long dtype = Attribute.dtype.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(dtype);
         }
 
     }
 
-    public static SequenceEmpty SequenceEmpty(TypeElement resultType, java.util.Optional<Integer> dtype) {
+    public static SequenceEmpty SequenceEmpty(TypeElement resultType, java.util.Optional<Long> dtype) {
         return new SequenceEmpty(resultType, dtype);
     }
 
@@ -25338,8 +25338,8 @@ public static final class Shape extends OnnxOp {
         public static final String NAME = "Shape";
 
         public enum Attribute implements OnnxAttribute {
-            start(Integer.class, true, 0),
-            end(Integer.class, true, null),
+            start(Long.class, true, 0),
+            end(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -25451,7 +25451,7 @@ public Shape transform(CopyContext cc, OpTransformer ot) {
             return new Shape(this, cc);
         }
 
-        Shape(TypeElement resultType, Value data, java.util.Optional<Integer> start, java.util.Optional<Integer> end) {
+        Shape(TypeElement resultType, Value data, java.util.Optional<Long> start, java.util.Optional<Long> end) {
             super(SCHEMA, resultType, Set.of(), List.of(data), List.of(start, end));
         }
 
@@ -25469,19 +25469,19 @@ public Value data() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> start() {
-            Integer start = Attribute.start.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> start() {
+            Long start = Attribute.start.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(start);
         }
 
-        public java.util.Optional<Integer> end() {
-            Integer end = Attribute.end.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> end() {
+            Long end = Attribute.end.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(end);
         }
 
     }
 
-    public static Shape Shape(TypeElement resultType, Value data, java.util.Optional<Integer> start, java.util.Optional<Integer> end) {
+    public static Shape Shape(TypeElement resultType, Value data, java.util.Optional<Long> start, java.util.Optional<Long> end) {
         return new Shape(resultType, data, start, end);
     }
 
@@ -26349,7 +26349,7 @@ public static final class Softmax extends OnnxOp {
         public static final String NAME = "Softmax";
 
         public enum Attribute implements OnnxAttribute {
-            axis(Integer.class, true, -1),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -26460,7 +26460,7 @@ public Softmax transform(CopyContext cc, OpTransformer ot) {
             return new Softmax(this, cc);
         }
 
-        Softmax(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+        Softmax(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(axis));
         }
 
@@ -26478,14 +26478,14 @@ public Value input() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Softmax Softmax(TypeElement resultType, Value input, java.util.Optional<Integer> axis) {
+    public static Softmax Softmax(TypeElement resultType, Value input, java.util.Optional<Long> axis) {
         return new Softmax(resultType, input, axis);
     }
 
@@ -26494,7 +26494,7 @@ public static final class SoftmaxCrossEntropyLoss extends OnnxOp {
         public static final String NAME = "SoftmaxCrossEntropyLoss";
 
         public enum Attribute implements OnnxAttribute {
-            ignore_index(Integer.class, true, null),
+            ignore_index(Long.class, true, null),
             reduction(String.class, true, "mean"),
             ;
 
@@ -26610,7 +26610,7 @@ public SoftmaxCrossEntropyLoss transform(CopyContext cc, OpTransformer ot) {
             return new SoftmaxCrossEntropyLoss(this, cc);
         }
 
-        SoftmaxCrossEntropyLoss(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value scores, Value labels, java.util.Optional<Value> weights, java.util.Optional<Integer> ignore_index, java.util.Optional<String> reduction) {
+        SoftmaxCrossEntropyLoss(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value scores, Value labels, java.util.Optional<Value> weights, java.util.Optional<Long> ignore_index, java.util.Optional<String> reduction) {
             super(SCHEMA, resultType, optionalOutputs, List.of(scores, labels, weights), List.of(ignore_index, reduction));
         }
 
@@ -26637,8 +26637,8 @@ public java.util.Optional<Value> weights() {
             return i != -1 ? java.util.Optional.of(operands().get(2 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> ignore_index() {
-            Integer ignore_index = Attribute.ignore_index.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> ignore_index() {
+            Long ignore_index = Attribute.ignore_index.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(ignore_index);
         }
 
@@ -26649,7 +26649,7 @@ public java.util.Optional<String> reduction() {
 
     }
 
-    public static SoftmaxCrossEntropyLoss SoftmaxCrossEntropyLoss(TypeElement resultType, Set<SoftmaxCrossEntropyLoss.OutputParameter> optionalOutputs, Value scores, Value labels, java.util.Optional<Value> weights, java.util.Optional<Integer> ignore_index, java.util.Optional<String> reduction) {
+    public static SoftmaxCrossEntropyLoss SoftmaxCrossEntropyLoss(TypeElement resultType, Set<SoftmaxCrossEntropyLoss.OutputParameter> optionalOutputs, Value scores, Value labels, java.util.Optional<Value> weights, java.util.Optional<Long> ignore_index, java.util.Optional<String> reduction) {
         return new SoftmaxCrossEntropyLoss(resultType, optionalOutputs, scores, labels, weights, ignore_index, reduction);
     }
 
@@ -26886,7 +26886,7 @@ public static final class SpaceToDepth extends OnnxOp {
         public static final String NAME = "SpaceToDepth";
 
         public enum Attribute implements OnnxAttribute {
-            blocksize(Integer.class, false, null),
+            blocksize(Long.class, false, null),
             ;
 
                 final Class<?> t;
@@ -26997,7 +26997,7 @@ public SpaceToDepth transform(CopyContext cc, OpTransformer ot) {
             return new SpaceToDepth(this, cc);
         }
 
-        SpaceToDepth(TypeElement resultType, Value input, int blocksize) {
+        SpaceToDepth(TypeElement resultType, Value input, long blocksize) {
             super(SCHEMA, resultType, Set.of(), List.of(input), List.of(blocksize));
         }
 
@@ -27015,14 +27015,14 @@ public Value input() {
             return operands().get(0);
         }
 
-        public int blocksize() {
-            int blocksize = Attribute.blocksize.access(int.class, onnxAttributes);
+        public long blocksize() {
+            long blocksize = Attribute.blocksize.access(long.class, onnxAttributes);
             return blocksize;
         }
 
     }
 
-    public static SpaceToDepth SpaceToDepth(TypeElement resultType, Value input, int blocksize) {
+    public static SpaceToDepth SpaceToDepth(TypeElement resultType, Value input, long blocksize) {
         return new SpaceToDepth(resultType, input, blocksize);
     }
 
@@ -27031,8 +27031,8 @@ public static final class Split extends OnnxOp {
         public static final String NAME = "Split";
 
         public enum Attribute implements OnnxAttribute {
-            num_outputs(Integer.class, true, null),
-            axis(Integer.class, true, 0),
+            num_outputs(Long.class, true, null),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -27144,7 +27144,7 @@ public Split transform(CopyContext cc, OpTransformer ot) {
             return new Split(this, cc);
         }
 
-        Split(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Integer> num_outputs, java.util.Optional<Integer> axis) {
+        Split(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Long> num_outputs, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input, split), List.of(num_outputs, axis));
         }
 
@@ -27167,19 +27167,19 @@ public java.util.Optional<Value> split() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> num_outputs() {
-            Integer num_outputs = Attribute.num_outputs.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> num_outputs() {
+            Long num_outputs = Attribute.num_outputs.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(num_outputs);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Split Split(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Integer> num_outputs, java.util.Optional<Integer> axis) {
+    public static Split Split(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Long> num_outputs, java.util.Optional<Long> axis) {
         return new Split(resultType, input, split, num_outputs, axis);
     }
 
@@ -27188,8 +27188,8 @@ public static final class SplitToSequence extends OnnxOp {
         public static final String NAME = "SplitToSequence";
 
         public enum Attribute implements OnnxAttribute {
-            keepdims(Integer.class, true, 1),
-            axis(Integer.class, true, 0),
+            keepdims(Long.class, true, 1),
+            axis(Long.class, true, 0),
             ;
 
                 final Class<?> t;
@@ -27303,7 +27303,7 @@ public SplitToSequence transform(CopyContext cc, OpTransformer ot) {
             return new SplitToSequence(this, cc);
         }
 
-        SplitToSequence(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Integer> keepdims, java.util.Optional<Integer> axis) {
+        SplitToSequence(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Long> keepdims, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(input, split), List.of(keepdims, axis));
         }
 
@@ -27326,19 +27326,19 @@ public java.util.Optional<Value> split() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> keepdims() {
-            Integer keepdims = Attribute.keepdims.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> keepdims() {
+            Long keepdims = Attribute.keepdims.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(keepdims);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static SplitToSequence SplitToSequence(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Integer> keepdims, java.util.Optional<Integer> axis) {
+    public static SplitToSequence SplitToSequence(TypeElement resultType, Value input, java.util.Optional<Value> split, java.util.Optional<Long> keepdims, java.util.Optional<Long> axis) {
         return new SplitToSequence(resultType, input, split, keepdims, axis);
     }
 
@@ -27700,7 +27700,7 @@ public static final class StringNormalizer extends OnnxOp {
         public static final String NAME = "StringNormalizer";
 
         public enum Attribute implements OnnxAttribute {
-            is_case_sensitive(Integer.class, true, 0),
+            is_case_sensitive(Long.class, true, 0),
             locale(String.class, true, null),
             stopwords(String[].class, true, null),
             case_change_action(String.class, true, "NONE"),
@@ -27799,7 +27799,7 @@ public StringNormalizer transform(CopyContext cc, OpTransformer ot) {
             return new StringNormalizer(this, cc);
         }
 
-        StringNormalizer(TypeElement resultType, Value X, java.util.Optional<Integer> is_case_sensitive, java.util.Optional<String> locale, java.util.Optional<String[]> stopwords, java.util.Optional<String> case_change_action) {
+        StringNormalizer(TypeElement resultType, Value X, java.util.Optional<Long> is_case_sensitive, java.util.Optional<String> locale, java.util.Optional<String[]> stopwords, java.util.Optional<String> case_change_action) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(is_case_sensitive, locale, stopwords, case_change_action));
         }
 
@@ -27817,8 +27817,8 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> is_case_sensitive() {
-            Integer is_case_sensitive = Attribute.is_case_sensitive.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> is_case_sensitive() {
+            Long is_case_sensitive = Attribute.is_case_sensitive.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(is_case_sensitive);
         }
 
@@ -27839,7 +27839,7 @@ public java.util.Optional<String> case_change_action() {
 
     }
 
-    public static StringNormalizer StringNormalizer(TypeElement resultType, Value X, java.util.Optional<Integer> is_case_sensitive, java.util.Optional<String> locale, java.util.Optional<String[]> stopwords, java.util.Optional<String> case_change_action) {
+    public static StringNormalizer StringNormalizer(TypeElement resultType, Value X, java.util.Optional<Long> is_case_sensitive, java.util.Optional<String> locale, java.util.Optional<String[]> stopwords, java.util.Optional<String> case_change_action) {
         return new StringNormalizer(resultType, X, is_case_sensitive, locale, stopwords, case_change_action);
     }
 
@@ -27849,7 +27849,7 @@ public static final class StringSplit extends OnnxOp {
 
         public enum Attribute implements OnnxAttribute {
             delimiter(String.class, true, null),
-            maxsplit(Integer.class, true, null),
+            maxsplit(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -27963,7 +27963,7 @@ public StringSplit transform(CopyContext cc, OpTransformer ot) {
             return new StringSplit(this, cc);
         }
 
-        StringSplit(TypeElement resultType, Value X, java.util.Optional<String> delimiter, java.util.Optional<Integer> maxsplit) {
+        StringSplit(TypeElement resultType, Value X, java.util.Optional<String> delimiter, java.util.Optional<Long> maxsplit) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(delimiter, maxsplit));
         }
 
@@ -27986,14 +27986,14 @@ public java.util.Optional<String> delimiter() {
             return java.util.Optional.ofNullable(delimiter);
         }
 
-        public java.util.Optional<Integer> maxsplit() {
-            Integer maxsplit = Attribute.maxsplit.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> maxsplit() {
+            Long maxsplit = Attribute.maxsplit.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(maxsplit);
         }
 
     }
 
-    public static StringSplit StringSplit(TypeElement resultType, Value X, java.util.Optional<String> delimiter, java.util.Optional<Integer> maxsplit) {
+    public static StringSplit StringSplit(TypeElement resultType, Value X, java.util.Optional<String> delimiter, java.util.Optional<Long> maxsplit) {
         return new StringSplit(resultType, X, delimiter, maxsplit);
     }
 
@@ -28463,15 +28463,15 @@ public static final class TfIdfVectorizer extends OnnxOp {
         public static final String NAME = "TfIdfVectorizer";
 
         public enum Attribute implements OnnxAttribute {
-            ngram_counts(int[].class, false, null),
-            min_gram_length(Integer.class, false, null),
+            ngram_counts(long[].class, false, null),
+            min_gram_length(Long.class, false, null),
             pool_strings(String[].class, true, null),
             mode(String.class, false, null),
-            max_gram_length(Integer.class, false, null),
-            max_skip_count(Integer.class, false, null),
-            pool_int64s(int[].class, true, null),
+            max_gram_length(Long.class, false, null),
+            max_skip_count(Long.class, false, null),
+            pool_int64s(long[].class, true, null),
             weights(float[].class, true, null),
-            ngram_indexes(int[].class, false, null),
+            ngram_indexes(long[].class, false, null),
             ;
 
                 final Class<?> t;
@@ -28583,7 +28583,7 @@ public TfIdfVectorizer transform(CopyContext cc, OpTransformer ot) {
             return new TfIdfVectorizer(this, cc);
         }
 
-        TfIdfVectorizer(TypeElement resultType, Value X, int[] ngram_counts, int min_gram_length, java.util.Optional<String[]> pool_strings, String mode, int max_gram_length, int max_skip_count, java.util.Optional<int[]> pool_int64s, java.util.Optional<float[]> weights, int[] ngram_indexes) {
+        TfIdfVectorizer(TypeElement resultType, Value X, long[] ngram_counts, long min_gram_length, java.util.Optional<String[]> pool_strings, String mode, long max_gram_length, long max_skip_count, java.util.Optional<long[]> pool_int64s, java.util.Optional<float[]> weights, long[] ngram_indexes) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(ngram_counts, min_gram_length, pool_strings, mode, max_gram_length, max_skip_count, pool_int64s, weights, ngram_indexes));
         }
 
@@ -28601,13 +28601,13 @@ public Value X() {
             return operands().get(0);
         }
 
-        public int[] ngram_counts() {
-            int[] ngram_counts = Attribute.ngram_counts.access(int[].class, onnxAttributes);
+        public long[] ngram_counts() {
+            long[] ngram_counts = Attribute.ngram_counts.access(long[].class, onnxAttributes);
             return ngram_counts.clone();
         }
 
-        public int min_gram_length() {
-            int min_gram_length = Attribute.min_gram_length.access(int.class, onnxAttributes);
+        public long min_gram_length() {
+            long min_gram_length = Attribute.min_gram_length.access(long.class, onnxAttributes);
             return min_gram_length;
         }
 
@@ -28621,19 +28621,19 @@ public String mode() {
             return mode;
         }
 
-        public int max_gram_length() {
-            int max_gram_length = Attribute.max_gram_length.access(int.class, onnxAttributes);
+        public long max_gram_length() {
+            long max_gram_length = Attribute.max_gram_length.access(long.class, onnxAttributes);
             return max_gram_length;
         }
 
-        public int max_skip_count() {
-            int max_skip_count = Attribute.max_skip_count.access(int.class, onnxAttributes);
+        public long max_skip_count() {
+            long max_skip_count = Attribute.max_skip_count.access(long.class, onnxAttributes);
             return max_skip_count;
         }
 
-        public java.util.Optional<int[]> pool_int64s() {
-            int[] pool_int64s = Attribute.pool_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(pool_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> pool_int64s() {
+            long[] pool_int64s = Attribute.pool_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(pool_int64s).map(long[]::clone);
         }
 
         public java.util.Optional<float[]> weights() {
@@ -28641,14 +28641,14 @@ public java.util.Optional<float[]> weights() {
             return java.util.Optional.ofNullable(weights).map(float[]::clone);
         }
 
-        public int[] ngram_indexes() {
-            int[] ngram_indexes = Attribute.ngram_indexes.access(int[].class, onnxAttributes);
+        public long[] ngram_indexes() {
+            long[] ngram_indexes = Attribute.ngram_indexes.access(long[].class, onnxAttributes);
             return ngram_indexes.clone();
         }
 
     }
 
-    public static TfIdfVectorizer TfIdfVectorizer(TypeElement resultType, Value X, int[] ngram_counts, int min_gram_length, java.util.Optional<String[]> pool_strings, String mode, int max_gram_length, int max_skip_count, java.util.Optional<int[]> pool_int64s, java.util.Optional<float[]> weights, int[] ngram_indexes) {
+    public static TfIdfVectorizer TfIdfVectorizer(TypeElement resultType, Value X, long[] ngram_counts, long min_gram_length, java.util.Optional<String[]> pool_strings, String mode, long max_gram_length, long max_skip_count, java.util.Optional<long[]> pool_int64s, java.util.Optional<float[]> weights, long[] ngram_indexes) {
         return new TfIdfVectorizer(resultType, X, ngram_counts, min_gram_length, pool_strings, mode, max_gram_length, max_skip_count, pool_int64s, weights, ngram_indexes);
     }
 
@@ -28922,9 +28922,9 @@ public static final class TopK extends OnnxOp {
         public static final String NAME = "TopK";
 
         public enum Attribute implements OnnxAttribute {
-            largest(Integer.class, true, 1),
-            sorted(Integer.class, true, 1),
-            axis(Integer.class, true, -1),
+            largest(Long.class, true, 1),
+            sorted(Long.class, true, 1),
+            axis(Long.class, true, -1),
             ;
 
                 final Class<?> t;
@@ -29038,7 +29038,7 @@ public TopK transform(CopyContext cc, OpTransformer ot) {
             return new TopK(this, cc);
         }
 
-        TopK(TypeElement resultType, Value X, Value K, java.util.Optional<Integer> largest, java.util.Optional<Integer> sorted, java.util.Optional<Integer> axis) {
+        TopK(TypeElement resultType, Value X, Value K, java.util.Optional<Long> largest, java.util.Optional<Long> sorted, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, Set.of(), List.of(X, K), List.of(largest, sorted, axis));
         }
 
@@ -29060,24 +29060,24 @@ public Value K() {
             return operands().get(1);
         }
 
-        public java.util.Optional<Integer> largest() {
-            Integer largest = Attribute.largest.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> largest() {
+            Long largest = Attribute.largest.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(largest);
         }
 
-        public java.util.Optional<Integer> sorted() {
-            Integer sorted = Attribute.sorted.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> sorted() {
+            Long sorted = Attribute.sorted.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(sorted);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static TopK TopK(TypeElement resultType, Value X, Value K, java.util.Optional<Integer> largest, java.util.Optional<Integer> sorted, java.util.Optional<Integer> axis) {
+    public static TopK TopK(TypeElement resultType, Value X, Value K, java.util.Optional<Long> largest, java.util.Optional<Long> sorted, java.util.Optional<Long> axis) {
         return new TopK(resultType, X, K, largest, sorted, axis);
     }
 
@@ -29086,7 +29086,7 @@ public static final class Transpose extends OnnxOp {
         public static final String NAME = "Transpose";
 
         public enum Attribute implements OnnxAttribute {
-            perm(int[].class, true, null),
+            perm(long[].class, true, null),
             ;
 
                 final Class<?> t;
@@ -29197,7 +29197,7 @@ public Transpose transform(CopyContext cc, OpTransformer ot) {
             return new Transpose(this, cc);
         }
 
-        Transpose(TypeElement resultType, Value data, java.util.Optional<int[]> perm) {
+        Transpose(TypeElement resultType, Value data, java.util.Optional<long[]> perm) {
             super(SCHEMA, resultType, Set.of(), List.of(data), List.of(perm));
         }
 
@@ -29215,14 +29215,14 @@ public Value data() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> perm() {
-            int[] perm = Attribute.perm.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(perm).map(int[]::clone);
+        public java.util.Optional<long[]> perm() {
+            long[] perm = Attribute.perm.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(perm).map(long[]::clone);
         }
 
     }
 
-    public static Transpose Transpose(TypeElement resultType, Value data, java.util.Optional<int[]> perm) {
+    public static Transpose Transpose(TypeElement resultType, Value data, java.util.Optional<long[]> perm) {
         return new Transpose(resultType, data, perm);
     }
 
@@ -29231,20 +29231,20 @@ public static final class TreeEnsemble extends OnnxOp {
         public static final String NAME = "TreeEnsemble";
 
         public enum Attribute implements OnnxAttribute {
-            aggregate_function(Integer.class, true, 1),
+            aggregate_function(Long.class, true, 1),
             nodes_hitrates(byte[].class, true, null),
-            nodes_featureids(int[].class, false, null),
-            nodes_falseleafs(int[].class, false, null),
-            post_transform(Integer.class, true, 0),
-            nodes_trueleafs(int[].class, false, null),
+            nodes_featureids(long[].class, false, null),
+            nodes_falseleafs(long[].class, false, null),
+            post_transform(Long.class, true, 0),
+            nodes_trueleafs(long[].class, false, null),
             nodes_modes(byte[].class, false, null),
-            nodes_falsenodeids(int[].class, false, null),
-            nodes_truenodeids(int[].class, false, null),
+            nodes_falsenodeids(long[].class, false, null),
+            nodes_truenodeids(long[].class, false, null),
             leaf_weights(byte[].class, false, null),
-            leaf_targetids(int[].class, false, null),
-            tree_roots(int[].class, false, null),
-            n_targets(Integer.class, true, null),
-            nodes_missing_value_tracks_true(int[].class, true, null),
+            leaf_targetids(long[].class, false, null),
+            tree_roots(long[].class, false, null),
+            n_targets(Long.class, true, null),
+            nodes_missing_value_tracks_true(long[].class, true, null),
             membership_values(byte[].class, true, null),
             nodes_splits(byte[].class, false, null),
             ;
@@ -29357,7 +29357,7 @@ public TreeEnsemble transform(CopyContext cc, OpTransformer ot) {
             return new TreeEnsemble(this, cc);
         }
 
-        TreeEnsemble(TypeElement resultType, Value X, java.util.Optional<Integer> aggregate_function, java.util.Optional<byte[]> nodes_hitrates, int[] nodes_featureids, int[] nodes_falseleafs, java.util.Optional<Integer> post_transform, int[] nodes_trueleafs, byte[] nodes_modes, int[] nodes_falsenodeids, int[] nodes_truenodeids, byte[] leaf_weights, int[] leaf_targetids, int[] tree_roots, java.util.Optional<Integer> n_targets, java.util.Optional<int[]> nodes_missing_value_tracks_true, java.util.Optional<byte[]> membership_values, byte[] nodes_splits) {
+        TreeEnsemble(TypeElement resultType, Value X, java.util.Optional<Long> aggregate_function, java.util.Optional<byte[]> nodes_hitrates, long[] nodes_featureids, long[] nodes_falseleafs, java.util.Optional<Long> post_transform, long[] nodes_trueleafs, byte[] nodes_modes, long[] nodes_falsenodeids, long[] nodes_truenodeids, byte[] leaf_weights, long[] leaf_targetids, long[] tree_roots, java.util.Optional<Long> n_targets, java.util.Optional<long[]> nodes_missing_value_tracks_true, java.util.Optional<byte[]> membership_values, byte[] nodes_splits) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(aggregate_function, nodes_hitrates, nodes_featureids, nodes_falseleafs, post_transform, nodes_trueleafs, nodes_modes, nodes_falsenodeids, nodes_truenodeids, leaf_weights, leaf_targetids, tree_roots, n_targets, nodes_missing_value_tracks_true, membership_values, nodes_splits));
         }
 
@@ -29375,8 +29375,8 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> aggregate_function() {
-            Integer aggregate_function = Attribute.aggregate_function.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> aggregate_function() {
+            Long aggregate_function = Attribute.aggregate_function.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(aggregate_function);
         }
 
@@ -29385,23 +29385,23 @@ public java.util.Optional<byte[]> nodes_hitrates() {
             return java.util.Optional.ofNullable(nodes_hitrates).map(byte[]::clone);
         }
 
-        public int[] nodes_featureids() {
-            int[] nodes_featureids = Attribute.nodes_featureids.access(int[].class, onnxAttributes);
+        public long[] nodes_featureids() {
+            long[] nodes_featureids = Attribute.nodes_featureids.access(long[].class, onnxAttributes);
             return nodes_featureids.clone();
         }
 
-        public int[] nodes_falseleafs() {
-            int[] nodes_falseleafs = Attribute.nodes_falseleafs.access(int[].class, onnxAttributes);
+        public long[] nodes_falseleafs() {
+            long[] nodes_falseleafs = Attribute.nodes_falseleafs.access(long[].class, onnxAttributes);
             return nodes_falseleafs.clone();
         }
 
-        public java.util.Optional<Integer> post_transform() {
-            Integer post_transform = Attribute.post_transform.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> post_transform() {
+            Long post_transform = Attribute.post_transform.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(post_transform);
         }
 
-        public int[] nodes_trueleafs() {
-            int[] nodes_trueleafs = Attribute.nodes_trueleafs.access(int[].class, onnxAttributes);
+        public long[] nodes_trueleafs() {
+            long[] nodes_trueleafs = Attribute.nodes_trueleafs.access(long[].class, onnxAttributes);
             return nodes_trueleafs.clone();
         }
 
@@ -29410,13 +29410,13 @@ public byte[] nodes_modes() {
             return nodes_modes.clone();
         }
 
-        public int[] nodes_falsenodeids() {
-            int[] nodes_falsenodeids = Attribute.nodes_falsenodeids.access(int[].class, onnxAttributes);
+        public long[] nodes_falsenodeids() {
+            long[] nodes_falsenodeids = Attribute.nodes_falsenodeids.access(long[].class, onnxAttributes);
             return nodes_falsenodeids.clone();
         }
 
-        public int[] nodes_truenodeids() {
-            int[] nodes_truenodeids = Attribute.nodes_truenodeids.access(int[].class, onnxAttributes);
+        public long[] nodes_truenodeids() {
+            long[] nodes_truenodeids = Attribute.nodes_truenodeids.access(long[].class, onnxAttributes);
             return nodes_truenodeids.clone();
         }
 
@@ -29425,24 +29425,24 @@ public byte[] leaf_weights() {
             return leaf_weights.clone();
         }
 
-        public int[] leaf_targetids() {
-            int[] leaf_targetids = Attribute.leaf_targetids.access(int[].class, onnxAttributes);
+        public long[] leaf_targetids() {
+            long[] leaf_targetids = Attribute.leaf_targetids.access(long[].class, onnxAttributes);
             return leaf_targetids.clone();
         }
 
-        public int[] tree_roots() {
-            int[] tree_roots = Attribute.tree_roots.access(int[].class, onnxAttributes);
+        public long[] tree_roots() {
+            long[] tree_roots = Attribute.tree_roots.access(long[].class, onnxAttributes);
             return tree_roots.clone();
         }
 
-        public java.util.Optional<Integer> n_targets() {
-            Integer n_targets = Attribute.n_targets.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> n_targets() {
+            Long n_targets = Attribute.n_targets.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(n_targets);
         }
 
-        public java.util.Optional<int[]> nodes_missing_value_tracks_true() {
-            int[] nodes_missing_value_tracks_true = Attribute.nodes_missing_value_tracks_true.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_missing_value_tracks_true).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_missing_value_tracks_true() {
+            long[] nodes_missing_value_tracks_true = Attribute.nodes_missing_value_tracks_true.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_missing_value_tracks_true).map(long[]::clone);
         }
 
         public java.util.Optional<byte[]> membership_values() {
@@ -29457,7 +29457,7 @@ public byte[] nodes_splits() {
 
     }
 
-    public static TreeEnsemble TreeEnsemble(TypeElement resultType, Value X, java.util.Optional<Integer> aggregate_function, java.util.Optional<byte[]> nodes_hitrates, int[] nodes_featureids, int[] nodes_falseleafs, java.util.Optional<Integer> post_transform, int[] nodes_trueleafs, byte[] nodes_modes, int[] nodes_falsenodeids, int[] nodes_truenodeids, byte[] leaf_weights, int[] leaf_targetids, int[] tree_roots, java.util.Optional<Integer> n_targets, java.util.Optional<int[]> nodes_missing_value_tracks_true, java.util.Optional<byte[]> membership_values, byte[] nodes_splits) {
+    public static TreeEnsemble TreeEnsemble(TypeElement resultType, Value X, java.util.Optional<Long> aggregate_function, java.util.Optional<byte[]> nodes_hitrates, long[] nodes_featureids, long[] nodes_falseleafs, java.util.Optional<Long> post_transform, long[] nodes_trueleafs, byte[] nodes_modes, long[] nodes_falsenodeids, long[] nodes_truenodeids, byte[] leaf_weights, long[] leaf_targetids, long[] tree_roots, java.util.Optional<Long> n_targets, java.util.Optional<long[]> nodes_missing_value_tracks_true, java.util.Optional<byte[]> membership_values, byte[] nodes_splits) {
         return new TreeEnsemble(resultType, X, aggregate_function, nodes_hitrates, nodes_featureids, nodes_falseleafs, post_transform, nodes_trueleafs, nodes_modes, nodes_falsenodeids, nodes_truenodeids, leaf_weights, leaf_targetids, tree_roots, n_targets, nodes_missing_value_tracks_true, membership_values, nodes_splits);
     }
 
@@ -29466,24 +29466,24 @@ public static final class TreeEnsembleClassifier extends OnnxOp {
         public static final String NAME = "TreeEnsembleClassifier";
 
         public enum Attribute implements OnnxAttribute {
-            classlabels_int64s(int[].class, true, null),
-            class_ids(int[].class, true, null),
+            classlabels_int64s(long[].class, true, null),
+            class_ids(long[].class, true, null),
             nodes_hitrates(float[].class, true, null),
-            nodes_featureids(int[].class, true, null),
-            nodes_treeids(int[].class, true, null),
+            nodes_featureids(long[].class, true, null),
+            nodes_treeids(long[].class, true, null),
             class_weights_as_tensor(byte[].class, true, null),
             post_transform(String.class, true, "NONE"),
             nodes_modes(String[].class, true, null),
-            nodes_falsenodeids(int[].class, true, null),
+            nodes_falsenodeids(long[].class, true, null),
             classlabels_strings(String[].class, true, null),
-            nodes_truenodeids(int[].class, true, null),
-            nodes_nodeids(int[].class, true, null),
+            nodes_truenodeids(long[].class, true, null),
+            nodes_nodeids(long[].class, true, null),
             nodes_hitrates_as_tensor(byte[].class, true, null),
             class_weights(float[].class, true, null),
             base_values_as_tensor(byte[].class, true, null),
-            nodes_missing_value_tracks_true(int[].class, true, null),
-            class_nodeids(int[].class, true, null),
-            class_treeids(int[].class, true, null),
+            nodes_missing_value_tracks_true(long[].class, true, null),
+            class_nodeids(long[].class, true, null),
+            class_treeids(long[].class, true, null),
             base_values(float[].class, true, null),
             nodes_values(float[].class, true, null),
             nodes_values_as_tensor(byte[].class, true, null),
@@ -29599,7 +29599,7 @@ public TreeEnsembleClassifier transform(CopyContext cc, OpTransformer ot) {
             return new TreeEnsembleClassifier(this, cc);
         }
 
-        TreeEnsembleClassifier(TypeElement resultType, Value X, java.util.Optional<int[]> classlabels_int64s, java.util.Optional<int[]> class_ids, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<int[]> nodes_featureids, java.util.Optional<int[]> nodes_treeids, java.util.Optional<byte[]> class_weights_as_tensor, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<int[]> nodes_falsenodeids, java.util.Optional<String[]> classlabels_strings, java.util.Optional<int[]> nodes_truenodeids, java.util.Optional<int[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<float[]> class_weights, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<int[]> nodes_missing_value_tracks_true, java.util.Optional<int[]> class_nodeids, java.util.Optional<int[]> class_treeids, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
+        TreeEnsembleClassifier(TypeElement resultType, Value X, java.util.Optional<long[]> classlabels_int64s, java.util.Optional<long[]> class_ids, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<long[]> nodes_featureids, java.util.Optional<long[]> nodes_treeids, java.util.Optional<byte[]> class_weights_as_tensor, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<long[]> nodes_falsenodeids, java.util.Optional<String[]> classlabels_strings, java.util.Optional<long[]> nodes_truenodeids, java.util.Optional<long[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<float[]> class_weights, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<long[]> nodes_missing_value_tracks_true, java.util.Optional<long[]> class_nodeids, java.util.Optional<long[]> class_treeids, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(classlabels_int64s, class_ids, nodes_hitrates, nodes_featureids, nodes_treeids, class_weights_as_tensor, post_transform, nodes_modes, nodes_falsenodeids, classlabels_strings, nodes_truenodeids, nodes_nodeids, nodes_hitrates_as_tensor, class_weights, base_values_as_tensor, nodes_missing_value_tracks_true, class_nodeids, class_treeids, base_values, nodes_values, nodes_values_as_tensor));
         }
 
@@ -29617,14 +29617,14 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> classlabels_int64s() {
-            int[] classlabels_int64s = Attribute.classlabels_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(classlabels_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> classlabels_int64s() {
+            long[] classlabels_int64s = Attribute.classlabels_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(classlabels_int64s).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> class_ids() {
-            int[] class_ids = Attribute.class_ids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(class_ids).map(int[]::clone);
+        public java.util.Optional<long[]> class_ids() {
+            long[] class_ids = Attribute.class_ids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(class_ids).map(long[]::clone);
         }
 
         public java.util.Optional<float[]> nodes_hitrates() {
@@ -29632,14 +29632,14 @@ public java.util.Optional<float[]> nodes_hitrates() {
             return java.util.Optional.ofNullable(nodes_hitrates).map(float[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_featureids() {
-            int[] nodes_featureids = Attribute.nodes_featureids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_featureids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_featureids() {
+            long[] nodes_featureids = Attribute.nodes_featureids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_featureids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_treeids() {
-            int[] nodes_treeids = Attribute.nodes_treeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_treeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_treeids() {
+            long[] nodes_treeids = Attribute.nodes_treeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_treeids).map(long[]::clone);
         }
 
         public java.util.Optional<byte[]> class_weights_as_tensor() {
@@ -29657,9 +29657,9 @@ public java.util.Optional<String[]> nodes_modes() {
             return java.util.Optional.ofNullable(nodes_modes).map(String[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_falsenodeids() {
-            int[] nodes_falsenodeids = Attribute.nodes_falsenodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_falsenodeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_falsenodeids() {
+            long[] nodes_falsenodeids = Attribute.nodes_falsenodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_falsenodeids).map(long[]::clone);
         }
 
         public java.util.Optional<String[]> classlabels_strings() {
@@ -29667,14 +29667,14 @@ public java.util.Optional<String[]> classlabels_strings() {
             return java.util.Optional.ofNullable(classlabels_strings).map(String[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_truenodeids() {
-            int[] nodes_truenodeids = Attribute.nodes_truenodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_truenodeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_truenodeids() {
+            long[] nodes_truenodeids = Attribute.nodes_truenodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_truenodeids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_nodeids() {
-            int[] nodes_nodeids = Attribute.nodes_nodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_nodeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_nodeids() {
+            long[] nodes_nodeids = Attribute.nodes_nodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_nodeids).map(long[]::clone);
         }
 
         public java.util.Optional<byte[]> nodes_hitrates_as_tensor() {
@@ -29692,19 +29692,19 @@ public java.util.Optional<byte[]> base_values_as_tensor() {
             return java.util.Optional.ofNullable(base_values_as_tensor).map(byte[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_missing_value_tracks_true() {
-            int[] nodes_missing_value_tracks_true = Attribute.nodes_missing_value_tracks_true.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_missing_value_tracks_true).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_missing_value_tracks_true() {
+            long[] nodes_missing_value_tracks_true = Attribute.nodes_missing_value_tracks_true.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_missing_value_tracks_true).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> class_nodeids() {
-            int[] class_nodeids = Attribute.class_nodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(class_nodeids).map(int[]::clone);
+        public java.util.Optional<long[]> class_nodeids() {
+            long[] class_nodeids = Attribute.class_nodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(class_nodeids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> class_treeids() {
-            int[] class_treeids = Attribute.class_treeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(class_treeids).map(int[]::clone);
+        public java.util.Optional<long[]> class_treeids() {
+            long[] class_treeids = Attribute.class_treeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(class_treeids).map(long[]::clone);
         }
 
         public java.util.Optional<float[]> base_values() {
@@ -29724,7 +29724,7 @@ public java.util.Optional<byte[]> nodes_values_as_tensor() {
 
     }
 
-    public static TreeEnsembleClassifier TreeEnsembleClassifier(TypeElement resultType, Value X, java.util.Optional<int[]> classlabels_int64s, java.util.Optional<int[]> class_ids, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<int[]> nodes_featureids, java.util.Optional<int[]> nodes_treeids, java.util.Optional<byte[]> class_weights_as_tensor, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<int[]> nodes_falsenodeids, java.util.Optional<String[]> classlabels_strings, java.util.Optional<int[]> nodes_truenodeids, java.util.Optional<int[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<float[]> class_weights, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<int[]> nodes_missing_value_tracks_true, java.util.Optional<int[]> class_nodeids, java.util.Optional<int[]> class_treeids, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
+    public static TreeEnsembleClassifier TreeEnsembleClassifier(TypeElement resultType, Value X, java.util.Optional<long[]> classlabels_int64s, java.util.Optional<long[]> class_ids, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<long[]> nodes_featureids, java.util.Optional<long[]> nodes_treeids, java.util.Optional<byte[]> class_weights_as_tensor, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<long[]> nodes_falsenodeids, java.util.Optional<String[]> classlabels_strings, java.util.Optional<long[]> nodes_truenodeids, java.util.Optional<long[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<float[]> class_weights, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<long[]> nodes_missing_value_tracks_true, java.util.Optional<long[]> class_nodeids, java.util.Optional<long[]> class_treeids, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
         return new TreeEnsembleClassifier(resultType, X, classlabels_int64s, class_ids, nodes_hitrates, nodes_featureids, nodes_treeids, class_weights_as_tensor, post_transform, nodes_modes, nodes_falsenodeids, classlabels_strings, nodes_truenodeids, nodes_nodeids, nodes_hitrates_as_tensor, class_weights, base_values_as_tensor, nodes_missing_value_tracks_true, class_nodeids, class_treeids, base_values, nodes_values, nodes_values_as_tensor);
     }
 
@@ -29736,21 +29736,21 @@ public enum Attribute implements OnnxAttribute {
             aggregate_function(String.class, true, "SUM"),
             nodes_hitrates(float[].class, true, null),
             target_weights_as_tensor(byte[].class, true, null),
-            nodes_featureids(int[].class, true, null),
-            target_treeids(int[].class, true, null),
-            nodes_treeids(int[].class, true, null),
+            nodes_featureids(long[].class, true, null),
+            target_treeids(long[].class, true, null),
+            nodes_treeids(long[].class, true, null),
             post_transform(String.class, true, "NONE"),
             nodes_modes(String[].class, true, null),
             target_weights(float[].class, true, null),
-            nodes_falsenodeids(int[].class, true, null),
-            target_ids(int[].class, true, null),
-            nodes_truenodeids(int[].class, true, null),
-            target_nodeids(int[].class, true, null),
-            nodes_nodeids(int[].class, true, null),
+            nodes_falsenodeids(long[].class, true, null),
+            target_ids(long[].class, true, null),
+            nodes_truenodeids(long[].class, true, null),
+            target_nodeids(long[].class, true, null),
+            nodes_nodeids(long[].class, true, null),
             nodes_hitrates_as_tensor(byte[].class, true, null),
             base_values_as_tensor(byte[].class, true, null),
-            n_targets(Integer.class, true, null),
-            nodes_missing_value_tracks_true(int[].class, true, null),
+            n_targets(Long.class, true, null),
+            nodes_missing_value_tracks_true(long[].class, true, null),
             base_values(float[].class, true, null),
             nodes_values(float[].class, true, null),
             nodes_values_as_tensor(byte[].class, true, null),
@@ -29864,7 +29864,7 @@ public TreeEnsembleRegressor transform(CopyContext cc, OpTransformer ot) {
             return new TreeEnsembleRegressor(this, cc);
         }
 
-        TreeEnsembleRegressor(TypeElement resultType, Value X, java.util.Optional<String> aggregate_function, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<byte[]> target_weights_as_tensor, java.util.Optional<int[]> nodes_featureids, java.util.Optional<int[]> target_treeids, java.util.Optional<int[]> nodes_treeids, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<float[]> target_weights, java.util.Optional<int[]> nodes_falsenodeids, java.util.Optional<int[]> target_ids, java.util.Optional<int[]> nodes_truenodeids, java.util.Optional<int[]> target_nodeids, java.util.Optional<int[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<Integer> n_targets, java.util.Optional<int[]> nodes_missing_value_tracks_true, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
+        TreeEnsembleRegressor(TypeElement resultType, Value X, java.util.Optional<String> aggregate_function, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<byte[]> target_weights_as_tensor, java.util.Optional<long[]> nodes_featureids, java.util.Optional<long[]> target_treeids, java.util.Optional<long[]> nodes_treeids, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<float[]> target_weights, java.util.Optional<long[]> nodes_falsenodeids, java.util.Optional<long[]> target_ids, java.util.Optional<long[]> nodes_truenodeids, java.util.Optional<long[]> target_nodeids, java.util.Optional<long[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<Long> n_targets, java.util.Optional<long[]> nodes_missing_value_tracks_true, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(aggregate_function, nodes_hitrates, target_weights_as_tensor, nodes_featureids, target_treeids, nodes_treeids, post_transform, nodes_modes, target_weights, nodes_falsenodeids, target_ids, nodes_truenodeids, target_nodeids, nodes_nodeids, nodes_hitrates_as_tensor, base_values_as_tensor, n_targets, nodes_missing_value_tracks_true, base_values, nodes_values, nodes_values_as_tensor));
         }
 
@@ -29897,19 +29897,19 @@ public java.util.Optional<byte[]> target_weights_as_tensor() {
             return java.util.Optional.ofNullable(target_weights_as_tensor).map(byte[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_featureids() {
-            int[] nodes_featureids = Attribute.nodes_featureids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_featureids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_featureids() {
+            long[] nodes_featureids = Attribute.nodes_featureids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_featureids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> target_treeids() {
-            int[] target_treeids = Attribute.target_treeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(target_treeids).map(int[]::clone);
+        public java.util.Optional<long[]> target_treeids() {
+            long[] target_treeids = Attribute.target_treeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(target_treeids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_treeids() {
-            int[] nodes_treeids = Attribute.nodes_treeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_treeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_treeids() {
+            long[] nodes_treeids = Attribute.nodes_treeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_treeids).map(long[]::clone);
         }
 
         public java.util.Optional<String> post_transform() {
@@ -29927,29 +29927,29 @@ public java.util.Optional<float[]> target_weights() {
             return java.util.Optional.ofNullable(target_weights).map(float[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_falsenodeids() {
-            int[] nodes_falsenodeids = Attribute.nodes_falsenodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_falsenodeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_falsenodeids() {
+            long[] nodes_falsenodeids = Attribute.nodes_falsenodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_falsenodeids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> target_ids() {
-            int[] target_ids = Attribute.target_ids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(target_ids).map(int[]::clone);
+        public java.util.Optional<long[]> target_ids() {
+            long[] target_ids = Attribute.target_ids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(target_ids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_truenodeids() {
-            int[] nodes_truenodeids = Attribute.nodes_truenodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_truenodeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_truenodeids() {
+            long[] nodes_truenodeids = Attribute.nodes_truenodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_truenodeids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> target_nodeids() {
-            int[] target_nodeids = Attribute.target_nodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(target_nodeids).map(int[]::clone);
+        public java.util.Optional<long[]> target_nodeids() {
+            long[] target_nodeids = Attribute.target_nodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(target_nodeids).map(long[]::clone);
         }
 
-        public java.util.Optional<int[]> nodes_nodeids() {
-            int[] nodes_nodeids = Attribute.nodes_nodeids.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_nodeids).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_nodeids() {
+            long[] nodes_nodeids = Attribute.nodes_nodeids.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_nodeids).map(long[]::clone);
         }
 
         public java.util.Optional<byte[]> nodes_hitrates_as_tensor() {
@@ -29962,14 +29962,14 @@ public java.util.Optional<byte[]> base_values_as_tensor() {
             return java.util.Optional.ofNullable(base_values_as_tensor).map(byte[]::clone);
         }
 
-        public java.util.Optional<Integer> n_targets() {
-            Integer n_targets = Attribute.n_targets.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> n_targets() {
+            Long n_targets = Attribute.n_targets.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(n_targets);
         }
 
-        public java.util.Optional<int[]> nodes_missing_value_tracks_true() {
-            int[] nodes_missing_value_tracks_true = Attribute.nodes_missing_value_tracks_true.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(nodes_missing_value_tracks_true).map(int[]::clone);
+        public java.util.Optional<long[]> nodes_missing_value_tracks_true() {
+            long[] nodes_missing_value_tracks_true = Attribute.nodes_missing_value_tracks_true.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(nodes_missing_value_tracks_true).map(long[]::clone);
         }
 
         public java.util.Optional<float[]> base_values() {
@@ -29989,7 +29989,7 @@ public java.util.Optional<byte[]> nodes_values_as_tensor() {
 
     }
 
-    public static TreeEnsembleRegressor TreeEnsembleRegressor(TypeElement resultType, Value X, java.util.Optional<String> aggregate_function, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<byte[]> target_weights_as_tensor, java.util.Optional<int[]> nodes_featureids, java.util.Optional<int[]> target_treeids, java.util.Optional<int[]> nodes_treeids, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<float[]> target_weights, java.util.Optional<int[]> nodes_falsenodeids, java.util.Optional<int[]> target_ids, java.util.Optional<int[]> nodes_truenodeids, java.util.Optional<int[]> target_nodeids, java.util.Optional<int[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<Integer> n_targets, java.util.Optional<int[]> nodes_missing_value_tracks_true, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
+    public static TreeEnsembleRegressor TreeEnsembleRegressor(TypeElement resultType, Value X, java.util.Optional<String> aggregate_function, java.util.Optional<float[]> nodes_hitrates, java.util.Optional<byte[]> target_weights_as_tensor, java.util.Optional<long[]> nodes_featureids, java.util.Optional<long[]> target_treeids, java.util.Optional<long[]> nodes_treeids, java.util.Optional<String> post_transform, java.util.Optional<String[]> nodes_modes, java.util.Optional<float[]> target_weights, java.util.Optional<long[]> nodes_falsenodeids, java.util.Optional<long[]> target_ids, java.util.Optional<long[]> nodes_truenodeids, java.util.Optional<long[]> target_nodeids, java.util.Optional<long[]> nodes_nodeids, java.util.Optional<byte[]> nodes_hitrates_as_tensor, java.util.Optional<byte[]> base_values_as_tensor, java.util.Optional<Long> n_targets, java.util.Optional<long[]> nodes_missing_value_tracks_true, java.util.Optional<float[]> base_values, java.util.Optional<float[]> nodes_values, java.util.Optional<byte[]> nodes_values_as_tensor) {
         return new TreeEnsembleRegressor(resultType, X, aggregate_function, nodes_hitrates, target_weights_as_tensor, nodes_featureids, target_treeids, nodes_treeids, post_transform, nodes_modes, target_weights, nodes_falsenodeids, target_ids, nodes_truenodeids, target_nodeids, nodes_nodeids, nodes_hitrates_as_tensor, base_values_as_tensor, n_targets, nodes_missing_value_tracks_true, base_values, nodes_values, nodes_values_as_tensor);
     }
 
@@ -29998,7 +29998,7 @@ public static final class Trilu extends OnnxOp {
         public static final String NAME = "Trilu";
 
         public enum Attribute implements OnnxAttribute {
-            upper(Integer.class, true, 1),
+            upper(Long.class, true, 1),
             ;
 
                 final Class<?> t;
@@ -30110,7 +30110,7 @@ public Trilu transform(CopyContext cc, OpTransformer ot) {
             return new Trilu(this, cc);
         }
 
-        Trilu(TypeElement resultType, Value input, java.util.Optional<Value> k, java.util.Optional<Integer> upper) {
+        Trilu(TypeElement resultType, Value input, java.util.Optional<Value> k, java.util.Optional<Long> upper) {
             super(SCHEMA, resultType, Set.of(), List.of(input, k), List.of(upper));
         }
 
@@ -30133,14 +30133,14 @@ public java.util.Optional<Value> k() {
             return i != -1 ? java.util.Optional.of(operands().get(1 + i)) : java.util.Optional.empty();
         }
 
-        public java.util.Optional<Integer> upper() {
-            Integer upper = Attribute.upper.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> upper() {
+            Long upper = Attribute.upper.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(upper);
         }
 
     }
 
-    public static Trilu Trilu(TypeElement resultType, Value input, java.util.Optional<Value> k, java.util.Optional<Integer> upper) {
+    public static Trilu Trilu(TypeElement resultType, Value input, java.util.Optional<Value> k, java.util.Optional<Long> upper) {
         return new Trilu(resultType, input, k, upper);
     }
 
@@ -30149,8 +30149,8 @@ public static final class Unique extends OnnxOp {
         public static final String NAME = "Unique";
 
         public enum Attribute implements OnnxAttribute {
-            sorted(Integer.class, true, 1),
-            axis(Integer.class, true, null),
+            sorted(Long.class, true, 1),
+            axis(Long.class, true, null),
             ;
 
                 final Class<?> t;
@@ -30264,7 +30264,7 @@ public Unique transform(CopyContext cc, OpTransformer ot) {
             return new Unique(this, cc);
         }
 
-        Unique(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, java.util.Optional<Integer> sorted, java.util.Optional<Integer> axis) {
+        Unique(TypeElement resultType, Set<OutputParameter> optionalOutputs, Value X, java.util.Optional<Long> sorted, java.util.Optional<Long> axis) {
             super(SCHEMA, resultType, optionalOutputs, List.of(X), List.of(sorted, axis));
         }
 
@@ -30282,19 +30282,19 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<Integer> sorted() {
-            Integer sorted = Attribute.sorted.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> sorted() {
+            Long sorted = Attribute.sorted.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(sorted);
         }
 
-        public java.util.Optional<Integer> axis() {
-            Integer axis = Attribute.axis.access(Integer.class, onnxAttributes);
+        public java.util.Optional<Long> axis() {
+            Long axis = Attribute.axis.access(Long.class, onnxAttributes);
             return java.util.Optional.ofNullable(axis);
         }
 
     }
 
-    public static Unique Unique(TypeElement resultType, Set<Unique.OutputParameter> optionalOutputs, Value X, java.util.Optional<Integer> sorted, java.util.Optional<Integer> axis) {
+    public static Unique Unique(TypeElement resultType, Set<Unique.OutputParameter> optionalOutputs, Value X, java.util.Optional<Long> sorted, java.util.Optional<Long> axis) {
         return new Unique(resultType, optionalOutputs, X, sorted, axis);
     }
 
@@ -30817,7 +30817,7 @@ public static final class ZipMap extends OnnxOp {
         public static final String NAME = "ZipMap";
 
         public enum Attribute implements OnnxAttribute {
-            classlabels_int64s(int[].class, true, null),
+            classlabels_int64s(long[].class, true, null),
             classlabels_strings(String[].class, true, null),
             ;
 
@@ -30929,7 +30929,7 @@ public ZipMap transform(CopyContext cc, OpTransformer ot) {
             return new ZipMap(this, cc);
         }
 
-        ZipMap(TypeElement resultType, Value X, java.util.Optional<int[]> classlabels_int64s, java.util.Optional<String[]> classlabels_strings) {
+        ZipMap(TypeElement resultType, Value X, java.util.Optional<long[]> classlabels_int64s, java.util.Optional<String[]> classlabels_strings) {
             super(SCHEMA, resultType, Set.of(), List.of(X), List.of(classlabels_int64s, classlabels_strings));
         }
 
@@ -30947,9 +30947,9 @@ public Value X() {
             return operands().get(0);
         }
 
-        public java.util.Optional<int[]> classlabels_int64s() {
-            int[] classlabels_int64s = Attribute.classlabels_int64s.access(int[].class, onnxAttributes);
-            return java.util.Optional.ofNullable(classlabels_int64s).map(int[]::clone);
+        public java.util.Optional<long[]> classlabels_int64s() {
+            long[] classlabels_int64s = Attribute.classlabels_int64s.access(long[].class, onnxAttributes);
+            return java.util.Optional.ofNullable(classlabels_int64s).map(long[]::clone);
         }
 
         public java.util.Optional<String[]> classlabels_strings() {
@@ -30959,7 +30959,7 @@ public java.util.Optional<String[]> classlabels_strings() {
 
     }
 
-    public static ZipMap ZipMap(TypeElement resultType, Value X, java.util.Optional<int[]> classlabels_int64s, java.util.Optional<String[]> classlabels_strings) {
+    public static ZipMap ZipMap(TypeElement resultType, Value X, java.util.Optional<long[]> classlabels_int64s, java.util.Optional<String[]> classlabels_strings) {
         return new ZipMap(resultType, X, classlabels_int64s, classlabels_strings);
     }
 
diff --git a/cr-examples/onnx/src/test/java/oracle/code/onnx/CNNTest.java b/cr-examples/onnx/src/test/java/oracle/code/onnx/CNNTest.java
index caa3d15a8c1..232d23234d6 100644
--- a/cr-examples/onnx/src/test/java/oracle/code/onnx/CNNTest.java
+++ b/cr-examples/onnx/src/test/java/oracle/code/onnx/CNNTest.java
@@ -28,12 +28,20 @@
 import jdk.incubator.code.Op;
 import jdk.incubator.code.op.CoreOp;
 import jdk.incubator.code.type.FunctionType;
+import jdk.incubator.code.type.TupleType;
+import jdk.incubator.code.writer.OpWriter;
+import oracle.code.onnx.compiler.OnnxTransformer;
 import oracle.code.onnx.ir.OnnxOps;
 import oracle.code.onnx.ir.OnnxType;
 import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Assertions;
 
+import java.io.StringWriter;
+import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Method;
+import java.util.Optional;
 import java.util.Set;
+import java.util.stream.Stream;
 
 import static java.util.Optional.empty;
 import static java.util.Optional.of;
@@ -49,26 +57,28 @@ public class CNNTest {
     private static final int IMAGE_SIZE = 28;
     private static final int NUM_LABELS = 10;
 
-    // (5, 5, NUM_CHANNELS, 32)
-    private Tensor<Float> conv1Weights;
-    // (32)
-    private Tensor<Float> conv1Biases;
-    // (5, 5, 32, 64)
-    private Tensor<Float> conv2Weights;
-    // (64)
-    private Tensor<Float> conv2Biases;
-    // (IMAGE_SIZE * IMAGE_SIZE * 4, 512)
-    private Tensor<Float> fc1Weights;
-    // (512)
-    private Tensor<Float> fc1Biases;
-    // (512, NUM_LABELS)
-    private Tensor<Float> fc2Weights;
-    // (NUM_LABELS)
-    private Tensor<Float> fc2Biases;
-
     @CodeReflection
-    public Tensor<Float> cnn(Tensor<Float> inputImage) {
-        var shape = Constant(new int[]{-1, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS});
+    public static Tensor<Float> cnn(
+            // Weights and biases
+            // (5, 5, NUM_CHANNELS, 32)
+            Tensor<Float> conv1Weights,
+            // (32)
+            Tensor<Float> conv1Biases,
+            // (5, 5, 32, 64)
+            Tensor<Float> conv2Weights,
+            // (64)
+            Tensor<Float> conv2Biases,
+            // (IMAGE_SIZE * IMAGE_SIZE * 4, 512)
+            Tensor<Float> fc1Weights,
+            // (512)
+            Tensor<Float> fc1Biases,
+            // (512, NUM_LABELS)
+            Tensor<Float> fc2Weights,
+            // (NUM_LABELS)
+            Tensor<Float> fc2Biases,
+            // Inputs
+            Tensor<Float> inputImage) {
+        var shape = Constant(new long[]{-1, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS});
         var inputReshaped = Reshape(inputImage, shape, empty());
 
         // Scaling the features
@@ -78,35 +88,35 @@ public Tensor<Float> cnn(Tensor<Float> inputImage) {
 
         // First conv layer
         var conv1 = Conv(scaledInput, conv1Weights, of(conv1Biases), empty(),
-                empty(), of("SAME_UPPER"), of(new int[]{1, 1, 1, 1}),
+                empty(), of("SAME_UPPER"), of(new long[]{1, 1, 1, 1}),
                 empty(), empty());
         var relu1 = Relu(conv1);
 
         // First pooling layer
         var pool1 = MaxPool(relu1, empty(), empty(), of("SAME_UPPER"),
-                empty(), empty(), of(new int[]{1, 2, 2, 1}), new int[]{1, 2, 2, 1});
+                empty(), empty(), of(new long[]{1, 2, 2, 1}), new long[]{1, 2, 2, 1});
 
         // Second conv layer
         var conv2 = Conv(pool1.Y(), conv2Weights, of(conv2Biases), empty(),
-                empty(), of("SAME_UPPER"), of(new int[]{1, 1, 1, 1}),
+                empty(), of("SAME_UPPER"), of(new long[]{1, 1, 1, 1}),
                 empty(), empty());
         var relu2 = Relu(conv2);
 
         // Second pooling layer
         var pool2 = MaxPool(relu2, empty(), empty(), of("SAME_UPPER"),
-                empty(), empty(), of(new int[]{1, 2, 2, 1}), new int[]{1, 2, 2, 1});
+                empty(), empty(), of(new long[]{1, 2, 2, 1}), new long[]{1, 2, 2, 1});
 
         // Flatten inputs
-        var flatShape = Constant(new int[]{0, 3136});
+        var flatShape = Constant(new long[]{0, 3136});
         var flatten = Reshape(pool2.Y(), flatShape, empty());
 
         // Fully connected layer
-        var fc1 = Gemm(flatten, fc1Weights, of(fc1Biases), of(1f), of(1), of(1f), empty());
+        var fc1 = Gemm(flatten, fc1Weights, of(fc1Biases), of(1f), of(1L), of(1f), empty());
         var relu3 = Relu(fc1);
 
         // Softmax layer
-        var fc2 = Gemm(relu3, fc2Weights, of(fc2Biases), of(1f), of(1), of(1f), empty());
-        var prediction = Softmax(fc2, of(1));
+        var fc2 = Gemm(relu3, fc2Weights, of(fc2Biases), of(1f), of(1L), of(1f), empty());
+        var prediction = Softmax(fc2, of(1L));
 
         return prediction;
     }
@@ -116,7 +126,6 @@ CoreOp.FuncOp cnnModel() {
 
         FunctionType functionType = FunctionType.functionType(
                 OnnxType.TENSOR_FLOAT32, // return
-                OnnxType.TENSOR_FLOAT32, // input arg
                 // weights & biases
                 OnnxType.TENSOR_FLOAT32,
                 OnnxType.TENSOR_FLOAT32,
@@ -125,21 +134,23 @@ CoreOp.FuncOp cnnModel() {
                 OnnxType.TENSOR_FLOAT32,
                 OnnxType.TENSOR_FLOAT32,
                 OnnxType.TENSOR_FLOAT32,
+                OnnxType.TENSOR_FLOAT32,
+                // input
                 OnnxType.TENSOR_FLOAT32
         );
 
         return CoreOp.func("cnn", functionType).body(b -> {
-            Block.Parameter inputImage = b.parameters().get(0);
-
             // weights & biases
-            Block.Parameter conv1Weights = b.parameters().get(1);
-            Block.Parameter conv1Biases = b.parameters().get(2);
-            Block.Parameter conv2Weights = b.parameters().get(3);
-            Block.Parameter conv2Biases = b.parameters().get(4);
-            Block.Parameter fc1Weights = b.parameters().get(5);
-            Block.Parameter fc1Biases = b.parameters().get(6);
-            Block.Parameter fc2Weights = b.parameters().get(7);
-            Block.Parameter fc2Biases = b.parameters().get(8);
+            Block.Parameter conv1Weights = b.parameters().get(0);
+            Block.Parameter conv1Biases = b.parameters().get(1);
+            Block.Parameter conv2Weights = b.parameters().get(2);
+            Block.Parameter conv2Biases = b.parameters().get(3);
+            Block.Parameter fc1Weights = b.parameters().get(4);
+            Block.Parameter fc1Biases = b.parameters().get(5);
+            Block.Parameter fc2Weights = b.parameters().get(6);
+            Block.Parameter fc2Biases = b.parameters().get(7);
+
+            Block.Parameter inputImage = b.parameters().get(8);
 
             var shape = b.op(OnnxOps.Constant(OnnxType.TENSOR_INT64,
                     empty(),
@@ -147,7 +158,7 @@ CoreOp.FuncOp cnnModel() {
                     empty(),
                     empty(),
                     empty(),
-                    of(new int[]{-1, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS}),
+                    of(new long[]{-1, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS}),
                     empty(),
                     empty()));
             var inputReshaped = b.op(OnnxOps.Reshape(inputImage.type(),
@@ -184,7 +195,7 @@ CoreOp.FuncOp cnnModel() {
                     empty(),
                     empty(),
                     of("SAME_UPPER"),
-                    of(new int[]{1, 1, 1, 1}),
+                    of(new long[]{1, 1, 1, 1}),
                     empty(),
                     empty()));
             var relu1 = b.op(OnnxOps.Relu(conv1.type(),
@@ -192,17 +203,19 @@ CoreOp.FuncOp cnnModel() {
 
             // First pooling layer
             // @@@ multiple results?
-            var pool1 = b.op(OnnxOps.MaxPool(relu1.type(), Set.of(),
+            var pool1Result = b.op(OnnxOps.MaxPool(TupleType.tupleType(relu1.type(), OnnxType.TENSOR_INT64),
+                    Set.of(OnnxOps.MaxPool.OutputParameter.Indices),
                     relu1,
                     empty(),
                     empty(),
                     of("SAME_UPPER"),
                     empty(),
                     empty(),
-                    of(new int[]{1, 2, 2, 1}),
-                    new int[]{1, 2, 2, 1}));
+                    of(new long[]{1, 2, 2, 1}),
+                    new long[]{1, 2, 2, 1}));
 
             // Second conv layer
+            var pool1 = b.op(CoreOp.tupleLoad(pool1Result, 0));
             var conv2 = b.op(OnnxOps.Conv(pool1.type(),
                     pool1,
                     conv2Weights,
@@ -210,7 +223,7 @@ CoreOp.FuncOp cnnModel() {
                     empty(),
                     empty(),
                     of("SAME_UPPER"),
-                    of(new int[]{1, 1, 1, 1}),
+                    of(new long[]{1, 1, 1, 1}),
                     empty(),
                     empty()));
             var relu2 = b.op(OnnxOps.Relu(conv2.type(),
@@ -218,15 +231,16 @@ CoreOp.FuncOp cnnModel() {
 
             // Second pooling layer
             // @@@ multiple results?
-            var pool2 = b.op(OnnxOps.MaxPool(relu2.type(), Set.of(),
+            var pool2Result = b.op(OnnxOps.MaxPool(TupleType.tupleType(relu2.type(), OnnxType.TENSOR_INT64),
+                    Set.of(OnnxOps.MaxPool.OutputParameter.Indices),
                     relu2,
                     empty(),
                     empty(),
                     of("SAME_UPPER"),
                     empty(),
                     empty(),
-                    of(new int[]{1, 2, 2, 1}),
-                    new int[]{1, 2, 2, 1}));
+                    of(new long[]{1, 2, 2, 1}),
+                    new long[]{1, 2, 2, 1}));
 
             // Flatten inputs
             var flatShape = b.op(OnnxOps.Constant(OnnxType.TENSOR_INT64,
@@ -235,9 +249,10 @@ CoreOp.FuncOp cnnModel() {
                     empty(),
                     empty(),
                     empty(),
-                    of(new int[]{0, 3136}),
+                    of(new long[]{0, 3136}),
                     empty(),
                     empty()));
+            var pool2 = b.op(CoreOp.tupleLoad(pool2Result, 0));
             var flatten = b.op(OnnxOps.Reshape(pool2.type(),
                     pool2,
                     flatShape,
@@ -249,7 +264,7 @@ CoreOp.FuncOp cnnModel() {
                     fc1Weights,
                     of(fc1Biases),
                     of(1f),
-                    of(1),
+                    of(1L),
                     of(1f),
                     empty()));
             var relu3 = b.op(OnnxOps.Relu(fc1.type(),
@@ -261,31 +276,43 @@ CoreOp.FuncOp cnnModel() {
                     fc2Weights,
                     of(fc2Biases),
                     of(1f),
-                    of(1),
+                    of(1L),
                     of(1f),
                     empty()));
             var prediction = b.op(OnnxOps.Softmax(fc2.type(),
                     fc2,
-                    of(1)));
+                    of(1L)));
 
             b.op(CoreOp._return(prediction));
         });
     }
 
     @Test
-    public void test() throws Exception {
-        {
-            Method cnn = CNNTest.class.getMethod("cnn", Tensor.class);
-            CoreOp.FuncOp funcOp = Op.ofMethod(cnn).get();
-            System.out.println(funcOp.toText());
-        }
-
-        {
-            CoreOp.FuncOp funcOp = cnnModel();
-            System.out.println(funcOp.toText());
-        }
+    public void test() {
+        CoreOp.FuncOp f = getFuncOp("cnn");
+        CoreOp.FuncOp onnxModel = OnnxTransformer.transform(MethodHandles.lookup(), f);
+        System.out.println(onnxModel.toText());
+
+        CoreOp.FuncOp expectedOnnxModel = cnnModel();
+        System.out.println(expectedOnnxModel.toText());
+
+        Assertions.assertEquals(serialize(expectedOnnxModel), serialize(onnxModel));
+    }
+
+    static String serialize(Op o) {
+        StringWriter w = new StringWriter();
+        OpWriter.writeTo(w, o, OpWriter.LocationOption.DROP_LOCATION);
+        return w.toString();
     }
 
+    static CoreOp.FuncOp getFuncOp(String name) {
+        Optional<Method> om = Stream.of(CNNTest.class.getDeclaredMethods())
+                .filter(m -> m.getName().equals(name))
+                .findFirst();
+
+        Method m = om.get();
+        return Op.ofMethod(m).get();
+    }
 
     /*
     ONNX code model