Skip to content

Commit 33afebf

Browse files
Sunny-AnandtitaiwangmsCopilot
authored
Cherry pick commit (#7740)
### Motivation and Context Fixes # - [Fix Slice shape inference crashes with SIGABRT on empty dimensions by titaiwangms · Pull Request #7738 · onnx/onnx](#7738) - [Rename misleading input_rank param to input_dim_value in processSlice… by titaiwangms · Pull Request #7739 · onnx/onnx](#7739) --------- Signed-off-by: Ti-Tai Wang <titaiwang@microsoft.com> Co-authored-by: Ti-Tai Wang <titaiwang@microsoft.com> Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent a51ac07 commit 33afebf

3 files changed

Lines changed: 108 additions & 7 deletions

File tree

onnx/defs/tensor/defs.cc

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -789,25 +789,32 @@ result = [
789789
```
790790
)DOC";
791791

792-
static void processSliceInputs(const int64_t input_rank, int64_t& start, int64_t& end, int64_t step) {
792+
static void processSliceInputs(const int64_t input_dim_size_or_value, int64_t& start, int64_t& end, int64_t step) {
793793
// process step
794794
if (step == 0) {
795795
fail_shape_inference("'step' cannot be 0 for Slice");
796796
}
797+
// Empty dimension: clamp bounds are invalid when dimension size is 0,
798+
// so short-circuit to produce a zero-length output.
799+
if (input_dim_size_or_value == 0) {
800+
start = 0;
801+
end = 0;
802+
return;
803+
}
797804
// process start
798805
if (start < 0)
799-
start += input_rank;
806+
start += input_dim_size_or_value;
800807
if (step < 0)
801-
start = std::clamp(start, static_cast<int64_t>(0), input_rank - 1);
808+
start = std::clamp(start, static_cast<int64_t>(0), input_dim_size_or_value - 1);
802809
else
803-
start = std::clamp(start, static_cast<int64_t>(0), input_rank);
810+
start = std::clamp(start, static_cast<int64_t>(0), input_dim_size_or_value);
804811
// process end
805812
if (end < 0)
806-
end += input_rank;
813+
end += input_dim_size_or_value;
807814
if (step < 0)
808-
end = std::clamp(end, static_cast<int64_t>(-1), input_rank - 1);
815+
end = std::clamp(end, static_cast<int64_t>(-1), input_dim_size_or_value - 1);
809816
else
810-
end = std::clamp(end, static_cast<int64_t>(0), input_rank);
817+
end = std::clamp(end, static_cast<int64_t>(0), input_dim_size_or_value);
811818
}
812819

813820
ONNX_OPERATOR_SET_SCHEMA(

onnx/defs/tensor/old.cc

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2632,6 +2632,17 @@ ONNX_OPERATOR_SET_SCHEMA(
26322632

26332633
const auto input_dim_value = input_dim.dim_value();
26342634

2635+
// Empty dimension: clamp bounds are invalid when dimension size is 0,
2636+
// so short-circuit to produce a zero-length output.
2637+
if (input_dim_value == 0) {
2638+
ctx.getOutputType(0)
2639+
->mutable_tensor_type()
2640+
->mutable_shape()
2641+
->mutable_dim(static_cast<int>(axis))
2642+
->set_dim_value(0);
2643+
continue;
2644+
}
2645+
26352646
// process step
26362647
auto step = steps[axis_index];
26372648
if (step == 0) {
@@ -6315,6 +6326,13 @@ ONNX_OPERATOR_SET_SCHEMA(
63156326

63166327
const auto input_dim_value = input_dim.dim_value();
63176328

6329+
// Empty dimension: clamp bounds are invalid when dimension size is 0,
6330+
// so short-circuit to produce a zero-length output.
6331+
if (input_dim_value == 0) {
6332+
ctx.getOutputType(0)->mutable_tensor_type()->mutable_shape()->mutable_dim(axis)->set_dim_value(0);
6333+
continue;
6334+
}
6335+
63186336
// process step
63196337
auto step = steps[axis_index];
63206338
if (step == 0) {

onnx/test/shape_inference_test.py

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2142,6 +2142,82 @@ def test_slice_variable_input_types(self) -> None:
21422142
graph, [make_tensor_value_info("y", TensorProto.DOUBLE, (2, 2))]
21432143
)
21442144

2145+
def test_slice_empty_dim_positive_step(self) -> None:
2146+
"""Slice on empty dimension with positive step should produce dim_value=0."""
2147+
graph = self._make_graph(
2148+
[
2149+
("x", TensorProto.FLOAT, (0, 6)),
2150+
("starts", TensorProto.INT64, (1,)),
2151+
("ends", TensorProto.INT64, (1,)),
2152+
("axes", TensorProto.INT64, (1,)),
2153+
("steps", TensorProto.INT64, (1,)),
2154+
],
2155+
[make_node("Slice", ["x", "starts", "ends", "axes", "steps"], "y")],
2156+
[],
2157+
initializer=[
2158+
make_tensor("starts", TensorProto.INT64, (1,), (0,)),
2159+
make_tensor("ends", TensorProto.INT64, (1,), (0,)),
2160+
make_tensor("axes", TensorProto.INT64, (1,), (0,)),
2161+
make_tensor("steps", TensorProto.INT64, (1,), (1,)),
2162+
],
2163+
)
2164+
self._assert_inferred(
2165+
graph, [make_tensor_value_info("y", TensorProto.FLOAT, (0, 6))]
2166+
)
2167+
2168+
def test_slice_empty_dim_negative_step(self) -> None:
2169+
"""Regression test for issue #7735: std::clamp UB on empty dim with step=-1."""
2170+
graph = self._make_graph(
2171+
[
2172+
("x", TensorProto.FLOAT, (0, 6)),
2173+
("starts", TensorProto.INT64, (1,)),
2174+
("ends", TensorProto.INT64, (1,)),
2175+
("axes", TensorProto.INT64, (1,)),
2176+
("steps", TensorProto.INT64, (1,)),
2177+
],
2178+
[make_node("Slice", ["x", "starts", "ends", "axes", "steps"], "y")],
2179+
[],
2180+
initializer=[
2181+
make_tensor("starts", TensorProto.INT64, (1,), (1,)),
2182+
make_tensor("ends", TensorProto.INT64, (1,), (0,)),
2183+
make_tensor("axes", TensorProto.INT64, (1,), (0,)),
2184+
make_tensor("steps", TensorProto.INT64, (1,), (-1,)),
2185+
],
2186+
)
2187+
self._assert_inferred(
2188+
graph, [make_tensor_value_info("y", TensorProto.FLOAT, (0, 6))]
2189+
)
2190+
2191+
def test_slice_scalar_shape_output(self) -> None:
2192+
"""Shape(scalar) produces 0-length output; Slice on it should not crash."""
2193+
graph = self._make_graph(
2194+
[
2195+
("x", TensorProto.FLOAT, ()),
2196+
("starts", TensorProto.INT64, (1,)),
2197+
("ends", TensorProto.INT64, (1,)),
2198+
("axes", TensorProto.INT64, (1,)),
2199+
("steps", TensorProto.INT64, (1,)),
2200+
],
2201+
[
2202+
make_node("Shape", ["x"], ["shape"]),
2203+
make_node("Slice", ["shape", "starts", "ends", "axes", "steps"], ["y"]),
2204+
],
2205+
[],
2206+
initializer=[
2207+
make_tensor("starts", TensorProto.INT64, (1,), (0,)),
2208+
make_tensor("ends", TensorProto.INT64, (1,), (0,)),
2209+
make_tensor("axes", TensorProto.INT64, (1,), (0,)),
2210+
make_tensor("steps", TensorProto.INT64, (1,), (1,)),
2211+
],
2212+
)
2213+
self._assert_inferred(
2214+
graph,
2215+
[
2216+
make_tensor_value_info("shape", TensorProto.INT64, (0,)),
2217+
make_tensor_value_info("y", TensorProto.INT64, (0,)),
2218+
],
2219+
)
2220+
21452221
def test_conv(self) -> None:
21462222
graph = self._make_graph(
21472223
[

0 commit comments

Comments
 (0)