[GPU] Update strided_slice for partially dynamic shape case (#19467)

This commit is contained in:
Kelvin Choi 2023-09-11 14:44:13 +09:00 committed by GitHub
parent 5833e7d55d
commit 2f4f76070f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 96 additions and 60 deletions

View File

@ -44,8 +44,20 @@ std::vector<layout> strided_slice_inst::calc_output_layouts(strided_slice_node c
if ((begin_data.empty() && !constant_mem.count(1))
|| (end_data.empty() && !constant_mem.count(2))
|| (strides_data.empty() && !constant_mem.count(3))) {
auto out_shape = ov::PartialShape::dynamic(input0_layout.get_partial_shape().size());
return { layout{out_shape, input0_layout.data_type, format::get_default_format(out_shape.rank().get_length())} };
auto input0_pshape = input0_layout.get_partial_shape();
auto input0_len = input0_pshape.size();
auto out_shape = ov::PartialShape::dynamic(input0_len);
if (input0_layout.is_dynamic()) {
// fill with static shape until it finds dynamic
for (size_t i = 0; i < input0_len; i++) {
if (input0_pshape[i].is_static())
out_shape[i] = input0_pshape[i];
else
break;
}
}
return { layout{out_shape, input0_layout.data_type, format::get_default_format(input0_len)} };
}
ov::op::v1::StridedSlice op;

View File

@ -25,11 +25,11 @@ struct StridedSliceParams {
};
typedef std::tuple<
InputShape, // Input shapes
InputShape, // Input shapes
StridedSliceParams,
ElementType, // Element type
ngraph::helpers::InputLayerType, // begin/end/stride input type
std::map<std::string, std::string> // Additional network configuration
ElementType, // Element type
std::vector<ngraph::helpers::InputLayerType>, // begin/end/stride input type
std::map<std::string, std::string> // Additional network configuration
> StridedSliceLayerParamSet;
class StridedSliceLayerGPUTest : public testing::WithParamInterface<StridedSliceLayerParamSet>,
@ -39,7 +39,7 @@ public:
InputShape shapes;
StridedSliceParams params;
ElementType elementType;
ngraph::helpers::InputLayerType restInputType;
std::vector<ngraph::helpers::InputLayerType> restInputType;
TargetDevice targetDevice;
std::map<std::string, std::string> additionalConfig;
std::tie(shapes, params, elementType, restInputType, additionalConfig) = obj.param;
@ -59,7 +59,9 @@ public:
results << "new_axis_m=" << (params.newAxisMask.empty() ? "def" : ov::test::utils::vec2str(params.newAxisMask)) << "_";
results << "shrink_m=" << (params.shrinkAxisMask.empty() ? "def" : ov::test::utils::vec2str(params.shrinkAxisMask)) << "_";
results << "ellipsis_m=" << (params.ellipsisAxisMask.empty() ? "def" : ov::test::utils::vec2str(params.ellipsisAxisMask)) << "_";
results << "restInputType=" << restInputType << "_";
results << "beginType=" << restInputType[0] << "_";
results << "endType=" << restInputType[1] << "_";
results << "strideType=" << restInputType[2] << "_";
results << "config=(";
for (const auto& configEntry : additionalConfig) {
results << configEntry.first << ", " << configEntry.second << ":";
@ -72,32 +74,46 @@ public:
void generate_inputs(const std::vector<ngraph::Shape>& targetInputStaticShapes) override {
inputs.clear();
const auto& funcInputs = function->inputs();
for (size_t i = 0; i < funcInputs.size(); ++i) {
const auto& funcInput = funcInputs[i];
ov::Tensor tensor;
if (i == 1) {
tensor = ov::Tensor(funcInput.get_element_type(), targetInputStaticShapes[i]);
auto *dataPtr = tensor.data<float>();
for (size_t i = 0; i < begin.size(); i++) {
dataPtr[i] = static_cast<float>(begin[i]);
}
} else if (i == 2) {
tensor = ov::Tensor(funcInput.get_element_type(), targetInputStaticShapes[i]);
auto *dataPtr = tensor.data<float>();
for (size_t i = 0; i < end.size(); i++) {
dataPtr[i] = static_cast<float>(end[i]);
}
} else if (i == 3) {
tensor = ov::Tensor(funcInput.get_element_type(), targetInputStaticShapes[i]);
auto *dataPtr = tensor.data<float>();
for (size_t i = 0; i < stride.size(); i++) {
dataPtr[i] = static_cast<float>(stride[i]);
}
} else {
tensor = ov::test::utils::create_and_fill_tensor(funcInput.get_element_type(), targetInputStaticShapes[i]);
ov::Tensor tensor;
// input0: data
int32_t idx = 0;
tensor = ov::test::utils::create_and_fill_tensor(funcInputs[idx].get_element_type(), targetInputStaticShapes[idx]);
inputs.insert({funcInputs[idx].get_node_shared_ptr(), tensor});
// input1: begin
if (restInputType[0] == ngraph::helpers::InputLayerType::PARAMETER) {
idx += 1;
tensor = ov::Tensor(funcInputs[idx].get_element_type(), targetInputStaticShapes[idx]);
auto *dataPtr = tensor.data<float>();
for (size_t i = 0; i < begin.size(); i++) {
dataPtr[i] = static_cast<float>(begin[i]);
}
inputs.insert({funcInput.get_node_shared_ptr(), tensor});
inputs.insert({funcInputs[idx].get_node_shared_ptr(), tensor});
}
// input2: end
if (restInputType[1] == ngraph::helpers::InputLayerType::PARAMETER) {
idx += 1;
tensor = ov::Tensor(funcInputs[idx].get_element_type(), targetInputStaticShapes[idx]);
auto *dataPtr = tensor.data<float>();
for (size_t i = 0; i < end.size(); i++) {
dataPtr[i] = static_cast<float>(end[i]);
}
inputs.insert({funcInputs[idx].get_node_shared_ptr(), tensor});
}
// input3: stride
if (restInputType[2] == ngraph::helpers::InputLayerType::PARAMETER) {
idx += 1;
tensor = ov::Tensor(funcInputs[idx].get_element_type(), targetInputStaticShapes[idx]);
auto *dataPtr = tensor.data<float>();
for (size_t i = 0; i < stride.size(); i++) {
dataPtr[i] = static_cast<float>(stride[i]);
}
inputs.insert({funcInputs[idx].get_node_shared_ptr(), tensor});
}
inferRequestNum++;
}
@ -105,12 +121,12 @@ protected:
std::vector<int64_t> begin;
std::vector<int64_t> end;
std::vector<int64_t> stride;
std::vector<ngraph::helpers::InputLayerType> restInputType;
size_t inferRequestNum = 0;
void SetUp() override {
InputShape shapes;
StridedSliceParams ssParams;
ngraph::helpers::InputLayerType restInputType;
std::map<std::string, std::string> additionalConfig;
std::tie(shapes, ssParams, inType, restInputType, additionalConfig) = this->GetParam();
@ -122,32 +138,42 @@ protected:
std::vector<InputShape> inputShapes;
inputShapes.push_back(shapes);
if (restInputType == ngraph::helpers::InputLayerType::PARAMETER) {
if (restInputType[0] == ngraph::helpers::InputLayerType::PARAMETER)
inputShapes.push_back(InputShape({static_cast<int64_t>(begin.size())}, std::vector<ov::Shape>(shapes.second.size(), {begin.size()})));
if (restInputType[1] == ngraph::helpers::InputLayerType::PARAMETER)
inputShapes.push_back(InputShape({static_cast<int64_t>(end.size())}, std::vector<ov::Shape>(shapes.second.size(), {end.size()})));
if (restInputType[2] == ngraph::helpers::InputLayerType::PARAMETER)
inputShapes.push_back(InputShape({static_cast<int64_t>(stride.size())}, std::vector<ov::Shape>(shapes.second.size(), {stride.size()})));
}
init_input_shapes(inputShapes);
ov::ParameterVector params{std::make_shared<ov::op::v0::Parameter>(inType, inputDynamicShapes.front())};
// auto paramNode = std::make_shared<ngraph::opset1::Parameter>(type, ngraph::Shape(shape));
std::shared_ptr<ov::Node> beginInput, endInput, strideInput;
if (restInputType == ngraph::helpers::InputLayerType::PARAMETER) {
if (restInputType[0] == ngraph::helpers::InputLayerType::PARAMETER) {
auto beginNode = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::Type_t::i64, ov::Shape{begin.size()});
auto endNode = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::Type_t::i64, ov::Shape{end.size()});
auto strideNode = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::Type_t::i64, ov::Shape{stride.size()});
params.push_back(beginNode);
params.push_back(endNode);
params.push_back(strideNode);
beginInput = beginNode;
endInput = endNode;
strideInput = strideNode;
} else {
beginInput = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ov::Shape{begin.size()}, begin);
}
if (restInputType[1] == ngraph::helpers::InputLayerType::PARAMETER) {
auto endNode = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::Type_t::i64, ov::Shape{end.size()});
params.push_back(endNode);
endInput = endNode;
} else {
endInput = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ov::Shape{end.size()}, end);
}
if (restInputType[2] == ngraph::helpers::InputLayerType::PARAMETER) {
auto strideNode = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::Type_t::i64, ov::Shape{stride.size()});
params.push_back(strideNode);
strideInput = strideNode;
} else {
strideInput = std::make_shared<ngraph::opset1::Constant>(ngraph::element::Type_t::i64, ov::Shape{stride.size()}, stride);
}
auto ss = std::make_shared<ngraph::op::v1::StridedSlice>(params[0], beginInput, endInput, strideInput, ssParams.beginMask, ssParams.endMask,
ssParams.newAxisMask, ssParams.shrinkAxisMask, ssParams.ellipsisAxisMask);
@ -174,9 +200,15 @@ const std::vector<ElementType> inputPrecisions = {
ElementType::f32
};
const std::vector<ngraph::helpers::InputLayerType> restInputTypes = {
ngraph::helpers::InputLayerType::CONSTANT,
ngraph::helpers::InputLayerType::PARAMETER
const std::vector<std::vector<ngraph::helpers::InputLayerType>> restInputTypes = {
{ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::CONSTANT},
{ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::PARAMETER},
{ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::CONSTANT},
{ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::CONSTANT},
{ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::PARAMETER},
{ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::PARAMETER},
{ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::CONSTANT, ngraph::helpers::InputLayerType::PARAMETER},
{ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::PARAMETER, ngraph::helpers::InputLayerType::CONSTANT},
};
const std::vector<InputShape> inputShapesDynamic2D = {
@ -185,14 +217,10 @@ const std::vector<InputShape> inputShapesDynamic2D = {
{{-1, 16},
{{16, 16}, {20, 16}, {32, 16}}},
{{{16, 32}, {16, 32}},
{{16, 32}, {32, 16}, {24, 24}}},
};
const std::vector<StridedSliceParams> paramsPlain2D = {
StridedSliceParams{ { 0, 10 }, { 16, 16 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 2, 5 }, { 16, 8 }, { 1, 1 }, { 0, 0 }, { 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 2, 5 }, { 16, 16 }, { 1, 2 }, { 0, 1 }, { 1, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 0 }, { 16, 16 }, { 2, 1 }, { 0, 0 }, { 1, 0 }, { }, { }, { } },
};
@ -202,7 +230,7 @@ INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_Plain_Static_2D, StridedSliceLaye
::testing::ValuesIn(static_shapes_to_test_representation({{32, 20}})),
::testing::ValuesIn(paramsPlain2D),
::testing::ValuesIn(inputPrecisions),
::testing::Values(ngraph::helpers::InputLayerType::CONSTANT),
::testing::Values(restInputTypes[0]),
::testing::Values(emptyAdditionalConfig)),
StridedSliceLayerGPUTest::getTestCaseName);
@ -217,12 +245,8 @@ INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_Plain_Dynamic_2D, StridedSliceLay
const std::vector<StridedSliceParams> testCasesCommon4D = {
StridedSliceParams{ { 0, 2, 5, 4 }, { 1, 4, 28, 27 }, { 1, 1, 1, 1 }, { 0, 0, 0, 0 }, { 0, 0, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 1, 0, 0 }, { 1, 3, 32, 20 }, { 1, 1, 1, 1 }, { 0, 0, 0, 0 }, { 0, 0, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 0, 10, 0 }, { 1, 3, 20, 20 }, { 1, 1, 1, 1 }, { 0, 0, 0, 0 }, { 0, 1, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 0, 20, 20 }, { 1, 5, 25, 26 }, { 1, 1, 1, 2 }, { 0, 0, 0, 0 }, { 0, 0, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 0, 10, 20 }, { 1, 5, 28, 26 }, { 1, 1, 1, 2 }, { 0, 0, 0, 0 }, { 0, 0, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 0, 0, 20 }, { 1, 2, 30, 30 }, { 1, 1, 2, 1 }, { 0, 0, 0, 1 }, { 0, 1, 0, 1 }, { }, { }, { } },
StridedSliceParams{ { 0, 0, 2, 10 }, { 1, 3, 32, 20 }, { 1, 1, 1, 1 }, { 0, 0, 1, 1 }, { 0, 0, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 1, 0, 10 }, { 1, 5, 32, 30 }, { 1, 1, 1, 1 }, { 0, 1, 0, 0 }, { 0, 0, 0, 0 }, { }, { }, { } },
StridedSliceParams{ { 0, 1, 2, 10 }, { 1, 5, 32, 18 }, { 1, 1, 1, 2 }, { 0, 0, 1, 0 }, { 0, 0, 0, 1 }, { }, { }, { } },
StridedSliceParams{ { 0, 0, 2, 10 }, { 1, 8, 32, 18 }, { 1, 2, 1, 2 }, { 0, 0, 1, 0 }, { 0, 0, 0, 1 }, { }, { }, { } },
};
@ -231,11 +255,11 @@ const std::vector<InputShape> inputShapesDynamic4D = {
{{-1, -1, -1, -1},
{{ 1, 5, 32, 32 }, { 2, 5, 32, 32 }, { 1, 5, 64, 64 }}},
{{-1, 5, -1, -1},
{{ 1, 5, 32, 32 }, { 2, 5, 32, 32 }, { 3, 5, 32, 36 }}},
{{1, 64, -1, -1},
{{ 1, 64, 16, 32 }, { 1, 64, 32, 64 }, { 1, 64, 64, 64 }}},
{{{1, 5}, 5, {32, 64}, {32, 64}},
{{ 2, 5, 32, 32 }, { 1, 5, 48, 32 }, { 5, 5, 32, 32 }}},
{{1, -1, 16, 32},
{{ 1, 16, 16, 32 }, { 1, 32, 16, 32 }, { 1, 64, 16, 32 }}},
};
INSTANTIATE_TEST_SUITE_P(smoke_CompareWithRefs_Common_Dynamic_4D, StridedSliceLayerGPUTest,