[GPU] Fix Pad Primitive shape inference (#14282)

This commit is contained in:
Sergey Shlyapnikov 2022-12-13 16:59:55 +04:00 committed by GitHub
parent 43808d9da4
commit 9f481e8eaa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 68 additions and 22 deletions

View File

@ -25,8 +25,8 @@ layout border_inst::calc_output_layout(border_node const& node, kernel_impl_para
auto new_dims = input_layout.get_dims();
for (size_t i = 0; i < new_dims.size(); ++i) {
new_dims[i] += desc->pads_begin[i];
new_dims[i] += desc->pads_end[i];
new_dims[i] += (i < desc->pads_begin.size()) ? desc->pads_begin[i] : 0;
new_dims[i] += (i < desc->pads_end.size()) ? desc->pads_end[i] : 0;
}
return layout{ input_layout.data_type, input_format, tensor(dims_format, new_dims) };
}
@ -129,7 +129,7 @@ border_inst::typed_primitive_inst(network& network, border_node const& node) : p
if (pad_mode == ov::op::PadMode::SYMMETRIC) {
bool valid_pads = true;
for (size_t i = 0; i < input_sizes.size(); ++i) {
for (size_t i = 0; i < argument->pads_begin.size(); ++i) {
valid_pads &= argument->pads_begin[i] <= input_sizes[i];
valid_pads &= argument->pads_end[i] <= input_sizes[i];
}
@ -140,7 +140,7 @@ border_inst::typed_primitive_inst(network& network, border_node const& node) : p
} else if (pad_mode == ov::op::PadMode::REFLECT) {
bool valid_pads = true;
for (size_t i = 0; i < input_sizes.size(); ++i) {
for (size_t i = 0; i < argument->pads_begin.size(); ++i) {
valid_pads &= argument->pads_begin[i] < input_sizes[i];
valid_pads &= argument->pads_end[i] < input_sizes[i];
}

View File

@ -31,10 +31,17 @@ struct border_impl : typed_primitive_impl_ocl<border> {
auto params = get_default_params<kernel_selector::border_params>(impl_param, 1);
auto optional_params = get_default_optional_params<kernel_selector::border_optional_params>(impl_param.get_program());
format pads_format = format::adjust_to_rank(format::bfyx, impl_param.get_input_layout(0).get_rank());
size_t rank = impl_param.get_input_layout(0).get_rank();
format pads_format = format::adjust_to_rank(format::bfyx, rank);
std::vector<tensor::value_type> pads_begin(primitive->pads_begin.begin(), primitive->pads_begin.end());
std::vector<tensor::value_type> pads_end(primitive->pads_end.begin(), primitive->pads_end.end());
if (pads_begin.size() < rank) {
size_t zeros_to_add = rank - pads_begin.size();
pads_begin.insert(pads_begin.end(), zeros_to_add, 0);
pads_end.insert(pads_end.end(), zeros_to_add, 0);
}
params.lt_sizes = convert_dim_vector(tensor(pads_format, pads_begin, 0));
params.rb_sizes = convert_dim_vector(tensor(pads_format, pads_end, 0));
params.border_value = primitive->pad_value;

View File

@ -17,30 +17,18 @@ static void CreatePadOp(Program& p, const std::shared_ptr<ngraph::op::v1::Pad>&
validate_inputs_count(op, {3, 4});
auto inputs = p.GetInputInfo(op);
std::string layerName = layer_type_name_ID(op);
size_t rank = std::max(op->get_input_partial_shape(0).size(), static_cast<size_t>(4));
float pad_value = 0.f;
if (op->get_input_size() == 4) {
auto const_node = std::dynamic_pointer_cast<ngraph::op::v0::Constant>(op->get_input_node_shared_ptr(3));
if (!const_node) {
IE_THROW() << "Unsupported const node type in " << op->get_friendly_name() << " (" << op->get_type_name() << ")";
}
OPENVINO_ASSERT(const_node, "Unsupported const node type in ", op->get_friendly_name(), " (", op->get_type_name(), ")");
ngraph::op::util::get_single_value(const_node, pad_value);
}
auto pads_begin = op->get_pads_begin();
auto pads_end = op->get_pads_end();
if (pads_begin.size() < rank) {
size_t zeros_to_add = rank - pads_begin.size();
pads_begin.insert(pads_begin.end(), zeros_to_add, 0);
pads_end.insert(pads_end.end(), zeros_to_add, 0);
}
auto tilePrim = cldnn::border(layerName,
inputs[0],
pads_begin,
pads_end,
op->get_pads_begin(),
op->get_pads_end(),
op->get_pad_mode(),
pad_value);

View File

@ -125,6 +125,13 @@ INSTANTIATE_TEST_SUITE_P(smoke, pad_test_single_input,
layout{ov::PartialShape{4}, data_types::i64, format::bfyx}, {1, 0, 3, 7},
ov::op::PadMode::CONSTANT, 1.f,
layout{ov::PartialShape{{1, -1},{5, -1},{5, -1},{8, -1}}, data_types::f32, format::bfyx}
},
{
layout{ov::PartialShape::dynamic(2), data_types::f32, format::bfyx},
layout{ov::PartialShape{2}, data_types::i64, format::bfyx}, {0, 5},
layout{ov::PartialShape{2}, data_types::i64, format::bfyx}, {1, 0},
ov::op::PadMode::CONSTANT, 1.f,
layout{ov::PartialShape{{1, -1},{5, -1}}, data_types::f32, format::bfyx}
}
}));

View File

@ -303,7 +303,7 @@ TEST(canSwapTensorsBetweenInferRequests, outputs) {
} else {
iter1++;
ov::Tensor output_tensor = infer_request1.get_output_tensor();
compare_results(output_tensor, ref[iter1 % 2].data());
compare_results(output_tensor, ref[0].data());
if (iter1 < niter_limit) {
infer_request1.set_output_tensor(output_tensors[(iter1 + 1) % 2]);
infer_request1.start_async();
@ -317,7 +317,7 @@ TEST(canSwapTensorsBetweenInferRequests, outputs) {
} else {
iter2++;
ov::Tensor output_tensor = infer_request2.get_output_tensor();
compare_results(output_tensor, ref[(iter2 + 1) % 2].data());
compare_results(output_tensor, ref[1].data());
if (iter2 < niter_limit) {
infer_request2.set_output_tensor(output_tensors[iter2 % 2]);
infer_request2.start_async();

View File

@ -91,6 +91,50 @@ const std::vector<ngraph::helpers::PadMode> padMode = {
ngraph::helpers::PadMode::SYMMETRIC
};
/* *======================* Dynamic Shapes Tests 2D *======================* */
const std::vector<InputShape> inputShapesDynamic2D = {
{{-1, -1}, // dynamic
{{5, 36}, {3, 16}}}, // target
{{-1, 32}, // dynamic
{{5, 32}}}, // target
{{{1, 5}, {16, 32}}, // dynamic
{{3, 16}, {5, 24}}}, // target
};
const std::vector<std::vector<int64_t>> padsBegin2D_Smoke = {{0, 1}, {0, 2}};
const std::vector<std::vector<int64_t>> padsEnd2D_Smoke = {{0, 2}, {0, 0}};
INSTANTIATE_TEST_SUITE_P(
smoke_GPUPadDynamic2DConst,
PadLayerGPUTest,
::testing::Combine(
::testing::ValuesIn(inputShapesDynamic2D),
::testing::ValuesIn(inputPrecisions),
::testing::ValuesIn(padsBegin2D_Smoke),
::testing::ValuesIn(padsEnd2D_Smoke),
::testing::ValuesIn(argPadValue),
::testing::Values(ngraph::helpers::PadMode::CONSTANT)),
PadLayerGPUTest::getTestCaseName
);
INSTANTIATE_TEST_SUITE_P(
smoke_GPUPadDynamic2D,
PadLayerGPUTest,
::testing::Combine(
::testing::ValuesIn(inputShapesDynamic2D),
::testing::ValuesIn(inputPrecisions),
::testing::ValuesIn(padsBegin2D_Smoke),
::testing::ValuesIn(padsEnd2D_Smoke),
::testing::Values(0),
::testing::ValuesIn(padMode)),
PadLayerGPUTest::getTestCaseName
);
/* *======================* *=====================* *======================* */
/* *======================* Dynamic Shapes Tests 4D *======================* */
const std::vector<InputShape> inputShapesDynamic4D = {