Remove ops from Layer Creator/ Node Converter - part 6 (#3620)

* remove deformableconvolution op from layer creator

* remove deformablepsroipooling op from layer creator

* remove maxpool op from layer creator

* remove nonmaxsuppresion from layer creator

* remove groupconvolutionbackpropdata op from layer creator

* remove groupconvolution op from layer creator

* fix code style
This commit is contained in:
Bartosz Lesniewski 2021-01-11 05:55:14 +01:00 committed by GitHub
parent 7e8cea672e
commit 11f1a0f671
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 48 additions and 388 deletions

View File

@ -1465,7 +1465,7 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
});
addSpecificCreator({"FakeQuantize"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "FakeQuantize", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::QuantizeLayer>(attrs);
res->params = params;
@ -1473,7 +1473,7 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
});
addSpecificCreator({"ConvolutionIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Convolution", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ConvolutionLayer>(attrs);
res->params = params;
@ -1506,6 +1506,46 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"DeformableConvolution"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "DeformableConvolution", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::DeformableConvolutionLayer>(attrs);
res->params = params;
auto shape = node->get_input_shape(2);
std::string value;
res->params["output"] = Builder::asString(shape[0]);
for (size_t i = 2; i < shape.size(); i++) {
if (!value.empty()) value += ",";
value += Builder::asString(shape[i]);
}
res->params["kernel"] = value;
if (res->params["auto_pad"] == "explicit") {
res->params.erase("auto_pad");
}
const auto weightsNode = node->input_value(2).get_node_shared_ptr();
InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights);
return res;
});
addSpecificCreator({"DeformablePSROIPooling"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "PSROIPooling", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
res->params = params;
res->params["no_trans"] = node->get_input_size() == 2 ? "1" : "0";
// temporary workaround due to incorrect usage of group_size in the nGraph operation for the DeformablePSROIPooling
res->params["pooled_height"] = params.at("group_size");
res->params["pooled_width"] = params.at("group_size");
return res;
});
addSpecificCreator({"CTCGreedyDecoder"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "CTCGreedyDecoder", details::convertPrecision(node->get_output_element_type(0))};
@ -1548,13 +1588,10 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
};
const static std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = {
std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformableConvolution>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformablePSROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Ceiling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::FullyConnected>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GenericIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::MaxPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PowerIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ReLUIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ResampleV2>>(),

View File

@ -479,142 +479,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v0::Unsqueeze>::createLayer(const std::s
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::DeformableConvolution>::createLayer(
const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "DeformableConvolution",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::DeformableConvolutionLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::DeformableConvolution>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
std::string value;
for (const auto& val : castedLayer->get_pads_begin()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_begin"] = value;
value.clear();
for (const auto& val : castedLayer->get_pads_end()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_end"] = value;
switch (castedLayer->get_auto_pad()) {
case ngraph::op::PadType::SAME_UPPER:
res->params["auto_pad"] = "same_upper";
break;
case ngraph::op::PadType::SAME_LOWER:
res->params["auto_pad"] = "same_lower";
break;
case ngraph::op::PadType::VALID:
res->params["auto_pad"] = "valid";
break;
default:
break;
}
value.clear();
for (const auto& val : castedLayer->get_strides()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["strides"] = value;
value.clear();
for (const auto& val : castedLayer->get_dilations()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["dilations"] = value;
// Restore kernel size and output
const auto& shape = castedLayer->get_input_shape(2);
res->params["output"] = asString(shape[0]);
value.clear();
for (size_t i = 2; i < shape.size(); i++) {
if (!value.empty()) value += ",";
value += asString(shape[i]);
}
res->params["kernel"] = value;
res->params["group"] = asString(castedLayer->get_group());
res->params["deformable_group"] = asString(castedLayer->get_deformable_group());
const auto weightsNode = castedLayer->input_value(2).get_node_shared_ptr();
InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::MaxPool>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Pooling",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PoolingLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::MaxPool>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
std::string value;
for (const auto& val : castedLayer->get_pads_begin()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_begin"] = value;
value.clear();
for (const auto& val : castedLayer->get_pads_end()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_end"] = value;
value.clear();
for (const auto& val : castedLayer->get_strides()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["strides"] = value;
value.clear();
for (const auto& val : castedLayer->get_kernel()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["kernel"] = value;
res->params["pool-method"] = "max";
switch (castedLayer->get_auto_pad()) {
case ngraph::op::PadType::VALID:
res->params["auto_pad"] = "valid";
break;
case ngraph::op::PadType::SAME_UPPER:
res->params["auto_pad"] = "same_upper";
break;
case ngraph::op::PadType::SAME_LOWER:
res->params["auto_pad"] = "same_lower";
break;
default:
break;
}
switch (castedLayer->get_rounding_type()) {
case ngraph::op::RoundingType::CEIL:
res->params["rounding_type"] = "ceil";
break;
case ngraph::op::RoundingType::FLOOR:
res->params["rounding_type"] = "floor";
break;
default:
THROW_IE_EXCEPTION << "Unsupported ngraph rounding type.";
}
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PSROIPooling>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "PSROIPooling",
@ -633,31 +497,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::PSROIPooling>::createLayer(const std::sh
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::DeformablePSROIPooling>::createLayer(
const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "PSROIPooling",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::DeformablePSROIPooling>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["output_dim"] = asString(castedLayer->get_output_dim());
res->params["group_size"] = asString(castedLayer->get_group_size());
res->params["spatial_bins_x"] = asString(castedLayer->get_spatial_bins_x());
res->params["spatial_bins_y"] = asString(castedLayer->get_spatial_bins_y());
res->params["spatial_scale"] = asString(castedLayer->get_spatial_scale());
res->params["mode"] = castedLayer->get_mode();
res->params["trans_std"] = asString(castedLayer->get_trans_std());
res->params["part_size"] = asString(castedLayer->get_part_size());
res->params["no_trans"] = layer->get_input_size() == 2 ? "1" : "0";
// temporary workaround due to incorrect usage of group_size in the nGraph operation for the DeformablePSROIPooling
res->params["pooled_height"] = asString(castedLayer->get_group_size());
res->params["pooled_width"] = asString(castedLayer->get_group_size());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::VariadicSplit>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Split",

View File

@ -623,17 +623,11 @@ std::shared_ptr<ngraph::Node> V10Parser::XmlDeserializer::createNode(
const Blob::CPtr& weights,
const GenericLayerParams& params) {
static std::vector<std::shared_ptr<LayerBaseCreator>> creators = {
std::make_shared<LayerCreator<ngraph::op::v1::DeformableConvolution>>("DeformableConvolution"),
std::make_shared<LayerCreator<ngraph::op::v1::DeformablePSROIPooling>>("DeformablePSROIPooling"),
std::make_shared<LayerCreator<ngraph::op::v1::GreaterEqual>>("GreaterEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolution>>("GroupConvolution"),
std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>>("GroupConvolutionBackpropData"),
std::make_shared<LayerCreator<ngraph::op::SquaredDifference>>("SquaredDifference"),
std::make_shared<LayerCreator<ngraph::op::v1::LessEqual>>("LessEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::Equal>>("Equal"),
std::make_shared<LayerCreator<ngraph::op::v0::LSTMCell>>("LSTMCell"),
std::make_shared<LayerCreator<ngraph::op::v1::MaxPool>>("MaxPool"),
std::make_shared<LayerCreator<ngraph::op::v1::NonMaxSuppression>>("NonMaxSuppression"),
std::make_shared<LayerCreator<ngraph::op::ReorgYolo>>("ReorgYolo"),
std::make_shared<LayerCreator<ngraph::op::RegionYolo>>("RegionYolo"),
std::make_shared<LayerCreator<ngraph::op::Result>>("Result"),
@ -1105,148 +1099,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::ReorgYolo>::cr
return std::make_shared<ngraph::op::ReorgYolo>(inputs[0], ngraph::Strides {stride});
}
// GroupConvolution layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolution>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
ngraph::op::PadType pad_type = ngraph::op::PadType::EXPLICIT;
std::string auto_pad = GetStrAttr(dn, "auto_pad", "");
if (auto_pad == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (auto_pad == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (auto_pad == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto dilations = ngraph::Strides(getParameters<size_t>(dn, "dilations"));
auto pads_begin = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_begin", {}));
auto pads_end = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_end", {}));
return std::make_shared<ngraph::op::v1::GroupConvolution>(inputs[0], inputs[1], strides, pads_begin, pads_end,
dilations, pad_type);
}
// DeformableConvolution layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::DeformableConvolution>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 3);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
size_t group = GetUIntAttr(dn, "group");
size_t deformable_group = GetUIntAttr(dn, "deformable_group");
ngraph::op::PadType pad_type = ngraph::op::PadType::EXPLICIT;
std::string auto_pad = GetStrAttr(dn, "auto_pad", "");
if (auto_pad == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (auto_pad == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (auto_pad == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto dilations = ngraph::Strides(getParameters<size_t>(dn, "dilations"));
auto pads_begin = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_begin"));
auto pads_end = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_end"));
return std::make_shared<ngraph::op::v1::DeformableConvolution>(inputs[0], inputs[1], inputs[2], strides, pads_begin,
pads_end, dilations, pad_type, group, deformable_group);
}
// GroupConvolutionBackpropData layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
ngraph::op::PadType pad_type = ngraph::op::PadType::EXPLICIT;
std::string auto_pad = GetStrAttr(dn, "auto_pad", "");
if (auto_pad == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (auto_pad == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (auto_pad == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto dilations = ngraph::Strides(getParameters<size_t>(dn, "dilations"));
auto pads_begin = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_begin", {}));
auto pads_end = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_end", {}));
auto output_padding = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "output_padding", {}));
if (inputs.size() != 3 && inputs.size() != 2) {
THROW_IE_EXCEPTION << layerParsePrms.type << " layer " << layerParsePrms.name << " has incorrect number of input ports!";
}
if (inputs.size() == 3) {
return std::make_shared<ngraph::op::v1::GroupConvolutionBackpropData>(inputs[0], inputs[1], inputs[2], strides, pads_begin, pads_end,
dilations, pad_type, output_padding);
} else {
return std::make_shared<ngraph::op::v1::GroupConvolutionBackpropData>(inputs[0], inputs[1], strides, pads_begin, pads_end,
dilations, pad_type, output_padding);
}
}
// MaxPool layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::MaxPool>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto kernel = ngraph::Shape(getParameters<size_t>(dn, "kernel"));
auto pads_begin = ngraph::Shape(getParameters<std::size_t>(dn, "pads_begin"));
auto pads_end = ngraph::Shape(getParameters<std::size_t>(dn, "pads_end"));
auto pad_type = ngraph::op::PadType::EXPLICIT;
auto pad_type_str = GetStrAttr(dn, "auto_pad", "");
if (pad_type_str == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (pad_type_str == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (pad_type_str == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
ngraph::op::RoundingType rounding_type;
auto str_rounding_type = GetStrAttr(dn, "rounding_type", "floor");
if (str_rounding_type == "floor") {
rounding_type = ngraph::op::RoundingType::FLOOR;
} else if (str_rounding_type == "ceil") {
rounding_type = ngraph::op::RoundingType::CEIL;
} else {
THROW_IE_EXCEPTION << "Unsuppored rounding type: " << str_rounding_type;
}
return std::make_shared<ngraph::op::v1::MaxPool>(inputs[0], strides, pads_begin, pads_end, kernel, rounding_type,
pad_type);
}
// PSROIPooling layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PSROIPooling>::createLayer(
@ -1270,42 +1122,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PSROIPooling>:
spatial_bins_y, mode);
}
// DeformablePSROIPooling layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::DeformablePSROIPooling>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
auto output_dim = GetIntAttr(dn, "output_dim");
auto group_size = GetIntAttr(dn, "group_size", 1);
auto spatial_bins_x = GetIntAttr(dn, "spatial_bins_x", 1);
auto spatial_bins_y = GetIntAttr(dn, "spatial_bins_y", 1);
auto spatial_scale = GetFloatAttr(dn, "spatial_scale");
auto mode = GetStrAttr(dn, "mode", "bilinear_deformable");
auto trans_std = GetFloatAttr(dn, "trans_std", 1.0);
auto part_size = GetIntAttr(dn, "part_size", 1);
if (inputs.size() == 3) {
return std::make_shared<ngraph::op::v1::DeformablePSROIPooling>(inputs[0],
inputs[1],
inputs[2], output_dim,
spatial_scale, group_size, mode, spatial_bins_x,
spatial_bins_y, trans_std, part_size);
} else if (inputs.size() == 2) {
return std::make_shared<ngraph::op::v1::DeformablePSROIPooling>(inputs[0],
inputs[1], output_dim,
spatial_scale, group_size, mode, spatial_bins_x,
spatial_bins_y, trans_std, part_size);
} else {
THROW_IE_EXCEPTION << "Wrong number of inputs for " << getType() << " layer with name: " << layerParsePrms.name;
}
}
// LogicalAnd layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::LogicalAnd>::createLayer(
@ -1342,36 +1158,4 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::LogicalNot
return std::make_shared<ngraph::op::v1::LogicalNot>(inputs[0]);
}
// NonMaxSuppression layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::NonMaxSuppression>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
auto box_encoding_string = GetStrAttr(dn, "box_encoding");
ngraph::op::v1::NonMaxSuppression::BoxEncodingType box_enc_type;
if (box_encoding_string == "corner") {
box_enc_type = ngraph::op::v1::NonMaxSuppression::BoxEncodingType::CORNER;
} else if (box_encoding_string == "center") {
box_enc_type = ngraph::op::v1::NonMaxSuppression::BoxEncodingType::CENTER;
} else {
THROW_IE_EXCEPTION << "Unsupported box encoding type " << box_encoding_string << " for " << getType() <<
" layer with name: " << layerParsePrms.name;
}
auto sort_flag = GetBoolAttr(dn, "sort_result_descending");
std::vector<ngraph::Output<ngraph::Node>> new_inputs{inputs.begin(), inputs.end()};
if (new_inputs.size() == 2)
new_inputs.push_back(ngraph::op::Constant::create(ngraph::element::i64, ngraph::Shape{}, {0}));
for (size_t ind = new_inputs.size(); ind < 5; ++ind)
new_inputs.push_back(ngraph::op::Constant::create(ngraph::element::f32, ngraph::Shape{}, {.0f}));
return std::make_shared<ngraph::op::v1::NonMaxSuppression>(new_inputs[0], new_inputs[1], new_inputs[2], new_inputs[3], new_inputs[4],
box_enc_type, sort_flag);
}
} // namespace InferenceEngine

View File

@ -94,12 +94,12 @@ namespace ngraph
private:
int64_t m_output_dim;
float m_spatial_scale;
int64_t m_group_size;
std::string m_mode;
int64_t m_spatial_bins_x;
int64_t m_spatial_bins_y;
float m_trans_std;
int64_t m_part_size;
int64_t m_group_size = 1;
std::string m_mode = "bilinear";
int64_t m_spatial_bins_x = 1;
int64_t m_spatial_bins_y = 1;
float m_trans_std = 1.f;
int64_t m_part_size = 1;
};
}
}