Remove ops from Layer Creator/ Node Converter - part 5 (#3493)

* remove avgpool op from layer creator

* remove binaryconvolution op from layer creator

* remove broadcast op from layer creator

* remove ctcgreedydecoder op from layer creator

* remove stridedslice op from layer creator

* remove convolutionbackpropdata op from layer creator

* adjust broadcast op to deduce broadcast mode

* add default strides if not provided when creating stridedslice

* code review comments
This commit is contained in:
Bartosz Lesniewski 2020-12-16 17:51:28 +01:00 committed by GitHub
parent 261cb6ecf8
commit 5f9ef0cf26
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 44 additions and 255 deletions

View File

@ -1291,6 +1291,15 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
} }
return res; return res;
}); });
addSpecificCreator({"CTCGreedyDecoder"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "CTCGreedyDecoder", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
res->params = params;
res->params["ctc_merge_repeated"] = res->getBoolStrParamAsIntStr("ctc_merge_repeated");
return res;
});
} }
CNNLayerPtr InferenceEngine::details::CNNLayerCreator::create() { CNNLayerPtr InferenceEngine::details::CNNLayerCreator::create() {
@ -1318,9 +1327,7 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
} }
}; };
const static std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = { const static std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = {
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::AvgPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(), std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CTCGreedyDecoder>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformableConvolution>>(), std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformableConvolution>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformablePSROIPooling>>(), std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformablePSROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(), std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),

View File

@ -550,72 +550,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v1::DeformableConvolution>::createLayer(
return res; return res;
} }
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::AvgPool>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Pooling",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PoolingLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::AvgPool>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
std::string value;
for (const auto& val : castedLayer->get_pads_begin()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_begin"] = value;
value.clear();
for (const auto& val : castedLayer->get_pads_end()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_end"] = value;
value.clear();
for (const auto& val : castedLayer->get_strides()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["strides"] = value;
value.clear();
for (const auto& val : castedLayer->get_kernel()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["kernel"] = value;
switch (castedLayer->get_auto_pad()) {
case ngraph::op::PadType::VALID:
res->params["auto_pad"] = "valid";
break;
case ngraph::op::PadType::SAME_UPPER:
res->params["auto_pad"] = "same_upper";
break;
case ngraph::op::PadType::SAME_LOWER:
res->params["auto_pad"] = "same_lower";
break;
default:
break;
}
auto exclude_pad = castedLayer->get_exclude_pad();
res->params["exclude-pad"] = exclude_pad ? "true" : "false";
res->params["pool-method"] = "avg";
switch (castedLayer->get_rounding_type()) {
case ngraph::op::RoundingType::CEIL:
res->params["rounding_type"] = "ceil";
break;
case ngraph::op::RoundingType::FLOOR:
res->params["rounding_type"] = "floor";
break;
default:
THROW_IE_EXCEPTION << "Unsupported ngraph rounding type.";
}
return res;
}
template <> template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::MaxPool>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const { CNNLayer::Ptr NodeConverter<ngraph::op::v1::MaxPool>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Pooling", LayerParams params = {layer->get_friendly_name(), "Pooling",
@ -1162,19 +1096,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::NormalizeIE>::createLayer(const std::sha
return res; return res;
} }
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::CTCGreedyDecoder>::createLayer(
const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "CTCGreedyDecoder",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::CTCGreedyDecoder>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["ctc_merge_repeated"] = castedLayer->get_ctc_merge_repeated() ? "1" : "0";
return res;
}
template <> template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Erf>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const { CNNLayer::Ptr NodeConverter<ngraph::op::Erf>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Erf", LayerParams params = {layer->get_friendly_name(), "Erf",

View File

@ -16,7 +16,6 @@
#include <sstream> #include <sstream>
#include <string> #include <string>
#include <vector> #include <vector>
#include <ngraph/op/strided_slice.hpp>
#include <ngraph/ops.hpp> #include <ngraph/ops.hpp>
#include <ngraph/opsets/opset.hpp> #include <ngraph/opsets/opset.hpp>
#include <ngraph/opsets/opset2.hpp> #include <ngraph/opsets/opset2.hpp>
@ -397,17 +396,11 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
const pugi::xml_node& node, const Blob::CPtr& weights, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& params) { const GenericLayerParams& params) {
static std::vector<std::shared_ptr<LayerBaseCreator>> creators = { static std::vector<std::shared_ptr<LayerBaseCreator>> creators = {
std::make_shared<LayerCreator<ngraph::op::v1::AvgPool>>("AvgPool"),
std::make_shared<LayerCreator<ngraph::op::CTCGreedyDecoder>>("CTCGreedyDecoder"),
std::make_shared<LayerCreator<ngraph::op::v1::DeformableConvolution>>("DeformableConvolution"), std::make_shared<LayerCreator<ngraph::op::v1::DeformableConvolution>>("DeformableConvolution"),
std::make_shared<LayerCreator<ngraph::op::v1::DeformablePSROIPooling>>("DeformablePSROIPooling"), std::make_shared<LayerCreator<ngraph::op::v1::DeformablePSROIPooling>>("DeformablePSROIPooling"),
std::make_shared<LayerCreator<ngraph::op::v1::Broadcast>>("Broadcast"),
std::make_shared<LayerCreator<ngraph::op::v1::StridedSlice>>("StridedSlice"),
std::make_shared<LayerCreator<ngraph::op::v1::GreaterEqual>>("GreaterEqual"), std::make_shared<LayerCreator<ngraph::op::v1::GreaterEqual>>("GreaterEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolution>>("GroupConvolution"), std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolution>>("GroupConvolution"),
std::make_shared<LayerCreator<ngraph::op::v1::ConvolutionBackpropData>>("ConvolutionBackpropData"),
std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>>("GroupConvolutionBackpropData"), std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>>("GroupConvolutionBackpropData"),
std::make_shared<LayerCreator<ngraph::op::v1::BinaryConvolution>>("BinaryConvolution"),
std::make_shared<LayerCreator<ngraph::op::SquaredDifference>>("SquaredDifference"), std::make_shared<LayerCreator<ngraph::op::SquaredDifference>>("SquaredDifference"),
std::make_shared<LayerCreator<ngraph::op::v1::LessEqual>>("LessEqual"), std::make_shared<LayerCreator<ngraph::op::v1::LessEqual>>("LessEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::Equal>>("Equal"), std::make_shared<LayerCreator<ngraph::op::v1::Equal>>("Equal"),
@ -775,20 +768,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v0::LSTMCell>:
activations, activations_alpha, activations_beta, clip); activations, activations_alpha, activations_beta, clip);
} }
// CTCGreedyDecoder layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::CTCGreedyDecoder>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::CTCGreedyDecoder>(inputs[0], inputs[1],
GetBoolAttr(dn, "ctc_merge_repeated", true));
}
// SquaredDifference layer // SquaredDifference layer
template <> template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::SquaredDifference>::createLayer( std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::SquaredDifference>::createLayer(
@ -857,44 +836,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Result>::creat
return std::make_shared<ngraph::op::Result>(inputs[0]); return std::make_shared<ngraph::op::Result>(inputs[0]);
} }
// StridedSlice layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::StridedSlice>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
std::vector<int64_t> begin_mask = getParameters<int64_t>(dn, "begin_mask");
std::vector<int64_t> end_mask = getParameters<int64_t>(dn, "end_mask");
std::vector<int64_t> new_axis = getParameters<int64_t>(dn, "new_axis_mask");
std::vector<int64_t> shrink_axis = getParameters<int64_t>(dn, "shrink_axis_mask");
std::vector<int64_t> ellipsis_mask = getParameters<int64_t>(dn, "ellipsis_mask");
if (inputs.size() == 3) {
return std::make_shared<ngraph::op::v1::StridedSlice>(inputs[0], inputs[1], inputs[2], begin_mask,
end_mask, new_axis, shrink_axis, ellipsis_mask);
} else if (inputs.size() == 4) {
return std::make_shared<ngraph::op::v1::StridedSlice>(inputs[0], inputs[1], inputs[2], inputs[3], begin_mask,
end_mask, new_axis, shrink_axis, ellipsis_mask);
} else {
THROW_IE_EXCEPTION << "Incorrect number of inputs " << inputs.size() << " for " << getType() << " layer with name: " << layerParsePrms.name;
}
}
// Broadcast layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Broadcast>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
if (inputs.size() == 2) {
return std::make_shared<ngraph::op::v1::Broadcast>(inputs[0], inputs[1]);
} else if (layerParsePrms.inputPorts.size() == 3) {
return std::make_shared<ngraph::op::v1::Broadcast>(inputs[0], inputs[1], inputs[2]);
}
THROW_IE_EXCEPTION << "Invalid number of inputs: " << layerParsePrms.inputPorts.size();
}
// RegionYolo layer // RegionYolo layer
template <> template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::RegionYolo>::createLayer( std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::RegionYolo>::createLayer(
@ -934,41 +875,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::ReorgYolo>::cr
return std::make_shared<ngraph::op::ReorgYolo>(inputs[0], ngraph::Strides {stride}); return std::make_shared<ngraph::op::ReorgYolo>(inputs[0], ngraph::Strides {stride});
} }
// BinaryConvolution layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::BinaryConvolution>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
size_t group = GetUIntAttr(dn, "group", 1);
if (group != 1) THROW_IE_EXCEPTION << "Cannot create grouped BinaryConvolution layer " << layerParsePrms.name;
ngraph::op::PadType pad_type = ngraph::op::PadType::EXPLICIT;
std::string auto_pad = GetStrAttr(dn, "auto_pad", "");
if (auto_pad == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (auto_pad == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (auto_pad == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto dilations = ngraph::Strides(getParameters<size_t>(dn, "dilations"));
auto pads_begin = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_begin"));
auto pads_end = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_end"));
auto mode = GetStrAttr(dn, "mode");
auto pad_value = GetFloatAttr(dn, "pad_value");
return std::make_shared<ngraph::op::v1::BinaryConvolution>(inputs[0], inputs[1], strides, pads_begin, pads_end,
dilations, mode, pad_value, pad_type);
}
// GroupConvolution layer // GroupConvolution layer
template <> template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolution>::createLayer( std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolution>::createLayer(
@ -1032,44 +938,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Deformable
pads_end, dilations, pad_type, group, deformable_group); pads_end, dilations, pad_type, group, deformable_group);
} }
// ConvolutionBackpropData layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::ConvolutionBackpropData>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
ngraph::op::PadType pad_type = ngraph::op::PadType::EXPLICIT;
std::string auto_pad = GetStrAttr(dn, "auto_pad", "");
if (auto_pad == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (auto_pad == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (auto_pad == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto dilations = ngraph::Strides(getParameters<size_t>(dn, "dilations"));
auto pads_begin = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_begin", {}));
auto pads_end = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_end", {}));
auto output_padding = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "output_padding", {}));
if (inputs.size() != 3 && inputs.size() != 2) {
THROW_IE_EXCEPTION << layerParsePrms.type << " layer " << layerParsePrms.name << " has incorrect number of input ports!";
}
if (inputs.size() == 3) {
return std::make_shared<ngraph::op::v1::ConvolutionBackpropData>(inputs[0], inputs[1], inputs[2], strides, pads_begin, pads_end,
dilations, pad_type, output_padding);
} else {
return std::make_shared<ngraph::op::v1::ConvolutionBackpropData>(inputs[0], inputs[1], strides, pads_begin, pads_end,
dilations, pad_type, output_padding);
}
}
// GroupConvolutionBackpropData layer // GroupConvolutionBackpropData layer
template <> template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>::createLayer( std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>::createLayer(
@ -1109,47 +977,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvo
} }
} }
// AvgPool layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::AvgPool>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
auto exclude_pad = GetStrAttr(dn, "exclude-pad") == "true";
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto kernel = ngraph::Shape(getParameters<size_t>(dn, "kernel"));
auto pads_begin = ngraph::Shape(getParameters<std::size_t>(dn, "pads_begin"));
auto pads_end = ngraph::Shape(getParameters<std::size_t>(dn, "pads_end"));
auto pad_type = ngraph::op::PadType::EXPLICIT;
auto pad_type_str = GetStrAttr(dn, "auto_pad", "");
if (pad_type_str == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (pad_type_str == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (pad_type_str == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
ngraph::op::RoundingType rounding_type;
auto str_rounding_type = GetStrAttr(dn, "rounding_type", "floor");
if (str_rounding_type == "floor") {
rounding_type = ngraph::op::RoundingType::FLOOR;
} else if (str_rounding_type == "ceil") {
rounding_type = ngraph::op::RoundingType::CEIL;
} else {
THROW_IE_EXCEPTION << "Unsuppored rounding type: " << str_rounding_type;
}
return std::make_shared<ngraph::op::v1::AvgPool>(inputs[0], strides, pads_begin, pads_end, kernel, exclude_pad,
rounding_type, pad_type);
}
// MaxPool layer // MaxPool layer
template <> template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::MaxPool>::createLayer( std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::MaxPool>::createLayer(

View File

@ -313,6 +313,12 @@ private:
adapter.set(value); adapter.set(value);
} }
void on_adapter(const std::string& name, ngraph::ValueAccessor<std::vector<int64_t>>& adapter) override {
std::vector<int64_t> value;
if (!getParameters<int64_t>(node.child("data"), name, value)) return;
adapter.set(value);
}
void on_adapter(const std::string& name, ngraph::ValueAccessor<std::vector<float>>& adapter) override { void on_adapter(const std::string& name, ngraph::ValueAccessor<std::vector<float>>& adapter) override {
std::vector<float> value; std::vector<float> value;
if (!getParameters<float>(node.child("data"), name, value)) return; if (!getParameters<float>(node.child("data"), name, value)) return;

View File

@ -269,8 +269,28 @@ op::v1::Broadcast::Broadcast(const Output<Node>& arg,
void op::v1::Broadcast::validate_and_infer_types() void op::v1::Broadcast::validate_and_infer_types()
{ {
util::BroadcastBase::validate_and_infer_types(); // m_type is deduced and not always explicitly stated, for cases where broadcast
// has 2 inputs its always NUMPY mode
if (m_broadcast_spec.m_type == AutoBroadcastType::NONE && get_input_size() < 3)
{
m_broadcast_spec.m_type = AutoBroadcastType::NUMPY;
}
// Mocking axes_mapping input for cases that don't require it
if (m_broadcast_spec.m_type == AutoBroadcastType::NUMPY && get_input_size() < 3)
{
auto output = op::v0::Constant::create(element::u8, Shape{}, {0})->output(0);
set_argument(2, output);
}
// update the base class' mode spec
auto base_spec = to_broadcast_mode(m_broadcast_spec);
if (util::BroadcastBase::m_mode.m_type != base_spec.m_type)
{
util::BroadcastBase::m_mode = base_spec;
}
util::BroadcastBase::validate_and_infer_types();
set_input_is_relevant_to_shape(0); // arg - Result element type set_input_is_relevant_to_shape(0); // arg - Result element type
set_input_is_relevant_to_shape(1); // target_shape - Result shape set_input_is_relevant_to_shape(1); // target_shape - Result shape
set_input_is_relevant_to_shape(2); // axes_mapping - Broadcast type set_input_is_relevant_to_shape(2); // axes_mapping - Broadcast type

View File

@ -172,6 +172,14 @@ void op::v1::StridedSlice::validate_and_infer_types()
")."); ").");
} }
// Fill up strides input with default strides if not set by this point.
if (get_input_size() < 4)
{
set_argument(3,
calculate_default_strides(get_input_node_ptr(1)->output(0),
get_input_node_ptr(2)->output(0)));
}
set_input_is_relevant_to_shape(1); set_input_is_relevant_to_shape(1);
set_input_is_relevant_to_shape(2); set_input_is_relevant_to_shape(2);
set_input_is_relevant_to_shape(3); set_input_is_relevant_to_shape(3);