Remove ops from Layer Creator/ Node Converter - part 3 (#3356)

* remove convert op from layer creator

* remove depthtospace op from layer creator

* remove mvn op from layer creator

* remove normalizel2 op from layer creator

* remove notequal op from layer creator

* remove subtract op from layer creator

* correct mvn op behavior when copied with new input

* fix trying to get precision from empty output of normalize layer

* fix normalize layer not setting output type

* remove trailing whitespace

* add fp64 to possible convert op precision types

* use a function to translate bool string representation

* merge emergency opset changes for mvn and roipooling ops
This commit is contained in:
Bartosz Lesniewski 2020-12-08 04:42:47 +01:00 committed by GitHub
parent 305f005605
commit 86347bd909
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 121 additions and 170 deletions

View File

@ -33,8 +33,8 @@ public:
bool get_across_spatial() const { return m_across_spatial;}
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor &visitor) override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector &new_args) const override;
protected:
float m_eps;

View File

@ -1029,6 +1029,93 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"Convert"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Convert",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
auto p = details::convertPrecision(node->get_output_element_type(0));
std::string precision_str;
switch (p) {
case Precision::FP16:
precision_str = "FP16";
break;
case Precision::BF16:
precision_str = "BF16";
break;
case Precision::FP32:
precision_str = "FP32";
break;
case Precision::FP64:
precision_str = "FP64";
break;
case Precision::I8:
precision_str = "I8";
break;
case Precision::I16:
precision_str = "I16";
break;
case Precision::I32:
precision_str = "I32";
break;
case Precision::I64:
precision_str = "I64";
break;
case Precision::U8:
precision_str = "U8";
break;
case Precision::U16:
precision_str = "U16";
break;
case Precision::U32:
precision_str = "U32";
break;
case Precision::U64:
precision_str = "U64";
break;
case Precision::BOOL:
precision_str = "BOOL";
break;
default:
THROW_IE_EXCEPTION << "Unsupported type";
}
res->params["precision"] = precision_str;
return res;
});
addSpecificCreator({"MVN"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "MVN",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::MVNLayer>(attrs);
res->params["normalize_variance"] = params.at("normalize_variance");
res->params["normalize_variance"] = res->getBoolStrParamAsIntStr("normalize_variance");
res->params["eps"] = params.at("eps");
res->params["across_channels"] = params.at("across_channels");
res->params["across_channels"] = res->getBoolStrParamAsIntStr("across_channels");
return res;
});
addSpecificCreator({"NormalizeIE"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Normalize",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::NormLayer>(attrs);
res->params = params;
res->params["channel_shared"] = res->getBoolStrParamAsIntStr("channel_shared");
res->params["across_spatial"] = res->getBoolStrParamAsIntStr("across_spatial");
const auto weightsNode = node->input_value(1).get_node_shared_ptr();
if (auto castedLayer = ngraph::as_type_ptr<ngraph::op::Constant>(weightsNode)) {
res->blobs["weights"] = InferenceEngine::details::shareWeights(castedLayer);
}
return res;
});
addSpecificCreator({"Clamp"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Clamp", details::convertPrecision(node->get_output_element_type(0))};
@ -1138,6 +1225,15 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"Subtract"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Eltwise",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(attrs);
res->params["operation"] = "sub";
return res;
});
addSpecificCreator({"FakeQuantize"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "FakeQuantize", details::convertPrecision(node->get_output_element_type(0))};
@ -1208,19 +1304,16 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
const static std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = {
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::AvgPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Convert>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CTCGreedyDecoder>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformableConvolution>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformablePSROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Ceiling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GatherIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::MVN>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::FullyConnected>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GenericIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::MaxPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Minimum>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::NormalizeIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PowerIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ReLUIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ResampleV2>>(),
@ -1230,7 +1323,6 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
std::make_shared<Builder::NodeConverter<::ngraph::op::ScaleShiftIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::SquaredDifference>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::VariadicSplit>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Subtract>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::TensorIterator>>(),
std::make_shared<Builder::NodeConverter<::ngraph::opset5::Loop>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ShuffleChannels>>(),

View File

@ -337,61 +337,6 @@ CNNLayer::Ptr NodeConverter<ngraph::opset5::Loop>::createLayer(const std::shared
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Convert>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Convert",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto p = details::convertPrecision(layer->get_output_element_type(0));
std::string precision_str;
switch (p) {
case Precision::FP16:
precision_str = "FP16";
break;
case Precision::BF16:
precision_str = "BF16";
break;
case Precision::FP32:
precision_str = "FP32";
break;
case Precision::FP64:
precision_str = "FP64";
break;
case Precision::I8:
precision_str = "I8";
break;
case Precision::I16:
precision_str = "I16";
break;
case Precision::I32:
precision_str = "I32";
break;
case Precision::I64:
precision_str = "I64";
break;
case Precision::U8:
precision_str = "U8";
break;
case Precision::U16:
precision_str = "U16";
break;
case Precision::U32:
precision_str = "U32";
break;
case Precision::U64:
precision_str = "U64";
break;
case Precision::BOOL:
precision_str = "BOOL";
break;
default:
THROW_IE_EXCEPTION << "Unsupported type";
}
res->params["precision"] = precision_str;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Ceiling>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Ceiling",
@ -453,23 +398,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::Exp>::createLayer(const std::shared_ptr<
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::MVN>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "MVN", details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::MVNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::MVN>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["eps"] = asString(castedLayer->get_eps());
const size_t chanelAxis = 1;
ngraph::AxisSet reductionAxes = castedLayer->get_reduction_axes();
res->params["across_channels"] = asString(reductionAxes.count(chanelAxis) > 0);
res->params["normalize_variance"] = asString(castedLayer->get_normalize_variance());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::CropIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Crop",
@ -502,15 +430,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::CropIE>::createLayer(const std::shared_p
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Subtract>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(params);
res->params["operation"] = "sub";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Maximum>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",

View File

@ -35,3 +35,10 @@ shared_ptr<Node> op::NormalizeIE::clone_with_new_inputs(const OutputVector& new_
check_new_args_count(this, new_args);
return make_shared<op::NormalizeIE>(new_args.at(0), new_args.at(1), m_eps, m_across_spatial, m_channel_shared, m_output_type);
}
bool op::NormalizeIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("eps", m_eps);
visitor.on_attribute("channel_shared", m_channel_shared);
visitor.on_attribute("across_spatial", m_across_spatial);
return true;
}

View File

@ -17,7 +17,6 @@
#include <string>
#include <vector>
#include <ngraph/op/strided_slice.hpp>
#include <ngraph/op/not_equal.hpp>
#include <ngraph/ops.hpp>
#include <ngraph/opsets/opset.hpp>
#include <ngraph/opsets/opset2.hpp>
@ -397,13 +396,10 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
const GenericLayerParams& params) {
static std::vector<std::shared_ptr<LayerBaseCreator>> creators = {
std::make_shared<LayerCreator<ngraph::op::v1::AvgPool>>("AvgPool"),
std::make_shared<LayerCreator<ngraph::op::Convert>>("Convert"),
std::make_shared<LayerCreator<ngraph::op::CTCGreedyDecoder>>("CTCGreedyDecoder"),
std::make_shared<LayerCreator<ngraph::op::v1::DeformableConvolution>>("DeformableConvolution"),
std::make_shared<LayerCreator<ngraph::op::v1::DeformablePSROIPooling>>("DeformablePSROIPooling"),
std::make_shared<LayerCreator<ngraph::op::SpaceToDepth>>("SpaceToDepth"),
std::make_shared<LayerCreator<ngraph::op::DepthToSpace>>("DepthToSpace"),
std::make_shared<LayerCreator<ngraph::op::v1::Subtract>>("Subtract"),
std::make_shared<LayerCreator<ngraph::op::v1::Broadcast>>("Broadcast"),
std::make_shared<LayerCreator<ngraph::op::v1::StridedSlice>>("StridedSlice"),
std::make_shared<LayerCreator<ngraph::op::v1::Gather>>("Gather"),
@ -415,14 +411,11 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
std::make_shared<LayerCreator<ngraph::op::SquaredDifference>>("SquaredDifference"),
std::make_shared<LayerCreator<ngraph::op::v1::LessEqual>>("LessEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::Equal>>("Equal"),
std::make_shared<LayerCreator<ngraph::op::v1::NotEqual>>("NotEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::FloorMod>>("FloorMod"),
std::make_shared<LayerCreator<ngraph::op::MVN>>("MVN"),
std::make_shared<LayerCreator<ngraph::op::v0::LSTMCell>>("LSTMCell"),
std::make_shared<LayerCreator<ngraph::op::v1::MaxPool>>("MaxPool"),
std::make_shared<LayerCreator<ngraph::op::v1::Minimum>>("Minimum"),
std::make_shared<LayerCreator<ngraph::op::v1::NonMaxSuppression>>("NonMaxSuppression"),
std::make_shared<LayerCreator<ngraph::op::NormalizeL2>>("NormalizeL2"),
std::make_shared<LayerCreator<ngraph::op::ReorgYolo>>("ReorgYolo"),
std::make_shared<LayerCreator<ngraph::op::RegionYolo>>("RegionYolo"),
std::make_shared<LayerCreator<ngraph::op::Result>>("Result"),
@ -480,12 +473,16 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
if (!ngraphNode && opsets.count(params.version)) {
auto opset = opsets.at(params.version);
std::string type = params.type;
if (type == "Const") {
type = "Constant";
}
// ROIPooling was missing in opset1 and was added in opset2
if (type == "ROIPooling" && params.version == "opset1") {
opset = opsets.at("opset2");
if (params.version == "opset1") {
// MVN and ROIPooling were missing in opset1
if (type == "MVN" || type == "ROIPooling") {
opset = opsets.at("opset2");
}
}
if (!opset.contains_type_insensitive(type)) {
@ -761,20 +758,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::opset5::Loop>::cre
return fillSubGraphLayer(inputs, node, weights, layerParsePrms, loop);
}
// Covnert layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Convert>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::Convert>(inputs[0],
details::convertPrecision(GetStrAttr(dn, "destination_type")));
}
// LSTMCell layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v0::LSTMCell>::createLayer(
@ -844,15 +827,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Equal>::cr
return std::make_shared<ngraph::op::v1::Equal>(inputs[0], inputs[1]);
}
// NotEqual layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::NotEqual>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
return std::make_shared<ngraph::op::v1::NotEqual>(inputs[0], inputs[1]);
}
// FloorMod layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::FloorMod>::createLayer(
@ -862,23 +836,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::FloorMod>:
return std::make_shared<ngraph::op::v1::FloorMod>(inputs[0], inputs[1]);
}
// MVN layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::MVN>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
double eps = GetFloatAttr(dn, "eps");
bool across = GetUIntAttr(dn, "across_channels", 0) == 1;
bool normalize_variance = GetUIntAttr(dn, "normalize_variance", 0) == 1;
return std::make_shared<ngraph::op::MVN>(inputs[0], across, normalize_variance, eps);
}
// VariadicSplit layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::VariadicSplit>::createLayer(
@ -959,14 +916,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Minimum>::
return std::make_shared<ngraph::op::v1::Minimum>(inputs[0], inputs[1]);
}
// Subtract layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Subtract>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
return std::make_shared<ngraph::op::v1::Subtract>(inputs[0], inputs[1]);
}
// Broadcast layer
template <>
@ -1344,31 +1293,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Gather>::c
return std::make_shared<ngraph::op::v1::Gather>(inputs[0], inputs[1], inputs[2]);
}
// NormalizeL2 layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::NormalizeL2>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
float eps = GetFloatAttr(dn, "eps");
std::string eps_mode = GetStrAttr(dn, "eps_mode");
ngraph::op::EpsMode em;
if (eps_mode == "add") {
em = ngraph::op::EpsMode::ADD;
} else if (eps_mode == "max") {
em = ngraph::op::EpsMode::MAX;
} else {
THROW_IE_EXCEPTION << "NormalizeL2 unsupported eps_mode: " << eps_mode;
}
return std::make_shared<ngraph::op::NormalizeL2>(inputs[0], inputs[1], eps, em);
}
// LogicalAnd layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::LogicalAnd>::createLayer(

View File

@ -49,6 +49,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
virtual bool visit_attributes(AttributeVisitor& visitor) override;
};
} // namespace v1
}

View File

@ -49,6 +49,8 @@ op::MVN::MVN(const Output<Node>& data, AxisSet reduction_axes, bool normalize_va
, m_reduction_axes{reduction_axes}
{
constructor_validate_and_infer_types();
const size_t chanelAxis = 1;
m_across_channels = (m_reduction_axes.count(chanelAxis) > 0);
}
// decompose_op() relies on knowing the data type of input data which might

View File

@ -84,6 +84,7 @@ void op::NormalizeL2::pre_validate_and_infer_types()
}
}
}
set_output_type(0, get_input_element_type(0), get_input_partial_shape(0));
}
AxisSet op::NormalizeL2::get_reduction_axes() const

View File

@ -94,3 +94,8 @@ bool op::v1::NotEqual::evaluate(const HostTensorVector& outputs,
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::v1::NotEqual::evaluate");
return not_equalop::evaluate_not_equal(inputs[0], inputs[1], outputs[0], get_autob());
}
bool op::v1::NotEqual::visit_attributes(AttributeVisitor& visitor)
{
return true;
}