Deserialization for: Convolution, FakeQuantize, Lrn, Onehot, Pad and … (#3116)

* Deserialization for: Convolution, FakeQuantize, Lrn, Onehot, Pad and Matmul.

* Replace converter with addBlob function.

* Apply review remarks.

* Apply reivew remarks, undo Loop removal.

* Remove transpose conversion for matmul op.
This commit is contained in:
Szymon Durawa
2020-11-27 16:01:55 +01:00
committed by GitHub
parent 5ecb4a3960
commit badc50bd08
15 changed files with 133 additions and 323 deletions

View File

@@ -30,6 +30,7 @@ public:
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;
double get_alpha() const { return m_alpha; }
void set_alpha(double alpha) { m_alpha = alpha; }

View File

@@ -27,6 +27,7 @@ public:
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
int get_axis() { return m_axis; }
int get_depth() { return m_depth; }

View File

@@ -26,6 +26,7 @@ public:
size_t get_version() const override { return 1; }
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
PadMode get_pad_mode() { return m_pad_mode; }

View File

@@ -849,6 +849,89 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"LRN_IE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Norm", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::NormLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"MatMul"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Gemm", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::GemmLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"OneHotIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "OneHot", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::OneHotLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"PadIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Pad", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PadLayer>(attrs);
res->params["pad_mode"] = params.at("pad_mode");
res->params["pads_begin"] = params.at("pads_begin");
res->params["pads_end"] = params.at("pads_end");
if (params.at("pad_mode") == "constant") {
res->params["pad_value"] = params.at("pad_value");
}
return res;
});
addSpecificCreator({"FakeQuantize"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "FakeQuantize", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::QuantizeLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"ConvolutionIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Convolution", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ConvolutionLayer>(attrs);
res->params = params;
auto && rt_info = node->get_rt_info();
bool keep_constants(false);
if (auto attr = std::dynamic_pointer_cast<ngraph::VariantWrapper<int64_t>>(rt_info["keep_constants"])) {
keep_constants = attr->get();
}
// Restore output and kernel size
auto shape = node->get_input_shape(1);
shape.erase(shape.begin(), shape.begin() + 2);
res->params["kernel"] = Builder::asString(static_cast<std::vector<size_t>&>(shape));
res->params["output"] = Builder::asString(node->get_shape()[1]);
// forward auto_pad only when its value is different than explicit
if (params.at("auto_pad") == "explicit") {
res->params.erase("auto_pad");
}
const auto weightsNode = node->input_value(1).get_node_shared_ptr();
if (!keep_constants && InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights)) {
if (node->inputs().size() == 3) {
const auto biasNode = node->input_value(2).get_node_shared_ptr();
InferenceEngine::details::addBlob(biasNode, res, InferenceEngine::details::biases);
}
}
return res;
});
}
CNNLayerPtr InferenceEngine::details::CNNLayerCreator::create() {
@@ -878,7 +961,6 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
const static std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = {
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::AvgPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Clamp>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ConvolutionIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Convert>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::CTCGreedyDecoder>>(),
@@ -887,24 +969,19 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Reshape>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Elu>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::FakeQuantize>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Ceiling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GatherIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GatherTreeIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Interp>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v0::Interpolate>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::LRN_IE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::MVN>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::FullyConnected>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::MatMul>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GenericIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GRN>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::MaxPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Minimum>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::NormalizeIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::OneHotIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PRelu>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PadIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Power>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PowerIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PriorBoxClusteredIE>>(),

View File

@@ -9,20 +9,16 @@
#include <utility>
#include "legacy/ngraph_ops/crop_ie.hpp"
#include "ngraph_ops/convolution_ie.hpp"
#include "legacy/ngraph_ops/eltwise.hpp"
#include "legacy/ngraph_ops/fully_connected.hpp"
#include "legacy/ngraph_ops/gather_ie.hpp"
#include "legacy/ngraph_ops/gather_tree_ie.hpp"
#include "legacy/ngraph_ops/gru_cell_ie.hpp"
#include "legacy/ngraph_ops/interp.hpp"
#include "legacy/ngraph_ops/lrn_ie.hpp"
#include "legacy/ngraph_ops/lstm_cell_ie.hpp"
#include <transformations/rt_info/primitives_priority_attribute.hpp>
#include "legacy/ngraph_ops/normalize_ie.hpp"
#include "legacy/ngraph_ops/nms_ie.hpp"
#include "legacy/ngraph_ops/onehot_ie.hpp"
#include "legacy/ngraph_ops/pad_ie.hpp"
#include "legacy/ngraph_ops/power.hpp"
#include "legacy/ngraph_ops/prior_box_clustered_ie.hpp"
#include "legacy/ngraph_ops/prior_box_ie.hpp"
@@ -33,6 +29,7 @@
#include "legacy/ngraph_ops/tile_ie.hpp"
#include "legacy/ngraph_ops/rnn_cell_ie.hpp"
#include "legacy/ngraph_ops/hard_sigmoid_ie.hpp"
#include "generic_ie.hpp"
#include "exec_graph_info.hpp"
@@ -494,22 +491,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::MVN>::createLayer(const std::shared_ptr<
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::LRN_IE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Norm",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::NormLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::LRN_IE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["alpha"] = asString(castedLayer->get_alpha());
res->params["beta"] = asString(castedLayer->get_beta());
res->params["k"] = asString(castedLayer->get_bias());
res->params["local-size"] = asString(castedLayer->get_nsize());
res->params["region"] = castedLayer->get_region();
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::CropIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Crop",
@@ -643,97 +624,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v0::Unsqueeze>::createLayer(const std::s
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::FakeQuantize>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "FakeQuantize",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::QuantizeLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::FakeQuantize>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["levels"] = asString(castedLayer->get_levels());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ConvolutionIE>::createLayer(
const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Convolution",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ConvolutionLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::ConvolutionIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
std::string value;
for (const auto& val : castedLayer->get_pads_begin()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_begin"] = value;
value.clear();
for (const auto& val : castedLayer->get_pads_end()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["pads_end"] = value;
switch (castedLayer->get_auto_pad()) {
case ngraph::op::PadType::SAME_UPPER:
res->params["auto_pad"] = "same_upper";
break;
case ngraph::op::PadType::SAME_LOWER:
res->params["auto_pad"] = "same_lower";
break;
case ngraph::op::PadType::VALID:
res->params["auto_pad"] = "valid";
break;
default:
break;
}
value.clear();
for (const auto& val : castedLayer->get_strides()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["strides"] = value;
value.clear();
for (const auto& val : castedLayer->get_dilations()) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["dilations"] = value;
// Restore kernel size and output
const auto& shape = castedLayer->get_input_shape(1);
res->params["output"] = asString(castedLayer->get_shape()[1]);
res->params["group"] = asString(castedLayer->get_group());
value.clear();
for (size_t i = 2; i < shape.size(); i++) {
if (!value.empty()) value += ",";
value += asString(shape[i]);
}
res->params["kernel"] = value;
auto & rt_info = layer->get_rt_info();
bool keep_constants(false);
if (auto attr = std::dynamic_pointer_cast<ngraph::VariantWrapper<int64_t>>(rt_info["keep_constants"])) {
keep_constants = attr->get();
}
const auto weightsNode = castedLayer->input_value(1).get_node_shared_ptr();
if (!keep_constants && InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights)) {
if (castedLayer->inputs().size() == 3) {
const auto biasNode = castedLayer->input_value(2).get_node_shared_ptr();
InferenceEngine::details::addBlob(biasNode, res, InferenceEngine::details::biases);
}
}
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::DeformableConvolution>::createLayer(
const std::shared_ptr<ngraph::Node>& layer) const {
@@ -1150,46 +1040,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v1::Reshape>::createLayer(const std::sha
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PadIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Pad",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PadLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::PadIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
switch (castedLayer->get_pad_mode()) {
case ngraph::op::PadMode::EDGE:
res->params["pad_mode"] = "edge";
break;
case ngraph::op::PadMode::REFLECT:
res->params["pad_mode"] = "reflect";
break;
case ngraph::op::PadMode::CONSTANT:
res->params["pad_mode"] = "constant";
res->params["pad_value"] = asString(castedLayer->get_pad_value());
break;
case ngraph::op::PadMode::SYMMETRIC:
res->params["pad_mode"] = "symmetric";
}
std::string pad;
for (const auto& p : castedLayer->get_pads_begin()) {
if (!pad.empty()) pad += ",";
pad += asString(p);
}
res->params["pads_begin"] = pad;
pad.clear();
for (const auto& p : castedLayer->get_pads_end()) {
if (!pad.empty()) pad += ",";
pad += asString(p);
}
res->params["pads_end"] = pad;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ScaleShiftIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ScaleShift",
@@ -1661,21 +1511,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::FullyConnected>::createLayer(const std::
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::MatMul>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Gemm",
details::convertPrecision(layer->get_output_element_type(0))};
auto castedLayer = ngraph::as_type_ptr<ngraph::op::MatMul>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
auto res = std::make_shared<InferenceEngine::GemmLayer>(params);
res->params["transpose_a"] = castedLayer->get_transpose_a() ? "True" : "False";
res->params["transpose_b"] = castedLayer->get_transpose_b() ? "True" : "False";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ExecGraphInfoSerialization::ExecutionNode>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
auto castedLayer = ngraph::as_type_ptr<ExecGraphInfoSerialization::ExecutionNode>(layer);
@@ -1891,21 +1726,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::Sqrt>::createLayer(const std::shared_ptr
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::OneHotIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "OneHot", Precision::FP32};
auto castedLayer = std::dynamic_pointer_cast<ngraph::op::OneHotIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
auto res = std::make_shared<InferenceEngine::OneHotLayer>(params);
res->params["axis"] = std::to_string(castedLayer->get_axis());
res->params["depth"] = std::to_string(castedLayer->get_depth());
res->params["on_value"] = std::to_string(castedLayer->get_on_value());
res->params["off_value"] = std::to_string(castedLayer->get_off_value());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::HardSigmoid_IE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = { layer->get_friendly_name(), "HardSigmoid", details::convertPrecision(layer->get_output_element_type(0)) };

View File

@@ -32,3 +32,12 @@ shared_ptr<Node> op::LRN_IE::clone_with_new_inputs(const OutputVector& new_args)
check_new_args_count(this, new_args);
return make_shared<op::LRN_IE>(new_args.at(0), m_alpha, m_beta, m_bias, m_size, m_region);
}
bool op::LRN_IE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("alpha", m_alpha);
visitor.on_attribute("beta", m_beta);
visitor.on_attribute("k", m_bias);
visitor.on_attribute("local-size", m_size);
visitor.on_attribute("region", m_region);
return true;
}

View File

@@ -35,3 +35,11 @@ shared_ptr<Node> op::OneHotIE::clone_with_new_inputs(const OutputVector& new_arg
check_new_args_count(this, new_args);
return make_shared<op::OneHotIE>(new_args.at(0), m_axis, m_depth, m_on_value, m_off_value, m_type);
}
bool op::OneHotIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("axis", m_axis);
visitor.on_attribute("depth", m_depth);
visitor.on_attribute("off_value", m_off_value);
visitor.on_attribute("on_value", m_on_value);
return true;
}

View File

@@ -44,3 +44,11 @@ void op::PadIE::validate_and_infer_types() {
shared_ptr<Node> op::PadIE::clone_with_new_inputs(const OutputVector& new_args) const {
return nullptr;
}
bool op::PadIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("pads_begin", m_pads_begin);
visitor.on_attribute("pads_end", m_pads_end);
visitor.on_attribute("pad_mode", m_pad_mode);
visitor.on_attribute("pad_value", m_pad_value);
return true;
}

View File

@@ -405,16 +405,13 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
std::make_shared<LayerCreator<ngraph::op::SpaceToDepth>>("SpaceToDepth"),
std::make_shared<LayerCreator<ngraph::op::DepthToSpace>>("DepthToSpace"),
std::make_shared<LayerCreator<ngraph::op::v1::Subtract>>("Subtract"),
std::make_shared<LayerCreator<ngraph::op::MatMul>>("MatMul"),
std::make_shared<LayerCreator<ngraph::op::v1::Broadcast>>("Broadcast"),
std::make_shared<LayerCreator<ngraph::op::v1::Reshape>>("Reshape"),
std::make_shared<LayerCreator<ngraph::op::v1::StridedSlice>>("StridedSlice"),
std::make_shared<LayerCreator<ngraph::op::Elu>>("ELU"),
std::make_shared<LayerCreator<ngraph::op::FakeQuantize>>("FakeQuantize"),
std::make_shared<LayerCreator<ngraph::op::v1::Gather>>("Gather"),
std::make_shared<LayerCreator<ngraph::op::v1::GatherTree>>("GatherTree"),
std::make_shared<LayerCreator<ngraph::op::v1::GreaterEqual>>("GreaterEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::Convolution>>("Convolution"),
std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolution>>("GroupConvolution"),
std::make_shared<LayerCreator<ngraph::op::v1::ConvolutionBackpropData>>("ConvolutionBackpropData"),
std::make_shared<LayerCreator<ngraph::op::v1::GroupConvolutionBackpropData>>("GroupConvolutionBackpropData"),
@@ -426,17 +423,14 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
std::make_shared<LayerCreator<ngraph::op::v1::Equal>>("Equal"),
std::make_shared<LayerCreator<ngraph::op::v1::NotEqual>>("NotEqual"),
std::make_shared<LayerCreator<ngraph::op::v1::FloorMod>>("FloorMod"),
std::make_shared<LayerCreator<ngraph::op::LRN>>("LRN"),
std::make_shared<LayerCreator<ngraph::op::MVN>>("MVN"),
std::make_shared<LayerCreator<ngraph::op::v0::LSTMCell>>("LSTMCell"),
std::make_shared<LayerCreator<ngraph::op::v1::MaxPool>>("MaxPool"),
std::make_shared<LayerCreator<ngraph::op::v1::Minimum>>("Minimum"),
std::make_shared<LayerCreator<ngraph::op::v1::NonMaxSuppression>>("NonMaxSuppression"),
std::make_shared<LayerCreator<ngraph::op::NormalizeL2>>("NormalizeL2"),
std::make_shared<LayerCreator<ngraph::op::v1::OneHot>>("OneHot"),
std::make_shared<LayerCreator<ngraph::op::PRelu>>("PReLU"),
std::make_shared<LayerCreator<ngraph::op::Relu>>("ReLU"),
std::make_shared<LayerCreator<ngraph::op::v1::Pad>>("Pad"),
std::make_shared<LayerCreator<ngraph::op::v1::Power>>("Power"),
std::make_shared<LayerCreator<ngraph::op::ReverseSequence>>("ReverseSequence"),
std::make_shared<LayerCreator<ngraph::op::PriorBox>>("PriorBox"),
@@ -836,20 +830,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PriorBox>::cre
return std::make_shared<ngraph::op::PriorBox>(inputs[0], inputs[1], attr);
}
// FakeQuantize layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::FakeQuantize>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 5);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::FakeQuantize>(inputs[0], inputs[1], inputs[2], inputs[3], inputs[4],
GetUIntAttr(dn, "levels"));
}
// ReverseSequence layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::ReverseSequence>::createLayer(const ngraph::OutputVector & inputs, const pugi::xml_node& node,
@@ -907,43 +887,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::CTCGreedyDecod
GetBoolAttr(dn, "ctc_merge_repeated", true));
}
// Pad layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Pad>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
std::string pad_mode_str = GetStrAttr(dn, "pad_mode");
ngraph::op::PadMode pad_mode;
if (pad_mode_str == "constant") {
pad_mode = ngraph::op::PadMode::CONSTANT;
} else if (pad_mode_str == "edge") {
pad_mode = ngraph::op::PadMode::EDGE;
} else if (pad_mode_str == "reflect") {
pad_mode = ngraph::op::PadMode::REFLECT;
} else if (pad_mode_str == "symmetric") {
pad_mode = ngraph::op::PadMode::SYMMETRIC;
} else {
THROW_IE_EXCEPTION << "Pad mode: " << pad_mode_str << " is not supported";
}
if (pad_mode == ngraph::op::PadMode::CONSTANT) {
if (inputs.size() == 3) {
return std::make_shared<ngraph::op::v1::Pad>(inputs[0], inputs[1], inputs[2], pad_mode);
}
checkParameters(inputs, layerParsePrms, 4);
return std::make_shared<ngraph::op::v1::Pad>(inputs[0], inputs[1], inputs[2], inputs[3], pad_mode);
}
checkParameters(inputs, layerParsePrms, 3);
return std::make_shared<ngraph::op::v1::Pad>(inputs[0], inputs[1], inputs[2], pad_mode);
}
// SquaredDifference layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::SquaredDifference>::createLayer(
@@ -1015,24 +958,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::MVN>::createLa
return std::make_shared<ngraph::op::MVN>(inputs[0], across, normalize_variance, eps);
}
// LRN layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::LRN>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::LRN>(inputs[0],
inputs[1],
GetFloatAttr(dn, "alpha"),
GetFloatAttr(dn, "beta"),
GetFloatAttr(dn, "bias"),
GetUInt64Attr(dn, "size"));
}
// Clamp layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Clamp>::createLayer(
@@ -1248,20 +1173,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Power>::cr
return std::make_shared<ngraph::op::v1::Power>(inputs[0], inputs[1]);
}
// MatMul layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::MatMul>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
auto transpose_a = GetBoolAttr(dn, "transpose_a", false);
auto transpose_b = GetBoolAttr(dn, "transpose_b", false);
return std::make_shared<ngraph::op::MatMul>(inputs[0], inputs[1], transpose_a, transpose_b);
}
// Softmax layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Softmax>::createLayer(
@@ -1350,36 +1261,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::BinaryConv
dilations, mode, pad_value, pad_type);
}
// Convolution layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Convolution>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
ngraph::op::PadType pad_type = ngraph::op::PadType::EXPLICIT;
std::string auto_pad = GetStrAttr(dn, "auto_pad", "");
if (auto_pad == "same_lower") {
pad_type = ngraph::op::PadType::SAME_LOWER;
} else if (auto_pad == "same_upper") {
pad_type = ngraph::op::PadType::SAME_UPPER;
} else if (auto_pad == "valid") {
pad_type = ngraph::op::PadType::VALID;
}
auto strides = ngraph::Strides(getParameters<size_t>(dn, "strides"));
auto dilations = ngraph::Strides(getParameters<size_t>(dn, "dilations"));
auto pads_begin = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_begin", {}));
auto pads_end = ngraph::CoordinateDiff(getParameters<std::ptrdiff_t>(dn, "pads_end", {}));
return std::make_shared<ngraph::op::v1::Convolution>(inputs[0], inputs[1], strides, pads_begin, pads_end,
dilations, pad_type);
}
// GroupConvolution layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GroupConvolution>::createLayer(
@@ -1697,20 +1578,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::GatherTree
return std::make_shared<ngraph::op::v1::GatherTree>(inputs[0], inputs[1], inputs[2], inputs[3]);
}
// OneHot layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::OneHot>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 4);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::v1::OneHot>(inputs[0], inputs[1], inputs[2], inputs[3], GetInt64Attr(dn, "axis"));
}
// NormalizeL2 layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::NormalizeL2>::createLayer(

View File

@@ -243,6 +243,11 @@ private:
} else if (auto a = ngraph::as_type<ngraph::AttributeAdapter<ngraph::op::TopKMode>>(&adapter)) {
if (!getStrAttribute(node.child("data"), name, val)) return;
static_cast<ngraph::op::TopKMode&>(*a) = ngraph::as_enum<ngraph::op::TopKMode>(val);
} else if (auto a = ngraph::as_type<ngraph::AttributeAdapter<ngraph::CoordinateDiff>>(&adapter)) {
std::vector<size_t> shape;
if (!getParameters<size_t>(node.child("data"), name, shape)) return;
std::vector<std::ptrdiff_t> coord_diff(shape.begin(), shape.end());
static_cast<ngraph::CoordinateDiff&>(*a) = ngraph::CoordinateDiff(coord_diff);
} else {
THROW_IE_EXCEPTION << "Error IR reading. Attribute adapter can not be found for " << name
<< " parameter";

View File

@@ -86,6 +86,8 @@ public:
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector & new_args) const override;
/// \return The strides.

View File

@@ -172,3 +172,13 @@ shared_ptr<Node> op::ConvolutionIE::clone_with_new_inputs(const ngraph::OutputVe
throw ngraph_error("Unsupported number of arguments for ConvolutionIE operation");
}
bool op::ConvolutionIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("strides", m_strides);
visitor.on_attribute("dilations", m_dilations);
visitor.on_attribute("pads_begin", m_pads_begin);
visitor.on_attribute("pads_end", m_pads_end);
visitor.on_attribute("auto_pad", m_auto_pad);
visitor.on_attribute("group", m_group);
return true;
}

View File

@@ -381,7 +381,7 @@ TEST_F(NGraphReaderTests, ReadMatMulNetwork4) {
</output>
</layer>
<layer id="2" name="fc" precision="FP32" type="Gemm">
<data transpose_a="True" transpose_b="True" />
<data transpose_a="true" transpose_b="true" />
<input>
<port id="0">
<dim>2048</dim>
@@ -529,7 +529,7 @@ TEST_F(NGraphReaderTests, ReadMatMulNetwork5) {
</output>
</layer>
<layer id="3" name="fc" precision="FP32" type="Gemm">
<data transpose_a="False" transpose_b="False" />
<data transpose_a="false" transpose_b="false" />
<input>
<port id="0">
<dim>1</dim>

View File

@@ -33,6 +33,7 @@
#include "ngraph/attribute_visitor.hpp"
#include "ngraph/check.hpp"
#include "ngraph/coordinate.hpp"
#include "ngraph/coordinate_diff.hpp"
#include "ngraph/deprecated.hpp"
#include "ngraph/descriptor/input.hpp"
#include "ngraph/descriptor/output.hpp"

View File

@@ -85,7 +85,7 @@ namespace ngraph
private:
std::size_t m_levels;
AutoBroadcastSpec m_auto_broadcast;
AutoBroadcastSpec m_auto_broadcast = op::AutoBroadcastType::NUMPY;
};
}
using v0::FakeQuantize;