Remove ops from Node Converter- part 1 (#4166)

* remove cropie op from node converter

* remove eltwise from node converter, enable visitor api

* remove ceiling op from node converter

* remove powerie, reluie, fullyconnected from node converter

* remove unused node converters

* join relu and reluie specific creators

* add missing eltwise types to node conversion

* fix eltwise type

* change power ops scale parameter passing for better precision
This commit is contained in:
Bartosz Lesniewski
2021-02-08 05:21:54 +01:00
committed by GitHub
parent e7465d329e
commit 132b47394c
12 changed files with 149 additions and 446 deletions

View File

@@ -24,6 +24,8 @@ public:
std::vector<int64_t> dim,
std::vector<int64_t> offset);
bool visit_attributes(AttributeVisitor &visitor) override;
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;

View File

@@ -11,7 +11,6 @@
#include "ngraph/op/op.hpp"
enum class ELTWISE_TYPE {Sum, Prod, Max, Sub, Min, Div};
namespace ngraph {
namespace op {
@@ -25,6 +24,8 @@ public:
const ELTWISE_TYPE eltwise_type,
const element::Type output_type = element::undefined);
bool visit_attributes(AttributeVisitor &visitor) override;
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
@@ -32,8 +33,23 @@ public:
ELTWISE_TYPE eltwise_type;
private:
ELTWISE_TYPE type_from_string(const std::string &eltwise_type) const { return as_enum<ELTWISE_TYPE>(eltwise_type); }
element::Type m_output_type;
};
} // namespace op
} // namespace ngraph
} // namespace op
std::ostream &operator<<(std::ostream &s, const ELTWISE_TYPE &type);
template <>
class AttributeAdapter<ELTWISE_TYPE>
: public EnumAttributeAdapterBase<ELTWISE_TYPE> {
public:
AttributeAdapter(ELTWISE_TYPE &value)
: EnumAttributeAdapterBase<ELTWISE_TYPE>(value) {}
static constexpr DiscreteTypeInfo type_info{"AttributeAdapter<ELTWISE_TYPE>",
1};
const DiscreteTypeInfo &get_type_info() const override { return type_info; }
};
} // namespace ngraph

View File

@@ -32,6 +32,8 @@ public:
const Shape & output_shape,
const element::Type output_type = element::undefined);
bool visit_attributes(AttributeVisitor &visitor) override;
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;

View File

@@ -23,6 +23,8 @@ public:
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
float scale, power, shift;

View File

@@ -22,6 +22,8 @@ public:
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor &visitor) override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
float get_slope() { return m_negative_slope; }

View File

@@ -458,6 +458,9 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
case ELTWISE_TYPE::Sum:
type = "sum";
break;
case ELTWISE_TYPE::Sub:
type = "sub";
break;
case ELTWISE_TYPE::Prod:
type = "prod";
break;
@@ -860,7 +863,7 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"Relu"},
addSpecificCreator({"Relu", "ReLUIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "ReLU",
@@ -1610,6 +1613,76 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
res->params["axis"] = Builder::asString(axis);
return res;
});
addSpecificCreator({"CropIE"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Crop", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CropLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"FullyConnected"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "FullyConnected", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::FullyConnectedLayer>(attrs);
res->params = params;
auto & rt_info = node->get_rt_info();
bool keep_constants(false);
if (auto attr = std::dynamic_pointer_cast<ngraph::VariantWrapper<int64_t>>(rt_info["keep_constants"])) {
keep_constants = attr->get();
}
const auto weightsNode = node->input_value(1).get_node_shared_ptr();
if (!keep_constants && InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights)) {
const auto biasNode = node->input_value(2).get_node_shared_ptr();
InferenceEngine::details::addBlob(biasNode, res, InferenceEngine::details::biases);
}
return res;
});
addSpecificCreator({"GenericIE"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
auto type = params.at("__generic_ie_type__");
auto castedLayer = ngraph::as_type_ptr<ngraph::op::GenericIE>(node);
LayerParams attrs = {node->get_friendly_name(), type, details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
if (type == "RNNCell") {
res = std::make_shared<InferenceEngine::RNNCell>(attrs);
}
if (type == "GRUCell") {
res = std::make_shared<InferenceEngine::GRUCell>(attrs);
}
auto weightableLayer = std::dynamic_pointer_cast<InferenceEngine::WeightableLayer>(res);
for (const auto& param : castedLayer->getParameters()) {
if (param.second.is<Blob::Ptr>()) {
res->blobs[param.first] = param.second.as<Blob::Ptr>();
} else if (param.second.is<Blob::CPtr>()) {
res->blobs[param.first] = std::const_pointer_cast<Blob>(param.second.as<Blob::CPtr>());
} else if (param.second.is<std::string>()) {
res->params[param.first] = param.second.as<std::string>();
}
if (weightableLayer && param.first == "weights")
weightableLayer->_weights = res->blobs[param.first];
if (weightableLayer && param.first == "biases")
weightableLayer->_biases = res->blobs[param.first];
}
return res;
});
addSpecificCreator({"PowerIE"}, [](const std::shared_ptr<::ngraph::Node> &node,
const std::map<std::string, std::string> &params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Power", details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PowerLayer>(attrs);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::PowerIE>(node);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << attrs.type << " layer " << attrs.name;
res->params = params;
// This is needed as scale parameter requires high precision
res->params["scale"] = Builder::asString(castedLayer->scale);
return res;
});
}
CNNLayerPtr InferenceEngine::details::CNNLayerCreator::create() {
@@ -1637,13 +1710,6 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
}
};
static const std::vector<std::shared_ptr<Builder::INodeConverter>> convertors = {
std::make_shared<Builder::NodeConverter<::ngraph::op::CropIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Ceiling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::FullyConnected>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GenericIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PowerIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ReLUIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ResampleV2>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ScaleShiftIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ShuffleChannels>>(),

View File

@@ -8,27 +8,11 @@
#include <sstream>
#include <utility>
#include "legacy/ngraph_ops/crop_ie.hpp"
#include "legacy/ngraph_ops/eltwise.hpp"
#include "legacy/ngraph_ops/fully_connected.hpp"
#include "legacy/ngraph_ops/gather_ie.hpp"
#include "legacy/ngraph_ops/gru_cell_ie.hpp"
#include "legacy/ngraph_ops/interp.hpp"
#include "legacy/ngraph_ops/lstm_cell_ie.hpp"
#include <transformations/rt_info/primitives_priority_attribute.hpp>
#include "legacy/ngraph_ops/normalize_ie.hpp"
#include "legacy/ngraph_ops/nms_ie.hpp"
#include "legacy/ngraph_ops/power.hpp"
#include "legacy/ngraph_ops/prior_box_clustered_ie.hpp"
#include "legacy/ngraph_ops/prior_box_ie.hpp"
#include "legacy/ngraph_ops/proposal_ie.hpp"
#include "legacy/ngraph_ops/relu_ie.hpp"
#include "legacy/ngraph_ops/selu_ie.hpp"
#include "legacy/ngraph_ops/scaleshift.hpp"
#include "legacy/ngraph_ops/tile_ie.hpp"
#include "legacy/ngraph_ops/rnn_cell_ie.hpp"
#include "generic_ie.hpp"
#include "exec_graph_info.hpp"
#include <cnn_network_ngraph_impl.hpp>
@@ -69,238 +53,6 @@ std::string asString<float>(const float& value) {
return asString(static_cast<double>(value));
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Abs>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Abs",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::GenericIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
auto castedLayer = ngraph::as_type_ptr<ngraph::op::GenericIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get layer " << layer->get_friendly_name();
LayerParams params = {layer->get_friendly_name(), castedLayer->getType(),
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
if (castedLayer->getType() == "RNNCell")
res = std::make_shared<InferenceEngine::RNNCell>(params);
if (castedLayer->getType() == "GRUCell")
res = std::make_shared<InferenceEngine::GRUCell>(params);
auto weightableLayer = std::dynamic_pointer_cast<InferenceEngine::WeightableLayer>(res);
for (const auto& param : castedLayer->getParameters()) {
if (param.second.is<Blob::Ptr>()) {
res->blobs[param.first] = param.second.as<Blob::Ptr>();
} else if (param.second.is<Blob::CPtr>()) {
res->blobs[param.first] = std::const_pointer_cast<Blob>(param.second.as<Blob::CPtr>());
} else if (param.second.is<std::string>()) {
res->params[param.first] = param.second.as<std::string>();
}
if (weightableLayer && param.first == "weights")
weightableLayer->_weights = res->blobs[param.first];
if (weightableLayer && param.first == "biases")
weightableLayer->_biases = res->blobs[param.first];
}
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Ceiling>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Ceiling",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Floor>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Floor",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Sigmoid>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Sigmoid",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Tanh>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "TanH",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ReLUIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ReLU",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ReLULayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::ReLUIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["negative_slope"] = asString(castedLayer->get_slope());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Range>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Range",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Exp>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Exp",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::CropIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Crop",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CropLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::CropIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
std::string value;
for (const auto& val : castedLayer->axes) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["axis"] = value;
value.clear();
for (const auto& val : castedLayer->dim) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["dim"] = value;
value.clear();
for (const auto& val : castedLayer->offset) {
if (!value.empty()) value += ",";
value += asString(val);
}
res->params["offset"] = value;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Maximum>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(params);
res->params["operation"] = "max";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Divide>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(params);
res->params["operation"] = "div";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Multiply>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(params);
res->params["operation"] = "prod";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Squeeze>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Squeeze",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::Squeeze>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v0::Unsqueeze>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Unsqueeze",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v0::Unsqueeze>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Concat>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Concat",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ConcatLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::Concat>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["axis"] = asString(castedLayer->get_concatenation_axis());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::GatherIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Gather",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::GatherLayer>(params);
auto castedLayer = std::dynamic_pointer_cast<ngraph::op::GatherIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["axis"] = asString(castedLayer->get_axis());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ReverseSequence>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ReverseSequence", details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ReverseSequenceLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::ReverseSequence>(layer);
if (castedLayer == nullptr)
THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["batch_axis"] = asString(castedLayer->get_batch_axis());
res->params["seq_axis"] = asString(castedLayer->get_sequence_axis());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ShapeOf>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ShapeOf",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ScaleShiftIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ScaleShift",
@@ -329,49 +81,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::ShuffleChannels>::createLayer(const std:
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PowerIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Power",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PowerLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::PowerIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["power"] = asString(castedLayer->power);
res->params["scale"] = asString(castedLayer->scale);
res->params["shift"] = asString(castedLayer->shift);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Eltwise>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::Eltwise>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
std::string type;
switch (castedLayer->eltwise_type) {
case ELTWISE_TYPE::Sum:
type = "sum";
break;
case ELTWISE_TYPE::Sub:
type = "sub";
break;
case ELTWISE_TYPE::Prod:
type = "prod";
break;
default:
THROW_IE_EXCEPTION << "Not supported eltwise type!";
}
res->params["operation"] = type;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ResampleV2>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Resample", details::convertPrecision(layer->get_output_element_type(0))};
@@ -518,32 +227,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v4::Interpolate>::createLayer(const std:
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::FullyConnected>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "FullyConnected",
details::convertPrecision(layer->get_output_element_type(0))};
auto castedLayer = ngraph::as_type_ptr<ngraph::op::FullyConnected>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
auto res = std::make_shared<InferenceEngine::FullyConnectedLayer>(params);
res->params["out-size"] = asString(castedLayer->get_out_size());
auto & rt_info = layer->get_rt_info();
bool keep_constants(false);
if (auto attr = std::dynamic_pointer_cast<ngraph::VariantWrapper<int64_t>>(rt_info["keep_constants"])) {
keep_constants = attr->get();
}
const auto weightsNode = layer->input_value(1).get_node_shared_ptr();
if (!keep_constants && InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights)) {
const auto biasNode = layer->input_value(2).get_node_shared_ptr();
InferenceEngine::details::addBlob(biasNode, res, InferenceEngine::details::biases);
}
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ExecGraphInfoSerialization::ExecutionNode>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
auto castedLayer = ngraph::as_type_ptr<ExecGraphInfoSerialization::ExecutionNode>(layer);
@@ -577,123 +260,5 @@ CNNLayer::Ptr NodeConverter<ExecGraphInfoSerialization::ExecutionNode>::createLa
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Log>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Log",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::NormalizeIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Normalize",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::NormLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::NormalizeIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["eps"] = asString(castedLayer->get_eps());
res->params["channel_shared"] = castedLayer->get_channel_shared() ? "1" : "0";
res->params["across_spatial"] = castedLayer->get_across_spatial() ? "1" : "0";
const auto weightsNode = layer->input_value(1).get_node_shared_ptr();
if (auto constWeights = ngraph::as_type_ptr<ngraph::op::Constant>(weightsNode)) {
Blob::Ptr dataBlob = InferenceEngine::details::shareWeights(constWeights);
res->blobs["weights"] = dataBlob;
}
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Erf>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Erf",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Sign>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Sign",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Sin>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Sin",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Sinh>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Sinh",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Asin>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Asin",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Cos>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Cos",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Cosh>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Cosh",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Acos>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Acos",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Tan>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Tan",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Atan>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Atan",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Sqrt>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Sqrt",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
return res;
}
} // namespace Builder
} // namespace InferenceEngine

View File

@@ -45,3 +45,10 @@ void op::CropIE::validate_and_infer_types() {
set_output_type(0, get_input_element_type(0), PartialShape(output_shape));
}
bool op::CropIE::visit_attributes(AttributeVisitor &visitor) {
visitor.on_attribute("axis", axes);
visitor.on_attribute("dim", dim);
visitor.on_attribute("offset", offset);
return true;
}

View File

@@ -75,3 +75,27 @@ void op::Eltwise::validate_and_infer_types() {
set_output_type(0, et_result, output_shape);
}
bool op::Eltwise::visit_attributes(AttributeVisitor &visitor) {
visitor.on_attribute("operation", eltwise_type);
return true;
}
namespace ngraph {
template <> EnumNames<ELTWISE_TYPE> &EnumNames<ELTWISE_TYPE>::get() {
static auto enum_names =
EnumNames<ELTWISE_TYPE>("ELTWISE_TYPE", {{"sum", ELTWISE_TYPE::Sum},
{"prod", ELTWISE_TYPE::Prod},
{"max", ELTWISE_TYPE::Max},
{"sub", ELTWISE_TYPE::Sub},
{"min", ELTWISE_TYPE::Min},
{"div", ELTWISE_TYPE::Div}});
return enum_names;
}
constexpr DiscreteTypeInfo AttributeAdapter<ELTWISE_TYPE>::type_info;
std::ostream &operator<<(std::ostream &s, const ELTWISE_TYPE &type) {
return s << as_string(type);
}
} // namespace ngraph

View File

@@ -34,3 +34,8 @@ void op::FullyConnected::validate_and_infer_types() {
m_output_type == element::undefined ? input_value(0).get_element_type() : m_output_type,
m_output_shape);
}
bool op::FullyConnected::visit_attributes(AttributeVisitor &visitor) {
visitor.on_attribute("out-size", m_output_size);
return true;
}

View File

@@ -30,3 +30,10 @@ std::shared_ptr<Node> op::PowerIE::clone_with_new_inputs(const OutputVector& new
void op::PowerIE::validate_and_infer_types() {
set_output_type(0, m_output_type == element::undefined ? get_input_element_type(0) : m_output_type, get_input_partial_shape(0));
}
bool op::PowerIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("scale", scale);
visitor.on_attribute("power", power);
visitor.on_attribute("shift", shift);
return true;
}

View File

@@ -31,3 +31,8 @@ void op::ReLUIE::validate_and_infer_types() {
m_output_type == element::undefined ? get_input_element_type(0) : m_output_type,
get_input_partial_shape(0));
}
bool op::ReLUIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("negative_slope", m_negative_slope);
return true;
}