Remove ops from Layer Creator/ Node Converter - part 2 (#3226)

* remove power op from layer creator

* remove prelu op from layer creator

* remove tile op from layer creator

* remove relu op from layer creator

* remove selu op from layer creator

* remove softmax op from layer creator

* remove tanh op from layer creator

* remove split op from layer creator

* remove reshape op from layer creator

* remove reverse sequence op from layer creator

* remove proposal op from layer creator

* remove priorbox op from layer creator

* remove roipooling op from layer creator

* remove priorboxclustered op from layer creator

* style fix

* utility function to parse bool-containing strings

* align priorbox scale_all_sizes parameter to specification

* change location of getBoolStrParamAsIntStr function

* align prelu creator to new constant op changes

* adjust priorbox tests to align with scale_all_sizes default value

* adjust priorbox python tests to align with scale_all_sizes default value

* align priorboxclustered attributes initlialization to specification

* fix checking wrong container's end iterator for opset name search

* improve comment on roipooling parameters

* Apply review suggestion 1

Co-authored-by: Ilya Churaev <ilyachur@gmail.com>

* Apply review suggestion 2

Co-authored-by: Ilya Churaev <ilyachur@gmail.com>

* align priorbox step initial value to specification

* align roipooling method attribute to specification

* remove roipooling specific creator

* align with review comments

Co-authored-by: Ilya Churaev <ilyachur@gmail.com>
This commit is contained in:
Bartosz Lesniewski 2020-12-04 17:49:36 +01:00 committed by GitHub
parent 29b8ffa40b
commit 2da6546841
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 353 additions and 562 deletions

View File

@ -361,6 +361,15 @@ public:
*/
std::string GetParamAsString(const char* param) const;
/**
* @brief Returns a string containing an integer if parameters value was
* "true" or "false"
*
* @param param Name of the layer parameter
* @return A string containing an integer or the parameter as string
*/
std::string getBoolStrParamAsIntStr(const char *param) const;
/**
* @brief Gets the parameter as a std::vector<std::string>
* @param param The parameter name

View File

@ -31,7 +31,7 @@ public:
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
const PriorBoxClusteredAttrs& get_attrs() const { return m_attrs; }
private:

View File

@ -32,6 +32,7 @@ public:
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
const PriorBoxAttrs& get_attrs() const { return m_attrs; }
bool visit_attributes(AttributeVisitor& visitor) override;
private:
PriorBoxAttrs m_attrs;

View File

@ -34,7 +34,7 @@ public:
std::shared_ptr<Node>
clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
const ProposalAttrs& get_attrs() const { return m_attrs; }
private:

View File

@ -25,7 +25,7 @@ public:
void validate_and_infer_types() override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
float gamma, alpha;
};

View File

@ -23,7 +23,7 @@ public:
const int64_t tiles);
void validate_and_infer_types() override;
bool visit_attributes(AttributeVisitor& visitor) override;
std::shared_ptr<Node> clone_with_new_inputs(const OutputVector& new_args) const override;
int64_t axis, tiles;

View File

@ -7,6 +7,7 @@
#include <vector>
#include <unordered_set>
#include <regex>
#include <sstream>
#include <cnn_network_ngraph_impl.hpp>
#include "ngraph_ops/convolution_ie.hpp"
@ -105,7 +106,10 @@ public:
}
void on_adapter(const std::string& name, ::ngraph::ValueAccessor<double>& adapter) override {
params[name] = std::to_string(adapter.get());
std::ostringstream stream;
stream.precision(8);
stream << std::fixed << adapter.get();
params[name] = stream.str();
}
void on_adapter(const std::string& name, ::ngraph::ValueAccessor<int64_t>& adapter) override {
@ -458,32 +462,25 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
res->params = params;
auto parseBoolStrToIntStr = [](const std::string &param) -> const std::string {
if (param == "true") {
return "1";
}
else if (param == "false") {
return "0";
}
return param;
};
if (res->params["code_type"] == "caffe.priorboxparameter.center_size"){
res->params["code_type"] = "caffe.PriorBoxParameter.CENTER_SIZE";
}
else{
res->params["code_type"] = "caffe.PriorBoxParameter.CORNER";
}
res->params["variance_encoded_in_target"] = parseBoolStrToIntStr(res->params["variance_encoded_in_target"]);
res->params["share_location"] = parseBoolStrToIntStr(res->params["share_location"]);
res->params["clip_after_nms"] = parseBoolStrToIntStr(res->params["clip_after_nms"]);
res->params["clip_before_nms"] = parseBoolStrToIntStr(res->params["clip_before_nms"]);
res->params["decrease_label_id"] = parseBoolStrToIntStr(res->params["decrease_label_id"]);
res->params["normalized"] = parseBoolStrToIntStr(res->params["normalized"]);
res->params["variance_encoded_in_target"] = res->getBoolStrParamAsIntStr("variance_encoded_in_target");
res->params["share_location"] = res->getBoolStrParamAsIntStr("share_location");
res->params["clip_after_nms"] = res->getBoolStrParamAsIntStr("clip_after_nms");
res->params["clip_before_nms"] = res->getBoolStrParamAsIntStr("clip_before_nms");
res->params["decrease_label_id"] = res->getBoolStrParamAsIntStr("decrease_label_id");
res->params["normalized"] = res->getBoolStrParamAsIntStr("normalized");
return res;
});
addSpecificCreator({"LogicalNot"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string> params) -> CNNLayerPtr {
addSpecificCreator({"LogicalNot"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string> params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Activation",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(attrs);
@ -491,8 +488,9 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"LSTMCellIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string> params) -> CNNLayerPtr {
addSpecificCreator({"LSTMCellIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string> params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "LSTMCell",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<LSTMCell>(attrs);
@ -506,8 +504,9 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"RNNCellIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
addSpecificCreator({"RNNCellIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "RNNCell",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<RNNCell>(attrs);
@ -522,8 +521,9 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"GRUCellIE"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
addSpecificCreator({"GRUCellIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "GRUCell",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<GRUCell>(attrs);
@ -538,6 +538,186 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
return res;
});
addSpecificCreator({"PRelu"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "PReLU",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<PReLULayer>(attrs);
res->params = params;
const auto weightsNode = node->input_value(1).get_node_shared_ptr();
InferenceEngine::details::addBlob(weightsNode, res, InferenceEngine::details::weights);
return res;
});
addSpecificCreator({"TileIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Tile",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<TileLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"PriorBoxIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "PriorBox",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<CNNLayer>(attrs);
res->params = params;
res->params["clip"] = res->getBoolStrParamAsIntStr("clip");
res->params["flip"] = res->getBoolStrParamAsIntStr("flip");
res->params["scale_all_sizes"] = res->getBoolStrParamAsIntStr("scale_all_sizes");
auto scale_all_sizes = std::stoi(res->params["scale_all_sizes"]);
if (!scale_all_sizes) {
auto data_pshape = node->get_input_partial_shape(0);
if (data_pshape.is_dynamic()) THROW_IE_EXCEPTION << "Dynamic 0-port input of PriorBox is not supported";
auto data_shape = data_pshape.to_shape();
if (data_shape.size() != 4) THROW_IE_EXCEPTION << "PriorBox has " << data_shape.size() << " items in 0-port input, 4 expected";
auto img_pshape = node->get_input_partial_shape(1);
if (img_pshape.is_dynamic()) THROW_IE_EXCEPTION << "Dynamic 1-port input of PriorBox is not supported";
auto img_shape = img_pshape.to_shape();
if (img_shape.size() != 4) THROW_IE_EXCEPTION << "PriorBox has " << data_shape.size() << " items in 1-port input, 4 expected";
// mxnet-like PriorBox
auto img_H = img_shape[2];
auto data_H = data_shape[2];
auto step = std::stof(res->params["step"]);
if (step == -1)
step = img_H / static_cast<float>(data_H);
else
step *= img_H;
res->params["step"] = Builder::asString(step);
auto min_size = details::split(res->params["min_size"], ",");
for (auto &size : min_size) {
size = Builder::asString(std::stof(size) * img_H);
}
res->params["min_size"] = details::joinVec(min_size);
}
return res;
});
addSpecificCreator({"PriorBoxClusteredIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "PriorBoxClustered",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<CNNLayer>(attrs);
res->params = params;
res->params["clip"] =
res->getBoolStrParamAsIntStr("clip");
auto step_h = std::stof(res->params["step_h"]);
auto step_w = std::stof(res->params["step_w"]);
if (std::abs(step_h - step_w) < 1e-5) {
res->params["step"] = res->params["step_w"];
}
return res;
});
addSpecificCreator({"ProposalIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Proposal",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<CNNLayer>(attrs);
res->params = params;
res->params["clip_before_nms"] =
res->getBoolStrParamAsIntStr("clip_before_nms");
res->params["clip_after_nms"] =
res->getBoolStrParamAsIntStr("clip_after_nms");
res->params["normalize"] = res->getBoolStrParamAsIntStr("normalize");
return res;
});
addSpecificCreator({"Relu"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "ReLU",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<ReLULayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"Reshape"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Reshape",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<ReshapeLayer>(attrs);
return res;
});
addSpecificCreator({"ReverseSequence"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "ReverseSequence",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<ReverseSequenceLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"SeluIE"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Selu",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<CNNLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"Softmax"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "SoftMax",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<SoftMaxLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"Split"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "Split",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<SplitLayer>(attrs);
auto axis_node = node->input_value(1).get_node_shared_ptr();
const auto axis_node_const = std::dynamic_pointer_cast<ngraph::op::Constant>(axis_node);
if (!axis_node_const) {
THROW_IE_EXCEPTION << "Split " << node->get_friendly_name() << " has no axes as Constant";
}
auto axis = axis_node_const->cast_vector<int64_t>()[0];
if (axis < 0) {
axis += node->get_input_shape(0).size();
}
res->params["axis"] = Builder::asString(axis);
return res;
});
addSpecificCreator({"Tanh"},
[](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "TanH",
details::convertPrecision(node->get_output_element_type(0))};
auto res = std::make_shared<CNNLayer>(attrs);
res->params = params;
return res;
});
addSpecificCreator({"ScatterElementsUpdate"}, [](const std::shared_ptr<::ngraph::Node>& node,
const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), node->description(),
@ -1032,7 +1212,6 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
std::make_shared<Builder::NodeConverter<::ngraph::op::CTCGreedyDecoder>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformableConvolution>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::DeformablePSROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Reshape>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Eltwise>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Ceiling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::GatherIE>>(),
@ -1042,29 +1221,16 @@ void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::MaxPool>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Minimum>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::NormalizeIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PRelu>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Power>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PowerIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PriorBoxClusteredIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PriorBoxIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ProposalIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Relu>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::SeluIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ReLUIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ReverseSequence>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ResampleV2>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::RegionYolo>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ReorgYolo>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::PSROIPooling>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ScaleShiftIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::SquaredDifference>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Softmax>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Split>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::VariadicSplit>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::v1::Subtract>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::Tanh>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::TileIE>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::TensorIterator>>(),
std::make_shared<Builder::NodeConverter<::ngraph::opset5::Loop>>(),
std::make_shared<Builder::NodeConverter<::ngraph::op::ShuffleChannels>>(),

View File

@ -424,28 +424,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::Tanh>::createLayer(const std::shared_ptr
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::Relu>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ReLU",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::ReLULayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::SeluIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Selu",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::SeluIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["alpha"] = asString(castedLayer->alpha);
res->params["gamma"] = asString(castedLayer->gamma);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ReLUIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ReLU",
@ -524,18 +502,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::CropIE>::createLayer(const std::shared_p
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Softmax>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "SoftMax",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::SoftMaxLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::Softmax>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["axis"] = asString(castedLayer->get_axis());
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Subtract>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
@ -545,15 +511,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v1::Subtract>::createLayer(const std::sh
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Power>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::EltwiseLayer>(params);
res->params["operation"] = "pow";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Maximum>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Eltwise",
@ -814,22 +771,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v1::MaxPool>::createLayer(const std::sha
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ROIPooling>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ROIPooling",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::ROIPooling>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["pooled_h"] = asString(castedLayer->get_output_size()[0]);
res->params["pooled_w"] = asString(castedLayer->get_output_size()[1]);
res->params["spatial_scale"] = asString(castedLayer->get_spatial_scale());
res->params["method"] = castedLayer->get_method();
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PSROIPooling>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "PSROIPooling",
@ -873,56 +814,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::v1::DeformablePSROIPooling>::createLayer
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PRelu>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "PReLU",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::PReLULayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::PRelu>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
const auto weightsNode = castedLayer->input_value(1).get_node_shared_ptr();
if (auto const_weights = ngraph::as_type_ptr<ngraph::op::Constant>(weightsNode)) {
SizeVector dataShape = const_weights->get_shape();
if (dataShape.size() >= 2 && ngraph::shape_size(dataShape) == dataShape[1]) {
dataShape = {dataShape[1]};
}
Blob::Ptr dataBlb = InferenceEngine::details::shareWeights(const_weights);
res->blobs["weights"] = dataBlb;
res->_weights = dataBlb;
}
auto const_shape = castedLayer->input(1).get_shape(), tensor_shape = castedLayer->input(0).get_shape();
if (const_shape.size() == 1 && const_shape[0] == 1) {
res->params["channel_shared"] = "true";
}
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Split>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Split",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::SplitLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::Split>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
auto axis_node = castedLayer->input_value(1).get_node_shared_ptr();
const auto axis_node_const = std::dynamic_pointer_cast<ngraph::op::Constant>(axis_node);
if (!axis_node_const) {
THROW_IE_EXCEPTION << "Split " << castedLayer->get_friendly_name() << " has no axes as Constant";
}
auto axis = axis_node_const->cast_vector<int64_t>()[0];
if (axis < 0) {
axis += castedLayer->get_input_shape(0).size();
}
res->params["axis"] = asString(axis);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::VariadicSplit>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Split",
@ -995,31 +886,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::ShapeOf>::createLayer(const std::shared_
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::v1::Reshape>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Reshape",
details::convertPrecision(layer->get_output_element_type(0))};
auto castedLayer = ngraph::as_type_ptr<ngraph::op::v1::Reshape>(layer);
if (castedLayer == nullptr)
THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
const auto constNode = castedLayer->input_value(1).get_node_shared_ptr();
if (auto constValue = ngraph::as_type_ptr<ngraph::op::Constant>(constNode)) {
auto value = constValue->cast_vector<int64_t>();
for (auto & i : value) {
if (i == 0 && !castedLayer->get_special_zero())
THROW_IE_EXCEPTION << "Reshape " << params.name << " has `special_zero`=False and zeros in second input. This combination is not supported";
}
} else {
THROW_IE_EXCEPTION << "Reshape " << params.name << " has dynamic second input!";
}
auto res = std::make_shared<InferenceEngine::ReshapeLayer>(params);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ScaleShiftIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "ScaleShift",
@ -1057,164 +923,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::ShuffleChannels>::createLayer(const std:
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ProposalIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Proposal",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::ProposalIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
auto attr = castedLayer->get_attrs();
std::string param;
for (const auto& val : attr.ratio) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["ratio"] = param;
param.clear();
for (const auto& val : attr.scale) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["scale"] = param;
res->params["base_size"] = asString(attr.base_size);
res->params["pre_nms_topn"] = asString(attr.pre_nms_topn);
res->params["post_nms_topn"] = asString(attr.post_nms_topn);
res->params["nms_thresh"] = asString(attr.nms_thresh);
res->params["feat_stride"] = asString(attr.feat_stride);
res->params["min_size"] = asString(attr.min_size);
res->params["box_size_scale"] = asString(attr.box_size_scale);
res->params["box_coordinate_scale"] = asString(attr.box_coordinate_scale);
res->params["clip_before_nms"] = asString(attr.clip_before_nms ? 1 : 0);
res->params["clip_after_nms"] = asString(attr.clip_after_nms ? 1 : 0);
res->params["normalize"] = asString(attr.normalize ? 1 : 0);
res->params["framework"] = attr.framework;
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PriorBoxClusteredIE>::createLayer(
const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "PriorBoxClustered",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::PriorBoxClusteredIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
auto attr = castedLayer->get_attrs();
std::string param;
for (const auto& val : attr.widths) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["width"] = param;
param.clear();
for (const auto& val : attr.heights) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["height"] = param;
param.clear();
for (const auto& val : attr.variances) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["variance"] = param;
if (std::abs(attr.step_heights - attr.step_widths) < 1e-5) {
res->params["step"] = asString(attr.step_widths);
} else {
res->params["step_w"] = asString(attr.step_widths);
res->params["step_h"] = asString(attr.step_heights);
}
res->params["offset"] = asString(attr.offset);
res->params["clip"] = asString(attr.clip ? 1 : 0);
res->params["flip"] = "1";
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PriorBoxIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "PriorBox",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::CNNLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::PriorBoxIE>(layer);
auto layer_info = params.type + " layer " + params.name;
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << layer_info;
auto attr = castedLayer->get_attrs();
std::string param;
auto data_pshape = castedLayer->get_input_partial_shape(0);
if (data_pshape.is_dynamic()) THROW_IE_EXCEPTION << "Dynamic 0-port input of " << layer_info << " is not supported";
auto data_shape = data_pshape.to_shape();
if (data_shape.size() != 4) THROW_IE_EXCEPTION << layer_info << " has " << data_shape.size() << " items in 0-port input, 4 expected";
auto img_pshape = castedLayer->get_input_partial_shape(1);
if (img_pshape.is_dynamic()) THROW_IE_EXCEPTION << "Dynamic 1-port input of " << layer_info << " is not supported";
auto img_shape = img_pshape.to_shape();
if (img_shape.size() != 4) THROW_IE_EXCEPTION << layer_info << " has " << data_shape.size() << " items in 1-port input, 4 expected";
if (!attr.scale_all_sizes) {
// mxnet-like PriorBox
auto img_H = img_shape[2];
auto data_H = data_shape[2];
if (attr.step == -1)
attr.step = static_cast<float>(1. * img_H / data_H);
else
attr.step *= img_H;
for (auto& size : attr.min_size)
size *= img_H;
}
for (const auto& val : attr.max_size) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["max_size"] = param;
param.clear();
for (const auto& val : attr.min_size) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["min_size"] = param;
param.clear();
for (const auto& val : attr.aspect_ratio) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["aspect_ratio"] = param;
param.clear();
for (const auto& val : attr.variance) {
if (!param.empty()) param += ",";
param += asString(val);
}
res->params["variance"] = param;
res->params["step"] = asString(attr.step);
res->params["offset"] = asString(attr.offset);
res->params["clip"] = asString(attr.clip ? 1 : 0);
res->params["flip"] = asString(attr.flip ? 1 : 0);
res->params["scale_all_sizes"] = asString(attr.scale_all_sizes ? 1 : 0);
res->params["density"] = asString(attr.density);
res->params["fixed_size"] = asString(attr.fixed_size);
res->params["fixed_ratio"] = asString(attr.fixed_ratio);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::PowerIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Power",
@ -1258,20 +966,6 @@ CNNLayer::Ptr NodeConverter<ngraph::op::Eltwise>::createLayer(const std::shared_
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::TileIE>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Tile",
details::convertPrecision(layer->get_output_element_type(0))};
auto res = std::make_shared<InferenceEngine::TileLayer>(params);
auto castedLayer = ngraph::as_type_ptr<ngraph::op::TileIE>(layer);
if (castedLayer == nullptr) THROW_IE_EXCEPTION << "Cannot get " << params.type << " layer " << params.name;
res->params["axis"] = asString(castedLayer->axis);
res->params["tiles"] = asString(castedLayer->tiles);
return res;
}
template <>
CNNLayer::Ptr NodeConverter<ngraph::op::ResampleV2>::createLayer(const std::shared_ptr<ngraph::Node>& layer) const {
LayerParams params = {layer->get_friendly_name(), "Resample", details::convertPrecision(layer->get_output_element_type(0))};

View File

@ -306,6 +306,16 @@ std::string CNNLayer::GetParamAsString(const char* param) const {
return (*it).second;
}
std::string CNNLayer::getBoolStrParamAsIntStr(const char *param) const {
std::string val = GetParamAsString(param);
if (val == "true" || val == "True") {
return "1";
} else if (val == "false" || val == "False") {
return "0";
}
return val;
}
std::vector<std::string> CNNLayer::GetParamAsStrings(const char* param, std::vector<std::string> def) const {
std::string vals = GetParamAsString(param, "");
std::vector<std::string> result;

View File

@ -37,3 +37,27 @@ std::shared_ptr<Node> op::PriorBoxClusteredIE::clone_with_new_inputs(const Outpu
check_new_args_count(this, new_args);
return make_shared<PriorBoxClusteredIE>(new_args.at(0), new_args.at(1), m_attrs);
}
bool op::PriorBoxClusteredIE::visit_attributes(AttributeVisitor& visitor)
{
float step = 0;
visitor.on_attribute("step", step);
visitor.on_attribute("step_w", m_attrs.step_widths);
visitor.on_attribute("step_h", m_attrs.step_heights);
if(step != 0) {
// deserialization: if step_w/h is 0 replace it with step
if (m_attrs.step_widths == 0) {
m_attrs.step_widths = step;
}
if (m_attrs.step_heights == 0) {
m_attrs.step_heights = step;
}
}
visitor.on_attribute("width", m_attrs.widths);
visitor.on_attribute("height", m_attrs.heights);
visitor.on_attribute("clip", m_attrs.clip);
visitor.on_attribute("offset", m_attrs.offset);
visitor.on_attribute("variance", m_attrs.variances);
return true;
}

View File

@ -34,3 +34,19 @@ std::shared_ptr<Node> op::PriorBoxIE::clone_with_new_inputs(const OutputVector&
check_new_args_count(this, new_args);
return make_shared<PriorBoxIE>(new_args.at(0), new_args.at(1), m_attrs);
}
bool op::PriorBoxIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("min_size", m_attrs.min_size);
visitor.on_attribute("max_size", m_attrs.max_size);
visitor.on_attribute("aspect_ratio", m_attrs.aspect_ratio);
visitor.on_attribute("density", m_attrs.density);
visitor.on_attribute("fixed_ratio", m_attrs.fixed_ratio);
visitor.on_attribute("fixed_size", m_attrs.fixed_size);
visitor.on_attribute("clip", m_attrs.clip);
visitor.on_attribute("flip", m_attrs.flip);
visitor.on_attribute("step", m_attrs.step);
visitor.on_attribute("offset", m_attrs.offset);
visitor.on_attribute("variance", m_attrs.variance);
visitor.on_attribute("scale_all_sizes", m_attrs.scale_all_sizes);
return true;
}

View File

@ -60,3 +60,21 @@ shared_ptr<Node> op::ProposalIE::clone_with_new_inputs(const OutputVector& new_a
check_new_args_count(this, new_args);
return make_shared<ProposalIE>(new_args.at(0), new_args.at(1), new_args.at(2), m_attrs);
}
bool op::ProposalIE::visit_attributes(AttributeVisitor& visitor){
visitor.on_attribute("ratio", m_attrs.ratio);
visitor.on_attribute("scale", m_attrs.scale);
visitor.on_attribute("base_size", m_attrs.base_size);
visitor.on_attribute("pre_nms_topn", m_attrs.pre_nms_topn);
visitor.on_attribute("post_nms_topn", m_attrs.post_nms_topn);
visitor.on_attribute("nms_thresh", m_attrs.nms_thresh);
visitor.on_attribute("feat_stride", m_attrs.feat_stride);
visitor.on_attribute("min_size", m_attrs.min_size);
visitor.on_attribute("box_size_scale", m_attrs.box_size_scale);
visitor.on_attribute("box_coordinate_scale", m_attrs.box_coordinate_scale);
visitor.on_attribute("clip_before_nms", m_attrs.clip_before_nms);
visitor.on_attribute("clip_after_nms", m_attrs.clip_after_nms);
visitor.on_attribute("normalize", m_attrs.normalize);
visitor.on_attribute("framework", m_attrs.framework);
return true;
}

View File

@ -30,3 +30,9 @@ std::shared_ptr<Node> op::SeluIE::clone_with_new_inputs(const OutputVector& new_
void op::SeluIE::validate_and_infer_types() {
set_output_type(0, get_input_element_type(0), get_input_partial_shape(0));
}
bool op::SeluIE::visit_attributes(AttributeVisitor& visitor) {
visitor.on_attribute("alpha", alpha);
visitor.on_attribute("gamma", gamma);
return true;
}

View File

@ -41,3 +41,9 @@ void op::TileIE::validate_and_infer_types() {
set_output_type(0, get_input_element_type(0), output_pshape);
}
bool op::TileIE::visit_attributes(AttributeVisitor& visitor){
visitor.on_attribute("axis", axis);
visitor.on_attribute("tiles", tiles);
return true;
}

View File

@ -405,7 +405,6 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
std::make_shared<LayerCreator<ngraph::op::DepthToSpace>>("DepthToSpace"),
std::make_shared<LayerCreator<ngraph::op::v1::Subtract>>("Subtract"),
std::make_shared<LayerCreator<ngraph::op::v1::Broadcast>>("Broadcast"),
std::make_shared<LayerCreator<ngraph::op::v1::Reshape>>("Reshape"),
std::make_shared<LayerCreator<ngraph::op::v1::StridedSlice>>("StridedSlice"),
std::make_shared<LayerCreator<ngraph::op::v1::Gather>>("Gather"),
std::make_shared<LayerCreator<ngraph::op::v1::GreaterEqual>>("GreaterEqual"),
@ -424,23 +423,11 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
std::make_shared<LayerCreator<ngraph::op::v1::Minimum>>("Minimum"),
std::make_shared<LayerCreator<ngraph::op::v1::NonMaxSuppression>>("NonMaxSuppression"),
std::make_shared<LayerCreator<ngraph::op::NormalizeL2>>("NormalizeL2"),
std::make_shared<LayerCreator<ngraph::op::PRelu>>("PReLU"),
std::make_shared<LayerCreator<ngraph::op::Relu>>("ReLU"),
std::make_shared<LayerCreator<ngraph::op::v1::Power>>("Power"),
std::make_shared<LayerCreator<ngraph::op::ReverseSequence>>("ReverseSequence"),
std::make_shared<LayerCreator<ngraph::op::PriorBox>>("PriorBox"),
std::make_shared<LayerCreator<ngraph::op::PriorBoxClustered>>("PriorBoxClustered"),
std::make_shared<LayerCreator<ngraph::op::ReorgYolo>>("ReorgYolo"),
std::make_shared<LayerCreator<ngraph::op::RegionYolo>>("RegionYolo"),
std::make_shared<LayerCreator<ngraph::op::Result>>("Result"),
std::make_shared<LayerCreator<ngraph::op::ROIPooling>>("ROIPooling"),
std::make_shared<LayerCreator<ngraph::op::PSROIPooling>>("PSROIPooling"),
std::make_shared<LayerCreator<ngraph::op::v0::Selu>>("Selu"),
std::make_shared<LayerCreator<ngraph::op::v1::Softmax>>("Softmax"),
std::make_shared<LayerCreator<ngraph::op::v1::Split>>("Split"),
std::make_shared<LayerCreator<ngraph::op::VariadicSplit>>("VariadicSplit"),
std::make_shared<LayerCreator<ngraph::op::Tanh>>("TanH"),
std::make_shared<LayerCreator<ngraph::op::v0::Tile>>("Tile"),
std::make_shared<LayerCreator<ngraph::op::TensorIterator>>("TensorIterator"),
std::make_shared<LayerCreator<ngraph::opset5::Loop>>("Loop"),
std::make_shared<LayerCreator<ngraph::op::v1::LogicalAnd>>("LogicalAnd"),
@ -496,11 +483,16 @@ std::shared_ptr<ngraph::Node> V10Parser::createNode(const std::vector<ngraph::Ou
if (type == "Const") {
type = "Constant";
}
if (!opset.contains_type(type)) {
// ROIPooling was missing in opset1 and was added in opset2
if (type == "ROIPooling" && params.version == "opset1") {
opset = opsets.at("opset2");
}
if (!opset.contains_type_insensitive(type)) {
THROW_IE_EXCEPTION << "Opset " << params.version << " doesn't contain the operation with type: " << type;
}
ngraphNode = std::shared_ptr<ngraph::Node>(opset.create(type));
ngraphNode = std::shared_ptr<ngraph::Node>(opset.create_insensitive(type));
ngraphNode->set_friendly_name(params.name);
ngraphNode->set_arguments(inputs);
XmlDeserializer visitor(node, weights);
@ -769,72 +761,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::opset5::Loop>::cre
return fillSubGraphLayer(inputs, node, weights, layerParsePrms, loop);
}
// PriorBoxClustered layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PriorBoxClustered>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
ngraph::op::PriorBoxClusteredAttrs attr;
attr.widths = getParameters<float>(dn, "width");
attr.heights = getParameters<float>(dn, "height");
attr.variances = getParameters<float>(dn, "variance");
attr.offset = GetFloatAttr(dn, "offset");
float step = GetFloatAttr(dn, "step", 0);
attr.step_heights = GetFloatAttr(dn, "step_h", step);
attr.step_widths = GetFloatAttr(dn, "step_w", step);
if (step != 0) {
attr.step_heights = step;
attr.step_widths = step;
}
attr.clip = (GetIntAttr(dn, "clip") != 0);
return std::make_shared<ngraph::op::PriorBoxClustered>(inputs[0], inputs[1], attr);
}
// PriorBox layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PriorBox>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
ngraph::op::PriorBoxAttrs attr;
attr.min_size = getParameters<float>(dn, "min_size", {});
attr.max_size = getParameters<float>(dn, "max_size", {});
attr.density = getParameters<float>(dn, "density", {});
attr.fixed_size = getParameters<float>(dn, "fixed_size", {});
attr.fixed_ratio = getParameters<float>(dn, "fixed_ratio", {});
attr.aspect_ratio = getParameters<float>(dn, "aspect_ratio", {});
attr.variance = getParameters<float>(dn, "variance", {});
attr.step = GetFloatAttr(dn, "step", 0);
attr.offset = GetFloatAttr(dn, "offset");
attr.clip = (GetIntAttr(dn, "clip") != 0);
attr.flip = (GetIntAttr(dn, "flip") != 0);
attr.scale_all_sizes = (GetIntAttr(dn, "scale_all_sizes", 1) != 0);
return std::make_shared<ngraph::op::PriorBox>(inputs[0], inputs[1], attr);
}
// ReverseSequence layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::ReverseSequence>::createLayer(const ngraph::OutputVector & inputs, const pugi::xml_node& node,
const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
return std::make_shared<ngraph::op::ReverseSequence>(inputs[0], inputs[1], GetIntAttr(dn, "batch_axis", 0), GetIntAttr(dn, "seq_axis", 1));
}
// Covnert layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Convert>::createLayer(
@ -962,21 +888,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::VariadicSplit>
return std::make_shared<ngraph::op::VariadicSplit>(inputs[0], inputs[1], inputs[2]);
}
// Split layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Split>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
int num_splits = GetIntAttr(dn, "num_splits");
checkParameters(inputs, layerParsePrms, 2);
return std::make_shared<ngraph::op::v1::Split>(inputs[0], inputs[1], num_splits);
}
// SpaceToDepth layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::SpaceToDepth>::createLayer(
@ -1005,42 +916,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::DepthToSpace>:
return std::make_shared<ngraph::op::DepthToSpace>(inputs[0], GetStrAttr(dn, "mode"), GetIntAttr(dn, "block_size", 1));
}
// SeLU layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v0::Selu>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 3);
return std::make_shared<ngraph::op::v0::Selu>(inputs[0], inputs[1], inputs[2]);
}
// PReLU layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PRelu>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
return std::make_shared<ngraph::op::PRelu>(inputs[0], inputs[1]);
}
// ReLU layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Relu>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
return std::make_shared<ngraph::op::Relu>(inputs[0]);
}
// Tanh layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Tanh>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
return std::make_shared<ngraph::op::Tanh>(inputs[0]);
}
// Result layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Result>::createLayer(
@ -1050,15 +925,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::Result>::creat
return std::make_shared<ngraph::op::Result>(inputs[0]);
}
// Tile layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v0::Tile>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
return std::make_shared<ngraph::op::v0::Tile>(inputs[0], inputs[1]);
}
// StridedSlice layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::StridedSlice>::createLayer(
@ -1084,20 +950,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::StridedSli
}
}
// Reshape layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Reshape>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::v1::Reshape>(inputs[0], inputs[1], GetBoolAttr(dn, "special_zero"));
}
// Minimum layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Minimum>::createLayer(
@ -1129,29 +981,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Broadcast>
THROW_IE_EXCEPTION << "Invalid number of inputs: " << layerParsePrms.inputPorts.size();
}
// Power layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Power>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
return std::make_shared<ngraph::op::v1::Power>(inputs[0], inputs[1]);
}
// Softmax layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::Softmax>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 1);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
return std::make_shared<ngraph::op::v1::Softmax>(inputs[0], GetUIntAttr(dn, "axis"));
}
// RegionYolo layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::RegionYolo>::createLayer(
@ -1447,25 +1276,6 @@ std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::v1::MaxPool>::
pad_type);
}
// ROIPooling layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::ROIPooling>::createLayer(
const ngraph::OutputVector& inputs, const pugi::xml_node& node, const Blob::CPtr& weights,
const GenericLayerParams& layerParsePrms) {
checkParameters(inputs, layerParsePrms, 2);
pugi::xml_node dn = node.child("data");
if (dn.empty())
THROW_IE_EXCEPTION << "Cannot read parameter for " << getType() << " layer with name: " << layerParsePrms.name;
auto pooled_h = GetUIntAttr(dn, "pooled_h");
auto pooled_w = GetUIntAttr(dn, "pooled_w");
auto spatial_scale = GetFloatAttr(dn, "spatial_scale");
auto method = GetStrAttr(dn, "method", "max");
return std::make_shared<ngraph::op::ROIPooling>(inputs[0], inputs[1],
ngraph::Shape {pooled_h, pooled_w}, spatial_scale, method);
}
// PSROIPooling layer
template <>
std::shared_ptr<ngraph::Node> V10Parser::LayerCreator<ngraph::op::PSROIPooling>::createLayer(

View File

@ -159,6 +159,9 @@ namespace FuncTestUtils {
}
} else {
if (item.first == "originalLayersNames") continue;
// ROIPooling specification says that there should be two parameters- pooled_h and pooled_w
// our implementation of this op has a single parameter - output_size.
if (item.first == "output_size" && layer->type == "ROIPooling") continue;
// autob is a WA for nGraph ops
if ((item.first != "auto_broadcast" && item.first != "autob") || item.second != "numpy") {
success = false;

View File

@ -41,10 +41,10 @@ namespace ngraph
std::vector<float> fixed_size;
bool clip = false;
bool flip = false;
float step = 1.0f;
float step = 0.0f;
float offset = 0.0f;
std::vector<float> variance;
bool scale_all_sizes = false;
bool scale_all_sizes = true;
};
namespace v0

View File

@ -33,9 +33,9 @@ namespace ngraph
// variances Values to adjust prior boxes with
std::vector<float> widths;
std::vector<float> heights;
bool clip = false;
float step_widths = 1.0f;
float step_heights = 1.0f;
bool clip = true;
float step_widths = 0.0f;
float step_heights = 0.0f;
float offset = 0.0f;
std::vector<float> variances;
};

View File

@ -46,6 +46,7 @@ namespace ngraph
bool evaluate(const HostTensorVector& outputs,
const HostTensorVector& inputs) const override;
bool visit_attributes(AttributeVisitor& visitor) override;
};
}
using v0::Relu;

View File

@ -56,7 +56,7 @@ namespace ngraph
void set_sequence_axis(int64_t sequence_axis) { m_seq_axis = sequence_axis; }
private:
int64_t m_batch_axis;
int64_t m_seq_axis;
int64_t m_seq_axis = 1;
size_t m_normalized_batch_axis;
size_t m_normalized_seq_axis;
};

View File

@ -54,9 +54,9 @@ namespace ngraph
bool visit_attributes(AttributeVisitor& visitor) override;
private:
Shape m_output_size;
Shape m_output_size{0, 0};
float m_spatial_scale;
std::string m_method;
std::string m_method = "max";
};
} // namespace v0

View File

@ -98,7 +98,7 @@ namespace ngraph
{
std::lock_guard<std::mutex> guard(get_mutex());
return m_case_insensitive_type_info_map.find(to_upper_name(name)) !=
m_name_type_info_map.end();
m_case_insensitive_type_info_map.end();
}
/// \brief Return true if node's type is in the opset

View File

@ -96,13 +96,31 @@ shared_ptr<Node> op::PriorBoxClustered::clone_with_new_inputs(const OutputVector
bool op::PriorBoxClustered::visit_attributes(AttributeVisitor& visitor)
{
visitor.on_attribute("widths", m_attrs.widths);
visitor.on_attribute("heights", m_attrs.heights);
float step = 0;
float step_w_tmp = m_attrs.step_widths;
float step_h_tmp = m_attrs.step_heights;
visitor.on_attribute("step", step);
visitor.on_attribute("step_w", m_attrs.step_widths);
visitor.on_attribute("step_h", m_attrs.step_heights);
if (step != 0)
{
// deserialization:
// if step_w/h is 0 or did not change, replace it with step
if (m_attrs.step_widths == 0 || m_attrs.step_widths == step_w_tmp)
{
m_attrs.step_widths = step;
}
if (m_attrs.step_heights == 0 || m_attrs.step_heights == step_h_tmp)
{
m_attrs.step_heights = step;
}
}
visitor.on_attribute("width", m_attrs.widths);
visitor.on_attribute("height", m_attrs.heights);
visitor.on_attribute("clip", m_attrs.clip);
visitor.on_attribute("step_widths", m_attrs.step_widths);
visitor.on_attribute("step_heights", m_attrs.step_heights);
visitor.on_attribute("offset", m_attrs.offset);
visitor.on_attribute("variances", m_attrs.variances);
visitor.on_attribute("variance", m_attrs.variances);
return true;
}

View File

@ -81,3 +81,8 @@ bool op::Relu::evaluate(const HostTensorVector& outputs, const HostTensorVector&
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::Relu::evaluate");
return relu::evaluate_relu(inputs[0], outputs[0], shape_size(get_output_shape(0)));
}
bool op::Relu::visit_attributes(AttributeVisitor& visitor)
{
return true;
}

View File

@ -147,6 +147,8 @@ shared_ptr<Node> op::ROIPooling::clone_with_new_inputs(const OutputVector& new_a
bool op::ROIPooling::visit_attributes(AttributeVisitor& visitor)
{
visitor.on_attribute("output_size", m_output_size);
visitor.on_attribute("pooled_h", m_output_size[0]);
visitor.on_attribute("pooled_w", m_output_size[1]);
visitor.on_attribute("spatial_scale", m_spatial_scale);
visitor.on_attribute("method", m_method);
return true;

View File

@ -866,6 +866,7 @@ def test_prior_box(int_dtype, fp_dtype):
"offset": fp_dtype(0),
"min_size": np.array([2, 3], dtype=fp_dtype),
"aspect_ratio": np.array([1.5, 2.0, 2.5], dtype=fp_dtype),
"scale_all_sizes": False
}
layer_shape = ng.constant(np.array([32, 32], dtype=int_dtype), int_dtype)
@ -896,8 +897,8 @@ def test_prior_box_clustered(int_dtype, fp_dtype):
image_size = np.array([64, 64], dtype=int_dtype)
attributes = {
"offset": fp_dtype(0.5),
"widths": np.array([4.0, 2.0, 3.2], dtype=fp_dtype),
"heights": np.array([1.0, 2.0, 1.0], dtype=fp_dtype),
"width": np.array([4.0, 2.0, 3.2], dtype=fp_dtype),
"height": np.array([1.0, 2.0, 1.0], dtype=fp_dtype),
}
output_size = ng.constant(np.array([19, 19], dtype=int_dtype), int_dtype)

View File

@ -79,6 +79,7 @@ TEST(type_prop_layers, prior_box1)
op::PriorBoxAttrs attrs;
attrs.min_size = {2.0f, 3.0f};
attrs.aspect_ratio = {1.5f, 2.0f, 2.5f};
attrs.scale_all_sizes = false;
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {32, 32});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
@ -92,6 +93,7 @@ TEST(type_prop_layers, prior_box2)
attrs.min_size = {2.0f, 3.0f};
attrs.aspect_ratio = {1.5f, 2.0f, 2.5f};
attrs.flip = true;
attrs.scale_all_sizes = false;
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {32, 32});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});
@ -106,7 +108,6 @@ TEST(type_prop_layers, prior_box3)
attrs.max_size = {315.0f};
attrs.aspect_ratio = {2.0f};
attrs.flip = true;
attrs.scale_all_sizes = true;
auto layer_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {1, 1});
auto image_shape = op::Constant::create<int64_t>(element::i64, Shape{2}, {300, 300});