Conversion via CNNNetworkImpl ctor (#1222)
* Added ctor for CNNNetworkImpl to convert from ngraphImpl * Re-use in all places instead of manual conversion * Hide convertToCNNNetworkImpl usage * Remove useless test * Fixed Gleb's comments
This commit is contained in:
parent
c39e32a47b
commit
884389d869
@ -15,12 +15,6 @@
|
||||
#include "hetero/hetero_plugin_config.hpp"
|
||||
#include <cpp_interfaces/base/ie_plugin_base.hpp>
|
||||
#include "hetero_executable_network.hpp"
|
||||
#include "convert_function_to_cnn_network.hpp"
|
||||
#include <generic_ie.hpp>
|
||||
#include <transformations/common_optimizations/common_optimizations.hpp>
|
||||
#include <transformations/convert_opset1_to_legacy/convert_opset1_to_legacy.hpp>
|
||||
#include <transformations/convert_opset2_to_opset1/convert_opset2_to_opset1.hpp>
|
||||
#include <transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.hpp>
|
||||
|
||||
using namespace InferenceEngine;
|
||||
using namespace InferenceEngine::PluginConfigParams;
|
||||
@ -63,8 +57,7 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Engine::LoadExeNetworkImpl(const
|
||||
}
|
||||
DeviceMetaInformationMap metaDevices = GetDevicePlugins(it->second, tconfig);
|
||||
|
||||
auto function = network.getFunction();
|
||||
if (function != nullptr) {
|
||||
if (auto function = network.getFunction()) {
|
||||
auto anyDeviceDoNotSupportNgraph =
|
||||
std::any_of(std::begin(metaDevices), std::end(metaDevices),
|
||||
[&] (const DeviceMetaInformationMap::value_type& metaDevice) {
|
||||
@ -74,15 +67,9 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Engine::LoadExeNetworkImpl(const
|
||||
return (clonedNetwork->getFunction() == nullptr);
|
||||
});
|
||||
if (anyDeviceDoNotSupportNgraph) {
|
||||
auto clonedNetwork = cloneNetwork(network);
|
||||
auto function = clonedNetwork->getFunction();
|
||||
::ngraph::op::GenericIE::DisableReshape noReshape(function);
|
||||
::ngraph::pass::CommonOptimizations().run_on_function(function);
|
||||
::ngraph::pass::ConvertOpSet3ToOpSet2().run_on_function(function);
|
||||
::ngraph::pass::ConvertOpSet2ToOpSet1().run_on_function(function);
|
||||
::ngraph::pass::ConvertOpSet1ToLegacy().run_on_function(function);
|
||||
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>(network);
|
||||
return std::make_shared<HeteroExecutableNetwork>(
|
||||
*InferenceEngine::details::convertFunctionToICNNNetwork(function, *clonedNetwork),
|
||||
*cnnNetworkImpl,
|
||||
mergeConfigs(_config, config), this);
|
||||
} else {
|
||||
return std::make_shared<HeteroExecutableNetwork>(*cloneNetwork(network), mergeConfigs(_config, config), this);
|
||||
|
@ -17,13 +17,8 @@
|
||||
#include <ngraph/ngraph.hpp>
|
||||
#include <ngraph/pass/get_output_element_elimination.hpp>
|
||||
#include <set>
|
||||
// #include <shape_infer/ie_reshaper.hpp>
|
||||
#include <string>
|
||||
|
||||
#include <transformations/common_optimizations/common_optimizations.hpp>
|
||||
#include <transformations/convert_opset1_to_legacy/convert_opset1_to_legacy.hpp>
|
||||
#include <transformations/convert_opset2_to_opset1/convert_opset2_to_opset1.hpp>
|
||||
#include <transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.hpp>
|
||||
#include <transformations/convert_opset1_to_legacy/convert_one_hot_to_one_hot_ie.hpp>
|
||||
|
||||
#include "ngraph_ops/eltwise.hpp"
|
||||
@ -35,7 +30,6 @@
|
||||
#include "ie_profiling.hpp"
|
||||
#include "network_serializer.h"
|
||||
#include "generic_ie.hpp"
|
||||
#include "convert_function_to_cnn_network.hpp"
|
||||
#include <shape_infer/built-in/ie_built_in_holder.hpp>
|
||||
|
||||
using namespace std;
|
||||
@ -110,12 +104,6 @@ void CNNNetworkNGraphImpl::createDataForResult(const ::ngraph::Output<::ngraph::
|
||||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<ICNNNetwork> CNNNetworkNGraphImpl::getCNNNetwork() {
|
||||
if (!cnnNetwork)
|
||||
convertToCNNNetworkImpl();
|
||||
return cnnNetwork;
|
||||
}
|
||||
|
||||
CNNNetworkNGraphImpl::CNNNetworkNGraphImpl(const std::shared_ptr<Function>& nGraph)
|
||||
: _ngraph_function(nGraph) {
|
||||
// Restore usual attributes for ICNNNetwork
|
||||
@ -325,9 +313,7 @@ CNNNetworkNGraphImpl::reshape(const std::map<std::string, std::vector<size_t>>&
|
||||
}
|
||||
_ngraph_function->validate_nodes_and_infer_types();
|
||||
|
||||
if (cnnNetwork) {
|
||||
convertToCNNNetworkImpl();
|
||||
} else {
|
||||
{
|
||||
auto specialized_ngraph_function = cloneFunction(true, inputShapes);
|
||||
// Call this transformation because OneHot IE and nGraph have different output precisions
|
||||
{
|
||||
@ -430,15 +416,7 @@ StatusCode CNNNetworkNGraphImpl::serialize(const std::string& xmlPath, const std
|
||||
return DescriptionBuffer(UNEXPECTED, resp);
|
||||
}
|
||||
|
||||
auto graph = cloneFunction();
|
||||
// Disable shape inference (WA for generic operations)
|
||||
::ngraph::op::GenericIE::DisableReshape noReshape(graph);
|
||||
|
||||
::ngraph::pass::CommonOptimizations().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet3ToOpSet2().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet2ToOpSet1().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet1ToLegacy().run_on_function(graph);
|
||||
network = InferenceEngine::details::convertFunctionToICNNNetwork(graph, *this);
|
||||
network = std::make_shared<details::CNNNetworkImpl>(*this);
|
||||
}
|
||||
if (!network) return GENERAL_ERROR;
|
||||
return network->serialize(xmlPath, binPath, resp);
|
||||
@ -492,15 +470,6 @@ StatusCode CNNNetworkNGraphImpl::setBatchSizeReshape(size_t size, ResponseDesc*
|
||||
|
||||
void CNNNetworkNGraphImpl::convertToCNNNetworkImpl() {
|
||||
IE_PROFILING_AUTO_SCOPE(convertToCNNNetworkImpl)
|
||||
if (cnnNetwork)
|
||||
return;
|
||||
auto graph = cloneFunction();
|
||||
// Disable shape inference (WA for generic operations)
|
||||
::ngraph::op::GenericIE::DisableReshape noReshape(graph);
|
||||
|
||||
::ngraph::pass::CommonOptimizations().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet3ToOpSet2().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet2ToOpSet1().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet1ToLegacy().run_on_function(graph);
|
||||
cnnNetwork = InferenceEngine::details::convertFunctionToICNNNetwork(graph, *this);
|
||||
if (!cnnNetwork)
|
||||
cnnNetwork = std::make_shared<details::CNNNetworkImpl>(*this);
|
||||
}
|
||||
|
@ -56,8 +56,6 @@ public:
|
||||
|
||||
void setInputInfo(InputInfo::Ptr data);
|
||||
|
||||
std::shared_ptr<ICNNNetwork> getCNNNetwork();
|
||||
|
||||
void addLayer(const CNNLayerPtr& layer) noexcept;
|
||||
|
||||
// public version
|
||||
@ -91,11 +89,11 @@ public:
|
||||
StatusCode serialize(const std::string& xmlPath, const std::string& binPath, ResponseDesc* resp) const
|
||||
noexcept override;
|
||||
|
||||
void convertToCNNNetworkImpl();
|
||||
protected:
|
||||
std::shared_ptr<::ngraph::Function> _ngraph_function;
|
||||
virtual std::shared_ptr<::ngraph::Function> cloneFunction(bool constFolding = false, const std::map<std::string,
|
||||
std::vector<size_t>>& inputShapes = {}) const;
|
||||
protected:
|
||||
std::shared_ptr<::ngraph::Function> _ngraph_function;
|
||||
|
||||
private:
|
||||
std::map<std::string, DataPtr> _data;
|
||||
InferenceEngine::InputsDataMap _inputData;
|
||||
@ -111,10 +109,18 @@ private:
|
||||
*/
|
||||
void createDataForResult(const ::ngraph::Output<::ngraph::Node>& output, const std::string& outName, DataPtr& ptr);
|
||||
|
||||
friend INFERENCE_ENGINE_API_CPP(std::shared_ptr<CNNNetworkImpl>)
|
||||
convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function>& graph,
|
||||
const ICNNNetwork& nGraphImpl, bool keep_constant_inputs);
|
||||
/**
|
||||
* @brief Converts ngraph::Function to old CNNNetworkImpl representation
|
||||
*/
|
||||
void convertToCNNNetworkImpl();
|
||||
|
||||
friend INFERENCE_ENGINE_API_CPP(void)
|
||||
convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function>& graph,
|
||||
const ICNNNetwork& nGraphImpl,
|
||||
CNNNetworkImpl* cnnNetworkImpl,
|
||||
bool keep_constant_inputs);
|
||||
|
||||
friend class NGraphData;
|
||||
|
||||
/**
|
||||
* @brief Reshape on the same shape
|
||||
@ -126,7 +132,6 @@ class TINGraphBody : public CNNNetworkNGraphImpl {
|
||||
public:
|
||||
explicit TINGraphBody(const std::shared_ptr<::ngraph::Function>& func): CNNNetworkNGraphImpl(func) {}
|
||||
|
||||
protected:
|
||||
std::shared_ptr<::ngraph::Function> cloneFunction(bool constFolding, const std::map<std::string, std::vector<size_t>>& inputShapes) const override {
|
||||
return _ngraph_function;
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ namespace details {
|
||||
class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl): public ICNNNetwork {
|
||||
public:
|
||||
CNNNetworkImpl();
|
||||
explicit CNNNetworkImpl(const ICNNNetwork & ngraphImpl);
|
||||
~CNNNetworkImpl() override;
|
||||
|
||||
std::shared_ptr<::ngraph::Function> getFunction() noexcept override {
|
||||
|
@ -18,6 +18,12 @@ INFERENCE_ENGINE_API_CPP(std::shared_ptr<CNNNetworkImpl>)
|
||||
convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function>& graph,
|
||||
const ICNNNetwork &network, bool keep_constant_inputs = false);
|
||||
|
||||
INFERENCE_ENGINE_API_CPP(void)
|
||||
convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function>& graph,
|
||||
const ICNNNetwork &ngraphNetwork,
|
||||
CNNNetworkImpl* cnnNetworkImpl,
|
||||
bool keep_constant_inputs = false);
|
||||
|
||||
|
||||
} // namespace details
|
||||
} // namespace InferenceEngine
|
||||
|
@ -24,11 +24,7 @@ namespace InferenceEngine {
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(ConstTransformer) {
|
||||
public:
|
||||
explicit ConstTransformer(ICNNNetwork* _network);
|
||||
explicit ConstTransformer(details::CNNNetworkImpl* _network);
|
||||
explicit ConstTransformer(std::vector<DataPtr> &_inputs, std::vector<DataPtr> &_outputs);
|
||||
|
||||
virtual ~ConstTransformer() = default;
|
||||
|
||||
/**
|
||||
* @brief calculates const layers, combines const subgraph into a single const layers
|
||||
@ -41,6 +37,8 @@ public:
|
||||
void fullTrim();
|
||||
|
||||
protected:
|
||||
ConstTransformer(std::vector<DataPtr> &_inputs, std::vector<DataPtr> &_outputs);
|
||||
|
||||
/**
|
||||
* @brief collect all const layers with marking if it defines shape (1 - for shape, 0 - otherwise)
|
||||
*/
|
||||
|
@ -22,6 +22,14 @@
|
||||
#include "network_serializer.h"
|
||||
#include "details/ie_cnn_network_tools.h"
|
||||
|
||||
#include "generic_ie.hpp"
|
||||
#include "cnn_network_ngraph_impl.hpp"
|
||||
#include <transformations/common_optimizations/common_optimizations.hpp>
|
||||
#include <transformations/convert_opset1_to_legacy/convert_opset1_to_legacy.hpp>
|
||||
#include <transformations/convert_opset2_to_opset1/convert_opset2_to_opset1.hpp>
|
||||
#include <transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.hpp>
|
||||
#include "convert_function_to_cnn_network.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace InferenceEngine;
|
||||
using namespace InferenceEngine::details;
|
||||
@ -78,6 +86,21 @@ ICNNNetwork::~ICNNNetwork() {}
|
||||
|
||||
CNNNetworkImpl::CNNNetworkImpl() {}
|
||||
|
||||
CNNNetworkImpl::CNNNetworkImpl(const ICNNNetwork & ngraphImpl) {
|
||||
auto ngraphImplPtr = dynamic_cast<const details::CNNNetworkNGraphImpl*>(&ngraphImpl);
|
||||
IE_ASSERT(ngraphImplPtr != nullptr);
|
||||
IE_ASSERT(ngraphImplPtr->getFunction() != nullptr);
|
||||
auto graph = ngraphImplPtr->cloneFunction();
|
||||
// Disable shape inference (WA for generic operations)
|
||||
::ngraph::op::GenericIE::DisableReshape noReshape(graph);
|
||||
|
||||
::ngraph::pass::CommonOptimizations().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet3ToOpSet2().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet2ToOpSet1().run_on_function(graph);
|
||||
::ngraph::pass::ConvertOpSet1ToLegacy().run_on_function(graph);
|
||||
InferenceEngine::details::convertFunctionToICNNNetwork(graph, ngraphImpl, this, false);
|
||||
}
|
||||
|
||||
CNNNetworkImpl::~CNNNetworkImpl() {
|
||||
// In case of cycles, memory leaks occur: Layer holds shared_ptr<Data>, and vice versa.
|
||||
// Added additional check on cycles.
|
||||
|
@ -42,7 +42,6 @@
|
||||
|
||||
#include <debug.h>
|
||||
#include <ngraph/opsets/opset1.hpp>
|
||||
#include "transformations/convert_opset1_to_legacy/convert_opset1_to_legacy.hpp"
|
||||
#include "transformations/utils/utils.hpp"
|
||||
#include "transformations/rt_info/fused_names_attribute.hpp"
|
||||
#include "transformations/rt_info/primitives_priority_attribute.hpp"
|
||||
@ -508,9 +507,10 @@ CNNLayerPtr InferenceEngine::details::CNNLayerCreator::create() {
|
||||
return res;
|
||||
}
|
||||
|
||||
std::shared_ptr<CNNNetworkImpl> convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function> &graph,
|
||||
const ICNNNetwork &network,
|
||||
bool keep_constant_inputs) {
|
||||
void convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function> &graph,
|
||||
const ICNNNetwork &network,
|
||||
CNNNetworkImpl* cnnNetworkImpl,
|
||||
bool keep_constant_inputs) {
|
||||
IE_PROFILING_AUTO_SCOPE(convertFunctionToICNNNetwork)
|
||||
const auto createCNNLayer = [](const std::shared_ptr<::ngraph::Node> &node) -> CNNLayerPtr {
|
||||
class NGraphCNNLayer: public CNNLayer {
|
||||
@ -698,7 +698,7 @@ std::shared_ptr<CNNNetworkImpl> convertFunctionToICNNNetwork(const std::shared_p
|
||||
return ::ngraph::as_type_ptr<::ngraph::op::Result>(node) != nullptr;
|
||||
};
|
||||
|
||||
const auto keep_input_info = [](std::shared_ptr<details::CNNNetworkImpl> &network, const DataPtr &inData) {
|
||||
const auto keep_input_info = [](CNNNetworkImpl *network, const DataPtr &inData) {
|
||||
InputInfo::Ptr info(new InputInfo());
|
||||
info->setInputData(inData);
|
||||
network->setInputInfo(info);
|
||||
@ -709,8 +709,7 @@ std::shared_ptr<CNNNetworkImpl> convertFunctionToICNNNetwork(const std::shared_p
|
||||
InputsDataMap thisInputDataMap;
|
||||
network.getInputsInfo(thisInputDataMap);
|
||||
|
||||
// Create network
|
||||
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>();
|
||||
// Construct network
|
||||
cnnNetworkImpl->setName(graph->get_friendly_name());
|
||||
|
||||
// Collect all names from current graph
|
||||
@ -913,7 +912,15 @@ std::shared_ptr<CNNNetworkImpl> convertFunctionToICNNNetwork(const std::shared_p
|
||||
for (const auto &ext : ::ngraph::op::GenericIE::getExtensions(graph)) {
|
||||
cnnNetworkImpl->AddExtension(ext, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<CNNNetworkImpl> convertFunctionToICNNNetwork(const std::shared_ptr<const ::ngraph::Function> &graph,
|
||||
const ICNNNetwork &network,
|
||||
bool keep_constant_inputs) {
|
||||
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>();
|
||||
convertFunctionToICNNNetwork(graph, network, cnnNetworkImpl.get(), keep_constant_inputs);
|
||||
return cnnNetworkImpl;
|
||||
}
|
||||
|
||||
} // namespace details
|
||||
} // namespace InferenceEngine
|
||||
|
@ -17,7 +17,6 @@
|
||||
#include <mutex>
|
||||
#include <algorithm>
|
||||
|
||||
#include <cnn_network_ngraph_impl.hpp>
|
||||
#include "blob_factory.hpp"
|
||||
#include "cnn_network_impl.hpp"
|
||||
#include "graph_tools.hpp"
|
||||
@ -71,19 +70,6 @@ ConstTransformer::ConstTransformer(details::CNNNetworkImpl* _network)
|
||||
THROW_IE_EXCEPTION << "[ERROR]: Failed to init ConstTransformer with null pointer of network";
|
||||
}
|
||||
|
||||
ConstTransformer::ConstTransformer(ICNNNetwork* _network) {
|
||||
if (auto cnnNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(_network)) {
|
||||
network = cnnNet;
|
||||
} else if (auto nGraphNet = dynamic_cast<InferenceEngine::details::CNNNetworkNGraphImpl *>(_network)) {
|
||||
if (auto cnnNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(nGraphNet->getCNNNetwork().get()))
|
||||
network = cnnNet;
|
||||
}
|
||||
if (!network)
|
||||
THROW_IE_EXCEPTION << "[ERROR]: Failed to init ConstTransformer with unsupported network type";
|
||||
inputs = get_inputs(network);
|
||||
outputs = get_outputs(network);
|
||||
}
|
||||
|
||||
ConstTransformer::ConstTransformer(std::vector<DataPtr> &_inputs, std::vector<DataPtr> &_outputs)
|
||||
: network(nullptr), inputs(_inputs), outputs(_outputs) {
|
||||
if (inputs.empty() || outputs.empty())
|
||||
|
@ -14,7 +14,6 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "cnn_network_ngraph_impl.hpp"
|
||||
#include "details/os/os_filesystem.hpp"
|
||||
#include "ie_format_parser.h"
|
||||
#include "ie_profiling.hpp"
|
||||
|
@ -18,7 +18,10 @@ void FrontEnd::removeConstLayers(ie::ICNNNetwork& network) {
|
||||
env.log->trace("Remove const layers");
|
||||
VPU_LOGGER_SECTION(env.log);
|
||||
|
||||
ie::ConstTransformer(&network).fullTrim();
|
||||
auto implNetwork = dynamic_cast<ie::details::CNNNetworkImpl *>(&network);
|
||||
VPU_THROW_UNLESS(implNetwork != nullptr, "FrontEnd::removeConstLayers expects CNNNetworkImpl");
|
||||
|
||||
ie::ConstTransformer(implNetwork).fullTrim();
|
||||
}
|
||||
|
||||
} // namespace vpu
|
||||
|
@ -39,28 +39,6 @@ using namespace InferenceEngine;
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
TEST(CNNNGraphImplTests, TestConvertNetwork) {
|
||||
std::shared_ptr<ngraph::Function> ngraph;
|
||||
{
|
||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||
ngraph::element::Type type(ngraph::element::Type_t::f32);
|
||||
auto param = std::make_shared<ngraph::op::Parameter>(type, shape);
|
||||
auto relu = std::make_shared<ngraph::op::Relu>(param);
|
||||
auto result = std::make_shared<ngraph::op::Result>(relu);
|
||||
|
||||
ngraph::ParameterVector params = {param};
|
||||
ngraph::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ngraph::Function>(results, params);
|
||||
}
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
auto cnnRefNet = cnnNet.getCNNNetwork();
|
||||
cnnNet.convertToCNNNetworkImpl();
|
||||
|
||||
ASSERT_EQ(cnnRefNet, cnnNet.getCNNNetwork());
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, TestConvertWithRemoveLastLayerNetwork) {
|
||||
std::shared_ptr<ngraph::Function> ngraph;
|
||||
{
|
||||
@ -81,10 +59,10 @@ TEST(CNNNGraphImplTests, TestConvertWithRemoveLastLayerNetwork) {
|
||||
}
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
InferenceEngine::ICNNNetwork& cnnRefNet = *cnnNet.getCNNNetwork();
|
||||
auto convertedNet = std::make_shared<details::CNNNetworkImpl>(cnnNet);
|
||||
// Remove convert layer
|
||||
InferenceEngine::NetPass::ConvertPrecision(cnnRefNet, Precision::I64, Precision::I32);
|
||||
ASSERT_NO_THROW(cloneNet(cnnRefNet));
|
||||
InferenceEngine::NetPass::ConvertPrecision(*convertedNet, Precision::I64, Precision::I32);
|
||||
ASSERT_NO_THROW(cloneNet(*convertedNet));
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, TestResultWithNotEqualName) {
|
||||
@ -105,7 +83,7 @@ TEST(CNNNGraphImplTests, TestResultWithNotEqualName) {
|
||||
}
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
ASSERT_NO_THROW(cnnNet.getCNNNetwork());
|
||||
ASSERT_NO_THROW(auto convertedNet = std::make_shared<details::CNNNetworkImpl>(cnnNet));
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, TestGetOutputAfterConvertNetwork) {
|
||||
@ -175,15 +153,9 @@ TEST(CNNNGraphImplTests, TestSetBatch) {
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
ASSERT_EQ(1, cnnNet.getBatchSize());
|
||||
ASSERT_EQ(OK, cnnNet.setBatchSize(2, nullptr));
|
||||
ASSERT_EQ(OK, cnnNet.setBatchSize(2, nullptr)); // triggers conversion
|
||||
ASSERT_EQ(2, cnnNet.getBatchSize());
|
||||
ASSERT_EQ(nullptr, cnnNet.getFunction());
|
||||
auto cnnRefNet = cnnNet.getCNNNetwork();
|
||||
|
||||
cnnNet.convertToCNNNetworkImpl();
|
||||
|
||||
ASSERT_EQ(2, cnnNet.getBatchSize());
|
||||
ASSERT_EQ(2, cnnNet.getCNNNetwork()->getBatchSize());
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, TestSaveAffinity) {
|
||||
@ -320,50 +292,16 @@ TEST(CNNNGraphImplTests, SaveInputInfoAfterConversion) {
|
||||
}
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
cnnNet.convertToCNNNetworkImpl();
|
||||
auto inputInfo = cnnNet.getInput(name);
|
||||
ASSERT_EQ(inputInfo->getPreProcess().getResizeAlgorithm(), ResizeAlgorithm::NO_RESIZE);
|
||||
inputInfo->getPreProcess().setResizeAlgorithm(ResizeAlgorithm::RESIZE_AREA);
|
||||
ASSERT_EQ(inputInfo->getPreProcess().getResizeAlgorithm(), ResizeAlgorithm::RESIZE_AREA);
|
||||
|
||||
cnnNet.convertToCNNNetworkImpl();
|
||||
inputInfo = cnnNet.getInput(name);
|
||||
auto cnnNetImpl = std::make_shared<details::CNNNetworkImpl>(cnnNet);
|
||||
inputInfo = cnnNetImpl->getInput(name);
|
||||
ASSERT_EQ(inputInfo->getPreProcess().getResizeAlgorithm(), ResizeAlgorithm::RESIZE_AREA);
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, SaveAttributesAfterConversion) {
|
||||
std::string name = "prelu";
|
||||
std::shared_ptr<ngraph::Function> ngraph;
|
||||
{
|
||||
ngraph::PartialShape shape({1, 3, 22, 22});
|
||||
ngraph::element::Type type(ngraph::element::Type_t::f32);
|
||||
auto param = std::make_shared<ngraph::op::Parameter>(type, shape);
|
||||
auto constant = ngraph::op::Constant::create(ngraph::element::Type_t::f32, {1}, {2});
|
||||
auto prelu = std::make_shared<ngraph::op::PRelu>(param, constant);
|
||||
prelu->set_friendly_name(name);
|
||||
auto add = std::make_shared<ngraph::op::v1::Maximum>(prelu, constant);
|
||||
auto result = std::make_shared<ngraph::op::Result>(add);
|
||||
|
||||
ngraph::ParameterVector params = {param};
|
||||
ngraph::ResultVector results = {result};
|
||||
|
||||
ngraph = std::make_shared<ngraph::Function>(results, params);
|
||||
}
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
auto * icnnnetwork = static_cast<InferenceEngine::ICNNNetwork*>(&cnnNet);
|
||||
CNNLayerPtr layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
|
||||
layer->params["test"] = "2";
|
||||
layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
|
||||
ASSERT_TRUE(layer->params.find("test") != layer->params.end());
|
||||
ASSERT_EQ(layer->params["test"], "2");
|
||||
|
||||
cnnNet.convertToCNNNetworkImpl();
|
||||
layer = CommonTestUtils::getLayerByName(icnnnetwork, name);
|
||||
ASSERT_TRUE(layer->params.find("test") != layer->params.end());
|
||||
ASSERT_EQ(layer->params["test"], "2");
|
||||
}
|
||||
|
||||
TEST(CNNNGraphImplTests, SavePrimitivesPriority) {
|
||||
std::string model = R"V0G0N(
|
||||
<net name="Activation" version="10">
|
||||
@ -704,7 +642,9 @@ TEST(CNNNGraphImplTests, CanSetBatchReadValue) {
|
||||
}
|
||||
|
||||
InferenceEngine::details::CNNNetworkNGraphImpl cnnNet(ngraph);
|
||||
auto status = cnnNet.getCNNNetwork()->setBatchSize(4, nullptr);
|
||||
auto convertedNet = std::make_shared<details::CNNNetworkImpl>(cnnNet);
|
||||
auto status = convertedNet->setBatchSize(4, nullptr);
|
||||
EXPECT_EQ(status, StatusCode::OK);
|
||||
}
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
@ -4,8 +4,8 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <convert_function_to_cnn_network.hpp>
|
||||
#include <cpp/ie_cnn_network.h>
|
||||
#include <cnn_network_impl.hpp> // deprecated API
|
||||
|
||||
#include <ngraph/function.hpp>
|
||||
#include <ngraph/opsets/opset1.hpp>
|
||||
@ -30,9 +30,9 @@ TEST(ConvertFunctionToCNNNetworkTests, ConvertPReLUNetwork) {
|
||||
}
|
||||
|
||||
InferenceEngine::CNNNetwork nGraphImpl(f);
|
||||
ASSERT_ANY_THROW(InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl));
|
||||
try {
|
||||
auto net = InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl);
|
||||
auto net = std::make_shared<InferenceEngine::details::CNNNetworkImpl>(
|
||||
static_cast<const InferenceEngine::ICNNNetwork &>(nGraphImpl));
|
||||
} catch (InferenceEngine::details::InferenceEngineException &err) {
|
||||
const std::string ref_msg = "Error of validate layer: prelu with type: PReLU. Number of inputs (2) is not equal to expected ones: 1";
|
||||
const std::string resp_msg = err.what();
|
||||
@ -60,7 +60,8 @@ TEST(ConvertFunctionToCNNNetworkTests, ConvertConvolutionNetwork) {
|
||||
|
||||
InferenceEngine::CNNNetwork nGraphImpl(f);
|
||||
try {
|
||||
auto net = InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl);
|
||||
auto net = std::make_shared<InferenceEngine::details::CNNNetworkImpl>(
|
||||
static_cast<const InferenceEngine::ICNNNetwork &>(nGraphImpl));
|
||||
} catch (InferenceEngine::details::InferenceEngineException &err) {
|
||||
FAIL();
|
||||
}
|
||||
|
@ -15,13 +15,10 @@
|
||||
#include <ngraph/function.hpp>
|
||||
#include <ngraph/opsets/opset1.hpp>
|
||||
#include <ngraph/variant.hpp>
|
||||
#include <transformations/common_optimizations/common_optimizations.hpp>
|
||||
#include <transformations/convert_opset1_to_legacy/convert_opset1_to_legacy.hpp>
|
||||
#include <transformations/convert_opset2_to_opset1/convert_opset2_to_opset1.hpp>
|
||||
#include <transformations/utils/utils.hpp>
|
||||
#include <convert_function_to_cnn_network.hpp>
|
||||
#include <generic_ie.hpp>
|
||||
#include <cpp/ie_cnn_network.h>
|
||||
#include <cnn_network_impl.hpp> // deprecated API
|
||||
#include <ie_layers.h> // deprecated API
|
||||
|
||||
#include "common_test_utils/ngraph_test_utils.hpp"
|
||||
|
||||
@ -47,7 +44,7 @@ TEST(TransformationTests, ConvBiasFusion) {
|
||||
|
||||
// Set PrimitivesPriority to all Convolutions
|
||||
auto nGraph = network.getFunction();
|
||||
ASSERT_TRUE(nGraph);
|
||||
ASSERT_NE(nullptr, nGraph);
|
||||
for (auto & op : nGraph->get_ops()) {
|
||||
if (auto conv = std::dynamic_pointer_cast<ngraph::opset1::Convolution>(op)) {
|
||||
auto & rtInfo = conv->get_rt_info();
|
||||
@ -55,16 +52,7 @@ TEST(TransformationTests, ConvBiasFusion) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Force conversion from nGraph to CNNNetwork
|
||||
ngraph::pass::CommonOptimizations().run_on_function(nGraph);
|
||||
ngraph::op::GenericIE::DisableReshape noReshape(f);
|
||||
|
||||
// Note: instead of running all Conversion Transformations you can make up your own transformation pipeline
|
||||
ngraph::pass::CommonOptimizations().run_on_function(nGraph);
|
||||
ngraph::pass::ConvertOpSet2ToOpSet1().run_on_function(nGraph);
|
||||
ngraph::pass::ConvertOpSet1ToLegacy().run_on_function(nGraph);
|
||||
auto clonedNetwork = InferenceEngine::details::convertFunctionToICNNNetwork(nGraph, network);
|
||||
auto clonedNetwork = std::make_shared<InferenceEngine::details::CNNNetworkImpl>(network);
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
InferenceEngine::CNNLayerPtr conv;
|
||||
|
@ -350,8 +350,8 @@ protected:
|
||||
ICNNNetwork& icnnnetwork = network;
|
||||
auto networkNGraph = dynamic_cast<CNNNetworkNGraphImpl*>(&icnnnetwork);
|
||||
if (networkNGraph) {
|
||||
std::shared_ptr<ICNNNetwork> networkPtr = networkNGraph->getCNNNetwork();
|
||||
network = CNNNetwork(networkPtr);
|
||||
auto netPtr = std::make_shared<details::CNNNetworkImpl>(*networkNGraph);
|
||||
network = CNNNetwork(netPtr);
|
||||
}
|
||||
|
||||
auto originalLayersInfo = LowPrecisionTransformationValidation::getLayers(network);
|
||||
|
Loading…
Reference in New Issue
Block a user