diff --git a/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iexecutable_network_internal.cpp b/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iexecutable_network_internal.cpp index d084f6ab797..abd12226ce4 100644 --- a/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iexecutable_network_internal.cpp +++ b/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iexecutable_network_internal.cpp @@ -26,6 +26,10 @@ void IExecutableNetworkInternal::setNetworkOutputs(const OutputsDataMap& network _networkOutputs = networkOutputs; } +void IExecutableNetworkInternal::setRuntimeFunction(std::shared_ptr function) { + _runtime_function = std::move(function); +} + ConstOutputsDataMap IExecutableNetworkInternal::GetOutputsInfo() const { ConstOutputsDataMap outputMap; for (const auto& output : _networkOutputs) { @@ -63,7 +67,7 @@ void IExecutableNetworkInternal::Export(std::ostream& networkModel) { } std::shared_ptr IExecutableNetworkInternal::GetExecGraphInfo() { - IE_THROW(NotImplemented); + return _runtime_function; } std::vector> IExecutableNetworkInternal::QueryState() { diff --git a/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iplugin_internal.cpp b/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iplugin_internal.cpp index bda5694322d..585679dc3f8 100644 --- a/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iplugin_internal.cpp +++ b/inference-engine/src/inference_engine/src/cpp_interfaces/interface/ie_iplugin_internal.cpp @@ -16,9 +16,11 @@ #include #include "blob_factory.hpp" +#include "exec_graph_info.hpp" #include "ie_icore.hpp" #include "ie_iextension.h" #include "ie_input_info.hpp" +#include "ie_ngraph_utils.hpp" #include "ie_parameter.hpp" namespace InferenceEngine { @@ -125,6 +127,10 @@ std::shared_ptr IInferencePlugin::LoadNetwork( } SetExeNetworkInfo(impl, const_map_cast(network.getInputsInfo()), const_map_cast(network.getOutputsInfo())); + auto function = network.getFunction(); + if (function) { + SetExeNetworkInfo(impl, std::const_pointer_cast(function)); + } return impl; } @@ -219,6 +225,85 @@ void IInferencePlugin::SetExeNetworkInfo(const std::shared_ptrsetNetworkInputs(copyInfo(constMapCast(inputs))); exeNetwork->setNetworkOutputs(copyInfo(constMapCast(outputs))); + + ngraph::ParameterVector parameters; + ngraph::ResultVector results; + std::vector> node_outputs; + + for (auto&& input : inputs) { + auto tensor_desc = input.second->getTensorDesc(); + auto dims = tensor_desc.getDims(); + parameters.push_back( + std::make_shared(details::convertPrecision(tensor_desc.getPrecision()), + std::vector{dims.begin(), dims.end()})); + parameters.back()->set_friendly_name(input.first); + node_outputs.push_back(parameters.back()->output(0)); + } + + auto node = std::make_shared(node_outputs, outputs.size()); + + int i = 0; + for (auto&& output : outputs) { + auto tensor_desc = output.second->getTensorDesc(); + auto dims = tensor_desc.getDims(); + node->set_output_type(i, + details::convertPrecision(tensor_desc.getPrecision()), + std::vector{dims.begin(), dims.end()}); + results.push_back(std::make_shared(node->output(i))); + ++i; + } + exeNetwork->setRuntimeFunction(std::make_shared(results, parameters, "execution_info")); + + exeNetwork->SetPointerToPlugin(shared_from_this()); +} + +void IInferencePlugin::SetExeNetworkInfo(const std::shared_ptr& exeNetwork, + const std::shared_ptr& function) { + IE_ASSERT(exeNetwork != nullptr); + IE_ASSERT(function != nullptr); + ngraph::ParameterVector parameters; + ngraph::ResultVector results; + ngraph::NodeVector nodes; + + std::map, ngraph::Output> output_map; + + for (auto&& node : function->get_ordered_ops()) { + ngraph::Node* new_node = nullptr; + if (ngraph::is_type(node)) { + parameters.push_back(std::static_pointer_cast(node->clone_with_new_inputs({}))); + for (std::size_t i = 0; i < node->outputs().size(); ++i) { + output_map.emplace(node->output(i), parameters.back()->output(i)); + } + new_node = parameters.back().get(); + } else { + std::vector> outputs; + for (auto&& input : node->inputs()) { + outputs.emplace_back(output_map.at(input.get_source_output())); + } + if (ngraph::is_type(node)) { + results.push_back( + std::static_pointer_cast(node->clone_with_new_inputs(outputs))); + new_node = results.back().get(); + } else { + nodes.push_back( + std::make_shared(outputs, node->outputs().size())); + new_node = nodes.back().get(); + for (std::size_t i = 0; i < node->outputs().size(); ++i) { + auto output = node->output(i); + output_map.emplace(output, nodes.back()->output(i)); + new_node->set_output_type(i, output.get_element_type(), output.get_partial_shape()); + } + } + } + IE_ASSERT(new_node != nullptr); + new_node->set_friendly_name(node->get_friendly_name()); + new_node->get_rt_info()[ExecGraphInfoSerialization::PERF_COUNTER] = + std::make_shared<::ngraph::VariantWrapper>("not_executed"); + new_node->get_rt_info()[ExecGraphInfoSerialization::ORIGINAL_NAMES] = + std::make_shared<::ngraph::VariantWrapper>(node->get_friendly_name()); + } + exeNetwork->setRuntimeFunction( + std::make_shared(results, parameters, function->get_friendly_name() + "_execution_info")); exeNetwork->SetPointerToPlugin(shared_from_this()); } diff --git a/inference-engine/src/multi_device/multi_device_plugin.cpp b/inference-engine/src/multi_device/multi_device_plugin.cpp index b0bda135224..11bc89bf783 100644 --- a/inference-engine/src/multi_device/multi_device_plugin.cpp +++ b/inference-engine/src/multi_device/multi_device_plugin.cpp @@ -293,6 +293,7 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons SetExeNetworkInfo(impl, executableNetworkPerDevice.begin()->second->GetInputsInfo(), executableNetworkPerDevice.begin()->second->GetOutputsInfo()); + SetExeNetworkInfo(impl, executableNetworkPerDevice.begin()->second->GetExecGraphInfo()); } return impl; } diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp index cee656e2f4e..adf48cc8f9b 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp @@ -15,6 +15,9 @@ #include "ie_parameter.hpp" #include "ie_remote_context.hpp" +namespace ov { +class Function; +} namespace InferenceEngine { class IInferencePlugin; @@ -47,6 +50,12 @@ public: */ virtual void setNetworkOutputs(const OutputsDataMap& networkOutputs); + /** + * @brief Sets function with network inputs and outpus info + * @param[in] function The function with network inputs and outpus info + */ + virtual void setRuntimeFunction(std::shared_ptr function); + /** * @brief Gets the Executable network output Data node information. The received info is stored in the given Data * node. @@ -141,6 +150,7 @@ protected: virtual std::shared_ptr CreateInferRequestImpl(InputsDataMap networkInputs, OutputsDataMap networkOutputs); + std::shared_ptr _runtime_function; //!< Holds information about network inputs and outputs InferenceEngine::InputsDataMap _networkInputs; //!< Holds information about network inputs info InferenceEngine::OutputsDataMap _networkOutputs; //!< Holds information about network outputs data diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index 94d719b9c6b..43d98f49a37 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -20,6 +20,9 @@ #include "ie_input_info.hpp" #include "ie_parameter.hpp" +namespace ov { +class Function; +} // namespace ov namespace InferenceEngine { class ICore; @@ -302,6 +305,14 @@ protected: const ConstInputsDataMap& inputs, const ConstOutputsDataMap& outputs); + /** + * @brief Set input and output information to executable network. This method is used to + * set additional information to InferenceEngine::IExecutableNetworkInternal create by device plugin. + * @param function Function with initial execution info + */ + void SetExeNetworkInfo(const std::shared_ptr& exeNetwork, + const std::shared_ptr& function); + std::string _pluginName; //!< A device name that plugins enables std::map _config; //!< A map config keys -> values std::weak_ptr _core; //!< A pointer to ICore interface diff --git a/inference-engine/tests/functional/inference_engine/caching_test.cpp b/inference-engine/tests/functional/inference_engine/caching_test.cpp index 10d952e6e0d..a1714c6e51a 100644 --- a/inference-engine/tests/functional/inference_engine/caching_test.cpp +++ b/inference-engine/tests/functional/inference_engine/caching_test.cpp @@ -131,6 +131,7 @@ public: MOCK_METHOD2(CreateInferRequestImpl, IInferRequestInternal::Ptr(InputsDataMap, OutputsDataMap)); MOCK_METHOD1(setNetworkInputs, void(const InputsDataMap& networkInputs)); MOCK_METHOD1(setNetworkOutputs, void(const OutputsDataMap& networkOutputs)); + MOCK_METHOD0(GetExecGraphInfo, std::shared_ptr()); // void Export(std::ostream& networkModel) override { // std::lock_guard guard(m_pluginMutex); @@ -217,10 +218,31 @@ public: m_dirCreator = std::unique_ptr(new MkDirGuard(m_cacheDir)); } + std::shared_ptr createMockIExecutableNet() { + auto mock = std::make_shared(); + EXPECT_CALL(*mock, GetInputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstInputsDataMap{})); + EXPECT_CALL(*mock, GetOutputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstOutputsDataMap{})); + EXPECT_CALL(*mock, GetConfig(PluginConfigParams::KEY_PERF_COUNT)).Times(AnyNumber()).WillRepeatedly(Return(Parameter{PluginConfigParams::NO})); + EXPECT_CALL(*mock, GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS))).Times(AnyNumber()).WillRepeatedly(Return(Parameter{1u})); + EXPECT_CALL(*mock, GetExecGraphInfo()).Times(AnyNumber()).WillRepeatedly(Return([] { + ngraph::ParameterVector parameters; + parameters.push_back(std::make_shared( + ov::element::f32, ov::Shape{1, 3, 8, 8})); + auto notOp = std::make_shared(parameters.back()); + ngraph::ResultVector results; + results.push_back(std::make_shared(notOp)); + return std::make_shared(results, parameters, "empty_function"); + } ())); + auto ptr = std::make_shared(); + EXPECT_CALL(*ptr, SetCallback(_)).Times(AnyNumber()); + EXPECT_CALL(*mock, CreateInferRequest()).Times(AnyNumber()).WillRepeatedly(Return(ptr)); + return mock; + } + void SetUp() override { initParamTest(); mockPlugin = std::make_shared(); - net = std::make_shared(); + net = createMockIExecutableNet(); setupMock(*mockPlugin); std::string libraryName = get_mock_engine_name(); sharedObjectLoader.reset(new SharedObjectLoader(libraryName.c_str())); @@ -285,18 +307,6 @@ public: return ie.LoadNetwork(cnnNetwork, context, config); } - std::shared_ptr createMockIExecutableNet() { - auto mock = std::make_shared(); - EXPECT_CALL(*mock, GetInputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstInputsDataMap{})); - EXPECT_CALL(*mock, GetOutputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstOutputsDataMap{})); - EXPECT_CALL(*mock, GetConfig(PluginConfigParams::KEY_PERF_COUNT)).Times(AnyNumber()).WillRepeatedly(Return(Parameter{PluginConfigParams::NO})); - EXPECT_CALL(*mock, GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS))).Times(AnyNumber()).WillRepeatedly(Return(Parameter{1u})); - auto ptr = std::make_shared(); - EXPECT_CALL(*ptr, SetCallback(_)).Times(AnyNumber()); - EXPECT_CALL(*mock, CreateInferRequest()).Times(AnyNumber()).WillRepeatedly(Return(ptr)); - return mock; - } - private: template std::function make_std_function(const std::string& functionName) { @@ -1453,7 +1463,8 @@ TEST_P(CachingTest, LoadMulti_Archs) { EXPECT_CALL(*net, Export(_)).Times(2); testLoad([&](Core &ie) { ie.SetConfig({{CONFIG_KEY(CACHE_DIR), m_cacheDir}}); - ASSERT_NO_THROW(m_testFunction(ie)); + // ASSERT_NO_THROW(m_testFunction(ie)); + m_testFunction(ie); }); } } @@ -1464,7 +1475,7 @@ TEST_P(CachingTest, LoadMulti_NoCachingOnDevice) { const auto TEST_DEVICE_MAX_COUNT = 100; // Looks enough to catch potential race conditions EXPECT_CALL(*mockPlugin, GetMetric(_, _)).Times(AnyNumber()); EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _)) - .Times(AnyNumber()).WillRepeatedly(Return(false)); + .Times(AnyNumber()).WillRepeatedly(Return(Parameter{false})); EXPECT_CALL(*mockPlugin, QueryNetwork(_, _)).Times(AnyNumber()); EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber()); DataPtr inData = std::make_shared("in", Precision::FP32); diff --git a/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/skip_tests_config.cpp b/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/skip_tests_config.cpp index 68d72bc5329..c75651220bb 100644 --- a/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/skip_tests_config.cpp +++ b/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/skip_tests_config.cpp @@ -50,5 +50,7 @@ std::vector disabledTestPatterns() { R"(.*IEClassNetworkTestP\.LoadNetworkActualHeteroDeviceNoThrow.*)", // CVS-58963: Not implemented yet R"(.*Behavior.*InferRequest.*OutOfFirstOutIsInputForSecondNetwork.*)", + // TODO: CVS-65013 + R"(.*LoadNetworkCreateDefaultExecGraphResult.*)", }; } diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp index 1064edaa570..639681f5451 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp @@ -489,6 +489,36 @@ TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) { ASSERT_NO_THROW(ie.LoadNetwork(actualNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}})); } +TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) { + SKIP_IF_CURRENT_TEST_IS_DISABLED() + Core ie = createCoreWithTemplate(); + auto net = ie.LoadNetwork(actualNetwork, deviceName); + auto exec_function = net.GetExecGraphInfo().getFunction(); + ASSERT_NE(nullptr, exec_function); + auto actual_parameters = exec_function->get_parameters(); + auto actual_results = exec_function->get_results(); + auto expected_parameters = actualNetwork.getFunction()->get_parameters(); + auto expected_results = actualNetwork.getFunction()->get_results(); + ASSERT_EQ(expected_parameters.size(), actual_parameters.size()); + for (std::size_t i = 0; i < expected_parameters.size(); ++i) { + auto expected_element_type = expected_parameters[i]->get_output_element_type(0); + auto actual_element_type = actual_parameters[i]->get_output_element_type(0); + ASSERT_EQ(expected_element_type, actual_element_type) << "For index: " << i; + auto expected_shape = expected_parameters[i]->get_output_shape(0); + auto actual_shape = actual_parameters[i]->get_output_shape(0); + ASSERT_EQ(expected_shape, actual_shape) << "For index: " << i; + } + ASSERT_EQ(expected_results.size(), actual_results.size()); + for (std::size_t i = 0; i < expected_results.size(); ++i) { + auto expected_element_type = expected_results[i]->get_input_element_type(0); + auto actual_element_type = actual_results[i]->get_input_element_type(0); + ASSERT_EQ(expected_element_type, actual_element_type) << "For index: " << i; + auto expected_shape = expected_results[i]->get_input_shape(0); + auto actual_shape = actual_results[i]->get_input_shape(0); + ASSERT_EQ(expected_shape, actual_shape) << "For index: " << i; + } +} + // // ImportExportNetwork // diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/exec_graph_info.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/exec_graph_info.hpp index e9b4d0fa520..ba9c7926708 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/exec_graph_info.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/exec_graph_info.hpp @@ -63,66 +63,57 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoBeforeExecution) { // Create CNNNetwork from ngrpah::Function InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork execGraph; - if (targetDevice != CommonTestUtils::DEVICE_AUTO && - targetDevice != CommonTestUtils::DEVICE_MULTI && - targetDevice != CommonTestUtils::DEVICE_TEMPLATE && - targetDevice != CommonTestUtils::DEVICE_GNA) { - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); - ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); - // Create InferRequest - InferenceEngine::InferRequest req; - ASSERT_NO_THROW(req = execNet.CreateInferRequest()); - // Store all the original layers from the network - const auto originalLayers = function->get_ops(); - std::map originalLayersMap; - for (const auto &layer : originalLayers) { - originalLayersMap[layer->get_friendly_name()] = 0; + // Load CNNNetwork to target plugins + auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); + // Create InferRequest + InferenceEngine::InferRequest req; + ASSERT_NO_THROW(req = execNet.CreateInferRequest()); + // Store all the original layers from the network + const auto originalLayers = function->get_ops(); + std::map originalLayersMap; + for (const auto &layer : originalLayers) { + originalLayersMap[layer->get_friendly_name()] = 0; + } + int IteratorForLayersConstant = 0; + + auto function = execGraph.getFunction(); + ASSERT_NE(function, nullptr); + + for (const auto & op : function->get_ops()) { + const auto & rtInfo = op->get_rt_info(); + + auto getExecValue = [&rtInfo](const std::string & paramName) -> std::string { + auto it = rtInfo.find(paramName); + IE_ASSERT(rtInfo.end() != it) << " paramName: " << paramName; + auto value = std::dynamic_pointer_cast>(it->second); + IE_ASSERT(nullptr != value); + + return value->get(); + }; + + // Each layer from the execGraphInfo network must have PM data option set + ASSERT_EQ("not_executed", getExecValue(ExecGraphInfoSerialization::PERF_COUNTER)); + // Parse origin layer names (fused/merged layers) from the executable graph + // and compare with layers from the original model + auto origFromExecLayer = getExecValue(ExecGraphInfoSerialization::ORIGINAL_NAMES); + if (origFromExecLayer == "") + IteratorForLayersConstant++; + std::vector origFromExecLayerSep = separateStrToVec(origFromExecLayer, ','); + std::for_each(origFromExecLayerSep.begin(), origFromExecLayerSep.end(), [&](const std::string &layer) { + auto origLayer = originalLayersMap.find(layer); + ASSERT_NE(originalLayersMap.end(), origLayer) << layer; + origLayer->second++; + }); + } + + // All layers from the original IR must be present with in ExecGraphInfo + for (auto &layer : originalLayersMap) { + if ((layer.second == 0) && (IteratorForLayersConstant > 0)) { + IteratorForLayersConstant--; + continue; } - int IteratorForLayersConstant = 0; - - auto function = execGraph.getFunction(); - ASSERT_NE(function, nullptr); - - for (const auto & op : function->get_ops()) { - const auto & rtInfo = op->get_rt_info(); - - auto getExecValue = [&rtInfo](const std::string & paramName) -> std::string { - auto it = rtInfo.find(paramName); - IE_ASSERT(rtInfo.end() != it); - auto value = std::dynamic_pointer_cast>(it->second); - IE_ASSERT(nullptr != value); - - return value->get(); - }; - - // Each layer from the execGraphInfo network must have PM data option set - ASSERT_EQ("not_executed", getExecValue(ExecGraphInfoSerialization::PERF_COUNTER)); - // Parse origin layer names (fused/merged layers) from the executable graph - // and compare with layers from the original model - auto origFromExecLayer = getExecValue(ExecGraphInfoSerialization::ORIGINAL_NAMES); - if (origFromExecLayer == "") - IteratorForLayersConstant++; - std::vector origFromExecLayerSep = separateStrToVec(origFromExecLayer, ','); - std::for_each(origFromExecLayerSep.begin(), origFromExecLayerSep.end(), [&](const std::string &layer) { - auto origLayer = originalLayersMap.find(layer); - ASSERT_NE(originalLayersMap.end(), origLayer) << layer; - origLayer->second++; - }); - } - - // All layers from the original IR must be present with in ExecGraphInfo - for (auto &layer : originalLayersMap) { - if ((layer.second == 0) && (IteratorForLayersConstant > 0)) { - IteratorForLayersConstant--; - continue; - } - ASSERT_GE(layer.second, 0); - } - } else { - InferenceEngine::ExecutableNetwork network; - ASSERT_NO_THROW(network = ie->LoadNetwork(cnnNet, targetDevice, configuration)); - ASSERT_THROW(network.GetExecGraphInfo(), InferenceEngine::NotImplemented); + ASSERT_GE(layer.second, 0); } } @@ -132,74 +123,66 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoAfterExecution) { // Create CNNNetwork from ngrpah::Function InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork execGraph; - if (targetDevice != CommonTestUtils::DEVICE_AUTO && - targetDevice != CommonTestUtils::DEVICE_MULTI && - targetDevice != CommonTestUtils::DEVICE_TEMPLATE && - targetDevice != CommonTestUtils::DEVICE_GNA) { - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); - ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); - // Create InferRequest - InferenceEngine::InferRequest req; - ASSERT_NO_THROW(req = execNet.CreateInferRequest()); - // Store all the original layers from the network - const auto originalLayers = function->get_ops(); - std::map originalLayersMap; - for (const auto &layer : originalLayers) { - originalLayersMap[layer->get_friendly_name()] = 0; + // Load CNNNetwork to target plugins + auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); + // Create InferRequest + InferenceEngine::InferRequest req; + ASSERT_NO_THROW(req = execNet.CreateInferRequest()); + // Store all the original layers from the network + const auto originalLayers = function->get_ops(); + std::map originalLayersMap; + for (const auto &layer : originalLayers) { + originalLayersMap[layer->get_friendly_name()] = 0; + } + int IteratorForLayersConstant = 0; + // Store all the layers from the executable graph information represented as CNNNetwork + bool has_layer_with_valid_time = false; + auto function = execGraph.getFunction(); + ASSERT_NE(nullptr, function); + + for (const auto & op : function->get_ops()) { + const auto & rtInfo = op->get_rt_info(); + + auto getExecValue = [&rtInfo](const std::string & paramName) -> std::string { + auto it = rtInfo.find(paramName); + IE_ASSERT(rtInfo.end() != it); + auto value = std::dynamic_pointer_cast>(it->second); + IE_ASSERT(nullptr != value); + + return value->get(); + }; + + // At least one layer in the topology should be executed and have valid perf counter value + try { + float x = static_cast(std::atof( + getExecValue(ExecGraphInfoSerialization::PERF_COUNTER).c_str())); + ASSERT_GE(x, 0.0f); + has_layer_with_valid_time = true; + } catch (std::exception &) {} + + // Parse origin layer names (fused/merged layers) from the executable graph + // and compare with layers from the original model + auto origFromExecLayer = getExecValue(ExecGraphInfoSerialization::ORIGINAL_NAMES); + std::vector origFromExecLayerSep = separateStrToVec(origFromExecLayer, ','); + if (origFromExecLayer == "") + IteratorForLayersConstant++; + std::for_each(origFromExecLayerSep.begin(), origFromExecLayerSep.end(), [&](const std::string &layer) { + auto origLayer = originalLayersMap.find(layer); + ASSERT_NE(originalLayersMap.end(), origLayer) << layer; + origLayer->second++; + }); + } + + ASSERT_TRUE(has_layer_with_valid_time); + + // All layers from the original IR must be present within ExecGraphInfo + for (auto &layer : originalLayersMap) { + if ((layer.second == 0) && (IteratorForLayersConstant > 0)) { + IteratorForLayersConstant--; + continue; } - int IteratorForLayersConstant = 0; - // Store all the layers from the executable graph information represented as CNNNetwork - bool has_layer_with_valid_time = false; - auto function = execGraph.getFunction(); - ASSERT_NE(nullptr, function); - - for (const auto & op : function->get_ops()) { - const auto & rtInfo = op->get_rt_info(); - - auto getExecValue = [&rtInfo](const std::string & paramName) -> std::string { - auto it = rtInfo.find(paramName); - IE_ASSERT(rtInfo.end() != it); - auto value = std::dynamic_pointer_cast>(it->second); - IE_ASSERT(nullptr != value); - - return value->get(); - }; - - // At least one layer in the topology should be executed and have valid perf counter value - try { - float x = static_cast(std::atof( - getExecValue(ExecGraphInfoSerialization::PERF_COUNTER).c_str())); - ASSERT_GE(x, 0.0f); - has_layer_with_valid_time = true; - } catch (std::exception &) {} - - // Parse origin layer names (fused/merged layers) from the executable graph - // and compare with layers from the original model - auto origFromExecLayer = getExecValue(ExecGraphInfoSerialization::ORIGINAL_NAMES); - std::vector origFromExecLayerSep = separateStrToVec(origFromExecLayer, ','); - if (origFromExecLayer == "") - IteratorForLayersConstant++; - std::for_each(origFromExecLayerSep.begin(), origFromExecLayerSep.end(), [&](const std::string &layer) { - auto origLayer = originalLayersMap.find(layer); - ASSERT_NE(originalLayersMap.end(), origLayer) << layer; - origLayer->second++; - }); - } - - ASSERT_TRUE(has_layer_with_valid_time); - - // All layers from the original IR must be present within ExecGraphInfo - for (auto &layer : originalLayersMap) { - if ((layer.second == 0) && (IteratorForLayersConstant > 0)) { - IteratorForLayersConstant--; - continue; - } - ASSERT_GE(layer.second, 0); - } - } else { - ASSERT_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration).GetExecGraphInfo(), - InferenceEngine::NotImplemented); + ASSERT_GE(layer.second, 0); } } @@ -214,22 +197,14 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoSerialization) { // Create CNNNetwork from ngrpah::Function InferenceEngine::CNNNetwork cnnNet(function); InferenceEngine::CNNNetwork execGraph; - if (targetDevice != CommonTestUtils::DEVICE_AUTO && - targetDevice != CommonTestUtils::DEVICE_MULTI && - targetDevice != CommonTestUtils::DEVICE_TEMPLATE && - targetDevice != CommonTestUtils::DEVICE_GNA) { - // Load CNNNetwork to target plugins - auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); - ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); - // Create InferRequest - InferenceEngine::InferRequest req; - ASSERT_NO_THROW(req = execNet.CreateInferRequest()); - execGraph.serialize(out_xml_path, out_bin_path); - ASSERT_EQ(0, std::remove(out_xml_path.c_str())); - ASSERT_EQ(0, std::remove(out_bin_path.c_str())); - } else { - ASSERT_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration).GetExecGraphInfo(), - InferenceEngine::NotImplemented); - } + // Load CNNNetwork to target plugins + auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration); + ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo()); + // Create InferRequest + InferenceEngine::InferRequest req; + ASSERT_NO_THROW(req = execNet.CreateInferRequest()); + execGraph.serialize(out_xml_path, out_bin_path); + ASSERT_EQ(0, std::remove(out_xml_path.c_str())); + ASSERT_EQ(0, std::remove(out_bin_path.c_str())); } } // namespace BehaviorTestsDefinitions \ No newline at end of file diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mock.cpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mock.cpp index 9cc7309c1c9..515a51e5244 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mock.cpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mock.cpp @@ -45,12 +45,12 @@ void MockNotEmptyICNNNetwork::getInputsInfo(InputsDataMap &inputs) const noexcep "Input", Precision::FP32 }); getInputTo(inData)[MockNotEmptyICNNNetwork::OUTPUT_BLOB_NAME] = inputLayer; - inData->setDims(MockNotEmptyICNNNetwork::INPUT_DIMENTIONS); + inData->setDims(MockNotEmptyICNNNetwork::INPUT_DIMENSIONS); inData->setLayout(Layout::NCHW); inputInfo->setInputData(inData); auto outData = std::make_shared(MockNotEmptyICNNNetwork::OUTPUT_BLOB_NAME, Precision::UNSPECIFIED); - outData->setDims(MockNotEmptyICNNNetwork::OUTPUT_DIMENTIONS); + outData->setDims(MockNotEmptyICNNNetwork::OUTPUT_DIMENSIONS); outData->setLayout(Layout::NCHW); getInputTo(outData)[""] = std::make_shared(LayerParams{ MockNotEmptyICNNNetwork::OUTPUT_BLOB_NAME, diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp index d861ded519a..98cf3509c8e 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp @@ -19,9 +19,9 @@ IE_SUPPRESS_DEPRECATED_START class MockNotEmptyICNNNetwork final : public ICNNNetwork { public: static constexpr const char* INPUT_BLOB_NAME = "first_input"; - const SizeVector INPUT_DIMENTIONS = { 1, 3, 299, 299 }; + const SizeVector INPUT_DIMENSIONS = { 1, 3, 299, 299 }; static constexpr const char* OUTPUT_BLOB_NAME = "first_output"; - const SizeVector OUTPUT_DIMENTIONS = { 1, 3, 299, 299 }; + const SizeVector OUTPUT_DIMENSIONS = { 1, 3, 299, 299 }; const std::string name = "test"; const std::string& getName() const noexcept override { return name; @@ -29,10 +29,24 @@ public: void getOutputsInfo(OutputsDataMap& out) const noexcept override; void getInputsInfo(InputsDataMap &inputs) const noexcept override; std::shared_ptr getFunction() noexcept override { - return nullptr; + ngraph::ParameterVector parameters; + parameters.push_back(std::make_shared( + ov::element::f32, std::vector{INPUT_DIMENSIONS.begin(), INPUT_DIMENSIONS.end()})); + parameters.back()->set_friendly_name(INPUT_BLOB_NAME); + ngraph::ResultVector results; + results.push_back(std::make_shared(parameters.back()->output(0))); + results.back()->set_friendly_name(OUTPUT_BLOB_NAME); + return std::make_shared(results, parameters, "empty_function"); } std::shared_ptr getFunction() const noexcept override { - return nullptr; + ngraph::ParameterVector parameters; + parameters.push_back(std::make_shared( + ov::element::f32, std::vector{INPUT_DIMENSIONS.begin(), INPUT_DIMENSIONS.end()})); + parameters.back()->set_friendly_name(INPUT_BLOB_NAME); + ngraph::ResultVector results; + results.push_back(std::make_shared(parameters.back()->output(0))); + results.back()->set_friendly_name(OUTPUT_BLOB_NAME); + return std::make_shared(results, parameters, "empty_function"); } MOCK_METHOD(InputInfo::Ptr, getInput, (const std::string &inputName), (const, noexcept)); MOCK_METHOD(size_t, layerCount, (), (const, noexcept));