Added default exec network result (#7352)

This commit is contained in:
Anton Pankratv 2021-09-08 23:33:12 +03:00 committed by GitHub
parent 7bc6a8ea13
commit 1c1401b069
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 308 additions and 165 deletions

View File

@ -26,6 +26,10 @@ void IExecutableNetworkInternal::setNetworkOutputs(const OutputsDataMap& network
_networkOutputs = networkOutputs;
}
void IExecutableNetworkInternal::setRuntimeFunction(std::shared_ptr<ov::Function> function) {
_runtime_function = std::move(function);
}
ConstOutputsDataMap IExecutableNetworkInternal::GetOutputsInfo() const {
ConstOutputsDataMap outputMap;
for (const auto& output : _networkOutputs) {
@ -63,7 +67,7 @@ void IExecutableNetworkInternal::Export(std::ostream& networkModel) {
}
std::shared_ptr<ngraph::Function> IExecutableNetworkInternal::GetExecGraphInfo() {
IE_THROW(NotImplemented);
return _runtime_function;
}
std::vector<std::shared_ptr<IVariableStateInternal>> IExecutableNetworkInternal::QueryState() {

View File

@ -16,9 +16,11 @@
#include <string>
#include "blob_factory.hpp"
#include "exec_graph_info.hpp"
#include "ie_icore.hpp"
#include "ie_iextension.h"
#include "ie_input_info.hpp"
#include "ie_ngraph_utils.hpp"
#include "ie_parameter.hpp"
namespace InferenceEngine {
@ -125,6 +127,10 @@ std::shared_ptr<IExecutableNetworkInternal> IInferencePlugin::LoadNetwork(
}
SetExeNetworkInfo(impl, const_map_cast(network.getInputsInfo()), const_map_cast(network.getOutputsInfo()));
auto function = network.getFunction();
if (function) {
SetExeNetworkInfo(impl, std::const_pointer_cast<ov::Function>(function));
}
return impl;
}
@ -219,6 +225,85 @@ void IInferencePlugin::SetExeNetworkInfo(const std::shared_ptr<IExecutableNetwor
// Set inputs/outputs and pointer to plugin manually here
exeNetwork->setNetworkInputs(copyInfo(constMapCast(inputs)));
exeNetwork->setNetworkOutputs(copyInfo(constMapCast(outputs)));
ngraph::ParameterVector parameters;
ngraph::ResultVector results;
std::vector<ngraph::Output<ngraph::Node>> node_outputs;
for (auto&& input : inputs) {
auto tensor_desc = input.second->getTensorDesc();
auto dims = tensor_desc.getDims();
parameters.push_back(
std::make_shared<ngraph::op::v0::Parameter>(details::convertPrecision(tensor_desc.getPrecision()),
std::vector<ov::Dimension>{dims.begin(), dims.end()}));
parameters.back()->set_friendly_name(input.first);
node_outputs.push_back(parameters.back()->output(0));
}
auto node = std::make_shared<ExecGraphInfoSerialization::ExecutionNode>(node_outputs, outputs.size());
int i = 0;
for (auto&& output : outputs) {
auto tensor_desc = output.second->getTensorDesc();
auto dims = tensor_desc.getDims();
node->set_output_type(i,
details::convertPrecision(tensor_desc.getPrecision()),
std::vector<ov::Dimension>{dims.begin(), dims.end()});
results.push_back(std::make_shared<ngraph::op::v0::Result>(node->output(i)));
++i;
}
exeNetwork->setRuntimeFunction(std::make_shared<ov::Function>(results, parameters, "execution_info"));
exeNetwork->SetPointerToPlugin(shared_from_this());
}
void IInferencePlugin::SetExeNetworkInfo(const std::shared_ptr<IExecutableNetworkInternal>& exeNetwork,
const std::shared_ptr<ov::Function>& function) {
IE_ASSERT(exeNetwork != nullptr);
IE_ASSERT(function != nullptr);
ngraph::ParameterVector parameters;
ngraph::ResultVector results;
ngraph::NodeVector nodes;
std::map<ngraph::Output<ngraph::Node>, ngraph::Output<ngraph::Node>> output_map;
for (auto&& node : function->get_ordered_ops()) {
ngraph::Node* new_node = nullptr;
if (ngraph::is_type<ngraph::op::Parameter>(node)) {
parameters.push_back(std::static_pointer_cast<ngraph::op::v0::Parameter>(node->clone_with_new_inputs({})));
for (std::size_t i = 0; i < node->outputs().size(); ++i) {
output_map.emplace(node->output(i), parameters.back()->output(i));
}
new_node = parameters.back().get();
} else {
std::vector<ngraph::Output<ngraph::Node>> outputs;
for (auto&& input : node->inputs()) {
outputs.emplace_back(output_map.at(input.get_source_output()));
}
if (ngraph::is_type<ngraph::op::Result>(node)) {
results.push_back(
std::static_pointer_cast<ngraph::op::v0::Result>(node->clone_with_new_inputs(outputs)));
new_node = results.back().get();
} else {
nodes.push_back(
std::make_shared<ExecGraphInfoSerialization::ExecutionNode>(outputs, node->outputs().size()));
new_node = nodes.back().get();
for (std::size_t i = 0; i < node->outputs().size(); ++i) {
auto output = node->output(i);
output_map.emplace(output, nodes.back()->output(i));
new_node->set_output_type(i, output.get_element_type(), output.get_partial_shape());
}
}
}
IE_ASSERT(new_node != nullptr);
new_node->set_friendly_name(node->get_friendly_name());
new_node->get_rt_info()[ExecGraphInfoSerialization::PERF_COUNTER] =
std::make_shared<::ngraph::VariantWrapper<std::string>>("not_executed");
new_node->get_rt_info()[ExecGraphInfoSerialization::ORIGINAL_NAMES] =
std::make_shared<::ngraph::VariantWrapper<std::string>>(node->get_friendly_name());
}
exeNetwork->setRuntimeFunction(
std::make_shared<ov::Function>(results, parameters, function->get_friendly_name() + "_execution_info"));
exeNetwork->SetPointerToPlugin(shared_from_this());
}

View File

@ -293,6 +293,7 @@ IExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadNetworkImpl(cons
SetExeNetworkInfo(impl,
executableNetworkPerDevice.begin()->second->GetInputsInfo(),
executableNetworkPerDevice.begin()->second->GetOutputsInfo());
SetExeNetworkInfo(impl, executableNetworkPerDevice.begin()->second->GetExecGraphInfo());
}
return impl;
}

View File

@ -15,6 +15,9 @@
#include "ie_parameter.hpp"
#include "ie_remote_context.hpp"
namespace ov {
class Function;
}
namespace InferenceEngine {
class IInferencePlugin;
@ -47,6 +50,12 @@ public:
*/
virtual void setNetworkOutputs(const OutputsDataMap& networkOutputs);
/**
* @brief Sets function with network inputs and outpus info
* @param[in] function The function with network inputs and outpus info
*/
virtual void setRuntimeFunction(std::shared_ptr<ov::Function> function);
/**
* @brief Gets the Executable network output Data node information. The received info is stored in the given Data
* node.
@ -141,6 +150,7 @@ protected:
virtual std::shared_ptr<IInferRequestInternal> CreateInferRequestImpl(InputsDataMap networkInputs,
OutputsDataMap networkOutputs);
std::shared_ptr<ov::Function> _runtime_function; //!< Holds information about network inputs and outputs
InferenceEngine::InputsDataMap _networkInputs; //!< Holds information about network inputs info
InferenceEngine::OutputsDataMap _networkOutputs; //!< Holds information about network outputs data

View File

@ -20,6 +20,9 @@
#include "ie_input_info.hpp"
#include "ie_parameter.hpp"
namespace ov {
class Function;
} // namespace ov
namespace InferenceEngine {
class ICore;
@ -302,6 +305,14 @@ protected:
const ConstInputsDataMap& inputs,
const ConstOutputsDataMap& outputs);
/**
* @brief Set input and output information to executable network. This method is used to
* set additional information to InferenceEngine::IExecutableNetworkInternal create by device plugin.
* @param function Function with initial execution info
*/
void SetExeNetworkInfo(const std::shared_ptr<IExecutableNetworkInternal>& exeNetwork,
const std::shared_ptr<ov::Function>& function);
std::string _pluginName; //!< A device name that plugins enables
std::map<std::string, std::string> _config; //!< A map config keys -> values
std::weak_ptr<ICore> _core; //!< A pointer to ICore interface

View File

@ -131,6 +131,7 @@ public:
MOCK_METHOD2(CreateInferRequestImpl, IInferRequestInternal::Ptr(InputsDataMap, OutputsDataMap));
MOCK_METHOD1(setNetworkInputs, void(const InputsDataMap& networkInputs));
MOCK_METHOD1(setNetworkOutputs, void(const OutputsDataMap& networkOutputs));
MOCK_METHOD0(GetExecGraphInfo, std::shared_ptr<ov::Function>());
// void Export(std::ostream& networkModel) override {
// std::lock_guard<std::mutex> guard(m_pluginMutex);
@ -217,10 +218,31 @@ public:
m_dirCreator = std::unique_ptr<MkDirGuard>(new MkDirGuard(m_cacheDir));
}
std::shared_ptr<MockExecutableNetwork> createMockIExecutableNet() {
auto mock = std::make_shared<MockExecutableNetwork>();
EXPECT_CALL(*mock, GetInputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstInputsDataMap{}));
EXPECT_CALL(*mock, GetOutputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstOutputsDataMap{}));
EXPECT_CALL(*mock, GetConfig(PluginConfigParams::KEY_PERF_COUNT)).Times(AnyNumber()).WillRepeatedly(Return(Parameter{PluginConfigParams::NO}));
EXPECT_CALL(*mock, GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS))).Times(AnyNumber()).WillRepeatedly(Return(Parameter{1u}));
EXPECT_CALL(*mock, GetExecGraphInfo()).Times(AnyNumber()).WillRepeatedly(Return([] {
ngraph::ParameterVector parameters;
parameters.push_back(std::make_shared<ngraph::op::v0::Parameter>(
ov::element::f32, ov::Shape{1, 3, 8, 8}));
auto notOp = std::make_shared<ngraph::op::v1::LogicalNot>(parameters.back());
ngraph::ResultVector results;
results.push_back(std::make_shared<ngraph::op::v0::Result>(notOp));
return std::make_shared<ov::Function>(results, parameters, "empty_function");
} ()));
auto ptr = std::make_shared<MockIInferRequestInternal>();
EXPECT_CALL(*ptr, SetCallback(_)).Times(AnyNumber());
EXPECT_CALL(*mock, CreateInferRequest()).Times(AnyNumber()).WillRepeatedly(Return(ptr));
return mock;
}
void SetUp() override {
initParamTest();
mockPlugin = std::make_shared<MockCachingInferencePlugin>();
net = std::make_shared<MockExecutableNetwork>();
net = createMockIExecutableNet();
setupMock(*mockPlugin);
std::string libraryName = get_mock_engine_name();
sharedObjectLoader.reset(new SharedObjectLoader(libraryName.c_str()));
@ -285,18 +307,6 @@ public:
return ie.LoadNetwork(cnnNetwork, context, config);
}
std::shared_ptr<MockExecutableNetwork> createMockIExecutableNet() {
auto mock = std::make_shared<MockExecutableNetwork>();
EXPECT_CALL(*mock, GetInputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstInputsDataMap{}));
EXPECT_CALL(*mock, GetOutputsInfo()).Times(AnyNumber()).WillRepeatedly(Return(ConstOutputsDataMap{}));
EXPECT_CALL(*mock, GetConfig(PluginConfigParams::KEY_PERF_COUNT)).Times(AnyNumber()).WillRepeatedly(Return(Parameter{PluginConfigParams::NO}));
EXPECT_CALL(*mock, GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS))).Times(AnyNumber()).WillRepeatedly(Return(Parameter{1u}));
auto ptr = std::make_shared<MockIInferRequestInternal>();
EXPECT_CALL(*ptr, SetCallback(_)).Times(AnyNumber());
EXPECT_CALL(*mock, CreateInferRequest()).Times(AnyNumber()).WillRepeatedly(Return(ptr));
return mock;
}
private:
template <class T>
std::function<T> make_std_function(const std::string& functionName) {
@ -1453,7 +1463,8 @@ TEST_P(CachingTest, LoadMulti_Archs) {
EXPECT_CALL(*net, Export(_)).Times(2);
testLoad([&](Core &ie) {
ie.SetConfig({{CONFIG_KEY(CACHE_DIR), m_cacheDir}});
ASSERT_NO_THROW(m_testFunction(ie));
// ASSERT_NO_THROW(m_testFunction(ie));
m_testFunction(ie);
});
}
}
@ -1464,7 +1475,7 @@ TEST_P(CachingTest, LoadMulti_NoCachingOnDevice) {
const auto TEST_DEVICE_MAX_COUNT = 100; // Looks enough to catch potential race conditions
EXPECT_CALL(*mockPlugin, GetMetric(_, _)).Times(AnyNumber());
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(IMPORT_EXPORT_SUPPORT), _))
.Times(AnyNumber()).WillRepeatedly(Return(false));
.Times(AnyNumber()).WillRepeatedly(Return(Parameter{false}));
EXPECT_CALL(*mockPlugin, QueryNetwork(_, _)).Times(AnyNumber());
EXPECT_CALL(*mockPlugin, GetMetric(METRIC_KEY(DEVICE_ARCHITECTURE), _)).Times(AnyNumber());
DataPtr inData = std::make_shared<Data>("in", Precision::FP32);

View File

@ -50,5 +50,7 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*IEClassNetworkTestP\.LoadNetworkActualHeteroDeviceNoThrow.*)",
// CVS-58963: Not implemented yet
R"(.*Behavior.*InferRequest.*OutOfFirstOutIsInputForSecondNetwork.*)",
// TODO: CVS-65013
R"(.*LoadNetworkCreateDefaultExecGraphResult.*)",
};
}

View File

@ -489,6 +489,36 @@ TEST_P(IEClassNetworkTestP, LoadNetworkActualHeteroDevice2NoThrow) {
ASSERT_NO_THROW(ie.LoadNetwork(actualNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName}}));
}
TEST_P(IEClassNetworkTestP, LoadNetworkCreateDefaultExecGraphResult) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
Core ie = createCoreWithTemplate();
auto net = ie.LoadNetwork(actualNetwork, deviceName);
auto exec_function = net.GetExecGraphInfo().getFunction();
ASSERT_NE(nullptr, exec_function);
auto actual_parameters = exec_function->get_parameters();
auto actual_results = exec_function->get_results();
auto expected_parameters = actualNetwork.getFunction()->get_parameters();
auto expected_results = actualNetwork.getFunction()->get_results();
ASSERT_EQ(expected_parameters.size(), actual_parameters.size());
for (std::size_t i = 0; i < expected_parameters.size(); ++i) {
auto expected_element_type = expected_parameters[i]->get_output_element_type(0);
auto actual_element_type = actual_parameters[i]->get_output_element_type(0);
ASSERT_EQ(expected_element_type, actual_element_type) << "For index: " << i;
auto expected_shape = expected_parameters[i]->get_output_shape(0);
auto actual_shape = actual_parameters[i]->get_output_shape(0);
ASSERT_EQ(expected_shape, actual_shape) << "For index: " << i;
}
ASSERT_EQ(expected_results.size(), actual_results.size());
for (std::size_t i = 0; i < expected_results.size(); ++i) {
auto expected_element_type = expected_results[i]->get_input_element_type(0);
auto actual_element_type = actual_results[i]->get_input_element_type(0);
ASSERT_EQ(expected_element_type, actual_element_type) << "For index: " << i;
auto expected_shape = expected_results[i]->get_input_shape(0);
auto actual_shape = actual_results[i]->get_input_shape(0);
ASSERT_EQ(expected_shape, actual_shape) << "For index: " << i;
}
}
//
// ImportExportNetwork
//

View File

@ -63,10 +63,6 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoBeforeExecution) {
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::CNNNetwork execGraph;
if (targetDevice != CommonTestUtils::DEVICE_AUTO &&
targetDevice != CommonTestUtils::DEVICE_MULTI &&
targetDevice != CommonTestUtils::DEVICE_TEMPLATE &&
targetDevice != CommonTestUtils::DEVICE_GNA) {
// Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
@ -89,7 +85,7 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoBeforeExecution) {
auto getExecValue = [&rtInfo](const std::string & paramName) -> std::string {
auto it = rtInfo.find(paramName);
IE_ASSERT(rtInfo.end() != it);
IE_ASSERT(rtInfo.end() != it) << " paramName: " << paramName;
auto value = std::dynamic_pointer_cast<ngraph::VariantImpl<std::string>>(it->second);
IE_ASSERT(nullptr != value);
@ -119,11 +115,6 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoBeforeExecution) {
}
ASSERT_GE(layer.second, 0);
}
} else {
InferenceEngine::ExecutableNetwork network;
ASSERT_NO_THROW(network = ie->LoadNetwork(cnnNet, targetDevice, configuration));
ASSERT_THROW(network.GetExecGraphInfo(), InferenceEngine::NotImplemented);
}
}
TEST_P(ExecGraphTests, CheckExecGraphInfoAfterExecution) {
@ -132,10 +123,6 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoAfterExecution) {
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::CNNNetwork execGraph;
if (targetDevice != CommonTestUtils::DEVICE_AUTO &&
targetDevice != CommonTestUtils::DEVICE_MULTI &&
targetDevice != CommonTestUtils::DEVICE_TEMPLATE &&
targetDevice != CommonTestUtils::DEVICE_GNA) {
// Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
@ -197,10 +184,6 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoAfterExecution) {
}
ASSERT_GE(layer.second, 0);
}
} else {
ASSERT_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration).GetExecGraphInfo(),
InferenceEngine::NotImplemented);
}
}
TEST_P(ExecGraphTests, CheckExecGraphInfoSerialization) {
@ -214,10 +197,6 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoSerialization) {
// Create CNNNetwork from ngrpah::Function
InferenceEngine::CNNNetwork cnnNet(function);
InferenceEngine::CNNNetwork execGraph;
if (targetDevice != CommonTestUtils::DEVICE_AUTO &&
targetDevice != CommonTestUtils::DEVICE_MULTI &&
targetDevice != CommonTestUtils::DEVICE_TEMPLATE &&
targetDevice != CommonTestUtils::DEVICE_GNA) {
// Load CNNNetwork to target plugins
auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration);
ASSERT_NO_THROW(execGraph = execNet.GetExecGraphInfo());
@ -227,9 +206,5 @@ TEST_P(ExecGraphTests, CheckExecGraphInfoSerialization) {
execGraph.serialize(out_xml_path, out_bin_path);
ASSERT_EQ(0, std::remove(out_xml_path.c_str()));
ASSERT_EQ(0, std::remove(out_bin_path.c_str()));
} else {
ASSERT_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration).GetExecGraphInfo(),
InferenceEngine::NotImplemented);
}
}
} // namespace BehaviorTestsDefinitions

View File

@ -45,12 +45,12 @@ void MockNotEmptyICNNNetwork::getInputsInfo(InputsDataMap &inputs) const noexcep
"Input",
Precision::FP32 });
getInputTo(inData)[MockNotEmptyICNNNetwork::OUTPUT_BLOB_NAME] = inputLayer;
inData->setDims(MockNotEmptyICNNNetwork::INPUT_DIMENTIONS);
inData->setDims(MockNotEmptyICNNNetwork::INPUT_DIMENSIONS);
inData->setLayout(Layout::NCHW);
inputInfo->setInputData(inData);
auto outData = std::make_shared<Data>(MockNotEmptyICNNNetwork::OUTPUT_BLOB_NAME, Precision::UNSPECIFIED);
outData->setDims(MockNotEmptyICNNNetwork::OUTPUT_DIMENTIONS);
outData->setDims(MockNotEmptyICNNNetwork::OUTPUT_DIMENSIONS);
outData->setLayout(Layout::NCHW);
getInputTo(outData)[""] = std::make_shared<CNNLayer>(LayerParams{
MockNotEmptyICNNNetwork::OUTPUT_BLOB_NAME,

View File

@ -19,9 +19,9 @@ IE_SUPPRESS_DEPRECATED_START
class MockNotEmptyICNNNetwork final : public ICNNNetwork {
public:
static constexpr const char* INPUT_BLOB_NAME = "first_input";
const SizeVector INPUT_DIMENTIONS = { 1, 3, 299, 299 };
const SizeVector INPUT_DIMENSIONS = { 1, 3, 299, 299 };
static constexpr const char* OUTPUT_BLOB_NAME = "first_output";
const SizeVector OUTPUT_DIMENTIONS = { 1, 3, 299, 299 };
const SizeVector OUTPUT_DIMENSIONS = { 1, 3, 299, 299 };
const std::string name = "test";
const std::string& getName() const noexcept override {
return name;
@ -29,10 +29,24 @@ public:
void getOutputsInfo(OutputsDataMap& out) const noexcept override;
void getInputsInfo(InputsDataMap &inputs) const noexcept override;
std::shared_ptr<ngraph::Function> getFunction() noexcept override {
return nullptr;
ngraph::ParameterVector parameters;
parameters.push_back(std::make_shared<ngraph::op::v0::Parameter>(
ov::element::f32, std::vector<ov::Dimension>{INPUT_DIMENSIONS.begin(), INPUT_DIMENSIONS.end()}));
parameters.back()->set_friendly_name(INPUT_BLOB_NAME);
ngraph::ResultVector results;
results.push_back(std::make_shared<ngraph::op::v0::Result>(parameters.back()->output(0)));
results.back()->set_friendly_name(OUTPUT_BLOB_NAME);
return std::make_shared<ov::Function>(results, parameters, "empty_function");
}
std::shared_ptr<const ngraph::Function> getFunction() const noexcept override {
return nullptr;
ngraph::ParameterVector parameters;
parameters.push_back(std::make_shared<ngraph::op::v0::Parameter>(
ov::element::f32, std::vector<ov::Dimension>{INPUT_DIMENSIONS.begin(), INPUT_DIMENSIONS.end()}));
parameters.back()->set_friendly_name(INPUT_BLOB_NAME);
ngraph::ResultVector results;
results.push_back(std::make_shared<ngraph::op::v0::Result>(parameters.back()->output(0)));
results.back()->set_friendly_name(OUTPUT_BLOB_NAME);
return std::make_shared<const ov::Function>(results, parameters, "empty_function");
}
MOCK_METHOD(InputInfo::Ptr, getInput, (const std::string &inputName), (const, noexcept));
MOCK_METHOD(size_t, layerCount, (), (const, noexcept));