Query network should return all supported layers (#1845)

* Query network should return all supported layers

* Added other plugins. Fixed hetero and multi

* IRv7 test use legacy impl
This commit is contained in:
Anton Pankratv 2020-08-26 15:33:37 +03:00 committed by GitHub
parent 99136f0940
commit 7c1b87c7aa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 110 additions and 79 deletions

View File

@ -160,17 +160,15 @@ void Plugin::QueryNetwork(const ICNNNetwork &network, const ConfigMap& config, Q
std::unordered_set<std::string> unsupported;
auto opset = ngraph::get_opset4();
for (auto&& node : transformedFunction->get_ops()) {
if (!ngraph::op::is_constant(node) && !ngraph::op::is_parameter(node) && !ngraph::op::is_output(node)) {
// Extract transformation history from transformed node as list of nodes
for (auto&& fusedLayerName : ngraph::getFusedNamesVector(node)) {
// Filter just nodes from original operation set
// TODO: fill with actual decision rules based on whether kernel is supported by backend
if (contains(originalOps, fusedLayerName)) {
if (opset.contains_type_insensitive(fusedLayerName)) {
supported.emplace(fusedLayerName);
} else {
unsupported.emplace(fusedLayerName);
}
// Extract transformation history from transformed node as list of nodes
for (auto&& fusedLayerName : ngraph::getFusedNamesVector(node)) {
// Filter just nodes from original operation set
// TODO: fill with actual decision rules based on whether kernel is supported by backend
if (contains(originalOps, fusedLayerName)) {
if (opset.contains_type_insensitive(fusedLayerName)) {
supported.emplace(fusedLayerName);
} else {
unsupported.emplace(fusedLayerName);
}
}
}

View File

@ -291,10 +291,7 @@ void clDNNEngine::QueryNetwork(const ICNNNetwork& network,
if (function != nullptr) {
std::unordered_set<std::string> originalOps;
for (auto&& node : function->get_ops()) {
if (!ngraph::op::is_parameter(node) &&
!ngraph::op::is_output(node)) {
originalOps.emplace(node->get_friendly_name());
}
originalOps.emplace(node->get_friendly_name());
}
auto clonedNetwork = CloneAndTransformNetwork(network);
std::unordered_set<std::string> supported;
@ -422,6 +419,23 @@ void clDNNEngine::QueryNetwork(const ICNNNetwork& network,
}
}
for (auto&& node : function->get_ops()) {
if (contains(supported, node->get_friendly_name())) {
for (auto&& inputNodeOutput : node->input_values()) {
if (ngraph::op::is_constant(inputNodeOutput.get_node()) || ngraph::op::is_parameter(inputNodeOutput.get_node())) {
supported.emplace(inputNodeOutput.get_node()->get_friendly_name());
}
}
for (auto&& outputs : node->outputs()) {
for (auto&& outputNodeInput : outputs.get_target_inputs()) {
if (ngraph::op::is_output(outputNodeInput.get_node())) {
supported.emplace(outputNodeInput.get_node()->get_friendly_name());
}
}
}
}
}
for (auto&& layerName : supported) {
res.supportedLayersMap.emplace(layerName, GetName());
}

View File

@ -364,11 +364,6 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
QueryNetworkResult queryNetworkResult;
auto orderedOps = function->get_ordered_ops();
orderedOps.erase(
std::remove_if(std::begin(orderedOps), std::end(orderedOps), [] (const std::shared_ptr<ngraph::Node>& node) {
return ngraph::op::is_constant(node);
}),
std::end(orderedOps));
bool allEmpty = true;
// Get user defined affinity
for (auto&& node : orderedOps) {
@ -400,40 +395,21 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
return input.get_source_output().get_node();
};
auto NoConstants = [] (std::vector<ngraph::Input<ngraph::Node>>&& inputs) {
std::vector<ngraph::Input<ngraph::Node>> result;
for (auto&& input : inputs) {
if (!(ngraph::op::is_constant(input.get_source_output().get_node()))) {
result.emplace_back(std::move(input));
// Set results, constants and parameters affinity
for (auto&& node : function->get_ops()) {
if (ngraph::op::is_constant(node) || ngraph::op::is_output(node) || ngraph::op::is_parameter(node)) {
if (!contains(queryNetworkResult.supportedLayersMap, node->get_friendly_name())) {
auto& nodeWithAffinityName = ngraph::op::is_output(node)
? node->input_value(0).get_node()->get_friendly_name()
: node->output(0).get_target_inputs().begin()->get_node()->get_friendly_name();
auto itAffinity = queryNetworkResult.supportedLayersMap.find(nodeWithAffinityName);
if (itAffinity == queryNetworkResult.supportedLayersMap.end()) {
THROW_IE_EXCEPTION << "Node " << nodeWithAffinityName <<
" was not assigned on any pointed device.";
}
queryNetworkResult.supportedLayersMap.emplace(node->get_friendly_name(), itAffinity->second);
}
}
return result;
};
// Set parameters affinity
for (auto&& node : function->get_parameters()) {
if (!contains(queryNetworkResult.supportedLayersMap, node->get_friendly_name())) {
auto& outputNodeName = node->output(0).get_target_inputs().begin()->get_node()->get_friendly_name();
auto itOutputAffinity = queryNetworkResult.supportedLayersMap.find(outputNodeName);
if (itOutputAffinity == queryNetworkResult.supportedLayersMap.end()) {
THROW_IE_EXCEPTION << "Layer " << outputNodeName <<
" was not assigned on any pointed device.";
}
queryNetworkResult.supportedLayersMap[node->get_friendly_name()] = itOutputAffinity->second;
}
}
// Set results affinity
for (auto&& node : function->get_results()) {
if (!contains(queryNetworkResult.supportedLayersMap, node->get_friendly_name())) {
auto& inputNodeName = node->input_value(0).get_node()->get_friendly_name();
auto itInputAffinity = queryNetworkResult.supportedLayersMap.find(inputNodeName);
if (itInputAffinity == queryNetworkResult.supportedLayersMap.end()) {
THROW_IE_EXCEPTION << "Layer " << inputNodeName <<
" was not assigned on any pointed device.";
}
queryNetworkResult.supportedLayersMap[node->get_friendly_name()] = itInputAffinity->second;
}
}
std::unordered_set<std::string> devices;
@ -480,12 +456,12 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
InputSet subgraphInputs;
// Get all subgraph inputs using just node affinities. Also collect transitive closure
for (auto&& node : orderedOps) {
if (ngraph::op::is_parameter(node)) {
if (ngraph::op::is_parameter(node) || ngraph::op::is_constant(node)) {
graphInputNodes.insert(node.get());
subgraphInputs.insert(Input{node.get(), 0});
nodeInputDependencies[node.get()].insert(Input{node.get(), 0});
} else {
auto inputs = NoConstants(node->inputs());
auto inputs = node->inputs();
auto& nodeInputDependency = nodeInputDependencies[node.get()];
for (auto&& input : inputs) {
nodeInputDependency.insert(input);
@ -503,7 +479,7 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
std::deque<int> subgraphIds;
NodeMap<int*> subgraphIdPtrs;
for (auto&& node : orderedOps) {
auto allNodeInputs = NoConstants(node->inputs());
auto allNodeInputs = node->inputs();
std::vector<Input> inputs;
for (auto&& input : allNodeInputs) {
if (!contains(subgraphInputs, input)) {
@ -553,7 +529,8 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
auto& nodeSubgraphCyclicInputDependency = nodeSubgraphCyclicInputDependencies[node.get()];
for (auto&& subgraphInput : allNodeSubgraphInputs) {
if (!ngraph::op::is_parameter(subgraphInput.get_node()) &&
subgraphIds[node.get()] == subgraphIds[InputNode(subgraphInput)]) {
!ngraph::op::is_constant(subgraphInput.get_node()) &&
subgraphIds[node.get()] == subgraphIds[InputNode(subgraphInput)]) {
nodeSubgraphCyclicInputDependency.emplace(subgraphInput);
}
}
@ -570,7 +547,7 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
cyclicInputsDependencies.emplace(input);
}
}
for (auto&& input : NoConstants(node->inputs())) {
for (auto&& input : node->inputs()) {
auto& inputNodeSubgraphCyclicInputDependency = nodeSubgraphCyclicInputDependencies[InputNode(input)];
auto& inputNodeSubgraphInputDependency = nodeSubgraphInputDependencies[InputNode(input)];
if (!Intersects(nodeSubgraphCyclicInputDependency,
@ -588,7 +565,7 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
NodeMap<ngraph::Node*> subgraphParameterToPrevResult;
std::vector<std::shared_ptr<ngraph::op::Result>> results;
for (auto&& input : subgraphInputs) {
if (!ngraph::op::is_parameter(input.get_node())) {
if (!ngraph::op::is_parameter(input.get_node()) && !ngraph::op::is_constant(input.get_node())) {
auto output = input.get_source_output();
output.remove_target_input(input);
auto result = std::make_shared<ngraph::op::Result>(output);

View File

@ -14,6 +14,7 @@
#include "ie_plugin_config.hpp"
#include "hetero/hetero_plugin_config.hpp"
#include "hetero_executable_network.hpp"
#include <cpp_interfaces/interface/ie_internal_plugin_config.hpp>
using namespace InferenceEngine;
using namespace InferenceEngine::PluginConfigParams;
@ -125,6 +126,11 @@ Engine::DeviceMetaInformationMap Engine::GetDevicePlugins(const std::string& tar
if (metaDevices.end() == itPlugin) {
metaDevices[deviceName] = getDeviceConfig(deviceName);
}
std::vector<std::string> supportedConfigKeys = GetCore()->GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
if (std::find(std::begin(supportedConfigKeys), std::end(supportedConfigKeys), CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN))
!= std::end(supportedConfigKeys)) {
metaDevices[deviceName].emplace(CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN), "");
}
}
return metaDevices;
}
@ -227,8 +233,12 @@ void Engine::QueryNetwork(const ICNNNetwork &network, const Configs& config, Que
return true;
});
if (!allSupportsNgraph) {
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>(network);
queryNetwork(*cnnNetworkImpl);
if (contains(tconfig, CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN))) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
} else {
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>(network);
queryNetwork(*cnnNetworkImpl);
}
} else {
queryNetwork(network);
}
@ -259,7 +269,8 @@ Parameter Engine::GetMetric(const std::string& name, const std::map<std::string,
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, std::vector<std::string>{
HETERO_CONFIG_KEY(DUMP_GRAPH_DOT),
"TARGET_FALLBACK",
CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS)});
CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS),
CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN)});
} else if (METRIC_KEY(FULL_DEVICE_NAME) == name) {
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, std::string{"HETERO"});
} else {

View File

@ -285,9 +285,7 @@ void Engine::QueryNetwork(const ICNNNetwork& network, const std::map<std::string
if (function != nullptr) {
std::unordered_set<std::string> originalOps;
for (auto&& node : function->get_ops()) {
if (!ngraph::op::is_constant(node) && !ngraph::op::is_parameter(node) && !ngraph::op::is_output(node)) {
originalOps.emplace(node->get_friendly_name());
}
originalOps.emplace(node->get_friendly_name());
}
auto clonedNetwork = cloneNetwork(network);
Transformation(clonedNetwork);
@ -313,6 +311,24 @@ void Engine::QueryNetwork(const ICNNNetwork& network, const std::map<std::string
}
}
}
for (auto&& node : function->get_ops()) {
if (!contains(unsupported, node->get_friendly_name())) {
for (auto&& inputNodeOutput : node->input_values()) {
if (ngraph::op::is_constant(inputNodeOutput.get_node())) {
supported.emplace(inputNodeOutput.get_node()->get_friendly_name());
}
}
for (auto&& outputs : node->outputs()) {
for (auto&& outputNodeInput : outputs.get_target_inputs()) {
if (ngraph::op::is_output(outputNodeInput.get_node())) {
supported.emplace(outputNodeInput.get_node()->get_friendly_name());
}
}
}
}
}
for (auto&& layerName : supported) {
if (!contains(unsupported, layerName)) {
res.supportedLayersMap.emplace(layerName, GetName());

View File

@ -15,6 +15,7 @@
#include "ie_metric_helpers.hpp"
#include <legacy/ie_util_internal.hpp>
#include <cpp_interfaces/base/ie_infer_async_request_base.hpp>
#include <cpp_interfaces/interface/ie_internal_plugin_config.hpp>
#include <multi-device/multi_device_config.hpp>
#include <ie_plugin_config.hpp>
#include "multi_device.hpp"
@ -385,20 +386,25 @@ DeviceMap<DeviceInformation> MultiDeviceInferencePlugin::ParseMetaDevices(const
for (auto && d : devicesWithRequests) {
auto openingBracket = d.find_first_of('(');
auto closingBracket = d.find_first_of(')', openingBracket);
auto device_name = d.substr(0, openingBracket);
auto deviceName = d.substr(0, openingBracket);
int numRequests = -1;
if (closingBracket != std::string::npos && openingBracket < closingBracket) {
numRequests = std::stol(d.substr(openingBracket + 1, closingBracket - 1));
if (numRequests <= 0) {
THROW_IE_EXCEPTION << "Priority value for '" << device_name << "' must be > 0, while " << numRequests
THROW_IE_EXCEPTION << "Priority value for '" << deviceName << "' must be > 0, while " << numRequests
<< "is passed";
}
}
// create meta device
metaDevices[device_name] = { getDeviceConfig(device_name), numRequests };
metaDevices[deviceName] = { getDeviceConfig(deviceName), numRequests };
std::vector<std::string> supportedConfigKeys = GetCore()->GetMetric(deviceName, METRIC_KEY(SUPPORTED_CONFIG_KEYS));
if (std::find(std::begin(supportedConfigKeys), std::end(supportedConfigKeys), CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN))
!= std::end(supportedConfigKeys)) {
metaDevices[deviceName].config.emplace(CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN), "");
}
}
return metaDevices;
@ -443,7 +449,9 @@ InferenceEngine::Parameter MultiDeviceInferencePlugin::GetMetric(const std::stri
std::string name = { "MULTI" };
IE_SET_METRIC_RETURN(FULL_DEVICE_NAME, name);
} else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
std::vector<std::string> configKeys = { MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES };
std::vector<std::string> configKeys = {
MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES,
CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN)};
IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
} else {
THROW_IE_EXCEPTION << "Unsupported metric key " << name;
@ -541,8 +549,12 @@ void MultiDeviceInferencePlugin::QueryNetwork(const ICNNNetwork&
if (network.getFunction()) {
if (!allSupportsNgraph) {
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>(network);
queryNetwork(*cnnNetworkImpl);
if (contains(fullConfig, CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN))) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
} else {
auto cnnNetworkImpl = std::make_shared<details::CNNNetworkImpl>(network);
queryNetwork(*cnnNetworkImpl);
}
} else {
queryNetwork(network);
}

View File

@ -44,6 +44,13 @@ DECLARE_CONFIG_KEY(SUBNETWORK_WITH_NETWORK_INPUTS);
*/
DECLARE_CONFIG_KEY(CPU_THREADS_PER_STREAM);
/**
* @brief This key should be used to notify aggregating plugin
* that it is used inside other aggregating plugin
* @ingroup ie_dev_api_plugin_api
*/
DECLARE_CONFIG_KEY(AGGREGATED_PLUGIN);
} // namespace PluginConfigInternalParams
} // namespace InferenceEngine

View File

@ -1316,13 +1316,13 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkHETEROwithMULTINoThrow_v7) {
}
}
auto convertedActualNetwork = std::make_shared<details::CNNNetworkImpl>(actualNetwork);
QueryNetworkResult result;
std::string targetFallback(std::string(CommonTestUtils::DEVICE_MULTI) + "," + CommonTestUtils::DEVICE_CPU);
ASSERT_NO_THROW(result = ie.QueryNetwork(actualNetwork, CommonTestUtils::DEVICE_HETERO, {
ASSERT_NO_THROW(result = ie.QueryNetwork(InferenceEngine::CNNNetwork{convertedActualNetwork}, CommonTestUtils::DEVICE_HETERO, {
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
{"TARGET_FALLBACK", targetFallback}}));
auto convertedActualNetwork = std::make_shared<details::CNNNetworkImpl>(actualNetwork);
for (auto &&layer : result.supportedLayersMap) {
EXPECT_NO_THROW(CommonTestUtils::getLayerByName(convertedActualNetwork.get(), layer.first));
}
@ -1346,11 +1346,11 @@ TEST_P(IEClassLoadNetworkTest, QueryNetworkMULTIwithHETERONoThrowv7) {
}
QueryNetworkResult result;
ASSERT_NO_THROW(result = ie.QueryNetwork(actualNetwork, CommonTestUtils::DEVICE_MULTI, {
auto convertedActualNetwork = std::make_shared<details::CNNNetworkImpl>(actualNetwork);
ASSERT_NO_THROW(result = ie.QueryNetwork(InferenceEngine::CNNNetwork{convertedActualNetwork}, CommonTestUtils::DEVICE_MULTI, {
{MULTI_CONFIG_KEY(DEVICE_PRIORITIES), devices},
{"TARGET_FALLBACK", deviceName + "," + CommonTestUtils::DEVICE_CPU}}));
auto convertedActualNetwork = std::make_shared<details::CNNNetworkImpl>(actualNetwork);
for (auto &&layer : result.supportedLayersMap) {
EXPECT_NO_THROW(CommonTestUtils::getLayerByName(convertedActualNetwork.get(), layer.first));
}

View File

@ -28,11 +28,7 @@ TEST_P(QueryNetworkTest, queryNetworkResultContainAllAndOnlyInputLayers) {
ASSERT_NE(nullptr, cnnNetwork.getFunction());
std::set<std::string> expectedLayers;
for (auto&& node : function->get_ops()) {
if (!ngraph::op::is_parameter(node) &&
!ngraph::op::is_constant(node) &&
!ngraph::op::is_output(node)) {
expectedLayers.insert(node->get_friendly_name());
}
expectedLayers.insert(node->get_friendly_name());
}
std::set<std::string> actualLayers;
for (auto&& res : queryNetworkResult.supportedLayersMap) {