From fb1b03752db06f3bb2be7035eba145629062b242 Mon Sep 17 00:00:00 2001 From: Ilya Lavrenov Date: Mon, 30 Nov 2020 07:08:31 +0300 Subject: [PATCH] Removed legacy from hetero plugin (#3398) Co-authored-by: apankratovantonp Co-authored-by: Alexander Zhogov --- .../src/hetero_plugin/CMakeLists.txt | 4 +- .../src/hetero_plugin/hetero_ade_util.cpp | 68 ---- .../src/hetero_plugin/hetero_ade_util.hpp | 24 -- .../hetero_executable_network.cpp | 350 ++---------------- .../hetero_executable_network.hpp | 1 - .../hetero_plugin/hetero_graph_splitter.cpp | 236 ------------ .../hetero_plugin/hetero_graph_splitter.hpp | 39 -- .../hetero_plugin/hetero_infer_request.cpp | 1 - .../src/hetero_plugin/hetero_plugin.cpp | 121 +----- .../src/hetero_plugin/hetero_plugin.hpp | 15 - 10 files changed, 42 insertions(+), 817 deletions(-) delete mode 100644 inference-engine/src/hetero_plugin/hetero_ade_util.cpp delete mode 100644 inference-engine/src/hetero_plugin/hetero_ade_util.hpp delete mode 100644 inference-engine/src/hetero_plugin/hetero_graph_splitter.cpp delete mode 100644 inference-engine/src/hetero_plugin/hetero_graph_splitter.hpp diff --git a/inference-engine/src/hetero_plugin/CMakeLists.txt b/inference-engine/src/hetero_plugin/CMakeLists.txt index cd8d15a4ece..41eb7688e6a 100644 --- a/inference-engine/src/hetero_plugin/CMakeLists.txt +++ b/inference-engine/src/hetero_plugin/CMakeLists.txt @@ -16,8 +16,8 @@ ie_faster_build(${TARGET_NAME} UNITY ) -target_link_libraries(${TARGET_NAME} PRIVATE ade pugixml inference_engine - inference_engine_legacy ${NGRAPH_LIBRARIES} inference_engine_transformations) +target_link_libraries(${TARGET_NAME} PRIVATE pugixml inference_engine + ${NGRAPH_LIBRARIES} inference_engine_transformations) ie_add_api_validator_post_build_step(TARGET ${TARGET_NAME}) diff --git a/inference-engine/src/hetero_plugin/hetero_ade_util.cpp b/inference-engine/src/hetero_plugin/hetero_ade_util.cpp deleted file mode 100644 index 75fbccbfb11..00000000000 --- a/inference-engine/src/hetero_plugin/hetero_ade_util.cpp +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "hetero_ade_util.hpp" - -#include -#include - -#include -#include -#include - -#include -#include -#include - -namespace InferenceEngine { -namespace { -using VisitedLayersMap = std::unordered_map; -using TGraph = ade::TypedGraph; - -void translateVisitLayer(VisitedLayersMap& visited, - TGraph& gr, - const ade::NodeHandle& prevNode, - const CNNLayer::Ptr& layer) { - assert(nullptr != layer);; - assert(!ade::util::contains(visited, layer)); - auto node = gr.createNode(); - gr.metadata(node).set(CNNLayerMetadata{layer}); - if (nullptr != prevNode) { - gr.link(prevNode, node); - } - visited.insert({layer, node}); - for (auto&& data : layer->outData) { - for (auto&& layerIt : getInputTo(data)) { - auto nextLayer = layerIt.second; - auto it = visited.find(nextLayer); - if (visited.end() == it) { - translateVisitLayer(visited, gr, node, nextLayer); - } else { - gr.link(node, it->second); - } - } - } -} -} // namespace - -void translateNetworkToAde(ade::Graph& gr, ICNNNetwork& network) { - TGraph tgr(gr); - VisitedLayersMap visited; - for (auto& data : getRootDataObjects(network)) { - assert(nullptr != data); - for (auto& layerIt : getInputTo(data)) { - auto layer = layerIt.second; - assert(nullptr != layer); - if (!ade::util::contains(visited, layer)) { - translateVisitLayer(visited, tgr, nullptr, layer); - } - } - } -} - -const char* CNNLayerMetadata::name() { - return "CNNLayerMetadata"; -} - -} // namespace InferenceEngine diff --git a/inference-engine/src/hetero_plugin/hetero_ade_util.hpp b/inference-engine/src/hetero_plugin/hetero_ade_util.hpp deleted file mode 100644 index 7d10bc1bca3..00000000000 --- a/inference-engine/src/hetero_plugin/hetero_ade_util.hpp +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -#include - -namespace ade { -class Graph; -} // namespace ade - -namespace InferenceEngine { - -struct CNNLayerMetadata { - CNNLayerPtr layer; - - static const char* name(); -}; - -class ICNNNetwork; -void translateNetworkToAde(ade::Graph& gr, ICNNNetwork& network); -} // namespace InferenceEngine - diff --git a/inference-engine/src/hetero_plugin/hetero_executable_network.cpp b/inference-engine/src/hetero_plugin/hetero_executable_network.cpp index d9f2b0a04a2..6ea6efd5cc4 100644 --- a/inference-engine/src/hetero_plugin/hetero_executable_network.cpp +++ b/inference-engine/src/hetero_plugin/hetero_executable_network.cpp @@ -5,8 +5,6 @@ #include "ie_metric_helpers.hpp" #include "hetero_executable_network.hpp" #include "hetero_async_infer_request.hpp" -#include -#include "hetero_graph_splitter.hpp" #include "hetero_itt.hpp" #include "xml_parse_utils.h" #include @@ -44,283 +42,19 @@ using namespace HeteroPlugin; using namespace InferenceEngine::PluginConfigParams; using namespace InferenceEngine::HeteroConfigParams; -namespace { - -void forward(const CNNLayerPtr& layer, std::deque& layers) { - for (const auto& out : layer->outData) { - for (const auto& out_link : getInputTo(out)) { - const auto& nextLayer = out_link.second; - if (nullptr != nextLayer) { - layers.emplace_back(nextLayer); - } - } - } -} - -template -void traverse(T& inputs, - std::function apply, - std::function& layers)> expand = forward) { - std::unordered_set visitedObjects; - std::deque layersToCheck; - - layersToCheck.insert(layersToCheck.end(), inputs.begin(), inputs.end()); - - while (!layersToCheck.empty()) { - auto& layer = layersToCheck.front(); - if (visitedObjects.insert(layer).second) { - apply(layer); - expand(layer, layersToCheck); - } - layersToCheck.pop_front(); - } -} - -void traverse(InferenceEngine::ICNNNetwork& network, - std::function apply, - std::function& layers)> expand = forward) { - std::vector layers; - - InferenceEngine::InputsDataMap inputs; - network.getInputsInfo(inputs); - for (const auto& input : inputs) { - const auto data = input.second->getInputData(); - for (const auto& to : getInputTo(data)) { - const auto nextLayer = to.second; - assert(nullptr != nextLayer); - layers.emplace_back(nextLayer); - } - } - - traverse(layers, apply, expand); -} - -std::vector getAffinities(InferenceEngine::ICNNNetwork &network) { - std::vector ret; - std::unordered_set affinities; - traverse(network, - [&](const InferenceEngine::CNNLayerPtr &layer) { - assert(nullptr != layer); - if (!contains(affinities, layer->affinity)) { - affinities.insert(layer->affinity); - ret.push_back(layer->affinity); - } - }); - return ret; -} - -void dumpGraph(InferenceEngine::ICNNNetwork &network, - const std::vector &subgraphs, - std::ostream &stream) { - static const std::array colors{{"#FFC405", - "#20F608", - "#F1F290", - "#C405FF", - "#BCFF05", - "#05FFC4", - "#FFC405", - "#5A5DF0", - "#FF2E05"}}; - auto split_color = [subgraphs](const CNNLayerPtr layer, - ordered_properties &printed_properties, - ordered_properties &node_properties) { - for (size_t i = 0; i < subgraphs.size(); i++) { - for (auto s : subgraphs[i]) { - if (s->name == layer->name) { - node_properties.emplace_back( - "fillcolor", - colors[std::min(i, colors.size() - 1)]); - printed_properties.insert(printed_properties.begin(), - std::pair("subgraph#", std::to_string(i))); - printed_properties.insert(printed_properties.begin(), - std::pair("device", layer->affinity)); - return; - } - } - } - }; - - saveGraphToDot(network, stream, split_color); -} - -} // namespace - -void HeteroExecutableNetwork::InitCNNImpl(const InferenceEngine::CNNNetwork& network_) { - auto networkPtr = cloneNet(network_); - auto& network = *networkPtr; - - // going over all network, if all layers are not assigned to devices, apply the default fallback policy - details::CNNNetworkIterator i(&network); - bool allEmpty = true; - while (i != details::CNNNetworkIterator()) { - CNNLayer::Ptr layer = *i; - if (!layer->affinity.empty()) { - allEmpty = false; - break; - } - i++; - } - - auto itDumpDotFile = _config.find(HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)); - bool dumpDotFile = itDumpDotFile != _config.end() ? itDumpDotFile->second == YES : false; -#ifndef NDEBUG - dumpDotFile = true; -#endif - - if (allEmpty) { - auto it = _config.find("TARGET_FALLBACK"); - if (it != _config.end()) { - _heteroPlugin->SetAffinity(InferenceEngine::CNNNetwork(networkPtr), _config); - } else { - THROW_IE_EXCEPTION << "The 'TARGET_FALLBACK' option was not defined for heterogeneous plugin"; - } - } else { - if (dumpDotFile) { - std::unordered_set devicesSet; - details::CNNNetworkIterator i(&network); - while (i != details::CNNNetworkIterator()) { - CNNLayer::Ptr layer = *i; - if (!layer->affinity.empty()) { - devicesSet.insert(layer->affinity); - } - i++; - } - std::vector devices{std::begin(devicesSet), std::end(devicesSet)}; - std::stringstream stream(std::stringstream::out); - stream << "hetero_affinity_" << network.getName() << ".dot"; - std::ofstream file(stream.str().c_str()); - saveGraphToDot(network, file, HeteroLayerColorer{devices}); - } - } - - details::CNNNetworkIterator el(&network); - bool someEmptyAffinity = false; - CNNLayer::Ptr layerEmptyAffinity = nullptr; - while (el != details::CNNNetworkIterator()) { - CNNLayer::Ptr layer = *el; - if (!CaselessEq()(layer->type, "input") && - layer->affinity.empty()) { - someEmptyAffinity = true; - layerEmptyAffinity = layer; - break; - } - el++; - } - - if (allEmpty && someEmptyAffinity) { - THROW_IE_EXCEPTION << "Hetero plugin used default fallback policy, but some layers eg: \n(Name:" << - layerEmptyAffinity->name << ", Type: " << layerEmptyAffinity->type << - ") were not able to be assigned on any pointed device.\n" << - "It happened because these layers are not supported in plugins by default.\n" << - "You need to implement custom layers to support them."; - } else if (someEmptyAffinity) { - THROW_IE_EXCEPTION << "Network passed to LoadNetwork has affinity assigned, but some layers eg: \n(Name:" << - layerEmptyAffinity->name << ", Type: " << layerEmptyAffinity->type << - ") were not assigned to any device.\n" << - "It might happen if you assigned layers manually and missed some layers or\n" << - "if you used some automatic assigning mode which decided that these layers are not\n" << - "supported by any plugin"; - } - - InputsDataMap externalInputsData; - network.getInputsInfo(externalInputsData); - - OutputsDataMap externalOutputsData; - network.getOutputsInfo(externalOutputsData); - - auto subgraphs = splitGraph(network, getAffinities(network)); - sortSubgraphs(subgraphs); - - if (dumpDotFile) { - std::stringstream stream(std::stringstream::out); - stream << "hetero_subgraphs_" << network.getName() << ".dot"; - - std::ofstream file(stream.str().c_str()); - dumpGraph(network, subgraphs, file); - } - - std::vector descs; - std::vector tempLayers; - for (auto &&subgraph : subgraphs) { - auto affinity = (*subgraph.begin())->affinity; - tempLayers.assign(subgraph.begin(), subgraph.end()); - auto tempNetwork = cloneNet(tempLayers); - auto name = network.getName() + "_" + std::to_string(std::distance(subgraphs.data(), &subgraph)); - tempNetwork->setName(name); - // restoring some outputs from original net if they are not marked as output automatically - // this might happen if output was set manually for origin network and - // it doesn't go to next subgraph - for (auto il : tempLayers) { - if (externalOutputsData.find(il->name) != externalOutputsData.end()) { - tempNetwork->addOutput(il->name); - } - } - - // update of pre-processing info - InputsDataMap clonedInputs; - tempNetwork->getInputsInfo(clonedInputs); - for (auto &&it : externalInputsData) { - auto inp = clonedInputs.find(it.first); - if (inp != clonedInputs.end() && nullptr != inp->second) { - inp->second->setPrecision(it.second->getPrecision()); - inp->second->getPreProcess() = it.second->getPreProcess(); - } - } - - // go over all inputs/outputs and right now - // set precision for intermediate data (not for external) to FP32 - for (auto &&it : clonedInputs) { - if (externalInputsData.find(it.first) == externalInputsData.end()) { - it.second->setPrecision(Precision::FP32); - } - } - - OutputsDataMap tmpOutputs; - tempNetwork->getOutputsInfo(tmpOutputs); - for (auto &&o : tmpOutputs) { - if (externalOutputsData.find(o.first) == externalOutputsData.end()) { - o.second->setPrecision(Precision::FP32); - } - } - - NetworkDesc desc; - desc._device = affinity; - desc._clonedNetwork = CNNNetwork{tempNetwork}; - - descs.emplace_back(std::move(desc)); - } - - for (auto &&d : descs) { - IExecutableNetwork::Ptr ret; - - auto subnetworkInputs = d._clonedNetwork.getInputsInfo(); - bool isInputSubnetwork = (subnetworkInputs.end() != std::find_first_of( - subnetworkInputs.begin(), subnetworkInputs.end(), - externalInputsData.begin(), externalInputsData.end(), - [] (const InputsDataMap::value_type& lhs, const InputsDataMap::value_type& rhs) { - return lhs.first == rhs.first; - })); - - auto cfg = _config; - cfg[PluginConfigInternalParams::KEY_SUBNETWORK_WITH_NETWORK_INPUTS] = - isInputSubnetwork ? CONFIG_VALUE(YES) : CONFIG_VALUE(NO); - - auto deviceName = d._device; - auto metaDevices = _heteroPlugin->GetDevicePlugins(deviceName, cfg); - assert(metaDevices.size() == 1); - auto loadConfig = metaDevices[deviceName]; - d._network = _heteroPlugin->GetCore()->LoadNetwork(d._clonedNetwork, deviceName, loadConfig); - } - - networks = std::move(descs); -} - template using NodeMap = std::unordered_map; -void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::CNNNetwork& network_) { - auto function = network_.getFunction(); +HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwork& network, + const Engine::Configs& config, + Engine* plugin): + InferenceEngine::ExecutableNetworkThreadSafeDefault( + nullptr, std::make_shared()), + _heteroPlugin{plugin}, + _name{network.getName()}, + _config{config} { + auto function = network.getFunction(); + IE_ASSERT(function != nullptr); auto clonedFunction = ngraph::clone_function(*function); auto itDumpDotFile = _config.find(HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)); bool dumpDotFile = itDumpDotFile != _config.end() ? (itDumpDotFile->second == YES) : false; @@ -346,7 +80,7 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::CNNNetwork& netw if (queryNetworkResult.supportedLayersMap.empty()) { auto it = _config.find("TARGET_FALLBACK"); if (it != _config.end()) { - queryNetworkResult = _heteroPlugin->QueryNetwork(network_, _config); + queryNetworkResult = _heteroPlugin->QueryNetwork(network, _config); } else { THROW_IE_EXCEPTION << "The 'TARGET_FALLBACK' option was not defined for heterogeneous plugin"; } @@ -380,7 +114,6 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::CNNNetwork& netw std::unordered_set devices; NodeMap affinities; // Check that all nodes has user or plugin defined affinities - std::shared_ptr convertedNetwork; for (auto&& node : orderedOps) { auto itAffinity = queryNetworkResult.supportedLayersMap.find(node->get_friendly_name()); if (itAffinity != queryNetworkResult.supportedLayersMap.end()) { @@ -631,8 +364,8 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::CNNNetwork& netw std::move(std::begin(nextSubgraphs), std::end(nextSubgraphs), std::back_inserter(orderedSubgraphs)); } while (!allSubgraphs.empty()); - InputsDataMap externalInputsData = network_.getInputsInfo(); - OutputsDataMap externalOutputsData = network_.getOutputsInfo(); + InputsDataMap externalInputsData = network.getInputsInfo(); + OutputsDataMap externalOutputsData = network.getOutputsInfo(); networks.resize(orderedSubgraphs.size()); std::vector> subFunctions(orderedSubgraphs.size()); std::vector isInputSubnetwork(orderedSubgraphs.size()); @@ -689,21 +422,6 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::CNNNetwork& netw } } -HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwork& network, - const Engine::Configs& config, - Engine* plugin): - InferenceEngine::ExecutableNetworkThreadSafeDefault( - nullptr, std::make_shared()), - _heteroPlugin{plugin}, - _name{network.getName()}, - _config{config} { - if (network.getFunction() == nullptr) { - InitCNNImpl(network); - } else { - InitNgraph(network); - } -} - HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream& heteroModel, const std::map& configs, Engine* heteroPlugin) : @@ -818,7 +536,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream& descs.emplace_back(NetworkDesc{ deviceName, - loaded ? CNNNetwork{cloneNet(static_cast(cnnnetwork))} : CNNNetwork{}, + loaded ? cnnnetwork : CNNNetwork{}, executableNetwork, }); } @@ -842,38 +560,27 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) { } auto subnetworksNode = heteroNode.append_child("subnetworks"); - std::map, ::CNNNetwork> convertedNetworks; for (auto&& subnetwork : networks) { - auto subnet = subnetwork._clonedNetwork; - if (subnet.getFunction()) { - subnet = convertedNetworks[subnet.getFunction()] = - InferenceEngine::CNNNetwork( - std::make_shared(subnetwork._clonedNetwork)); - } + auto subnetFunction = subnetwork._clonedNetwork.getFunction(); + IE_ASSERT(subnetFunction != nullptr); auto subnetworkNode = subnetworksNode.append_child("subnetwork"); subnetworkNode.append_attribute("device").set_value(subnetwork._device.c_str()); auto subnetworkInputsNode = subnetworkNode.append_child("inputs"); - auto inputInfo = subnet.getInputsInfo(); - for (auto&& input : inputInfo) { + for (auto&& parameter : subnetFunction->get_parameters()) { auto inputNode = subnetworkInputsNode.append_child("input"); - inputNode.append_attribute("name").set_value(input.first.c_str()); - inputNode.append_attribute("precision").set_value(input.second->getPrecision().name()); + inputNode.append_attribute("name").set_value(parameter->get_friendly_name().c_str()); + inputNode.append_attribute("precision").set_value(parameter->get_output_element_type(0).get_type_name().c_str()); } auto subnetworkOutputsNode = subnetworkNode.append_child("outputs"); - auto outputInfo = subnet.getOutputsInfo(); - for (auto&& output : outputInfo) { + for (auto&& result : subnetFunction->get_results()) { auto outputNode = subnetworkOutputsNode.append_child("output"); - auto creator = getCreatorLayer(output.second).lock(); - outputNode.append_attribute("creatorName").set_value(creator->name.c_str()); - outputNode.append_attribute("name").set_value(output.first.c_str()); - outputNode.append_attribute("precision").set_value(output.second->getPrecision().name()); - auto& outDatas = creator->outData; - auto itData = std::find_if(std::begin(outDatas), std::end(outDatas), [&] (const DataPtr& data) { - return output.first == data->getName(); - }); - IE_ASSERT(outDatas.end() != itData); - std::uint64_t index = std::distance(std::begin(outDatas), itData); - outputNode.append_attribute("index").set_value(std::to_string(index).c_str()); + auto sourceOutput = result->input_value(0); + outputNode.append_attribute("creatorName").set_value(sourceOutput.get_node()->get_friendly_name().c_str()); + outputNode.append_attribute("name").set_value( + (sourceOutput.get_node()->get_friendly_name() + + ((sourceOutput.get_node()->get_output_size() == 0) ? "" : std::to_string(sourceOutput.get_index()))).c_str()); + outputNode.append_attribute("precision").set_value(result->get_input_element_type(0).get_type_name().c_str()); + outputNode.append_attribute("index").set_value(std::to_string(sourceOutput.get_index()).c_str()); } } @@ -899,9 +606,6 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) { #else pugi::xml_document doc; auto subnet = subnetwork._clonedNetwork; - if (subnet.getFunction()) { - subnet = convertedNetworks[subnet.getFunction()]; - } auto dataSize = static_cast(InferenceEngine::Serialization::FillXmlDoc(subnet, doc)); doc.save(heteroModel, nullptr, pugi::format_raw); heteroModel << std::endl; diff --git a/inference-engine/src/hetero_plugin/hetero_executable_network.hpp b/inference-engine/src/hetero_plugin/hetero_executable_network.hpp index 11f8ea818df..7e5ce5eda2a 100644 --- a/inference-engine/src/hetero_plugin/hetero_executable_network.hpp +++ b/inference-engine/src/hetero_plugin/hetero_executable_network.hpp @@ -20,7 +20,6 @@ #include "hetero_infer_request.hpp" #include "ie_icore.hpp" -#include #include "hetero_async_infer_request.hpp" namespace HeteroPlugin { diff --git a/inference-engine/src/hetero_plugin/hetero_graph_splitter.cpp b/inference-engine/src/hetero_plugin/hetero_graph_splitter.cpp deleted file mode 100644 index cce3d30fb41..00000000000 --- a/inference-engine/src/hetero_plugin/hetero_graph_splitter.cpp +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include "hetero_graph_splitter.hpp" -#include "hetero_ade_util.hpp" - -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include - -namespace InferenceEngine { - -namespace { -class ISplitChecker { -public: - struct GraphSelectionResult final { - static const constexpr std::size_t NoGraph - = static_cast(-1); - - std::size_t selectedGraph = NoGraph; - bool continueSelect = false; - }; - - virtual ~ISplitChecker() = default; - virtual GraphSelectionResult selectSubgraph( - const std::vector& subgraphs) = 0; -}; - -class DefaultSplitChecker : public ISplitChecker { -public: - // ISplitChecker interface - GraphSelectionResult selectSubgraph(const std::vector& subgraphs) override; -}; -} // namespace - -std::vector splitGraph(ICNNNetwork& network, - const std::vector& plugins) { - assert(!plugins.empty()); - ade::Graph gr; - ade::TypedGraph tgr(gr); - - std::vector tempSubgraphs; - LayersSet tempSet1; - LayersSet tempSet2; - - translateNetworkToAde(gr, network); - std::size_t currentChecker = 0; - - DefaultSplitChecker checker; - - auto getChecker = [&]() { - assert(currentChecker < plugins.size()); - return &checker; - }; - - auto getAffinity = [&]()->const std::string& { - assert(currentChecker < plugins.size()); - return plugins[currentChecker]; - }; - - auto nodes = gr.nodes(); - ade::subgraphs::NodesSet availableNodes(nodes.begin(), nodes.end()); - std::vector finalSubgraphs; - ade::SubgraphSelfReferenceChecker cycleChecker(nodes); - while (!availableNodes.empty()) { - auto subgraphs = ade::selectSubgraphs( - ade::util::filter(ade::util::toRange(availableNodes), - [&](const ade::NodeHandle& node) { - assert(nullptr != node); - auto layer = tgr.metadata(node).get().layer; - assert(nullptr != layer); - return layer->affinity == getAffinity(); - }), - [&]( - const ade::EdgeHandle& edge, - ade::SubgraphMergeDirection dir) { - assert(nullptr != edge); - auto dstNode = ade::getDstMergeNode(edge, dir); - assert(nullptr != dstNode); - if (!ade::util::contains(availableNodes, dstNode)) { - return false; - } - auto srcNode = ade::getSrcMergeNode(edge, dir); - assert(nullptr != srcNode); - auto srcLayer = tgr.metadata(srcNode).get().layer; - auto dstLayer = tgr.metadata(dstNode).get().layer; - assert(nullptr != srcLayer); - assert(nullptr != dstLayer); - return srcLayer->affinity == dstLayer->affinity; - }, - [&]( - const ade::subgraphs::NodesSet& acceptedNodes, - const ade::subgraphs::NodesSet& rejectedNodes) { - if (cycleChecker(acceptedNodes, rejectedNodes)) { - return false; - } - return true; - }); - - if (!subgraphs.empty()) { - if (plugins.size() == currentChecker) { - THROW_IE_EXCEPTION << "Some nodes weren't assigned to plugin"; - } - - tempSubgraphs.clear(); - for (auto&& subgraph : subgraphs) { - assert(!subgraph.empty()); - tempSet1.clear(); - for (auto&& node : subgraph) { - assert(nullptr != node); - auto layer = tgr.metadata(node).get().layer; - assert(nullptr != layer); - tempSet1.insert(layer); - } - tempSubgraphs.emplace_back(std::move(tempSet1)); - } - auto result = getChecker()->selectSubgraph(tempSubgraphs); - const auto selected = result.selectedGraph; - if (ISplitChecker::GraphSelectionResult::NoGraph != - selected) { - assert(selected < subgraphs.size()); - finalSubgraphs.emplace_back(std::move(tempSubgraphs[selected])); - - for (auto&& node : subgraphs[selected]) { - availableNodes.erase(node); - } - - if (result.continueSelect) { - continue; - } - } - } - ++currentChecker; - } - - return finalSubgraphs; -} - -ISplitChecker::GraphSelectionResult DefaultSplitChecker::selectSubgraph( - const std::vector& subgraphs) { - assert(!subgraphs.empty()); - std::size_t index = 0; - auto maxSize = subgraphs[0].size(); - for (auto i : ade::util::iota(std::size_t(1), subgraphs.size())) { - auto size = subgraphs[i].size(); - if (size > maxSize) { - index = 1; - maxSize = size; - } - } - GraphSelectionResult ret; - ret.selectedGraph = index; - ret.continueSelect = true; - return ret; -} - -namespace { -struct SubgraphDesc { - std::size_t topoIndex = static_cast(-1); - std::unordered_set dependsOn; -}; - -void topoVisitSubgraph(std::vector& subgraphs, - SubgraphDesc& subgraph, - std::size_t& topoIndex) { - if (subgraph.topoIndex != static_cast(-1)) { - assert(subgraph.topoIndex < topoIndex); - return; - } - - for (auto&& dep : subgraph.dependsOn) { - topoVisitSubgraph(subgraphs, subgraphs[dep], topoIndex); - } - subgraph.topoIndex = topoIndex; - ++topoIndex; -} -} // namespace - -void sortSubgraphs(std::vector& subgraphs) { - std::vector descs(subgraphs.size()); - - for (auto i : ade::util::iota(subgraphs.size())) { - auto& subgraph = subgraphs[i]; - assert(!subgraph.empty()); - for (auto&& layer : subgraph) { - assert(nullptr != layer); - for (auto&& dataIt : layer->insData) { - auto data = dataIt.lock(); - assert(nullptr != data); - auto prevLayer = getCreatorLayer(data).lock(); - if (nullptr != prevLayer) { - for (auto j : ade::util::iota(subgraphs.size())) { - if (i != j) { - if (ade::util::contains(subgraphs[j], prevLayer)) { - descs[i].dependsOn.insert(j); - break; - } - } - } - } - } - } - } - - { - std::size_t topoIndex = 0; - for (auto&& desc : descs) { - topoVisitSubgraph(descs, desc, topoIndex); - } - assert(subgraphs.size() == topoIndex); - } - - std::vector ret(subgraphs.size()); - for (auto i : ade::util::iota(subgraphs.size())) { - assert(i < descs.size()); - auto& desc = descs[i]; - auto topoIndex = desc.topoIndex; - assert(topoIndex != static_cast(-1)); - assert(topoIndex < ret.size()); - assert(!subgraphs[i].empty()); - ret[topoIndex] = std::move(subgraphs[i]); - } - subgraphs = std::move(ret); -} - -} // namespace InferenceEngine diff --git a/inference-engine/src/hetero_plugin/hetero_graph_splitter.hpp b/inference-engine/src/hetero_plugin/hetero_graph_splitter.hpp deleted file mode 100644 index fc4d2bce0c6..00000000000 --- a/inference-engine/src/hetero_plugin/hetero_graph_splitter.hpp +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#pragma once - -#include -#include - -#include -#include -#include -#include -#include - -namespace InferenceEngine { -class ICNNNetwork; - -using LayersSet = std::unordered_set; - -/// Split network on subgraphs based on layer affinity -/// -/// @param network - source network -/// @param checkers - list of supported plugins -/// -/// @return list of subgraphs -std::vector -splitGraph(ICNNNetwork& network, - const std::vector& plugins); - -/// Sort sugraphs topologically, behaviour is undefined if there are circular -/// refences between subgraps -/// -/// @param subgraphs - list of subgraphs -void -sortSubgraphs(std::vector& subgraphs); - -} // namespace InferenceEngine - diff --git a/inference-engine/src/hetero_plugin/hetero_infer_request.cpp b/inference-engine/src/hetero_plugin/hetero_infer_request.cpp index 6690815540b..b4b60690816 100644 --- a/inference-engine/src/hetero_plugin/hetero_infer_request.cpp +++ b/inference-engine/src/hetero_plugin/hetero_infer_request.cpp @@ -5,7 +5,6 @@ #include "hetero_infer_request.hpp" #include "hetero_itt.hpp" #include -#include #include #include #include diff --git a/inference-engine/src/hetero_plugin/hetero_plugin.cpp b/inference-engine/src/hetero_plugin/hetero_plugin.cpp index 6726fc95f92..9c7af172eb3 100644 --- a/inference-engine/src/hetero_plugin/hetero_plugin.cpp +++ b/inference-engine/src/hetero_plugin/hetero_plugin.cpp @@ -57,29 +57,12 @@ InferenceEngine::ExecutableNetworkInternal::Ptr Engine::LoadExeNetworkImpl(const } DeviceMetaInformationMap metaDevices = GetDevicePlugins(it->second, tconfig); - if (network.getFunction()) { - auto allSupportsNgraph = - std::all_of(std::begin(metaDevices), std::end(metaDevices), - [&] (const DeviceMetaInformationMap::value_type& metaDevice) -> bool { - auto& deviceName = metaDevice.first; - try { GetCore()->QueryNetwork(network, deviceName, metaDevice.second); } - catch (const InferenceEngine::details::InferenceEngineException & ex) { - std::string message = ex.what(); - return message.find(NOT_IMPLEMENTED_str) == std::string::npos; - } - return true; - }); - if (!allSupportsNgraph) { - auto cnnNetworkImpl = std::make_shared(network); - IE_ASSERT(cnnNetworkImpl != nullptr); - return std::make_shared( - InferenceEngine::CNNNetwork(cnnNetworkImpl), mergeConfigs(_config, config), this); - } else { - return std::make_shared(network, mergeConfigs(_config, config), this); - } - } else { - return std::make_shared(network, mergeConfigs(_config, config), this); + auto function = network.getFunction(); + if (function == nullptr) { + THROW_IE_EXCEPTION << "HETERO plugin supports just ngraph network representation"; } + + return std::make_shared(network, mergeConfigs(_config, config), this); } ExecutableNetwork Engine::ImportNetworkImpl(std::istream& heteroModel, const Configs& config) { @@ -141,59 +124,6 @@ void Engine::SetConfig(const Configs &configs) { } } -HeteroLayerColorer::HeteroLayerColorer(const std::vector& devices) { - static const std::vector colors = {"#5A5DF0", "#20F608", "#F1F290", "#11F110"}; - for (auto&& device : devices) { - deviceColorMap[device] = colors[std::distance(&device, devices.data()) % colors.size()]; - } -} - -void HeteroLayerColorer::operator()(const CNNLayerPtr layer, - ordered_properties &printed_properties, - ordered_properties &node_properties) { - auto device = layer->affinity; - printed_properties.insert(printed_properties.begin(), std::make_pair("device", device)); - node_properties.emplace_back("fillcolor", deviceColorMap[device]); -} - -void Engine::SetAffinity(const InferenceEngine::CNNNetwork &network, const Configs &config) { - QueryNetworkResult qr = QueryNetwork(network, config); - - details::CNNNetworkIterator i(network); - while (i != details::CNNNetworkIterator()) { - CNNLayer::Ptr layer = *i; - auto it = qr.supportedLayersMap.find(layer->name); - if (it != qr.supportedLayersMap.end()) { - layer->affinity = it->second; - } - i++; - } - - auto dumpDot = [](const Configs & config) { - auto it = config.find(HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)); - return it != config.end() ? it->second == YES : false; - }; - - if (dumpDot(config) || dumpDot(_config)) { - std::unordered_set devicesSet; - details::CNNNetworkIterator i(network); - while (i != details::CNNNetworkIterator()) { - CNNLayer::Ptr layer = *i; - if (!layer->affinity.empty()) { - devicesSet.insert(layer->affinity); - } - i++; - } - std::vector devices{std::begin(devicesSet), std::end(devicesSet)}; - std::stringstream stream(std::stringstream::out); - stream << "hetero_affinity_" << network.getName() << ".dot"; - - std::ofstream file(stream.str()); - saveGraphToDot(static_cast(network), - file, HeteroLayerColorer{devices}); - } -} - QueryNetworkResult Engine::QueryNetwork(const CNNNetwork &network, const Configs& config) const { QueryNetworkResult qr; @@ -210,40 +140,15 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork &network, const Configs std::string fallbackDevicesStr = it->second; DeviceMetaInformationMap metaDevices = GetDevicePlugins(fallbackDevicesStr, tconfig); - std::map queryResults; - auto queryNetwork = [&] (const InferenceEngine::CNNNetwork & networkObject) { - // go over devices and call query network - for (auto&& metaDevice : metaDevices) { - auto& deviceName = metaDevice.first; - queryResults[deviceName] = GetCore()->QueryNetwork(networkObject, deviceName, metaDevice.second); - } - return queryResults; - }; + auto function = network.getFunction(); + if (function == nullptr) { + THROW_IE_EXCEPTION << "HETERO plugin supports just ngraph network representation"; + } - if (network.getFunction()) { - auto allSupportsNgraph = - std::all_of(std::begin(metaDevices), std::end(metaDevices), - [&] (const DeviceMetaInformationMap::value_type& metaDevice) -> bool { - auto& deviceName = metaDevice.first; - try { GetCore()->QueryNetwork(network, deviceName, metaDevice.second); } - catch (const InferenceEngine::details::InferenceEngineException & ex) { - std::string message = ex.what(); - return message.find(NOT_IMPLEMENTED_str) == std::string::npos; - } - return true; - }); - if (!allSupportsNgraph) { - if (contains(tconfig, CONFIG_KEY_INTERNAL(AGGREGATED_PLUGIN))) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str; - } else { - auto cnnNetworkImpl = std::make_shared(network); - queryNetwork(InferenceEngine::CNNNetwork(cnnNetworkImpl)); - } - } else { - queryNetwork(network); - } - } else { - queryNetwork(network); + std::map queryResults; + for (auto&& metaDevice : metaDevices) { + auto& deviceName = metaDevice.first; + queryResults[deviceName] = GetCore()->QueryNetwork(network, deviceName, metaDevice.second); } // WARNING: Here is devices with user set priority diff --git a/inference-engine/src/hetero_plugin/hetero_plugin.hpp b/inference-engine/src/hetero_plugin/hetero_plugin.hpp index 5fb6c7ffe88..c44b0e7e953 100644 --- a/inference-engine/src/hetero_plugin/hetero_plugin.hpp +++ b/inference-engine/src/hetero_plugin/hetero_plugin.hpp @@ -13,7 +13,6 @@ #include #include #include -#include namespace HeteroPlugin { @@ -40,24 +39,10 @@ public: ExecutableNetwork ImportNetworkImpl(std::istream& heteroModel, const Configs& config) override; - - void SetAffinity(const InferenceEngine::CNNNetwork& network, const Configs &config); - DeviceMetaInformationMap GetDevicePlugins(const std::string& targetFallback, const Configs & localConfig) const; private: Configs GetSupportedConfig(const Configs& config, const std::string & deviceName) const; }; - -struct HeteroLayerColorer { - explicit HeteroLayerColorer(const std::vector& devices); - - void operator() (const CNNLayerPtr layer, - ordered_properties &printed_properties, - ordered_properties &node_properties); - - std::unordered_map deviceColorMap; -}; - } // namespace HeteroPlugin