Revert "Add constant folding to hetero to avoid dynamism on GPU (#10572)" (#11370)

This reverts commit 5b18677f1b.
This commit is contained in:
Ilya Churaev
2022-04-01 10:16:14 +03:00
committed by GitHub
parent 070f27a089
commit 8ab5dbade0

View File

@@ -39,7 +39,6 @@
#include "plugin.hpp"
#include <ie_algorithm.hpp>
#include <ngraph/pass/manager.hpp>
#include <ngraph/function.hpp>
#include <ngraph/variant.hpp>
#include <ngraph/graph_util.hpp>
@@ -48,7 +47,6 @@
#include <ngraph/op/util/op_types.hpp>
#include <ngraph/rt_info.hpp>
#include <ngraph/pass/visualize_tree.hpp>
#include <ngraph/pass/constant_folding.hpp>
// clang-format on
using namespace InferenceEngine;
@@ -60,22 +58,17 @@ using namespace InferenceEngine::HeteroConfigParams;
template <typename T>
using NodeMap = std::unordered_map<ngraph::Node*, T>;
HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwork& originalNetwork,
HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwork& network,
const Engine::Configs& config,
Engine* plugin)
: InferenceEngine::ExecutableNetworkThreadSafeDefault(nullptr,
std::make_shared<InferenceEngine::ImmediateExecutor>()),
_heteroPlugin{plugin},
_name{originalNetwork.getName()},
_name{network.getName()},
_config{config} {
auto clonned_network = InferenceEngine::details::cloneNetwork(originalNetwork);
auto clonned_function = clonned_network.getFunction();
IE_ASSERT(clonned_function != nullptr);
ngraph::pass::Manager manager;
manager.register_pass<ngraph::pass::ConstantFolding>();
manager.run_passes(clonned_function);
auto function = network.getFunction();
IE_ASSERT(function != nullptr);
auto clonedFunction = ngraph::clone_function(*function);
bool dumpDotFile = false;
if (std::getenv("OPENVINO_HETERO_VISUALIZE")) {
dumpDotFile = true;
@@ -85,7 +78,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
}
QueryNetworkResult queryNetworkResult;
auto orderedOps = clonned_function->get_ordered_ops();
auto orderedOps = clonedFunction->get_ordered_ops();
bool allEmpty = true;
// Get user defined affinity
for (auto&& node : orderedOps) {
@@ -104,7 +97,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
it = _config.find(ov::device::priorities.name());
}
if (it != _config.end()) {
queryNetworkResult = _heteroPlugin->QueryNetwork(clonned_network, _config);
queryNetworkResult = _heteroPlugin->QueryNetwork(network, _config);
} else {
IE_THROW() << "The '" << ov::device::priorities.name()
<< "' option was not defined for heterogeneous plugin";
@@ -120,7 +113,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
};
// Set results, constants and parameters affinity
for (auto&& node : clonned_function->get_ops()) {
for (auto&& node : clonedFunction->get_ops()) {
if (ngraph::op::is_constant(node) || ngraph::op::is_output(node) || ngraph::op::is_parameter(node)) {
if (!contains(queryNetworkResult.supportedLayersMap, node->get_friendly_name())) {
auto& nodeWithAffinityName =
@@ -201,7 +194,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
colorIndex++;
}
}}
.run_on_model(ngraph::clone_function(*clonned_function));
.run_on_model(ngraph::clone_function(*function));
}
NodeMap<InputSet> nodeInputDependencies;
@@ -334,7 +327,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
itLabel->pop_back();
(*itLabel) += label;
}}
.run_on_model(std::const_pointer_cast<ov::Model>(clonned_function));
.run_on_model(std::const_pointer_cast<ov::Model>(function));
}
// Break graph using insertion of result parameter split
@@ -451,8 +444,8 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(const InferenceEngine::CNNNetwo
std::move(std::begin(newOrderedSubgraphs), std::end(newOrderedSubgraphs), std::back_inserter(orderedSubgraphs));
} while (!allSubgraphs.empty());
InputsDataMap externalInputsData = clonned_network.getInputsInfo();
OutputsDataMap externalOutputsData = clonned_network.getOutputsInfo();
InputsDataMap externalInputsData = network.getInputsInfo();
OutputsDataMap externalOutputsData = network.getOutputsInfo();
_networks.resize(orderedSubgraphs.size());
std::vector<std::shared_ptr<ngraph::Function>> subFunctions(orderedSubgraphs.size());
int id = 0;