Removed unused files (#12872)

This commit is contained in:
Ilya Lavrenov 2022-09-02 20:38:59 +04:00 committed by GitHub
parent 142cfd173a
commit 0ad91f040c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 0 additions and 73 deletions

View File

@ -1,14 +0,0 @@
#include <ie_core.hpp>
#include <ngraph/function.hpp>
#include <ngraph/variant.hpp>
int main() {
InferenceEngine::Core core;
auto network = core.ReadNetwork("sample.xml");
auto function = network.getFunction();
//! [part0]
for (auto && op : function->get_ops())
op->get_rt_info()["affinity"] = "CPU";
//! [part0]
return 0;
}

View File

@ -1,34 +0,0 @@
#include <ie_core.hpp>
#include <ngraph/function.hpp>
#include <ngraph/variant.hpp>
int main() {
using namespace InferenceEngine;
using namespace ngraph;
//! [part1]
InferenceEngine::Core core;
auto network = core.ReadNetwork("sample.xml");
auto function = network.getFunction();
// This example demonstrates how to perform default affinity initialization and then
// correct affinity manually for some layers
const std::string device = "HETERO:GPU,CPU";
// QueryNetworkResult object contains map layer -> device
InferenceEngine::QueryNetworkResult res = core.QueryNetwork(network, device, { });
// update default affinities
res.supportedLayersMap["layerName"] = "CPU";
// set affinities to network
for (auto&& node : function->get_ops()) {
auto& affinity = res.supportedLayersMap[node->get_friendly_name()];
// Store affinity mapping using node runtime information
node->get_rt_info()["affinity"] = affinity;
}
// load network with affinities set before
auto executable_network = core.LoadNetwork(network, device);
//! [part1]
return 0;
}

View File

@ -1,11 +0,0 @@
#include <ie_core.hpp>
int main() {
using namespace InferenceEngine;
//! [part2]
InferenceEngine::Core core;
auto network = core.ReadNetwork("sample.xml");
auto executable_network = core.LoadNetwork(network, "HETERO:GPU,CPU");
//! [part2]
return 0;
}

View File

@ -1,14 +0,0 @@
#include <ie_core.hpp>
int main() {
using namespace InferenceEngine;
//! [part3]
using namespace InferenceEngine::PluginConfigParams;
using namespace InferenceEngine::HeteroConfigParams;
// ...
InferenceEngine::Core core;
core.SetConfig({ { KEY_HETERO_DUMP_GRAPH_DOT, YES } }, "HETERO");
//! [part3]
return 0;
}