2021-06-01 16:31:29 +03:00
|
|
|
#include <ie_core.hpp>
|
2020-09-22 18:01:48 +03:00
|
|
|
#include <ngraph/function.hpp>
|
2021-06-01 16:31:29 +03:00
|
|
|
#include <ngraph/variant.hpp>
|
2020-09-22 18:01:48 +03:00
|
|
|
|
|
|
|
|
int main() {
|
|
|
|
|
using namespace InferenceEngine;
|
|
|
|
|
using namespace ngraph;
|
|
|
|
|
//! [part1]
|
|
|
|
|
InferenceEngine::Core core;
|
|
|
|
|
auto network = core.ReadNetwork("sample.xml");
|
|
|
|
|
auto function = network.getFunction();
|
|
|
|
|
|
|
|
|
|
// This example demonstrates how to perform default affinity initialization and then
|
|
|
|
|
// correct affinity manually for some layers
|
2021-09-13 14:01:49 +03:00
|
|
|
const std::string device = "HETERO:GPU,CPU";
|
2020-09-22 18:01:48 +03:00
|
|
|
|
|
|
|
|
// QueryNetworkResult object contains map layer -> device
|
|
|
|
|
InferenceEngine::QueryNetworkResult res = core.QueryNetwork(network, device, { });
|
|
|
|
|
|
|
|
|
|
// update default affinities
|
|
|
|
|
res.supportedLayersMap["layerName"] = "CPU";
|
|
|
|
|
|
|
|
|
|
// set affinities to network
|
|
|
|
|
for (auto&& node : function->get_ops()) {
|
|
|
|
|
auto& affinity = res.supportedLayersMap[node->get_friendly_name()];
|
|
|
|
|
// Store affinity mapping using node runtime information
|
2021-12-09 17:19:18 +03:00
|
|
|
node->get_rt_info()["affinity"] = affinity;
|
2020-09-22 18:01:48 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// load network with affinities set before
|
|
|
|
|
auto executable_network = core.LoadNetwork(network, device);
|
|
|
|
|
//! [part1]
|
|
|
|
|
return 0;
|
|
|
|
|
}
|