Files
openvino/docs/snippets/HETERO1.cpp
Polina Mishanina 898d4ee8f0 Update md files. Add cpp in docs/examples (#1769)
* Update md files. Add cpp in docs/examples

* Normalize all the line endings

* Fix block_id in snippets

* Fix utf-8 encoding

* Add new folder for snippets

* Fix issues with compiling code from snippets

* Added conteiner iterator fix
2020-09-22 18:01:48 +03:00

36 lines
1.1 KiB
C++

#include <inference_engine.hpp>
#include <ngraph/ngraph.hpp>
#include <ngraph/function.hpp>
#include "hetero/hetero_plugin_config.hpp"
int main() {
using namespace InferenceEngine;
using namespace ngraph;
//! [part1]
InferenceEngine::Core core;
auto network = core.ReadNetwork("sample.xml");
auto function = network.getFunction();
// This example demonstrates how to perform default affinity initialization and then
// correct affinity manually for some layers
const std::string device = "HETERO:FPGA,CPU";
// QueryNetworkResult object contains map layer -> device
InferenceEngine::QueryNetworkResult res = core.QueryNetwork(network, device, { });
// update default affinities
res.supportedLayersMap["layerName"] = "CPU";
// set affinities to network
for (auto&& node : function->get_ops()) {
auto& affinity = res.supportedLayersMap[node->get_friendly_name()];
// Store affinity mapping using node runtime information
node->get_rt_info()["affinity"] = std::make_shared<ngraph::VariantWrapper<std::string>>(affinity);
}
// load network with affinities set before
auto executable_network = core.LoadNetwork(network, device);
//! [part1]
return 0;
}