2021-06-01 16:31:29 +03:00
|
|
|
|
#include <ie_core.hpp>
|
2020-09-22 18:01:48 +03:00
|
|
|
|
|
|
|
|
|
|
int main() {
|
2020-10-10 11:19:16 +03:00
|
|
|
|
InferenceEngine::Core core;
|
2020-09-22 18:01:48 +03:00
|
|
|
|
int numRequests = 42;
|
|
|
|
|
|
int i = 1;
|
|
|
|
|
|
auto network = core.ReadNetwork("sample.xml");
|
|
|
|
|
|
auto executable_network = core.LoadNetwork(network, "CPU");
|
|
|
|
|
|
//! [part0]
|
|
|
|
|
|
struct Request {
|
2021-04-21 16:05:30 +03:00
|
|
|
|
InferenceEngine::InferRequest inferRequest;
|
2020-09-22 18:01:48 +03:00
|
|
|
|
int frameidx;
|
|
|
|
|
|
};
|
|
|
|
|
|
//! [part0]
|
|
|
|
|
|
|
|
|
|
|
|
//! [part1]
|
|
|
|
|
|
// numRequests is the number of frames (max size, equal to the number of VPUs in use)
|
|
|
|
|
|
std::vector<Request> request(numRequests);
|
|
|
|
|
|
//! [part1]
|
|
|
|
|
|
|
|
|
|
|
|
//! [part2]
|
|
|
|
|
|
// initialize infer request pointer – Consult IE API for more detail.
|
2021-04-21 16:05:30 +03:00
|
|
|
|
request[i].inferRequest = executable_network.CreateInferRequest();
|
2020-09-22 18:01:48 +03:00
|
|
|
|
//! [part2]
|
|
|
|
|
|
|
|
|
|
|
|
//! [part3]
|
|
|
|
|
|
// Run inference
|
2021-04-21 16:05:30 +03:00
|
|
|
|
request[i].inferRequest.StartAsync();
|
2020-09-22 18:01:48 +03:00
|
|
|
|
//! [part3]
|
|
|
|
|
|
|
|
|
|
|
|
//! [part4]
|
2021-04-21 16:05:30 +03:00
|
|
|
|
request[i].inferRequest.SetCompletionCallback([] () {});
|
2020-09-22 18:01:48 +03:00
|
|
|
|
//! [part4]
|
|
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
}
|