* CPU device documentation refresh * Bfloat16 inference page aligned with the new API * Bfloat16 inference section moved to CPU main * First review comments applied * Second review step comments applied * OneDNN reference changed to the GitHub page * AvgPool added to the oneDNN ops list
14 lines
323 B
C++
14 lines
323 B
C++
#include <openvino/runtime/core.hpp>
|
|
|
|
int main() {
|
|
using namespace InferenceEngine;
|
|
//! [part1]
|
|
ov::Core core;
|
|
auto network = core.read_model("sample.xml");
|
|
auto exec_network = core.compile_model(network, "CPU");
|
|
auto inference_precision = exec_network.get_property(ov::hint::inference_precision);
|
|
//! [part1]
|
|
|
|
return 0;
|
|
}
|