Files
openvino/docs/snippets/cpu/Bfloat16Inference1.cpp
2023-03-29 18:59:33 +04:00

14 lines
323 B
C++

#include <openvino/runtime/core.hpp>
int main() {
using namespace InferenceEngine;
//! [part1]
ov::Core core;
auto network = core.read_model("sample.xml");
auto exec_network = core.compile_model(network, "CPU");
auto inference_precision = exec_network.get_property(ov::hint::inference_precision);
//! [part1]
return 0;
}