* CPU device documentation refresh * Bfloat16 inference page aligned with the new API * Bfloat16 inference section moved to CPU main * First review comments applied * Second review step comments applied * OneDNN reference changed to the GitHub page * AvgPool added to the oneDNN ops list
21 lines
507 B
C++
21 lines
507 B
C++
#include <openvino/runtime/core.hpp>
|
|
|
|
|
|
int main() {
|
|
{
|
|
//! [compile_model_default]
|
|
ov::Core core;
|
|
auto model = core.read_model("model.xml");
|
|
auto compiled_model = core.compile_model(model, "CPU");
|
|
//! [compile_model_default]
|
|
}
|
|
|
|
{
|
|
//! [compile_model_multi]
|
|
ov::Core core;
|
|
auto model = core.read_model("model.xml");
|
|
auto compiled_model = core.compile_model(model, "MULTI:CPU,GPU.0");
|
|
//! [compile_model_multi]
|
|
}
|
|
}
|