* CPU device documentation refresh * Bfloat16 inference page aligned with the new API * Bfloat16 inference section moved to CPU main * First review comments applied * Second review step comments applied * OneDNN reference changed to the GitHub page * AvgPool added to the oneDNN ops list
26 lines
571 B
C++
26 lines
571 B
C++
#include <openvino/runtime/core.hpp>
|
|
|
|
|
|
int main() {
|
|
{
|
|
//! [defined_upper_bound]
|
|
ov::Core core;
|
|
auto model = core.read_model("model.xml");
|
|
|
|
model->reshape({{ov::Dimension(1, 10), ov::Dimension(1, 20), ov::Dimension(1, 30), ov::Dimension(1, 40)}});
|
|
//! [defined_upper_bound]
|
|
}
|
|
|
|
{
|
|
//! [static_shape]
|
|
ov::Core core;
|
|
auto model = core.read_model("model.xml");
|
|
ov::Shape static_shape = {10, 20, 30, 40};
|
|
|
|
model->reshape(static_shape);
|
|
//! [static_shape]
|
|
}
|
|
|
|
return 0;
|
|
}
|