Renamed ov::Function to ov::Model (#9051)
* Renamed ov::Function to ov::Model * Fixed all for macos * Fixed build * Fixed build * Revert changes in GPU plugin * Fixed ngraphFunctions * Fixed all for mac * Fixed new test * Fixed if for Windows * Fixed unit tests and renamed Function in python API * Fixed code style * Fixed import * Fixed conflict * Fixed merge issues
This commit is contained in:
@@ -94,7 +94,7 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
// -------- Step 2. Read a model --------
|
||||
slog::info << "Loading model files:" << slog::endl << FLAGS_m << slog::endl;
|
||||
std::shared_ptr<ov::Function> model = core.read_model(FLAGS_m);
|
||||
std::shared_ptr<ov::Model> model = core.read_model(FLAGS_m);
|
||||
|
||||
OPENVINO_ASSERT(model->get_parameters().size() == 1, "Sample supports models with 1 input only");
|
||||
OPENVINO_ASSERT(model->get_results().size() == 1, "Sample supports models with 1 output only");
|
||||
|
||||
@@ -38,4 +38,4 @@ void processLayout(InferenceEngine::CNNNetwork& network,
|
||||
const std::string& iol);
|
||||
|
||||
void printInputAndOutputsInfo(const InferenceEngine::CNNNetwork& network);
|
||||
void printInputAndOutputsInfo(const ov::Function& network);
|
||||
void printInputAndOutputsInfo(const ov::Model& network);
|
||||
|
||||
@@ -327,7 +327,7 @@ void printInputAndOutputsInfo(const InferenceEngine::CNNNetwork& network) {
|
||||
}
|
||||
}
|
||||
|
||||
void printInputAndOutputsInfo(const ov::Function& network) {
|
||||
void printInputAndOutputsInfo(const ov::Model& network) {
|
||||
slog::info << "model name: " << network.get_friendly_name() << slog::endl;
|
||||
|
||||
const std::vector<ov::Output<const ov::Node>> inputs = network.inputs();
|
||||
|
||||
@@ -104,7 +104,7 @@ ov::runtime::Tensor ReadWeights(const std::string& filepath) {
|
||||
* @brief Create ngraph function
|
||||
* @return Ptr to ngraph function
|
||||
*/
|
||||
std::shared_ptr<ov::Function> createNgraphFunction() {
|
||||
std::shared_ptr<ov::Model> createNgraphFunction() {
|
||||
auto weights = ReadWeights(FLAGS_m);
|
||||
const std::uint8_t* data = weights.data<std::uint8_t>();
|
||||
|
||||
@@ -234,14 +234,14 @@ std::shared_ptr<ov::Function> createNgraphFunction() {
|
||||
// ------- OpenVINO function--
|
||||
auto result_full = std::make_shared<opset8::Result>(softMaxNode->output(0));
|
||||
|
||||
std::shared_ptr<ov::Function> fnPtr =
|
||||
std::make_shared<ov::Function>(result_full, ov::ParameterVector{paramNode}, "lenet");
|
||||
std::shared_ptr<ov::Model> fnPtr =
|
||||
std::make_shared<ov::Model>(result_full, ov::ParameterVector{paramNode}, "lenet");
|
||||
|
||||
return fnPtr;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief The entry point for inference engine automatic ov::Function
|
||||
* @brief The entry point for inference engine automatic ov::Model
|
||||
* creation sample
|
||||
* @file ngraph_function_creation_sample/main.cpp
|
||||
* @example ngraph_function_creation_sample/main.cpp
|
||||
@@ -268,7 +268,7 @@ int main(int argc, char* argv[]) {
|
||||
slog::info << "Device info: " << slog::endl;
|
||||
slog::info << core.get_versions(FLAGS_d) << slog::endl;
|
||||
|
||||
// -------- Step 2. Create network using ov::Function --------
|
||||
// -------- Step 2. Create network using ov::Model --------
|
||||
|
||||
auto model = createNgraphFunction();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user