MO: progress bar implementation for new frontends (#9381)

This commit is contained in:
Tomasz Dołbniak 2022-01-03 14:04:45 +01:00 committed by GitHub
parent c26a904c6e
commit 326289265b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 312 additions and 93 deletions

View File

@ -43,6 +43,7 @@ from openvino.pyopenvino import Place
from openvino.pyopenvino import TelemetryExtension
from openvino.pyopenvino import DecoderTransformationExtension
from openvino.pyopenvino import JsonConfigExtension
from openvino.pyopenvino import ProgressReporterExtension
# exceptions
from openvino.pyopenvino import NotImplementedFailure

View File

@ -11,6 +11,7 @@
#include "manager.hpp"
#include "openvino/frontend/exception.hpp"
#include "openvino/frontend/extension/decoder_transformation.hpp"
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "pyopenvino/graph/model.hpp"
@ -54,3 +55,26 @@ void regclass_frontend_JsonConfigExtension(py::module m) {
return std::make_shared<ov::frontend::JsonConfigExtension>(path);
}));
}
void regclass_frontend_ProgressReporterExtension(py::module m) {
py::class_<ProgressReporterExtension, std::shared_ptr<ProgressReporterExtension>, ov::Extension> ext{
m,
"ProgressReporterExtension",
py::dynamic_attr()};
ext.doc() = "An extension class intented to use as progress reporting utility";
ext.def(py::init([]() {
return std::make_shared<ProgressReporterExtension>();
}));
ext.def(py::init([](const ProgressReporterExtension::progress_notifier_callback& callback) {
return std::make_shared<ProgressReporterExtension>(callback);
}));
ext.def(py::init([](ProgressReporterExtension::progress_notifier_callback&& callback) {
return std::make_shared<ProgressReporterExtension>(std::move(callback));
}));
ext.def("report_progress", &ProgressReporterExtension::report_progress);
}

View File

@ -11,3 +11,4 @@ namespace py = pybind11;
void regclass_frontend_TelemetryExtension(py::module m);
void regclass_frontend_DecoderTransformationExtension(py::module m);
void regclass_frontend_JsonConfigExtension(py::module m);
void regclass_frontend_ProgressReporterExtension(py::module m);

View File

@ -140,6 +140,7 @@ PYBIND11_MODULE(pyopenvino, m) {
regclass_frontend_TelemetryExtension(m);
regclass_frontend_DecoderTransformationExtension(m);
regclass_frontend_JsonConfigExtension(m);
regclass_frontend_ProgressReporterExtension(m);
regmodule_offline_transformations(m);
}

View File

@ -467,7 +467,7 @@ set(MULTI_TEST_SRC
backend/sqrt.in.cpp
)
set(OP_EVAL_TEST_SRC
set(OP_EVAL_TEST_SRC
# It should be a part of template plugin
op_eval/binary_convolution.cpp
op_eval/bucketize.cpp
@ -518,7 +518,8 @@ endif()
# SOURCE FOR FRONTEND TESTING
file(GLOB FRONTEND_TESTS_SRC ${CMAKE_CURRENT_SOURCE_DIR}/frontend/frontend_manager.cpp
${CMAKE_CURRENT_SOURCE_DIR}/frontend/decoder_transformation_extension.cpp)
${CMAKE_CURRENT_SOURCE_DIR}/frontend/decoder_transformation_extension.cpp
${CMAKE_CURRENT_SOURCE_DIR}/frontend/progress_reporter.cpp)
list(APPEND SRC ${FRONTEND_TESTS_SRC})
foreach(src IN LISTS SRC MULTI_TEST_SRC OP_EVAL_TEST_SRC)

View File

@ -0,0 +1,75 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "gtest/gtest.h"
#include "openvino/frontend/exception.hpp"
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
using namespace ov::frontend;
TEST(ProgressReporter_Callables, LambdaReporter) {
const auto lambda = [](float progress, unsigned int total, unsigned int completed) {
EXPECT_NEAR(progress, 0.5, 0.0001);
EXPECT_EQ(total, 100);
EXPECT_EQ(completed, 50);
};
ProgressReporterExtension ext{lambda};
ext.report_progress(0.5, 100, 50);
}
TEST(ProgressReporter_Callables, RvalueLambdaReporter) {
ProgressReporterExtension ext{[](float progress, unsigned int total, unsigned int completed) {
EXPECT_NEAR(progress, 0.5, 0.0001);
EXPECT_EQ(total, 100);
EXPECT_EQ(completed, 50);
}};
ext.report_progress(0.5, 100, 50);
}
TEST(ProgressReporter_Callables, StructReporter) {
struct ProgressConsumer {
void operator()(float progress, unsigned int total, unsigned int completed) {
EXPECT_NEAR(progress, 0.5675, 0.0001);
EXPECT_EQ(total, 37);
EXPECT_EQ(completed, 21);
}
};
ProgressConsumer consumer;
ProgressReporterExtension ext{consumer};
ext.report_progress(0.5675, 37, 21);
}
namespace {
void function_reporter(float progress, unsigned int total, unsigned int completed) {
EXPECT_NEAR(progress, 0.2574, 0.0001);
EXPECT_EQ(total, 101);
EXPECT_EQ(completed, 26);
}
void reporter_stub(float, unsigned int, unsigned int) {}
} // namespace
TEST(ProgressReporter_Callables, FunctionReporter) {
ProgressReporterExtension ext{function_reporter};
ext.report_progress(0.2574, 101, 26);
}
TEST(ProgressReporter, ReportMoreStepsThanTotal) {
ProgressReporterExtension ext{reporter_stub};
EXPECT_THROW(ext.report_progress(0.0, 100, 101), ov::frontend::GeneralFailure);
}
TEST(ProgressReporter, ReportMoreThan100Percent) {
ProgressReporterExtension ext{reporter_stub};
EXPECT_THROW(ext.report_progress(1.00001, 100, 50), ov::frontend::GeneralFailure);
}
TEST(ProgressReporter, ReportLessThanZeroPercent) {
ProgressReporterExtension ext{reporter_stub};
EXPECT_THROW(ext.report_progress(-100.0, 100, 50), ov::frontend::GeneralFailure);
}

View File

@ -0,0 +1,18 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
namespace ov {
namespace frontend {
struct ExtensionHolder {
ExtensionHolder() : progress_reporter{std::make_shared<ProgressReporterExtension>()} {}
std::shared_ptr<ProgressReporterExtension> progress_reporter;
std::shared_ptr<TelemetryExtension> telemetry;
};
} // namespace frontend
} // namespace ov

View File

@ -0,0 +1,39 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/core/extension.hpp"
#include "openvino/frontend/visibility.hpp"
namespace ov {
namespace frontend {
class FRONTEND_API ProgressReporterExtension : public ov::Extension {
public:
/// \brief A progress reporting callback signature. A FunctionObject that matches this signature should be passed
/// to the constructor of this extension. The extension will then invoke this as a callback each time the
/// progress needs to be reported. The callback itself is responsible for consuming the reported values.
///
/// \param progress A float value in the range [0.0, 1.0] indicating the total progress of an operation.
/// \param total_steps The total number of steps that a given instance of this extension is tracking
/// \param completed_completed The current number of completed steps (out of the total number of steps to take)
using progress_notifier_callback = std::function<void(float, unsigned int, unsigned int)>;
/// \brief The default constructor which creates a reporter that doesn't report progress
ProgressReporterExtension() : m_callback{[](float, unsigned int, unsigned int) {}} {}
ProgressReporterExtension(const progress_notifier_callback& callback) : m_callback{callback} {}
ProgressReporterExtension(progress_notifier_callback&& callback) : m_callback{std::move(callback)} {}
/// \brief The main method of this extension used to report the progress.
/// This method forwards its arguments to the callback stored in this class.
/// \param progress A float value in the range [0.0, 1.0] indicating the total progress of an operation.
/// \param total_steps The total number of steps that a given instance of this extension is tracking
/// \param completed_steps The current number of completed steps (out of the total number of steps to take)
void report_progress(float progress, unsigned int total_steps, unsigned int completed_steps) const;
private:
progress_notifier_callback m_callback;
};
} // namespace frontend
} // namespace ov

View File

@ -0,0 +1,22 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
#include "openvino/frontend/exception.hpp"
namespace ov {
namespace frontend {
void ProgressReporterExtension::report_progress(float progress,
unsigned int total_steps,
unsigned int completed_steps) const {
FRONT_END_GENERAL_CHECK(completed_steps <= total_steps,
"When reporting the progress, the number of completed steps can be at most equal to the "
"number of total steps.");
FRONT_END_GENERAL_CHECK(progress >= 0.0f && progress <= 1.0f,
"The reported progress needs to be a value between 0.0 and 1.0");
m_callback(progress, total_steps, completed_steps);
}
} // namespace frontend
} // namespace ov

View File

@ -4,10 +4,9 @@
#pragma once
#include <common/extension_holder.hpp>
#include <openvino/frontend/frontend.hpp>
#include "openvino/frontend/extension/telemetry.hpp"
#ifdef OPENVINO_STATIC_LIBRARY
# define ONNX_FRONTEND_API
# define ONNX_FRONTEND_C_API
@ -38,7 +37,7 @@ protected:
InputModel::Ptr load_impl(const std::vector<ov::Any>& params) const override;
private:
std::shared_ptr<TelemetryExtension> m_telemetry;
ExtensionHolder m_extensions;
};
} // namespace onnx

View File

@ -63,17 +63,17 @@ bool common_node_for_all_outputs(const OutputVector& outputs) {
};
} // namespace detail
Graph::Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
: Graph(model_proto, common::make_unique<GraphCache>(), telemetry) {}
Graph::Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto, ov::frontend::ExtensionHolder extensions)
: Graph(model_proto, common::make_unique<GraphCache>(), std::move(extensions)) {}
Graph::Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto,
std::unique_ptr<GraphCache>&& cache,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
ov::frontend::ExtensionHolder extensions)
: m_model{common::make_unique<Model>(model_proto)},
m_cache{std::move(cache)},
m_telemetry(telemetry) {
m_extensions{std::move(extensions)} {
std::map<std::string, Tensor> initializers;
// Process all initializers in the graph
for (const auto& initializer_tensor : m_model->get_graph().initializer()) {
if (initializer_tensor.has_name()) {
@ -116,7 +116,7 @@ Graph::Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto,
std::map<std::string, std::reference_wrapper<const ONNX_NAMESPACE::NodeProto>> unknown_operators;
std::map<std::string, uint64_t> op_statistics;
for (const auto& node_proto : m_model->get_graph().node()) {
if (telemetry) {
if (m_extensions.telemetry) {
op_statistics[node_proto.op_type()]++;
}
if (!m_model->is_operator_available(node_proto)) {
@ -127,9 +127,9 @@ Graph::Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto,
}
}
if (telemetry) {
if (m_extensions.telemetry) {
for (const auto& op : op_statistics) {
telemetry->send_event("op_count", "onnx_" + op.first, op.second);
m_extensions.telemetry->send_event("op_count", "onnx_" + op.first, op.second);
}
}
@ -149,6 +149,8 @@ Graph::Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto,
}
void Graph::convert_to_ngraph_nodes() {
const float total = static_cast<float>(m_model->get_graph().node().size());
unsigned int completed = 0u;
// Process ONNX graph nodes, convert to nGraph nodes
for (const auto& node_proto : m_model->get_graph().node()) {
const Node node{node_proto, *this};
@ -160,6 +162,8 @@ void Graph::convert_to_ngraph_nodes() {
}
}
OutputVector ng_nodes{make_ng_nodes(node)};
++completed;
m_extensions.progress_reporter->report_progress(completed / total, total, completed);
}
}
@ -198,6 +202,8 @@ std::shared_ptr<Function> Graph::convert() {
}
void Graph::decode_to_framework_nodes() {
const float total = static_cast<float>(m_model->get_graph().node().size());
unsigned int completed = 0u;
// Process ONNX graph nodes, convert to nGraph nodes
for (const auto& node_proto : m_model->get_graph().node()) {
const Node node{node_proto, *this};
@ -230,6 +236,8 @@ void Graph::decode_to_framework_nodes() {
for (std::size_t i{0}; i < node.get_outputs_size(); ++i) {
m_cache->emplace_node(node.output(i), std::move(ng_nodes.at(i)));
}
++completed;
m_extensions.progress_reporter->report_progress(completed / total, total, completed);
}
}
@ -350,8 +358,12 @@ const OpsetImports& Graph::get_opset_imports() const {
}
Subgraph::Subgraph(std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto, const Graph* parent_graph)
: Graph(model_proto, common::make_unique<GraphCache>(), parent_graph->get_telemetry()),
m_parent_graph(parent_graph) {}
: Graph(model_proto, common::make_unique<GraphCache>()),
m_parent_graph(parent_graph) {
// do not copy a pre-configured progress reporter extension to the subgraph, copy just the telemetry
// (do not report subgraph conversion progress)
m_extensions.telemetry = parent_graph->get_extensions().telemetry;
}
bool Subgraph::is_ng_node_in_cache(const std::string& name) const {
if (m_cache->contains(name)) {

View File

@ -10,19 +10,19 @@
#include <string>
#include <vector>
#include "common/extension_holder.hpp"
#include "core/graph_cache.hpp"
#include "core/model.hpp"
#include "ngraph/function.hpp"
#include "ngraph/op/parameter.hpp"
#include "onnx_import/core/operator_set.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
namespace ngraph {
namespace onnx_import {
class Graph : public std::enable_shared_from_this<Graph> {
public:
Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto2,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model_proto,
ov::frontend::ExtensionHolder extensions = {});
Graph() = delete;
Graph(const Graph&) = delete;
@ -45,14 +45,14 @@ public:
const OpsetImports& get_opset_imports() const;
virtual ~Graph() = default;
const std::shared_ptr<ov::frontend::TelemetryExtension>& get_telemetry() const {
return m_telemetry;
const ov::frontend::ExtensionHolder& get_extensions() const {
return m_extensions;
}
protected:
Graph(const std::shared_ptr<ONNX_NAMESPACE::ModelProto>& model,
std::unique_ptr<GraphCache>&& cache,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
ov::frontend::ExtensionHolder extensions = {});
void set_friendly_names(const Node& onnx_node, const OutputVector& ng_subgraph_outputs) const;
@ -65,10 +65,10 @@ protected:
ParameterVector m_parameters;
std::unique_ptr<Model> m_model;
std::unique_ptr<GraphCache> m_cache;
ov::frontend::ExtensionHolder m_extensions = {};
private:
std::vector<Node> m_nodes;
std::shared_ptr<ov::frontend::TelemetryExtension> m_telemetry;
};
/// \brief Representation of ONNX subgraph. It is used for example by ONNX Loop op.

View File

@ -221,19 +221,17 @@ struct onnx_editor::ONNXModelEditor::Impl {
#endif
};
onnx_editor::ONNXModelEditor::ONNXModelEditor(const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
onnx_editor::ONNXModelEditor::ONNXModelEditor(const std::string& model_path, frontend::ExtensionHolder extensions)
: m_model_path{model_path},
m_telemetry(telemetry),
m_extensions{std::move(extensions)},
m_pimpl{new ONNXModelEditor::Impl{model_path}, [](Impl* impl) {
delete impl;
}} {}
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
onnx_editor::ONNXModelEditor::ONNXModelEditor(const std::wstring& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
onnx_editor::ONNXModelEditor::ONNXModelEditor(const std::wstring& model_path, frontend::ExtensionHolder extensions)
: m_model_path{ngraph::file_util::wstring_to_string(model_path)},
m_telemetry(telemetry),
m_extensions{std::move(extensions)},
m_pimpl{new ONNXModelEditor::Impl{model_path}, [](Impl* impl) {
delete impl;
}} {}
@ -241,9 +239,9 @@ onnx_editor::ONNXModelEditor::ONNXModelEditor(const std::wstring& model_path,
onnx_editor::ONNXModelEditor::ONNXModelEditor(std::istream& model_stream,
const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
frontend::ExtensionHolder extensions)
: m_model_path{model_path},
m_telemetry(telemetry),
m_extensions{std::move(extensions)},
m_pimpl{new ONNXModelEditor::Impl{model_stream}, [](Impl* impl) {
delete impl;
}} {}
@ -436,7 +434,7 @@ std::string onnx_editor::ONNXModelEditor::model_string() const {
}
std::shared_ptr<Model> onnx_editor::ONNXModelEditor::get_function() const {
return ngraph::onnx_import::detail::import_onnx_model(m_pimpl->m_model_proto, m_model_path, m_telemetry);
return ngraph::onnx_import::detail::import_onnx_model(m_pimpl->m_model_proto, m_model_path, m_extensions);
}
void onnx_editor::ONNXModelEditor::set_input_values(
@ -625,7 +623,7 @@ std::vector<std::string> onnx_editor::ONNXModelEditor::get_output_ports(const Ed
}
std::shared_ptr<Model> onnx_editor::ONNXModelEditor::decode() {
return ngraph::onnx_import::detail::decode_to_framework_nodes(m_pimpl->m_model_proto, m_model_path, m_telemetry);
return ngraph::onnx_import::detail::decode_to_framework_nodes(m_pimpl->m_model_proto, m_model_path, m_extensions);
}
void onnx_editor::ONNXModelEditor::add_output(const OutputEdge& output_edge) const {

View File

@ -8,12 +8,14 @@
#include <map>
#include <memory>
#include "common/extension_holder.hpp"
#include "editor_types.hpp"
#include "ngraph/function.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/partial_shape.hpp"
#include "ngraph/type/element_type.hpp"
#include "onnx_import/onnx_importer_visibility.hpp"
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
namespace ov {
@ -29,11 +31,9 @@ public:
/// is parsed and loaded into the m_model_proto member variable.
///
/// \param model_path Path to the file containing the model.
ONNXModelEditor(const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
ONNXModelEditor(const std::string& model_path, frontend::ExtensionHolder extensions = {});
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
ONNXModelEditor(const std::wstring& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
ONNXModelEditor(const std::wstring& model_path, frontend::ExtensionHolder extensions = {});
#endif
/// \brief Creates an editor from a model stream. The stream is parsed and loaded
@ -44,7 +44,7 @@ public:
/// for ONNX external weights feature support.
ONNXModelEditor(std::istream& model_stream,
const std::string& path = "",
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
frontend::ExtensionHolder extensions = {});
/// \brief Modifies the in-memory representation of the model by setting
/// custom input types for all inputs specified in the provided map.
@ -296,7 +296,7 @@ public:
private:
void update_mapper_if_needed() const;
std::shared_ptr<ov::frontend::TelemetryExtension> m_telemetry;
frontend::ExtensionHolder m_extensions;
const std::string m_model_path;
struct Impl;

View File

@ -12,7 +12,6 @@
#include <utils/onnx_internal.hpp>
#include "onnx_common/onnx_model_validator.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
using namespace ov;
using namespace ov::frontend::onnx;
@ -36,27 +35,27 @@ InputModel::Ptr FrontEnd::load_impl(const std::vector<ov::Any>& variants) const
}
if (variants[0].is<std::string>()) {
const auto path = variants[0].as<std::string>();
return std::make_shared<InputModel>(path, m_telemetry);
return std::make_shared<InputModel>(path, m_extensions);
}
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
if (variants[0].is<std::wstring>()) {
const auto path = variants[0].as<std::wstring>();
return std::make_shared<InputModel>(path, m_telemetry);
return std::make_shared<InputModel>(path, m_extensions);
}
#endif
if (variants[0].is<std::istream*>()) {
const auto stream = variants[0].as<std::istream*>();
if (variants.size() > 1 && variants[1].is<std::string>()) {
const auto path = variants[0].as<std::string>();
return std::make_shared<InputModel>(*stream, path, m_telemetry);
return std::make_shared<InputModel>(*stream, path, m_extensions);
}
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
if (variants.size() > 1 && variants[1].is<std::wstring>()) {
const auto path = variants[1].as<std::wstring>();
return std::make_shared<InputModel>(*stream, path, m_telemetry);
return std::make_shared<InputModel>(*stream, path, m_extensions);
}
#endif
return std::make_shared<InputModel>(*stream, m_telemetry);
return std::make_shared<InputModel>(*stream, m_extensions);
}
return nullptr;
}
@ -135,6 +134,8 @@ bool FrontEnd::supported_impl(const std::vector<ov::Any>& variants) const {
void FrontEnd::add_extension(const std::shared_ptr<ov::Extension>& extension) {
if (auto telemetry = std::dynamic_pointer_cast<TelemetryExtension>(extension)) {
m_telemetry = telemetry;
m_extensions.telemetry = telemetry;
} else if (auto progress_reporter = std::dynamic_pointer_cast<ProgressReporterExtension>(extension)) {
m_extensions.progress_reporter = progress_reporter;
}
}

View File

@ -14,27 +14,23 @@ using namespace ov::frontend::onnx;
NGRAPH_SUPPRESS_DEPRECATED_START
InputModel::InputModel(const std::string& path, const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(path, telemetry)} {}
InputModel::InputModel(const std::string& path, frontend::ExtensionHolder extensions)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(path, std::move(extensions))} {}
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
InputModel::InputModel(const std::wstring& path, const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(path, telemetry)} {}
InputModel::InputModel(const std::wstring& path, frontend::ExtensionHolder extensions)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(path, std::move(extensions))} {}
#endif
InputModel::InputModel(std::istream& model_stream, const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(model_stream, "", telemetry)} {}
InputModel::InputModel(std::istream& model_stream, frontend::ExtensionHolder extensions)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(model_stream, "", std::move(extensions))} {}
InputModel::InputModel(std::istream& model_stream,
const std::string& path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(model_stream, path, telemetry)} {}
InputModel::InputModel(std::istream& model_stream, const std::string& path, frontend::ExtensionHolder extensions)
: m_editor{std::make_shared<onnx_editor::ONNXModelEditor>(model_stream, path, std::move(extensions))} {}
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
InputModel::InputModel(std::istream& model_stream,
const std::wstring& path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry)
: InputModel(model_stream, ov::util::wstring_to_string(path), telemetry) {}
InputModel::InputModel(std::istream& model_stream, const std::wstring& path, frontend::ExtensionHolder extensions)
: InputModel(model_stream, ov::util::wstring_to_string(path), std::move(extensions)) {}
#endif
std::vector<ov::frontend::Place::Ptr> InputModel::get_inputs() const {

View File

@ -8,26 +8,24 @@
#include <fstream>
#include <openvino/frontend/input_model.hpp>
#include "common/extension_holder.hpp"
namespace ov {
namespace frontend {
namespace onnx {
class InputModel : public ov::frontend::InputModel {
public:
InputModel(const std::string& path, const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
InputModel(const std::string& path, ExtensionHolder extensions = {});
#if defined(OPENVINO_ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32)
InputModel(const std::wstring& path, const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
InputModel(const std::wstring& path, ExtensionHolder extensions = {});
#endif
InputModel(std::istream& model_stream, const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
InputModel(std::istream& model_stream, ExtensionHolder extensions = {});
// The path can be required even if the model is passed as a stream because it is necessary
// for ONNX external data feature
InputModel(std::istream& model_stream,
const std::string& path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
InputModel(std::istream& model_stream, const std::string& path, ExtensionHolder extensions = {});
#ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT
InputModel(std::istream& model_stream,
const std::wstring& path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
InputModel(std::istream& model_stream, const std::wstring& path, ExtensionHolder extensions = {});
#endif
std::vector<ov::frontend::Place::Ptr> get_inputs() const override;

View File

@ -89,18 +89,17 @@ void convert_decoded_function(std::shared_ptr<Function> function) {
std::shared_ptr<Function> import_onnx_model(std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto,
const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry) {
ov::frontend::ExtensionHolder extensions) {
apply_transformations(*model_proto, model_path);
Graph graph{model_proto, telemetry};
Graph graph{model_proto, extensions};
return graph.convert();
}
std::shared_ptr<Function> decode_to_framework_nodes(
std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto,
const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry) {
std::shared_ptr<Function> decode_to_framework_nodes(std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto,
const std::string& model_path,
ov::frontend::ExtensionHolder extensions) {
apply_transformations(*model_proto, model_path);
auto graph = std::make_shared<Graph>(model_proto, telemetry);
auto graph = std::make_shared<Graph>(model_proto, extensions);
return graph->decode();
}
} // namespace detail

View File

@ -7,8 +7,8 @@
#include <memory>
#include <string>
#include "common/extension_holder.hpp"
#include "ngraph/function.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
namespace ONNX_NAMESPACE {
class ModelProto;
@ -25,32 +25,30 @@ namespace detail {
/// library can cause segfaults. If stream parsing fails or the ONNX model
/// contains unsupported ops, the function throws an ngraph_error exception.
///
/// \param[in] model_proto Reference to a GraphProto object.
/// \param[in] model_path The path to the imported onnx model.
/// It is required if the imported model uses data saved in
/// external files.
/// \param model_proto Reference to a GraphProto object.
/// \param model_path The path to the imported onnx model.
/// It is required if the imported model uses data saved in external files.
/// \param extensions An object containing a collection of frontend extensions to use during the import process
///
/// \return An nGraph function that represents a single output from the created
/// graph.
std::shared_ptr<Function> import_onnx_model(std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto,
const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
ov::frontend::ExtensionHolder extensions = {});
/// \brief Decode ONNX model to nGraph function with ONNXFrameworkNode(s)
///
/// \param[in] model_proto Reference to a GraphProto object.
/// \param[in] model_path The path to the imported onnx model.
/// It is required if the imported model uses data saved in
/// external files.
/// \param model_proto Reference to a GraphProto object.
/// \param model_path The path to the imported onnx model.
/// It is required if the imported model uses data saved in external files.
/// \param extensions An object containing a collection of frontend extensions to use during the import process
///
/// \return A nGraph function with ONNXFrameworkNodes
std::shared_ptr<Function> decode_to_framework_nodes(
std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto,
const std::string& model_path,
const std::shared_ptr<ov::frontend::TelemetryExtension>& telemetry = {});
std::shared_ptr<Function> decode_to_framework_nodes(std::shared_ptr<ONNX_NAMESPACE::ModelProto> model_proto,
const std::string& model_path,
ov::frontend::ExtensionHolder extensions = {});
/// \brief Converts a nGraph function (onnx model decoded to function with
/// ONNXFrameworkNode(s))
/// \brief Converts a nGraph function (onnx model decoded to function with ONNXFrameworkNode(s))
/// to a complete function with actual compute operations
///
/// \return A nGraph function.

View File

@ -33,7 +33,7 @@ from openvino.tools.mo.utils.error import Error, FrameworkError
from openvino.tools.mo.utils.find_ie_version import find_ie_version
from openvino.tools.mo.utils.get_ov_update_message import get_ov_update_message
from openvino.tools.mo.utils.guess_framework import deduce_framework_by_namespace
from openvino.tools.mo.utils.logger import init_logger
from openvino.tools.mo.utils.logger import init_logger, progress_printer
from openvino.tools.mo.utils.model_analysis import AnalysisResults
from openvino.tools.mo.utils.utils import refer_to_faq_msg
from openvino.tools.mo.utils.telemetry_utils import send_params_info, send_framework_info
@ -43,7 +43,7 @@ from openvino.tools.mo.utils.telemetry_utils import get_tid
from openvino.tools.mo.front.common.partial_infer.utils import mo_array
# pylint: disable=no-name-in-module,import-error
from openvino.frontend import FrontEndManager, TelemetryExtension
from openvino.frontend import FrontEndManager, ProgressReporterExtension, TelemetryExtension
def replace_ext(name: str, old: str, new: str):
@ -322,6 +322,7 @@ def prepare_ir(argv):
if moc_front_end:
t.send_event("mo", "conversion_method", moc_front_end.get_name() + "_frontend")
moc_front_end.add_extension(TelemetryExtension("mo", t.send_event, t.send_error, t.send_stack_trace))
moc_front_end.add_extension(ProgressReporterExtension(progress_printer(argv)))
ngraph_function = moc_pipeline(argv, moc_front_end)
else:
t.send_event("mo", "conversion_method", "mo_legacy")

View File

@ -6,6 +6,7 @@ import logging as log
import os
import re
import sys
from argparse import Namespace
# WA for abseil bug that affects logging while importing TF starting 1.14 version
# Link to original issue: https://github.com/abseil/abseil-py/issues/99
@ -111,3 +112,37 @@ def progress_bar(function: callable):
function(*args, **kwargs)
return wrapper
def progress_printer(argv: Namespace):
"""
A higher-order factory function returning a configurable callback displaying a progress bar
Depending on the configuration stored in 'argv' the progress bar can be one-line, multi-line, or silent.
"""
def _progress_bar(progress, total, completed, endline):
bar_len = 20
def dots():
return '.' * int(progress * bar_len)
print('\rProgress: [{:{}}]{:>7.2f}% done'.format(dots(), bar_len, progress*100), end=endline)
sys.stdout.flush()
def no_progress_bar(progress, total, completed):
""" A 'dummy' progressbar which doesn't print anything """
pass
def oneline_progress_bar(progress, total, completed):
""" A callback that always prints the progress in the same line (mimics real GUI progress bar)"""
_progress_bar(progress, total, completed, '')
def newline_progress_bar(progress, total, completed):
""" A callback that prints an updated progress bar in separate lines """
_progress_bar(progress, total, completed, '\n')
if "progress" in argv and argv.progress:
if "stream_output" in argv and argv.stream_output:
return newline_progress_bar
else:
return oneline_progress_bar
else:
return no_progress_bar