FrontEnd ConversionExtensions (#9070)

* Squash commit: implement Conversion extensions

* Refactor PaddlePaddle FrontEnd

* Codestyle

* FrontEnd,InputModel,Place base classes -> abstract, renamed model file

* Fix unit tests

* fix unit tests

* ngraph:: to ov::

* Rename frontends dir to frontend

* fix merge conflicts

* Fix ConversionExtension

* get rid of NamedInputs/Outputs in TF FE

* Rename paddlepaddle to paddle; pdpd to paddle

* add missing file

* codestyle

* Remove local change

* paddlepaddle -> paddle for azure configs and .md files

* fix package name, fix config files

* Fix win build

* Revert Broadcast/AutoBroadcast changes

* codestyle

* fix FronEnd class

* fix ngraph_cpp_api.config

* fix incorrect merge, codestyle

* fix conversion extension

* conversion extension

* codestyle

* merge master

* fix build

* refactoring; revert broadcast/autobroadcast changes

* codestyle

* fix MacOS config

* resolve merge conflicts

* refactor includes

* register ConversionExtension in FrontEnds

* move get_op_type to base NodeContex class

* set op_translator map in ctor of Paddle FE; fix unit tests

* update unit tests; codestyle

* codestyle

* preliminary version of conversion extension in pybind

* conversion extension

* get_attribute_as_any method for NodeContext

* move get_attribute methods to NodeContext base class, rename get_ng_input to get_input

* add missed file

* Implement ov::Any getter in ONNX NodeContext

* fix py bindings

* Add update ConversionExtension unit tests, add SO unit tests, fix TF FE

* fix segfault on destructor

* fix NodeContext interface, fix unit tests

* set different names for ConversionExtensions in unit tests

* fix PaddleFuzzy tests

* fix Paddle Fuzzy tests

* revert changes in generate_slice.py

* fix codestyle

* fix pybindings

* revert local changes in generate_slice.py

* delete duplicate exceptions.hpp

* Refactoring: fix names according to convention

* pybinding for NodeContext, FrontEnd, ConversionExtension; fix unit tests; implement new unit tests

* Refactoring

* fix the case when a new converter rewrites existed one; delete unnecessary NodeContext from pybindings; use CreatorFunctons from the base class in ConversionExtension; update unit tests

* Revert local change

* PythonAPI: fix get_attribute method; fix get_input method; implement support of dtype and default attributes

* Fix py unit tests: add support for vector<ov::element::Type> as attribute

* resolve review comments

* fix unit tests

* move extension_holder to openvino/frontend/extension folder

* fix build on mac os

* temporary disable cast from vector<bool> to investigate issue on mac os

* Resolve review comments

* Resolve review comments

* Use dev API for .so extension

* Link frontends to pyopenvino as separate targets

* Temporary enable tf fe installation

* ignore PEP8 E402 for init files, set correct directory for py modules

* revert local changes

* Fix deadlock in pybind GIL; fix Win build; fix PEP8

* fix PEP8

* Add a return type annotation

* fix builds; fix ON/OFF switcher for ENABLE_OV_xxx_FRONTEND cmake options

* Fix the issue with ifdefs on WinOS; fix the issue related to pybindings and static c++ object

* fix python unit tests

* fix static build on WinOS

* Retrigger CI builds

* Fix static build on Windows

* fix static build on Windows again

* Retrigger CI

* delete unused includes; add a comment about issue on MacOS

* fix missprint

* resolve review comments

* fix missprint

* resolve review remarks

* Resolve review comments

* win win wheels build

* resolve review comments
This commit is contained in:
Ivan Tikhonov 2022-01-20 20:44:31 +03:00 committed by GitHub
parent ff6a9a1179
commit 29d73ce3c8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
180 changed files with 2619 additions and 574 deletions

View File

@ -9,7 +9,7 @@ enable-extensions = G
per-file-ignores =
*.pyx: E225, E226, E251, E999, E800, E265, E203, E266, E227, E211
tests/*: S101, T001
*__init__.py: F403, F405, F405
*__init__.py: E402, F403, F405, F405
[pydocstyle]
convention = google

View File

@ -8,42 +8,23 @@ Low level wrappers for the FrontEnd c++ api.
# flake8: noqa
import os
import sys
if sys.platform == "win32":
# Installer, yum, pip installs openvino dlls to the different directories
# and those paths need to be visible to the openvino modules
#
# If you're using a custom installation of openvino,
# add the location of openvino dlls to your system PATH.
#
# looking for the libs in the pip installation path by default.
openvino_libs = [os.path.join(os.path.dirname(__file__), "..", "..", ".."),
os.path.join(os.path.dirname(__file__), "..", "..", "openvino", "libs")]
# setupvars.bat script set all libs paths to OPENVINO_LIB_PATHS environment variable.
openvino_libs_installer = os.getenv("OPENVINO_LIB_PATHS")
if openvino_libs_installer:
openvino_libs.extend(openvino_libs_installer.split(";"))
for lib in openvino_libs:
lib_path = os.path.join(os.path.dirname(__file__), lib)
if os.path.isdir(lib_path):
# On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.
if (3, 8) <= sys.version_info:
os.add_dll_directory(os.path.abspath(lib_path))
else:
os.environ["PATH"] = os.path.abspath(lib_path) + ";" + os.environ["PATH"]
from openvino.utils import add_openvino_libs_to_path
add_openvino_libs_to_path()
# main classes
from openvino.pyopenvino import FrontEndManager
from openvino.pyopenvino import FrontEnd
from openvino.pyopenvino import InputModel
from openvino.pyopenvino import NodeContext
from openvino.pyopenvino import Place
from openvino.pyopenvino import TelemetryExtension
# extensions
from openvino.pyopenvino import DecoderTransformationExtension
from openvino.pyopenvino import JsonConfigExtension
from openvino.pyopenvino import ConversionExtension
from openvino.pyopenvino import ProgressReporterExtension
from openvino.pyopenvino import TelemetryExtension
# exceptions
from openvino.pyopenvino import NotImplementedFailure

View File

@ -0,0 +1,19 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
"""
Package: openvino
Low level wrappers for the FrontEnd c++ api.
"""
# flake8: noqa
from openvino.utils import add_openvino_libs_to_path
add_openvino_libs_to_path()
try:
from openvino.frontend.onnx.py_onnx_frontend import ConversionExtensionONNX as ConversionExtension
except ImportError as err:
raise ImportError("OpenVINO ONNX frontend is not available, please make sure the frontend is built. "
"{}".format(err))

View File

@ -0,0 +1,20 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
"""
Package: openvino
Low level wrappers for the FrontEnd c++ api.
"""
# flake8: noqa
from openvino.utils import add_openvino_libs_to_path
add_openvino_libs_to_path()
try:
from openvino.frontend.paddle.py_paddle_frontend import ConversionExtensionPaddle as ConversionExtension
except ImportError as err:
raise ImportError("OpenVINO Paddle frontend is not available, please make sure the frontend is built."
"{}".format(err))

View File

@ -0,0 +1,19 @@
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
"""
Package: openvino
Low level wrappers for the FrontEnd c++ api.
"""
# flake8: noqa
from openvino.utils import add_openvino_libs_to_path
add_openvino_libs_to_path()
try:
from openvino.frontend.tensorflow.py_tensorflow_frontend import ConversionExtensionTensorflow as ConversionExtension
except ImportError as err:
raise ImportError("OpenVINO Tensorflow frontend is not available, please make sure the frontend is built. "
"{}".format(err))

View File

@ -2,32 +2,11 @@
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
import sys
# flake8: noqa
if sys.platform == "win32":
# Installer, yum, pip installs openvino dlls to the different directories
# and those paths need to be visible to the openvino modules
#
# If you're using a custom installation of openvino,
# add the location of openvino dlls to your system PATH.
#
# looking for the libs in the pip installation path by default.
openvino_libs = [os.path.join(os.path.dirname(__file__), "..", "..", ".."),
os.path.join(os.path.dirname(__file__), "..", "..", "openvino", "libs")]
# setupvars.bat script set all libs paths to OPENVINO_LIB_PATHS environment variable.
openvino_libs_installer = os.getenv("OPENVINO_LIB_PATHS")
if openvino_libs_installer:
openvino_libs.extend(openvino_libs_installer.split(";"))
for lib in openvino_libs:
lib_path = os.path.join(os.path.dirname(__file__), lib)
if os.path.isdir(lib_path):
# On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.
if (3, 8) <= sys.version_info:
os.add_dll_directory(os.path.abspath(lib_path))
else:
os.environ["PATH"] = os.path.abspath(lib_path) + ";" + os.environ["PATH"]
from openvino.utils import add_openvino_libs_to_path
add_openvino_libs_to_path()
from openvino.pyopenvino.offline_transformations import apply_moc_transformations
from openvino.pyopenvino.offline_transformations import apply_moc_legacy_transformations

View File

@ -8,32 +8,9 @@ Low level wrappers for the PrePostProcessing c++ api.
# flake8: noqa
import os
import sys
if sys.platform == "win32":
# Installer, yum, pip installs openvino dlls to the different directories
# and those paths need to be visible to the openvino modules
#
# If you're using a custom installation of openvino,
# add the location of openvino dlls to your system PATH.
#
# looking for the libs in the pip installation path by default.
openvino_libs = [os.path.join(os.path.dirname(__file__), "..", "..", ".."),
os.path.join(os.path.dirname(__file__), "..", "..", "openvino", "libs")]
# setupvars.bat script set all libs paths to OPENVINO_LIB_PATHS environment variable.
openvino_libs_installer = os.getenv("OPENVINO_LIB_PATHS")
if openvino_libs_installer:
openvino_libs.extend(openvino_libs_installer.split(";"))
for lib in openvino_libs:
lib_path = os.path.join(os.path.dirname(__file__), lib)
if os.path.isdir(lib_path):
# On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.
if (3, 8) <= sys.version_info:
os.add_dll_directory(os.path.abspath(lib_path))
else:
os.environ["PATH"] = os.path.abspath(lib_path) + ";" + os.environ["PATH"]
from openvino.utils import add_openvino_libs_to_path
add_openvino_libs_to_path()
# main classes
from openvino.pyopenvino.preprocess import InputInfo

View File

@ -4,9 +4,7 @@
"""openvino module namespace, exposing factory functions for all ops and other classes."""
# noqa: F401
import os
import sys
from openvino.utils import add_openvino_libs_to_path
from pkg_resources import get_distribution, DistributionNotFound
try:
@ -14,33 +12,7 @@ try:
except DistributionNotFound:
__version__ = "0.0.0.dev0"
if sys.platform == "win32":
# Installer, yum, pip installs openvino dlls to the different directories
# and those paths need to be visible to the openvino modules
#
# If you're using a custom installation of openvino,
# add the location of openvino dlls to your system PATH.
#
# looking for the libs in the pip installation path by default.
openvino_libs = [
os.path.join(os.path.dirname(__file__), "..", "..", ".."),
os.path.join(os.path.dirname(__file__), "..", "..", "openvino", "libs"),
]
# setupvars.bat script set all libs paths to OPENVINO_LIB_PATHS environment variable.
openvino_libs_installer = os.getenv("OPENVINO_LIB_PATHS")
if openvino_libs_installer:
openvino_libs.extend(openvino_libs_installer.split(";"))
for lib in openvino_libs:
lib_path = os.path.join(os.path.dirname(__file__), lib)
if os.path.isdir(lib_path):
# On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.
if (3, 8) <= sys.version_info:
os.add_dll_directory(os.path.abspath(lib_path))
else:
os.environ["PATH"] = (
os.path.abspath(lib_path) + ";" + os.environ["PATH"]
)
add_openvino_libs_to_path()
# Openvino pybind bindings and python extended classes
from openvino.pyopenvino import Dimension

View File

@ -0,0 +1,31 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os
import sys
def add_openvino_libs_to_path() -> None:
"""Adds OpenVINO libs to path on Win OS."""
if sys.platform == "win32":
# Installer, yum, pip installs openvino dlls to the different directories
# and those paths need to be visible to the openvino modules
#
# If you're using a custom installation of openvino,
# add the location of openvino dlls to your system PATH.
#
# looking for the libs in the pip installation path by default.
openvino_libs = [os.path.join(os.path.dirname(__file__), "..", ".."),
os.path.join(os.path.dirname(__file__), "..", "openvino", "libs")]
# setupvars.bat script set all libs paths to OPENVINO_LIB_PATHS environment variable.
openvino_libs_installer = os.getenv("OPENVINO_LIB_PATHS")
if openvino_libs_installer:
openvino_libs.extend(openvino_libs_installer.split(";"))
for lib in openvino_libs:
lib_path = os.path.join(os.path.dirname(__file__), lib)
if os.path.isdir(lib_path):
# On Windows, with Python >= 3.8, DLLs are no longer imported from the PATH.
if (3, 8) <= sys.version_info:
os.add_dll_directory(os.path.abspath(lib_path))
else:
os.environ["PATH"] = os.path.abspath(lib_path) + ";" + os.environ["PATH"]

View File

@ -48,9 +48,22 @@ if(ENABLE_TESTS)
add_subdirectory(test_utils)
endif()
if(TARGET openvino::frontend::onnx)
add_subdirectory(frontend/onnx)
endif()
if(TARGET openvino::frontend::tensorflow)
add_subdirectory(frontend/tensorflow)
endif()
if(TARGET openvino::frontend::paddle)
add_subdirectory(frontend/paddle)
endif()
# create target
file(GLOB_RECURSE SOURCES core/*.cpp graph/*.cpp frontend/*.cpp pyopenvino.cpp)
list(FILTER SOURCES EXCLUDE REGEX frontend/onnx|tensorflow|paddle/* )
pybind11_add_module(${PROJECT_NAME} MODULE ${SOURCES})

View File

@ -10,6 +10,7 @@
#include "extension/json_config.hpp"
#include "manager.hpp"
#include "openvino/frontend/exception.hpp"
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/extension/decoder_transformation.hpp"
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
@ -56,6 +57,45 @@ void regclass_frontend_JsonConfigExtension(py::module m) {
}));
}
void regclass_frontend_ConversionExtensionBase(py::module m) {
py::class_<ConversionExtensionBase, ConversionExtensionBase::Ptr, ov::Extension> ext(m,
"ConversionExtensionBase",
py::dynamic_attr());
}
void regclass_frontend_ConversionExtension(py::module m) {
py::class_<ConversionExtension, ConversionExtension::Ptr, ConversionExtensionBase> _ext(m,
"_ConversionExtension",
py::dynamic_attr(),
py::module_local());
class PyConversionExtension : public ConversionExtension {
public:
using Ptr = std::shared_ptr<PyConversionExtension>;
using PyCreatorFunction = std::function<ov::OutputVector(const NodeContext*)>;
using PyCreatorFunctionNamed = std::function<std::map<std::string, ov::OutputVector>(const NodeContext*)>;
PyConversionExtension(const std::string& op_type, const PyCreatorFunction& f)
: ConversionExtension(op_type, [f](const NodeContext& node) -> ov::OutputVector {
return f(static_cast<const NodeContext*>(&node));
}) {}
PyConversionExtension(const std::string& op_type, const PyCreatorFunctionNamed& f)
: ConversionExtension(op_type, [f](const NodeContext& node) -> std::map<std::string, ov::OutputVector> {
return f(static_cast<const NodeContext*>(&node));
}) {}
};
py::class_<PyConversionExtension, PyConversionExtension::Ptr, ConversionExtension> ext(m,
"ConversionExtension",
py::dynamic_attr());
ext.def(py::init([](const std::string& op_type, const PyConversionExtension::PyCreatorFunction& f) {
return std::make_shared<PyConversionExtension>(op_type, f);
}));
ext.def(py::init([](const std::string& op_type, const PyConversionExtension::PyCreatorFunctionNamed& f) {
return std::make_shared<PyConversionExtension>(op_type, f);
}));
}
void regclass_frontend_ProgressReporterExtension(py::module m) {
py::class_<ProgressReporterExtension, std::shared_ptr<ProgressReporterExtension>, ov::Extension> ext{
m,

View File

@ -11,4 +11,6 @@ namespace py = pybind11;
void regclass_frontend_TelemetryExtension(py::module m);
void regclass_frontend_DecoderTransformationExtension(py::module m);
void regclass_frontend_JsonConfigExtension(py::module m);
void regclass_frontend_ConversionExtension(py::module m);
void regclass_frontend_ConversionExtensionBase(py::module m);
void regclass_frontend_ProgressReporterExtension(py::module m);

View File

@ -18,7 +18,7 @@ using namespace ov::frontend;
void regclass_frontend_FrontEnd(py::module m) {
py::class_<FrontEnd, std::shared_ptr<FrontEnd>> fem(m, "FrontEnd", py::dynamic_attr(), py::module_local());
fem.doc() = "ngraph.impl.FrontEnd wraps ngraph::frontend::FrontEnd";
fem.doc() = "openvino.frontend.FrontEnd wraps ov::frontend::FrontEnd";
fem.def(
"load",

View File

@ -0,0 +1,39 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
function(frontend_module TARGET FRAMEWORK INSTALL_COMPONENT)
set(TARGET_NAME ${TARGET})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/frontend/${FRAMEWORK})
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/frontend/${FRAMEWORK})
set(CMAKE_COMPILE_PDB_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/frontend/${FRAMEWORK})
set(CMAKE_PDB_OUTPUT_DIRECTORY ${PYTHON_BRIDGE_OUTPUT_DIRECTORY}/frontend/${FRAMEWORK})
set(PYTHON_BRIDGE_CPACK_PATH "python")
file(GLOB SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
# create target
pybind11_add_module(${TARGET_NAME} MODULE ${SOURCES})
add_dependencies(${TARGET_NAME} pyopenvino)
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}")
target_link_libraries(${TARGET_NAME} PRIVATE openvino::runtime openvino::frontend::${FRAMEWORK})
# Compatibility with python 2.7 which has deprecated "register" specifier
if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
target_compile_options(${TARGET_NAME} PRIVATE "-Wno-error=register")
endif()
# perform copy
add_custom_command(TARGET ${TARGET_NAME}
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy ${PYTHON_SOURCE_DIR}/openvino/frontend/${FRAMEWORK}/__init__.py ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/__init__.py
)
install(TARGETS ${TARGET_NAME}
DESTINATION python/${PYTHON_VERSION}/openvino/frontend/${FRAMEWORK}
COMPONENT ${INSTALL_COMPONENT})
endfunction()

View File

@ -0,0 +1,127 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "node_context.hpp"
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
#include "openvino/frontend/node_context.hpp"
namespace py = pybind11;
using namespace ov::frontend;
template <typename>
struct is_std_vector : std::false_type {};
template <typename T, typename A>
struct is_std_vector<std::vector<T, A>> : std::true_type {};
#define CAST_VEC_TO_PY(any, py_type, c_type) \
{ \
static_assert(is_std_vector<c_type>(), "The type should be std::vector."); \
if ((any).is<c_type>()) { \
auto casted = (any).as<c_type>(); \
if (!(py_type).is_none()) { \
py::list py_list; \
for (auto el : casted) { \
py_list.append(py_type(el)); \
} \
return py_list; \
} \
return py::cast(casted); \
} \
}
#define CAST_TO_PY(any, py_type, c_type) \
{ \
if ((any).is<c_type>()) { \
auto casted = (any).as<c_type>(); \
if (!(py_type).is_none()) { \
return py_type(casted); \
} \
return py::cast(casted); \
} \
}
void regclass_frontend_NodeContext(py::module m) {
py::class_<ov::frontend::NodeContext, std::shared_ptr<ov::frontend::NodeContext>> ext(m,
"NodeContext",
py::dynamic_attr());
ext.def(
"get_attribute",
[=](NodeContext& self, const std::string& name, const py::object& default_value, const py::object& dtype)
-> py::object {
auto any = self.get_attribute_as_any(name);
auto module = py::module_::import("openvino.runtime");
auto type = m.attr("Type");
if (dtype == type) {
if (any.is<int32_t>() || any.is<int64_t>()) {
return py::cast(self.get_attribute<ov::element::Type>(name));
} else if (any.is<std::vector<int32_t>>() || any.is<std::vector<int64_t>>()) {
return py::cast(self.get_attribute<std::vector<ov::element::Type>>(name));
}
}
CAST_TO_PY(any, dtype, int32_t);
CAST_TO_PY(any, dtype, int64_t);
CAST_TO_PY(any, dtype, bool);
CAST_TO_PY(any, dtype, std::string);
CAST_TO_PY(any, dtype, float);
CAST_TO_PY(any, dtype, ov::element::Type);
CAST_TO_PY(any, dtype, ov::PartialShape);
CAST_VEC_TO_PY(any, dtype, std::vector<int32_t>);
CAST_VEC_TO_PY(any, dtype, std::vector<int64_t>);
#ifndef __APPLE__
// TODO: investigate the issue in pybind11 on MacOS
CAST_VEC_TO_PY(any, dtype, std::vector<bool>);
#endif
CAST_VEC_TO_PY(any, dtype, std::vector<std::string>);
CAST_VEC_TO_PY(any, dtype, std::vector<float>);
CAST_VEC_TO_PY(any, dtype, std::vector<ov::element::Type>);
CAST_VEC_TO_PY(any, dtype, std::vector<ov::PartialShape>);
if (default_value.is_none())
FRONT_END_GENERAL_CHECK(false, "Attribute ", name, " can't be converted to defined types.");
else
return default_value;
},
py::arg("name"),
py::arg("default_value") = py::none(),
py::arg("dtype") = py::none());
ext.def("get_input", [](NodeContext& self, int idx) {
return self.get_input(idx);
});
ext.def("get_input", [](NodeContext& self, const std::string& name) {
return self.get_input(name);
});
ext.def("get_input", [](NodeContext& self, const std::string& name, int idx) {
return self.get_input(name, idx);
});
ext.def("get_input_size", [](NodeContext& self) {
return self.get_input_size();
});
ext.def("get_input_size", [](NodeContext& self, std::string& name) {
return self.get_input_size(name);
});
ext.def("get_op_type", [](NodeContext& self, std::string& name) {
return self.get_op_type();
});
ext.def("has_attribute", [](NodeContext& self, std::string& name) {
return self.has_attribute(name);
});
}

View File

@ -0,0 +1,11 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <pybind11/pybind11.h>
namespace py = pybind11;
void regclass_frontend_NodeContext(py::module m);

View File

@ -0,0 +1,6 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
include(${PYTHON_SOURCE_DIR}/pyopenvino/frontend/frontend_module.cmake)
frontend_module(py_onnx_frontend onnx pyopenvino_${PYTHON_VERSION})

View File

@ -0,0 +1,43 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "extension.hpp"
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
#include "openvino/frontend/onnx/extension/conversion.hpp"
#include "openvino/frontend/onnx/frontend.hpp"
#include "openvino/frontend/onnx/node_context.hpp"
namespace py = pybind11;
using namespace ov::frontend::onnx;
void regclass_frontend_onnx_ConversionExtension(py::module m) {
py::class_<ConversionExtension, ConversionExtension::Ptr, ov::frontend::ConversionExtensionBase> _ext(
m,
"_ConversionExtensionONNX",
py::dynamic_attr());
class PyConversionExtension : public ConversionExtension {
public:
using Ptr = std::shared_ptr<PyConversionExtension>;
using PyCreatorFunction = std::function<ov::OutputVector(const ov::frontend::NodeContext*)>;
PyConversionExtension(const std::string& op_type, const PyCreatorFunction& f)
: ConversionExtension(op_type, [f](const ov::frontend::NodeContext& node) -> ov::OutputVector {
return f(static_cast<const ov::frontend::NodeContext*>(&node));
}) {}
};
py::class_<PyConversionExtension, PyConversionExtension::Ptr, ConversionExtension> ext(m,
"ConversionExtensionONNX",
py::dynamic_attr());
ext.def(py::init([](const std::string& op_type, const PyConversionExtension::PyCreatorFunction& f) {
return std::make_shared<PyConversionExtension>(op_type, f);
}));
ext.def_property_readonly_static("m_converter", &ConversionExtension::get_converter);
}

View File

@ -0,0 +1,11 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <pybind11/pybind11.h>
namespace py = pybind11;
void regclass_frontend_onnx_ConversionExtension(py::module m);

View File

@ -0,0 +1,15 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <pybind11/pybind11.h>
#include <string>
#include "extension.hpp"
namespace py = pybind11;
PYBIND11_MODULE(py_onnx_frontend, m) {
regclass_frontend_onnx_ConversionExtension(m);
}

View File

@ -0,0 +1,6 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
include(${PYTHON_SOURCE_DIR}/pyopenvino/frontend/frontend_module.cmake)
frontend_module(py_paddle_frontend paddle pyopenvino_${PYTHON_VERSION})

View File

@ -0,0 +1,43 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "extension.hpp"
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
#include "openvino/frontend/paddle/extension/conversion.hpp"
namespace py = pybind11;
using namespace ov::frontend::paddle;
void regclass_frontend_paddle_ConversionExtension(py::module m) {
py::class_<ConversionExtension, ConversionExtension::Ptr, ov::frontend::ConversionExtensionBase> _ext(
m,
"_ConversionExtensionPaddle",
py::dynamic_attr());
class PyConversionExtension : public ConversionExtension {
public:
using Ptr = std::shared_ptr<PyConversionExtension>;
using PyCreatorFunctionNamed =
std::function<std::map<std::string, ov::OutputVector>(const ov::frontend::NodeContext*)>;
PyConversionExtension(const std::string& op_type, const PyCreatorFunctionNamed& f)
: ConversionExtension(
op_type,
[f](const ov::frontend::NodeContext& node) -> std::map<std::string, ov::OutputVector> {
return f(static_cast<const ov::frontend::NodeContext*>(&node));
}) {}
};
py::class_<PyConversionExtension, PyConversionExtension::Ptr, ConversionExtension> ext(m,
"ConversionExtensionPaddle",
py::dynamic_attr());
ext.def(py::init([](const std::string& op_type, const PyConversionExtension::PyCreatorFunctionNamed& f) {
return std::make_shared<PyConversionExtension>(op_type, f);
}));
}

View File

@ -0,0 +1,11 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <pybind11/pybind11.h>
namespace py = pybind11;
void regclass_frontend_paddle_ConversionExtension(py::module m);

View File

@ -0,0 +1,15 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <pybind11/pybind11.h>
#include <string>
#include "extension.hpp"
namespace py = pybind11;
PYBIND11_MODULE(py_paddle_frontend, m) {
regclass_frontend_paddle_ConversionExtension(m);
}

View File

@ -0,0 +1,6 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
include(${PYTHON_SOURCE_DIR}/pyopenvino/frontend/frontend_module.cmake)
frontend_module(py_tensorflow_frontend tensorflow tests)

View File

@ -0,0 +1,41 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "extension.hpp"
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/stl_bind.h>
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/tensorflow/extension/conversion.hpp"
namespace py = pybind11;
using namespace ov::frontend::tensorflow;
void regclass_frontend_tensorflow_ConversionExtension(py::module m) {
py::class_<ConversionExtension, ConversionExtension::Ptr, ov::frontend::ConversionExtensionBase> _ext(
m,
"_ConversionExtensionTensorflow",
py::dynamic_attr());
class PyConversionExtension : public ConversionExtension {
public:
using Ptr = std::shared_ptr<PyConversionExtension>;
using PyCreatorFunction = std::function<ov::OutputVector(const ov::frontend::NodeContext*)>;
PyConversionExtension(const std::string& op_type, const PyCreatorFunction& f)
: ConversionExtension(op_type, [f](const ov::frontend::NodeContext& node) -> ov::OutputVector {
return f(static_cast<const ov::frontend::NodeContext*>(&node));
}) {}
};
py::class_<PyConversionExtension, PyConversionExtension::Ptr, ConversionExtension> ext(
m,
"ConversionExtensionTensorflow",
py::dynamic_attr());
ext.def(py::init([](const std::string& op_type, const PyConversionExtension::PyCreatorFunction& f) {
return std::make_shared<PyConversionExtension>(op_type, f);
}));
}

View File

@ -0,0 +1,11 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <pybind11/pybind11.h>
namespace py = pybind11;
void regclass_frontend_tensorflow_ConversionExtension(py::module m);

View File

@ -0,0 +1,15 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <pybind11/pybind11.h>
#include <string>
#include "extension.hpp"
namespace py = pybind11;
PYBIND11_MODULE(py_tensorflow_frontend, m) {
regclass_frontend_tensorflow_ConversionExtension(m);
}

View File

@ -32,10 +32,11 @@
#include "pyopenvino/core/tensor.hpp"
#include "pyopenvino/core/variable_state.hpp"
#include "pyopenvino/core/version.hpp"
#include "pyopenvino/frontend/extensions.hpp"
#include "pyopenvino/frontend/extension.hpp"
#include "pyopenvino/frontend/frontend.hpp"
#include "pyopenvino/frontend/inputmodel.hpp"
#include "pyopenvino/frontend/input_model.hpp"
#include "pyopenvino/frontend/manager.hpp"
#include "pyopenvino/frontend/node_context.hpp"
#include "pyopenvino/frontend/place.hpp"
#include "pyopenvino/graph/any.hpp"
#include "pyopenvino/graph/descriptors/tensor.hpp"
@ -133,6 +134,7 @@ PYBIND11_MODULE(pyopenvino, m) {
regclass_ProfilingInfo(m);
regclass_Extension(m);
// frontend
regclass_frontend_Place(m);
regclass_frontend_InitializationFailureFrontEnd(m);
regclass_frontend_GeneralFailureFrontEnd(m);
@ -142,10 +144,16 @@ PYBIND11_MODULE(pyopenvino, m) {
regclass_frontend_FrontEndManager(m);
regclass_frontend_FrontEnd(m);
regclass_frontend_InputModel(m);
regclass_frontend_NodeContext(m);
// frontend extensions
regclass_frontend_TelemetryExtension(m);
regclass_frontend_DecoderTransformationExtension(m);
regclass_frontend_JsonConfigExtension(m);
regclass_frontend_ConversionExtensionBase(m);
regclass_frontend_ConversionExtension(m);
regclass_frontend_ProgressReporterExtension(m);
// transformations
regmodule_offline_transformations(m);
}

View File

@ -4,18 +4,38 @@
set(TARGET_FE_NAME "ov_mock_py_frontend")
file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/*.cpp)
file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/include/*.hpp)
source_group("src" FILES ${LIBRARY_SRC})
source_group("include" FILES ${LIBRARY_HEADERS})
# Create shared library
add_library(${TARGET_FE_NAME} SHARED ${LIBRARY_SRC} ${LIBRARY_HEADERS})
add_library(${TARGET_FE_NAME} ${LIBRARY_SRC} ${LIBRARY_HEADERS})
target_include_directories(${TARGET_FE_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR})
target_include_directories(${TARGET_FE_NAME}
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include
PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_link_libraries(${TARGET_FE_NAME} PRIVATE frontend_common::static)
set(DEPENDENCIES frontend_common::static)
set(DEFINITIONS)
if (ENABLE_OV_ONNX_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::onnx)
list(APPEND DEFINITIONS ENABLE_OV_ONNX_FRONTEND)
endif()
if (ENABLE_OV_TF_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::tensorflow)
list(APPEND DEFINITIONS ENABLE_OV_TF_FRONTEND)
endif()
if (ENABLE_OV_PADDLE_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::paddle)
list(APPEND DEFINITIONS ENABLE_OV_PADDLE_FRONTEND)
endif()
target_compile_definitions(${TARGET_FE_NAME} PRIVATE ${DEFINITIONS})
target_link_libraries(${TARGET_FE_NAME} PRIVATE ${DEPENDENCIES})
add_clang_format_target(${TARGET_FE_NAME}_clang FOR_TARGETS ${TARGET_FE_NAME})

View File

@ -0,0 +1,61 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#ifdef ENABLE_OV_ONNX_FRONTEND
# include "openvino/frontend/onnx/frontend.hpp"
#endif
#ifdef ENABLE_OV_PADDLE_FRONTEND
# include "openvino/frontend/paddle/frontend.hpp"
#endif
#ifdef ENABLE_OV_TF_FRONTEND
# include "openvino/frontend/tensorflow/frontend.hpp"
#endif
#include "visibility.hpp"
#ifdef ENABLE_OV_ONNX_FRONTEND
// TODO: create Wrapper for ONNX. How to check that converter is actually registered?
// m_op_translators is some internal entity for ONNX FrontEnd
/*class MOCK_API FrontEndWrapperONNX : public ov::frontend::onnx::FrontEnd {
public:
void add_extension(const std::shared_ptr<ov::Extension>& extension) override {
FrontEnd::add_extension(extension);
}
bool check_conversion_extension_registered(const std::string& name) {
return m_op_translators.find(name) != m_op_translators.end();
}
};*/
#endif
#ifdef ENABLE_OV_TF_FRONTEND
class MOCK_API FrontEndWrapperTensorflow : public ov::frontend::tensorflow::FrontEnd {
public:
FrontEndWrapperTensorflow() = default;
void add_extension(const std::shared_ptr<ov::Extension>& extension) override {
FrontEnd::add_extension(extension);
}
bool check_conversion_extension_registered(const std::string& name) {
return m_op_translators.find(name) != m_op_translators.end();
}
};
#endif
#ifdef ENABLE_OV_PADDLE_FRONTEND
class MOCK_API FrontEndWrapperPaddle : public ov::frontend::paddle::FrontEnd {
public:
FrontEndWrapperPaddle() = default;
void add_extension(const std::shared_ptr<ov::Extension>& extension) override {
FrontEnd::add_extension(extension);
}
bool check_conversion_extension_registered(const std::string& name) {
return m_op_translators.find(name) != m_op_translators.end();
}
};
#endif

View File

@ -4,20 +4,11 @@
#pragma once
#include "ngraph/visibility.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "openvino/frontend/manager.hpp"
#include "openvino/frontend/visibility.hpp"
// Defined if we are building the plugin DLL (instead of using it)
#ifdef ov_mock_py_frontend_EXPORTS
# define MOCK_API OPENVINO_CORE_EXPORTS
#else
# define MOCK_API OPENVINO_CORE_IMPORTS
#endif // ov_mock_py_frontend_EXPORTS
#include "visibility.hpp"
// OK to have 'using' in mock header
using namespace ngraph;
using namespace ov::frontend;

View File

@ -0,0 +1,17 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/core/visibility.hpp"
#ifdef OPENVINO_STATIC_LIBRARY
# define MOCK_API
#else
# ifdef IMPLEMENT_OPENVINO_API
# define MOCK_API OPENVINO_CORE_EXPORTS
# else
# define MOCK_API OPENVINO_CORE_IMPORTS
# endif // IMPLEMENT_OPENVINO_API
#endif // OPENVINO_STATIC_LIBRARY

View File

@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "mock_py_frontend.hpp"
#include "ov_mock_py_frontend/mock_py_frontend.hpp"
#include "openvino/frontend/manager.hpp"
#include "openvino/frontend/visibility.hpp"

View File

@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0
#
set(TARGET_FE_NAME "ov_mock_py_frontend")
set(PYBIND_FE_NAME "pybind_mock_frontend")
set(PYBIND_FE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/pyngraph_mock_frontend_api.cpp)
@ -11,7 +10,26 @@ source_group("src" FILES ${PYBIND_FE_SRC})
pybind11_add_module(${PYBIND_FE_NAME} MODULE ${PYBIND_FE_SRC})
target_link_libraries(${PYBIND_FE_NAME} PRIVATE ${TARGET_FE_NAME} frontend_common::static)
set(DEPENDENCIES ov_mock_py_frontend openvino::runtime)
set(DEFINITIONS)
if (ENABLE_OV_ONNX_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::onnx)
list(APPEND DEFINITIONS ENABLE_OV_ONNX_FRONTEND)
endif()
if (ENABLE_OV_TF_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::tensorflow)
list(APPEND DEFINITIONS ENABLE_OV_TF_FRONTEND)
endif()
if (ENABLE_OV_PADDLE_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::paddle)
list(APPEND DEFINITIONS ENABLE_OV_PADDLE_FRONTEND)
endif()
target_compile_definitions(${PYBIND_FE_NAME} PRIVATE ${DEFINITIONS})
target_link_libraries(${PYBIND_FE_NAME} PRIVATE ${DEPENDENCIES})
add_clang_format_target(${PYBIND_FE_NAME}_clang FOR_TARGETS ${PYBIND_FE_NAME})

View File

@ -5,12 +5,17 @@
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include "../ov_mock_py_frontend/mock_py_frontend.hpp"
#include "ov_mock_py_frontend/frontend_wrappers.hpp"
#include "ov_mock_py_frontend/mock_py_frontend.hpp"
namespace py = pybind11;
using namespace ngraph;
using namespace ov::frontend;
FeStat FrontEndMockPy::m_stat = {};
ModelStat InputModelMockPy::m_stat = {};
PlaceStat PlaceMockPy::m_stat = {};
static void register_mock_frontend_stat(py::module m) {
m.def("get_fe_stat", &FrontEndMockPy::get_stat);
m.def("clear_fe_stat", &FrontEndMockPy::clear_stat);
@ -92,9 +97,44 @@ static void register_mock_place_stat(py::module m) {
placeStat.def_property_readonly("get_source_tensor", &PlaceStat::get_source_tensor);
}
static void register_frontend_wrappers(py::module m) {
#ifdef ENABLE_OV_PADDLE_FRONTEND
py::class_<FrontEndWrapperPaddle, std::shared_ptr<FrontEndWrapperPaddle>> fe_paddle(m,
"FrontEndWrapperPaddle",
py::dynamic_attr());
fe_paddle.def(py::init([]() {
return std::make_shared<FrontEndWrapperPaddle>();
}));
fe_paddle.def(
"add_extension",
static_cast<void (FrontEnd::*)(const std::shared_ptr<ov::Extension>& extension)>(&FrontEnd::add_extension));
fe_paddle.def("check_conversion_extension_registered", [](FrontEndWrapperPaddle& self, const std::string& name) {
return self.check_conversion_extension_registered(name);
});
#endif
#ifdef ENABLE_OV_TF_FRONTEND
py::class_<FrontEndWrapperTensorflow, std::shared_ptr<FrontEndWrapperTensorflow>> fe_tensorflow(
m,
"FrontEndWrapperTensorflow",
py::dynamic_attr());
fe_tensorflow.def(py::init([]() {
return std::make_shared<FrontEndWrapperTensorflow>();
}));
fe_tensorflow.def(
"add_extension",
static_cast<void (FrontEnd::*)(const std::shared_ptr<ov::Extension>& extension)>(&FrontEnd::add_extension));
fe_tensorflow.def("check_conversion_extension_registered",
[](FrontEndWrapperTensorflow& self, const std::string& name) {
return self.check_conversion_extension_registered(name);
});
#endif
}
PYBIND11_MODULE(pybind_mock_frontend, m) {
m.doc() = "Mock frontend call counters for testing Pyngraph frontend bindings";
register_mock_frontend_stat(m);
register_mock_model_stat(m);
register_mock_place_stat(m);
register_frontend_wrappers(m);
}

View File

@ -0,0 +1,2 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

View File

@ -0,0 +1,60 @@
# Copyright (C) 2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import pytest
from openvino.frontend import FrontEndManager
mock_available = True
try:
from openvino.pybind_mock_frontend import FrontEndWrapperPaddle, FrontEndWrapperTensorflow
except Exception:
print("No mock frontend available")
mock_available = False
# FrontEndManager shall be initialized and destroyed after all tests finished
# This is because destroy of FrontEndManager will unload all plugins, no objects shall exist after this
fem = FrontEndManager()
TENSORFLOW_FRONTEND_NAME = "tf"
PADDLE_FRONTEND_NAME = "paddle"
mock_needed = pytest.mark.skipif(not mock_available, reason="mock fe is not available")
def skip_if_frontend_is_disabled(frontend_name):
front_ends = fem.get_available_front_ends()
if frontend_name not in front_ends:
pytest.skip()
@mock_needed
def test_tensorflow_conversion_extension_fe_wrapper():
skip_if_frontend_is_disabled(TENSORFLOW_FRONTEND_NAME)
from openvino.frontend.tensorflow import ConversionExtension
from openvino.frontend import NodeContext
fe = FrontEndWrapperTensorflow()
def custom_converter(node: NodeContext):
node.get_input(0)
node.get_attribute("alpha")
fe.add_extension(ConversionExtension("CustomConverter", custom_converter))
assert fe.check_conversion_extension_registered("CustomConverter")
@mock_needed
def test_paddle_conversion_extension_fe_wrapper():
skip_if_frontend_is_disabled(PADDLE_FRONTEND_NAME)
from openvino.frontend.paddle import ConversionExtension
from openvino.frontend import NodeContext
fe = FrontEndWrapperPaddle()
def custom_converter(node: NodeContext):
node.get_input(0)
node.get_attribute("alpha")
fe.add_extension(ConversionExtension("CustomConverter", custom_converter))
assert fe.check_conversion_extension_registered("CustomConverter")

View File

@ -13,7 +13,10 @@ from tests.runtime import get_runtime
def create_onnx_model():
add = onnx.helper.make_node("Add", inputs=["x", "y"], outputs=["z"])
const_tensor = onnx.helper.make_tensor("const_tensor", onnx.TensorProto.FLOAT, (2, 2), [0.5, 1, 1.5, 2.0])
const_tensor = onnx.helper.make_tensor("const_tensor",
onnx.TensorProto.FLOAT,
(2, 2),
[0.5, 1, 1.5, 2.0])
const_node = onnx.helper.make_node("Constant", [], outputs=["const_node"],
value=const_tensor, name="const_node")
mul = onnx.helper.make_node("Mul", inputs=["z", "const_node"], outputs=["out"])
@ -52,6 +55,42 @@ def create_onnx_model_with_subgraphs():
return make_model(graph, producer_name="ngraph ONNX Importer")
def create_onnx_model_with_custom_attributes():
add = onnx.helper.make_node("Add", inputs=["x", "y"], outputs=["z"],
attribute_i32=np.int32(10),
attribute_i64=np.int64(10),
attribute_str="string",
attribute_f32=np.float(10),
attribute_f64=np.float64(10),
attribute_bool=np.bool(True),
attribute_type=onnx.TensorProto.INT32,
attribute_list_i32=np.array([1, 2, 3], dtype=np.int32),
attribute_list_i64=np.array([1, 2, 3], dtype=np.int64),
attribute_list_str=np.array(["a", "b", "c"], dtype=np.str),
attribute_list_f32=np.array([1, 2, 3], dtype=np.float),
attribute_list_f64=np.array([1, 2, 3], dtype=np.float64),
attribute_list_bool=[True, False, True],
attribute_list_type=np.array([onnx.TensorProto.INT32,
onnx.TensorProto.FLOAT]),
)
const_tensor = onnx.helper.make_tensor("const_tensor",
onnx.TensorProto.FLOAT,
(2, 2),
[0.5, 1, 1.5, 2.0])
const_node = onnx.helper.make_node("Constant", [], outputs=["const_node"],
value=const_tensor, name="const_node")
mul = onnx.helper.make_node("Mul", inputs=["z", "const_node"], outputs=["out"])
input_tensors = [
make_tensor_value_info("x", onnx.TensorProto.FLOAT, (2, 2)),
make_tensor_value_info("y", onnx.TensorProto.FLOAT, (2, 2)),
]
output_tensors = [make_tensor_value_info("out", onnx.TensorProto.FLOAT, (2, 2))]
graph = make_graph([add, const_node, mul], "graph", input_tensors, output_tensors)
return make_model(graph, producer_name="ngraph ONNX Importer")
def run_function(function, *inputs, expected):
runtime = get_runtime()
computation = runtime.computation(function)
@ -61,19 +100,25 @@ def run_function(function, *inputs, expected):
np.testing.assert_allclose(expected[i], actual[i], rtol=1e-3, atol=1e-6)
# FrontEndManager shall be initialized and destroyed after all tests finished
# This is because destroy of FrontEndManager will unload all plugins, no objects shall exist after this
fem = FrontEndManager()
onnx_model_filename = "model.onnx"
onnx_model_with_custom_attributes_filename = "model_custom_attributes.onnx"
onnx_model_with_subgraphs_filename = "model_subgraphs.onnx"
ONNX_FRONTEND_NAME = "onnx"
def setup_module():
onnx.save_model(create_onnx_model(), onnx_model_filename)
onnx.save_model(create_onnx_model_with_custom_attributes(),
onnx_model_with_custom_attributes_filename)
onnx.save_model(create_onnx_model_with_subgraphs(), onnx_model_with_subgraphs_filename)
def teardown_module():
os.remove(onnx_model_filename)
os.remove(onnx_model_with_custom_attributes_filename)
os.remove(onnx_model_with_subgraphs_filename)
@ -149,3 +194,232 @@ def test_load_by_model():
assert not fem.load_by_model("test.xx")
assert not fem.load_by_model("onnx.yy")
def test_onnx_conversion_extension_check_attributes():
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend.onnx) import here
from openvino.frontend.onnx import ConversionExtension
from openvino.frontend import NodeContext
import openvino.runtime.opset8 as ops
# use the model with attributes
fe = fem.load_by_model(onnx_model_with_custom_attributes_filename)
assert fe
assert fe.get_name() == "onnx"
invoked = False
def custom_converter(node: NodeContext):
nonlocal invoked
invoked = True
def check_attribute(context, name, expected_type, expected_value):
assert context.has_attribute(name)
attribute = context.get_attribute(name)
assert type(attribute) == expected_type
assert attribute == expected_value
check_attribute(node, "attribute_i32", int, 10)
check_attribute(node, "attribute_i64", int, 10)
check_attribute(node, "attribute_str", str, "string")
check_attribute(node, "attribute_f32", float, 10.)
check_attribute(node, "attribute_f64", float, 10.)
check_attribute(node, "attribute_bool", int, 1)
check_attribute(node, "attribute_type", int, 6)
check_attribute(node, "attribute_list_i32", list, [1, 2, 3])
check_attribute(node, "attribute_list_i64", list, [1, 2, 3])
check_attribute(node, "attribute_list_str", list, ["a", "b", "c"])
check_attribute(node, "attribute_list_f32", list, [1., 2., 3.])
check_attribute(node, "attribute_list_f64", list, [1., 2., 3.])
check_attribute(node, "attribute_list_bool", list, [1, 0, 1])
check_attribute(node, "attribute_list_type", list, [6, 1])
a = node.get_input(0)
b = node.get_input(1)
add = ops.add(a, b)
return [add.output(0)]
fe.add_extension(ConversionExtension("Add", custom_converter))
input_model = fe.load(onnx_model_with_custom_attributes_filename)
assert input_model
model = fe.convert(input_model)
assert model
assert invoked
def test_onnx_conversion_extension_attribute_with_default_value():
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend.onnx) import here
from openvino.frontend.onnx import ConversionExtension
from openvino.frontend import NodeContext
import openvino.runtime.opset8 as ops
# use the model without attributes
fe = fem.load_by_model(onnx_model_filename)
assert fe
assert fe.get_name() == "onnx"
invoked = False
def custom_converter(node: NodeContext):
nonlocal invoked
invoked = True
def check_attribute(context, name, default_value):
assert not context.has_attribute(name)
attribute = context.get_attribute(name, default_value)
assert type(attribute) == type(default_value)
if isinstance(attribute, np.ndarray):
assert np.all(attribute == default_value)
else:
assert attribute == default_value
check_attribute(node, "attribute_i32", np.int32(5))
check_attribute(node, "attribute_i64", np.int64(5))
check_attribute(node, "attribute_str", "abc")
check_attribute(node, "attribute_f32", np.float32(5))
check_attribute(node, "attribute_f64", np.float64(5))
check_attribute(node, "attribute_bool", np.bool(False))
check_attribute(node, "attribute_type", onnx.TensorProto.FLOAT)
check_attribute(node, "attribute_list_i32", np.array([4, 5, 6], dtype=np.int32))
check_attribute(node, "attribute_list_i64", np.array([4, 5, 6], dtype=np.int64))
check_attribute(node, "attribute_list_str", np.array(["d", "e", "f"], dtype=np.str))
check_attribute(node, "attribute_list_f32", np.array([4, 5, 6], dtype=np.float))
check_attribute(node, "attribute_list_f64", np.array([4, 5, 6], dtype=np.float64))
check_attribute(node, "attribute_list_bool", np.array([True, False, True], dtype=np.bool))
check_attribute(node, "attribute_list_type", np.array([onnx.TensorProto.INT32,
onnx.TensorProto.FLOAT]))
a = node.get_input(0)
b = node.get_input(1)
add = ops.add(a, b)
return [add.output(0)]
fe.add_extension(ConversionExtension("Add", custom_converter))
input_model = fe.load(onnx_model_filename)
assert input_model
model = fe.convert(input_model)
assert model
assert invoked
def test_onnx_conversion_extension_cast_attributes():
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend.onnx) import here
from openvino.frontend.onnx import ConversionExtension
from openvino.frontend import NodeContext
from openvino.runtime import Type
import openvino.runtime.opset8 as ops
# use the model without attributes
fe = fem.load_by_model(onnx_model_with_custom_attributes_filename)
assert fe
assert fe.get_name() == "onnx"
invoked = False
def custom_converter(node: NodeContext):
nonlocal invoked
invoked = True
def check_attribute(context, name, expected_value, dtype):
attribute = context.get_attribute(name, dtype=dtype)
if isinstance(attribute, list):
assert type(attribute[0]) == dtype
else:
assert type(attribute) == dtype
assert attribute == expected_value
check_attribute(node, "attribute_i32", 10, float)
check_attribute(node, "attribute_i64", 10, float)
check_attribute(node, "attribute_str", "string", np.str)
check_attribute(node, "attribute_f32", 10, int)
check_attribute(node, "attribute_f64", 10, int)
check_attribute(node, "attribute_bool", True, bool)
check_attribute(node, "attribute_type", Type.i32, Type)
check_attribute(node, "attribute_list_i32", [1., 2., 3.], float)
check_attribute(node, "attribute_list_i64", [1., 2., 3.], float)
check_attribute(node, "attribute_list_str", ["a", "b", "c"], np.str)
check_attribute(node, "attribute_list_f32", [1, 2, 3], int)
check_attribute(node, "attribute_list_f64", [1, 2, 3], int)
check_attribute(node, "attribute_list_bool", [True, False, True], bool)
check_attribute(node, "attribute_list_type", [Type.i32, Type.f32], Type)
a = node.get_input(0)
b = node.get_input(1)
add = ops.add(a, b)
return [add.output(0)]
fe.add_extension(ConversionExtension("Add", custom_converter))
input_model = fe.load(onnx_model_with_custom_attributes_filename)
assert input_model
model = fe.convert(input_model)
assert model
assert invoked
def test_onnx_conversion_extension_common():
skip_if_onnx_frontend_is_disabled()
# use common (openvino.frontend) import here
from openvino.frontend import ConversionExtension
from openvino.frontend import NodeContext
import openvino.runtime.opset8 as ops
fe = fem.load_by_model(onnx_model_filename)
assert fe
assert fe.get_name() == "onnx"
invoked = False
def custom_converter(node: NodeContext):
nonlocal invoked
invoked = True
a = node.get_input(0)
b = node.get_input(1)
add = ops.add(a, b)
return [add.output(0)]
fe.add_extension(ConversionExtension("Add", custom_converter))
input_model = fe.load(onnx_model_filename)
assert input_model
model = fe.convert(input_model)
assert model
assert invoked
def test_onnx_conversion_extension():
skip_if_onnx_frontend_is_disabled()
# use specific (openvino.frontend.onnx) import here
from openvino.frontend.onnx import ConversionExtension
from openvino.frontend import NodeContext
import openvino.runtime.opset8 as ops
fe = fem.load_by_model(onnx_model_filename)
assert fe
assert fe.get_name() == "onnx"
invoked = False
def custom_converter(node: NodeContext):
nonlocal invoked
invoked = True
a = node.get_input(0)
b = node.get_input(1)
add = ops.add(a, b)
return [add.output(0)]
fe.add_extension(ConversionExtension("Add", custom_converter))
input_model = fe.load(onnx_model_filename)
assert input_model
model = fe.convert(input_model)
assert model
assert invoked

View File

@ -1367,12 +1367,14 @@ def test_set_tensor_value():
def test_not_supported_methods():
skip_if_onnx_frontend_is_disabled()
from openvino.frontend import GeneralFailure
fe = fem.load_by_framework(framework=ONNX_FRONTEND_NAME)
model = fe.load("test_place_names.onnx")
with pytest.raises(Exception) as e:
with pytest.raises(GeneralFailure) as e:
model.free_name_for_tensor("add_out")
assert "not applicable for ONNX model" in str(e)
assert "not applicable for ONNX model" in str(e.value)
def test_set_name_for_tensor():
@ -1389,18 +1391,18 @@ def test_set_name_for_tensor():
with pytest.raises(Exception) as e:
model.set_name_for_tensor(tensor=tensor, new_name="")
assert "name must not be empty" in str(e)
assert "name must not be empty" in str(e.value)
# ONNX model stores tensor info separately for inputs, outputs and between nodes tensors
with pytest.raises(Exception) as e:
model.set_name_for_tensor(tensor=tensor, new_name="in1")
assert "already used by another tensor" in str(e)
assert "already used by another tensor" in str(e.value)
with pytest.raises(Exception) as e:
model.set_name_for_tensor(tensor=tensor, new_name="out1")
assert "already used by another tensor" in str(e)
assert "already used by another tensor" in str(e.value)
with pytest.raises(Exception) as e:
model.set_name_for_tensor(tensor=tensor, new_name="sub_out")
assert "already used by another tensor" in str(e)
assert "already used by another tensor" in str(e.value)
# actual rename
model.set_name_for_tensor(tensor=tensor, new_name=new_name)
@ -1497,12 +1499,12 @@ def test_set_name_for_dimension():
with pytest.raises(Exception) as e:
model.set_name_for_dimension(input1, 0, "")
assert "name must not be empty" in str(e)
assert "name must not be empty" in str(e.value)
one_const = model.get_place_by_tensor_name(tensor_name="one_const")
with pytest.raises(Exception) as e:
model.set_name_for_dimension(one_const, 0, dim_name)
assert "ONNX initializer shape dimension cannot be dynamic." in str(e)
assert "ONNX initializer shape dimension cannot be dynamic." in str(e.value)
def test_set_input_partial_shape_using_input_edge():

View File

@ -22,7 +22,7 @@ public:
protected:
std::vector<Extension::Ptr> m_loaded_extensions;
std::vector<std::pair<std::shared_ptr<DecoderTransformationExtension>, std::string>> m_target_extensions;
std::vector<std::pair<DecoderTransformationExtension::Ptr, std::string>> m_target_extensions;
};
} // namespace frontend
} // namespace ov

View File

@ -8,7 +8,7 @@ file(GLOB_RECURSE SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
add_executable(${TARGET_NAME} ${SRC})
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes)
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_onnx_frontend frontend_common)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

View File

@ -0,0 +1,52 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "conversion_extension.hpp"
#include "onnx_utils.hpp"
#include "openvino/frontend/onnx/frontend.hpp"
#include "so_extension.hpp"
using namespace ov::frontend;
using ONNXConversionExtensionTest = FrontEndConversionExtensionTest;
static const std::string translator_name = "Add";
class ONNXFrontendWrapper : public ov::frontend::onnx::FrontEnd {
void add_extension(const std::shared_ptr<ov::Extension>& extension) override {
ov::frontend::onnx::FrontEnd::add_extension(extension);
if (auto conv_ext = std::dynamic_pointer_cast<ConversionExtension>(extension)) {
EXPECT_NE(std::find(m_conversion_extensions.begin(), m_conversion_extensions.end(), conv_ext),
m_conversion_extensions.end())
<< "ConversionExtension is not registered.";
// TODO: check that operator is actually registered in ONNX FE
// EXPECT_NE(m_op_translators.find(conv_ext->get_op_type()), m_op_translators.end())
// << conv_ext->get_op_type() << " translator is not registered.";
} else if (auto telemetry = std::dynamic_pointer_cast<TelemetryExtension>(extension)) {
EXPECT_EQ(m_extensions.telemetry, telemetry) << "TelemetryExtension is not registered.";
} else if (auto transformation = std::dynamic_pointer_cast<DecoderTransformationExtension>(extension)) {
EXPECT_NE(std::find(m_transformation_extensions.begin(), m_transformation_extensions.end(), transformation),
m_transformation_extensions.end())
<< "DecoderTransformationExtension is not registered.";
} else if (auto so_ext = std::dynamic_pointer_cast<ov::detail::SOExtension>(extension)) {
EXPECT_NE(std::find(m_other_extensions.begin(), m_other_extensions.end(), so_ext), m_other_extensions.end())
<< "SOExtension is not registered.";
}
}
};
static ConversionExtensionFEParam getTestData() {
ConversionExtensionFEParam res;
res.m_frontEndName = ONNX_FE;
res.m_modelsPath = std::string(TEST_ONNX_MODELS_DIRNAME);
res.m_modelName = "controlflow/loop_2d_add.onnx";
res.m_translatorName = translator_name;
res.m_frontend = std::make_shared<ONNXFrontendWrapper>();
return res;
}
INSTANTIATE_TEST_SUITE_P(ONNXConversionExtensionTest,
FrontEndConversionExtensionTest,
::testing::Values(getTestData()),
FrontEndConversionExtensionTest::getTestCaseName);

View File

@ -8,7 +8,7 @@ file(GLOB_RECURSE SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
add_executable(${TARGET_NAME} ${SRC})
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes)
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_paddle_frontend openvino::runtime)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

View File

@ -0,0 +1,53 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "conversion_extension.hpp"
#include "openvino/frontend/exception.hpp"
#include "openvino/frontend/paddle/frontend.hpp"
#include "paddle_utils.hpp"
#include "so_extension.hpp"
using namespace ov::frontend;
using PDPDConversionExtensionTest = FrontEndConversionExtensionTest;
static const std::string translator_name = "relu";
class PaddleFrontendWrapper : public ov::frontend::paddle::FrontEnd {
void add_extension(const std::shared_ptr<ov::Extension>& extension) override {
ov::frontend::paddle::FrontEnd::add_extension(extension);
if (auto conv_ext = std::dynamic_pointer_cast<ConversionExtension>(extension)) {
EXPECT_NE(std::find(m_conversion_extensions.begin(), m_conversion_extensions.end(), conv_ext),
m_conversion_extensions.end())
<< "ConversionExtension is not registered.";
EXPECT_NE(m_op_translators.find(conv_ext->get_op_type()), m_op_translators.end())
<< conv_ext->get_op_type() << " translator is not registered.";
} else if (auto telemetry = std::dynamic_pointer_cast<TelemetryExtension>(extension)) {
EXPECT_EQ(m_telemetry, telemetry) << "TelemetryExtension is not registered.";
} else if (auto transformation = std::dynamic_pointer_cast<DecoderTransformationExtension>(extension)) {
EXPECT_NE(std::find(m_transformation_extensions.begin(), m_transformation_extensions.end(), transformation),
m_transformation_extensions.end())
<< "DecoderTransformationExtension is not registered.";
} else if (auto so_ext = std::dynamic_pointer_cast<ov::detail::SOExtension>(extension)) {
EXPECT_NE(std::find(m_extensions.begin(), m_extensions.end(), so_ext), m_extensions.end())
<< "SOExtension is not registered.";
}
}
};
static ConversionExtensionFEParam getTestData() {
ConversionExtensionFEParam res;
res.m_frontEndName = PADDLE_FE;
res.m_modelsPath = std::string(TEST_PADDLE_MODELS_DIRNAME);
res.m_modelName = "relu/relu.pdmodel";
res.m_translatorName = translator_name;
res.m_frontend = std::make_shared<PaddleFrontendWrapper>();
return res;
}
INSTANTIATE_TEST_SUITE_P(PDPDConversionExtensionTest,
FrontEndConversionExtensionTest,
::testing::Values(getTestData()),
FrontEndConversionExtensionTest::getTestCaseName);

View File

@ -0,0 +1,31 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <gtest/gtest.h>
#include <openvino/frontend/extension/conversion.hpp>
#include <openvino/frontend/manager.hpp>
struct ConversionExtensionFEParam {
std::string m_frontEndName;
std::string m_modelsPath;
std::string m_modelName;
std::string m_translatorName;
std::shared_ptr<ov::frontend::FrontEnd> m_frontend;
};
class FrontEndConversionExtensionTest : public ::testing::TestWithParam<ConversionExtensionFEParam> {
public:
ConversionExtensionFEParam m_param;
ov::frontend::FrontEndManager m_fem;
static std::string getTestCaseName(const testing::TestParamInfo<ConversionExtensionFEParam>& obj);
void SetUp() override;
protected:
void initParamTest();
};

View File

@ -19,7 +19,7 @@ public:
return m_loaded_extensions;
};
std::vector<std::pair<std::shared_ptr<DecoderTransformationExtension>, std::string>> get_target_extensions() {
std::vector<std::pair<DecoderTransformationExtension::Ptr, std::string>> get_target_extensions() {
return m_target_extensions;
}
};

View File

@ -0,0 +1,87 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <openvino/frontend/extension/conversion.hpp>
#include <openvino/frontend/extension/decoder_transformation.hpp>
#include <openvino/op/util/framework_node.hpp>
#include <openvino/opsets/opset8.hpp>
#include "conversion_extension.hpp"
#include "utils.hpp"
using namespace ov::frontend;
std::string FrontEndConversionExtensionTest::getTestCaseName(
const testing::TestParamInfo<ConversionExtensionFEParam>& obj) {
std::string res = obj.param.m_frontEndName + "_" + obj.param.m_modelName;
return FrontEndTestUtils::fileToTestName(res);
}
void FrontEndConversionExtensionTest::SetUp() {
FrontEndTestUtils::setupTestEnv();
initParamTest();
}
void FrontEndConversionExtensionTest::initParamTest() {
m_param = GetParam();
m_param.m_modelName = FrontEndTestUtils::make_model_path(m_param.m_modelsPath + m_param.m_modelName);
}
inline std::string get_lib_path(const std::string& lib_name) {
return ov::util::make_plugin_library_name<char>(ov::util::get_ov_lib_path(), lib_name + IE_BUILD_POSTFIX);
}
///////////////////////////////////////////////////////////////////
TEST_P(FrontEndConversionExtensionTest, TestConversionExtension) {
auto frontend = m_param.m_frontend;
bool invoked = false;
if (m_param.m_frontEndName == "paddle") {
frontend->add_extension(std::make_shared<ConversionExtension>(
m_param.m_translatorName,
[&](const NodeContext& node) -> std::map<std::string, ov::OutputVector> {
auto relu = std::make_shared<ov::opset8::Relu>(node.get_input("X"));
invoked = true;
return {{"Out", {relu}}};
}));
} else if (m_param.m_frontEndName == "tf") {
frontend->add_extension(
std::make_shared<ConversionExtension>(m_param.m_translatorName,
[&](const ov::frontend::NodeContext& node) -> ov::OutputVector {
invoked = true;
auto ng_input = node.get_input(0);
auto res = std::make_shared<ov::opset8::Relu>(ng_input);
return {res};
}));
} else if (m_param.m_frontEndName == "onnx") {
frontend->add_extension(
std::make_shared<ConversionExtension>(m_param.m_translatorName,
[&](const ov::frontend::NodeContext& node) -> ov::OutputVector {
invoked = true;
auto a = node.get_input(0);
auto b = node.get_input(1);
auto res = std::make_shared<ov::opset8::Add>(a, b);
return {res};
}));
}
std::shared_ptr<InputModel> input_model;
ASSERT_NO_THROW(input_model = frontend->load(m_param.m_modelName));
ASSERT_NE(input_model, nullptr);
std::shared_ptr<ov::Model> model;
ASSERT_NO_THROW(model = frontend->convert(input_model));
ASSERT_NE(model, nullptr);
EXPECT_EQ(invoked, true);
}
TEST_P(FrontEndConversionExtensionTest, TestConversionExtensionViaSO) {
auto frontend = m_param.m_frontend;
const auto& lib_path = get_lib_path("test_builtin_extensions_1");
frontend->add_extension(lib_path);
std::shared_ptr<InputModel> input_model;
ASSERT_NO_THROW(input_model = frontend->load(m_param.m_modelName));
ASSERT_NE(input_model, nullptr);
std::shared_ptr<ov::Model> model;
ASSERT_NO_THROW(model = frontend->convert(input_model));
ASSERT_NE(model, nullptr);
}

View File

@ -142,7 +142,8 @@ TEST_P(FrontEndJsonConfigTest, testAddJsonConfigExtension) {
auto loaded_ext = json_config_ext->get_loaded_extensions();
auto target_ext = json_config_ext->get_target_extensions();
EXPECT_EQ(loaded_ext.size(), 3);
// the number of Loaded extensions can be more than the number of actually used ones.
EXPECT_EQ(loaded_ext.size(), 8);
EXPECT_EQ(target_ext.size(), 3);
for (const auto& target : target_ext) {

View File

@ -4,14 +4,31 @@
set(TARGET_NAME "test_builtin_extensions_1")
file(GLOB_RECURSE LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
file(GLOB LIBRARY_SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
set(DEPENDENCIES nlohmann_json openvino::runtime::dev offline_transformations)
set(DEFINITIONS)
if (ENABLE_OV_ONNX_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::onnx)
list(APPEND DEFINITIONS ENABLE_OV_ONNX_FRONTEND)
endif()
if (ENABLE_OV_TF_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::tensorflow)
list(APPEND DEFINITIONS ENABLE_OV_TF_FRONTEND)
endif()
if (ENABLE_OV_PADDLE_FRONTEND)
list(APPEND DEPENDENCIES openvino::frontend::paddle)
list(APPEND DEFINITIONS ENABLE_OV_PADDLE_FRONTEND)
endif()
# Create library
add_library(${TARGET_NAME} SHARED ${LIBRARY_SRC} ${LIBRARY_HEADERS})
target_link_libraries(${TARGET_NAME} PUBLIC nlohmann_json inference_engine_transformations frontend_common
offline_transformations)
target_compile_definitions(${TARGET_NAME} PRIVATE ${DEFINITIONS})
target_link_libraries(${TARGET_NAME} PRIVATE ${DEPENDENCIES})
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

View File

@ -3,7 +3,35 @@
//
#include <openvino/core/extension.hpp>
#include <openvino/frontend/extension/conversion.hpp>
#ifdef ENABLE_OV_ONNX_FRONTEND
# include <openvino/frontend/onnx/extension/conversion.hpp>
# define ONNX_EXT std::make_shared<ov::frontend::onnx::ConversionExtension>("NewCustomOp_3", CustomTranslatorONNX),
#else
# define ONNX_EXT
#endif
#ifdef ENABLE_OV_PADDLE_FRONTEND
# include <openvino/frontend/paddle/extension/conversion.hpp>
# define PADDLE_EXT \
std::make_shared<ov::frontend::paddle::ConversionExtension>("NewCustomOp_4", CustomTranslatorPaddle),
#else
# define PADDLE_EXT
#endif
#ifdef ENABLE_OV_TF_FRONTEND
# include <openvino/frontend/tensorflow/extension/conversion.hpp>
# define TF_EXT \
std::make_shared<ov::frontend::tensorflow::ConversionExtension>("NewCustomOp_5", CustomTranslatorTensorflow)
#else
# define TF_EXT
#endif
#include "test_extension.hpp"
OPENVINO_CREATE_EXTENSIONS(std::vector<ov::Extension::Ptr>({std::make_shared<TestExtension1>()}));
OPENVINO_CREATE_EXTENSIONS(std::vector<ov::Extension::Ptr>(
{std::make_shared<TestExtension1>(),
std::make_shared<ov::frontend::ConversionExtension>("NewCustomOp_1", CustomTranslatorCommon_1),
std::make_shared<ov::frontend::ConversionExtension>("NewCustomOp_2", CustomTranslatorCommon_2),
ONNX_EXT PADDLE_EXT TF_EXT}));

View File

@ -12,3 +12,23 @@ bool TestExtension1::transform(const std::shared_ptr<ov::Model>& function, const
}
TestExtension1::TestExtension1() : ov::frontend::JsonTransformationExtension("buildin_extensions_1::TestExtension1") {}
ov::OutputVector CustomTranslatorCommon_1(const ov::frontend::NodeContext& node) {
return ov::OutputVector();
}
std::map<std::string, ov::OutputVector> CustomTranslatorCommon_2(const ov::frontend::NodeContext& node) {
return std::map<std::string, ov::OutputVector>();
}
ov::OutputVector CustomTranslatorTensorflow(const ov::frontend::NodeContext& node) {
return ov::OutputVector();
}
ov::OutputVector CustomTranslatorONNX(const ov::frontend::NodeContext& node) {
return ov::OutputVector();
}
std::map<std::string, ov::OutputVector> CustomTranslatorPaddle(const ov::frontend::NodeContext& node) {
return std::map<std::string, ov::OutputVector>();
}

View File

@ -6,6 +6,7 @@
#include <extension/json_transformation.hpp>
#include <nlohmann/json.hpp>
#include <openvino/frontend/node_context.hpp>
class TestExtension1 : public ov::frontend::JsonTransformationExtension {
public:
@ -13,3 +14,13 @@ public:
bool transform(const std::shared_ptr<ov::Model>& function, const std::string& config) const override;
};
ov::OutputVector CustomTranslatorCommon_1(const ov::frontend::NodeContext& node);
std::map<std::string, ov::OutputVector> CustomTranslatorCommon_2(const ov::frontend::NodeContext& node);
ov::OutputVector CustomTranslatorTensorflow(const ov::frontend::NodeContext& node);
ov::OutputVector CustomTranslatorONNX(const ov::frontend::NodeContext& node);
std::map<std::string, ov::OutputVector> CustomTranslatorPaddle(const ov::frontend::NodeContext& node);

View File

@ -10,7 +10,7 @@ file(GLOB_RECURSE LIBRARY_HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
# Create library
add_library(${TARGET_NAME} SHARED ${LIBRARY_SRC} ${LIBRARY_HEADERS})
target_link_libraries(${TARGET_NAME} PRIVATE nlohmann_json inference_engine_transformations frontend_common
target_link_libraries(${TARGET_NAME} PRIVATE nlohmann_json openvino::runtime::dev
offline_transformations)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

View File

@ -8,7 +8,7 @@ file(GLOB_RECURSE SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
add_executable(${TARGET_NAME} ${SRC})
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_tensorflow_frontend)
target_link_libraries(${TARGET_NAME} PRIVATE frontend_shared_test_classes ov_tensorflow_frontend openvino::runtime)
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME})

View File

@ -0,0 +1,53 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "conversion_extension.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "openvino/frontend/tensorflow/frontend.hpp"
#include "so_extension.hpp"
#include "tf_utils.hpp"
using namespace ov::frontend;
using TFConversionExtensionTest = FrontEndConversionExtensionTest;
static const std::string translator_name = "Relu";
class TensorflowFrontendWrapper : public ov::frontend::tensorflow::FrontEnd {
void add_extension(const std::shared_ptr<ov::Extension>& extension) override {
ov::frontend::tensorflow::FrontEnd::add_extension(extension);
if (auto conv_ext = std::dynamic_pointer_cast<ConversionExtension>(extension)) {
EXPECT_NE(std::find(m_conversion_extensions.begin(), m_conversion_extensions.end(), conv_ext),
m_conversion_extensions.end())
<< "ConversionExtension is not registered.";
EXPECT_NE(m_op_translators.find(conv_ext->get_op_type()), m_op_translators.end())
<< conv_ext->get_op_type() << " translator is not registered.";
} else if (auto telemetry = std::dynamic_pointer_cast<TelemetryExtension>(extension)) {
EXPECT_EQ(m_telemetry, telemetry) << "TelemetryExtension is not registered.";
} else if (auto transformation = std::dynamic_pointer_cast<DecoderTransformationExtension>(extension)) {
EXPECT_NE(std::find(m_transformation_extensions.begin(), m_transformation_extensions.end(), transformation),
m_transformation_extensions.end())
<< "DecoderTransformationExtension is not registered.";
} else if (auto so_ext = std::dynamic_pointer_cast<ov::detail::SOExtension>(extension)) {
EXPECT_NE(std::find(m_extensions.begin(), m_extensions.end(), so_ext), m_extensions.end())
<< "SOExtension is not registered.";
}
}
};
static ConversionExtensionFEParam getTestData() {
ConversionExtensionFEParam res;
res.m_frontEndName = TF_FE;
res.m_modelsPath = std::string(TEST_TENSORFLOW_MODELS_DIRNAME);
res.m_modelName = "2in_2out/2in_2out.pb";
res.m_translatorName = translator_name;
res.m_frontend = std::make_shared<TensorflowFrontendWrapper>();
return res;
}
INSTANTIATE_TEST_SUITE_P(TFConversionExtensionTest,
FrontEndConversionExtensionTest,
::testing::Values(getTestData()),
FrontEndConversionExtensionTest::getTestCaseName);

View File

@ -0,0 +1,56 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/core/extension.hpp"
#include "openvino/frontend/node_context.hpp"
#include "openvino/frontend/visibility.hpp"
namespace ov {
namespace frontend {
class FRONTEND_API ConversionExtensionBase : public ov::Extension {
public:
using Ptr = std::shared_ptr<ConversionExtensionBase>;
explicit ConversionExtensionBase(const std::string& op_type) : m_op_type(op_type) {}
const std::string& get_op_type() const {
return m_op_type;
}
~ConversionExtensionBase() override = 0;
private:
std::string m_op_type;
};
class FRONTEND_API ConversionExtension : public ConversionExtensionBase {
public:
using Ptr = std::shared_ptr<ConversionExtension>;
ConversionExtension(const std::string& op_type, const CreatorFunction& converter)
: ConversionExtensionBase(op_type),
m_converter(converter) {}
ConversionExtension(const std::string& op_type, const CreatorFunctionNamed& converter)
: ConversionExtensionBase(op_type),
m_converter_named(converter) {}
const CreatorFunction& get_converter() const {
return m_converter;
};
const CreatorFunctionNamed& get_converter_named() const {
return m_converter_named;
};
~ConversionExtension() override = default;
private:
CreatorFunction m_converter;
CreatorFunctionNamed m_converter_named;
};
} // namespace frontend
} // namespace ov

View File

@ -24,6 +24,7 @@ namespace frontend {
/// calling corresponding ctor.
class FRONTEND_API DecoderTransformationExtension : public ov::Extension {
public:
using Ptr = std::shared_ptr<DecoderTransformationExtension>;
DecoderTransformationExtension() = default;
/// \brief Create a custom functional pass where code of the pass is implemented as a function.

View File

@ -20,6 +20,7 @@ namespace frontend {
/// \brief Provides callback to report telemetry information back to Python code
class FRONTEND_API TelemetryExtension : public ov::Extension {
public:
using Ptr = std::shared_ptr<TelemetryExtension>;
using error_callback = std::function<void(const std::string& category, const std::string& error_message)>;
using event_callback = std::function<
void(const std::string& category, const std::string& action, const std::string& label, int value)>;

View File

@ -22,7 +22,8 @@ using FrontEndFactory = std::function<FrontEnd::Ptr()>;
/// frontends This is a main frontend entry point for client applications
class FRONTEND_API FrontEndManager final {
public:
/*/ \brief Default constructor. Searches and loads of available frontends*/ FrontEndManager();
/// \brief Default constructor. Searches and loads of available frontends
FrontEndManager();
/// \brief Default move constructor
FrontEndManager(FrontEndManager&&) noexcept;

View File

@ -0,0 +1,101 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/core/extension.hpp"
#include "openvino/frontend/exception.hpp"
#include "openvino/frontend/visibility.hpp"
#include "openvino/pass/graph_rewrite.hpp"
#include "openvino/pass/manager.hpp"
#include "openvino/pass/pass.hpp"
namespace ov {
namespace frontend {
class FRONTEND_API NodeContext {
public:
explicit NodeContext(const std::string& op_type) : m_op_type(op_type) {}
virtual ~NodeContext() = default;
/// \brief Returns a number of inputs
virtual size_t get_input_size() const {
FRONT_END_NOT_IMPLEMENTED(get_input_size);
};
/// \brief Returns a number of inputs
virtual size_t get_input_size(const std::string& port_name) const {
FRONT_END_NOT_IMPLEMENTED(get_input_size);
}
/// \brief Returns exactly one input with a given idx; throws if there is no inputs or
/// there are more than one input
virtual Output<Node> get_input(int idx) const {
FRONT_END_NOT_IMPLEMENTED(get_input);
}
/// \brief Returns exactly one input with a given name and idx; throws if there is no inputs or
/// there are more than one input
virtual Output<Node> get_input(const std::string& name, int idx) const {
FRONT_END_NOT_IMPLEMENTED(get_input);
}
/// \brief Returns exactly one input with a given name; throws if there is no inputs or
/// there are more than one input
virtual Output<Node> get_input(const std::string& name) const {
FRONT_END_NOT_IMPLEMENTED(get_input);
}
virtual const std::string& get_op_type() const {
return m_op_type;
}
/// \brief Returns node attribute by name.
template <class T>
T get_attribute(const std::string& name) const {
auto any = get_attribute_as_any(name);
FRONT_END_GENERAL_CHECK(!any.empty(), "Attribute with name '", name, "' does not exist");
// sometimes we can't unambiguously recognize types in protobuf, e.g.
// int we can interpret as int or as enum inherited from int, so
// we have to apply additional rules based on the type (T) passed from the user.
auto res = apply_additional_conversion_rules(any, typeid(T));
return res.as<T>();
}
/// \brief Returns node attribute by name. Returns 'def' value if attribute does not exist
template <class T>
T get_attribute(const std::string& name, const T& def) const {
auto any = get_attribute_as_any(name);
// sometimes we can't unambiguously recognize types in protobuf, e.g.
// int we can interpret as int or as enum inherited from int, so
// we have to apply additional rules based on the type (T) passed from the user.
auto res = apply_additional_conversion_rules(any, typeid(T));
if (!res.empty()) {
return res.as<T>();
}
return def;
}
/// \brief Check if an attribute of a given name exist
bool has_attribute(const std::string& name) const {
return !get_attribute_as_any(name).empty();
}
/// \brief Returns node attribute by name as ov::Any.
virtual ov::Any get_attribute_as_any(const std::string& name) const = 0;
private:
virtual ov::Any apply_additional_conversion_rules(const ov::Any& data, const std::type_info& type_info) const {
return data;
}
std::string m_op_type;
};
using CreatorFunction = std::function<OutputVector(const NodeContext&)>;
using CreatorFunctionNamed = std::function<std::map<std::string, OutputVector>(const NodeContext&)>;
} // namespace frontend
} // namespace ov

View File

@ -0,0 +1,8 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/frontend/extension/conversion.hpp"
using namespace ov::frontend;
ConversionExtensionBase::~ConversionExtensionBase() = default;

View File

@ -43,5 +43,7 @@ DecoderTransformationExtension::DecoderTransformationExtension(
}) {}
void DecoderTransformationExtension::register_pass(ov::pass::Manager& manager) const {
m_registration(manager);
if (m_registration) {
m_registration(manager);
}
}

View File

@ -191,4 +191,4 @@ inline std::ostream& operator<<(std::ostream& outs, const Node& node) {
} // namespace onnx_import
} // namespace ngraph
} // namespace ngraph

View File

@ -0,0 +1,32 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/node_context.hpp"
#include "openvino/frontend/onnx/node_context.hpp"
#include "openvino/frontend/onnx/visibility.hpp"
namespace ov {
namespace frontend {
namespace onnx {
class ONNX_FRONTEND_API ConversionExtension : public ConversionExtensionBase {
public:
using Ptr = std::shared_ptr<ConversionExtension>;
ConversionExtension() = delete;
ConversionExtension(const std::string& op_type, const ov::frontend::CreatorFunction& converter)
: ConversionExtensionBase(op_type),
m_converter(converter) {}
const ov::frontend::CreatorFunction& get_converter() const {
return m_converter;
}
private:
ov::frontend::CreatorFunction m_converter;
};
} // namespace onnx
} // namespace frontend
} // namespace ov

View File

@ -4,21 +4,12 @@
#pragma once
#include <common/extension_holder.hpp>
#include <openvino/frontend/frontend.hpp>
#ifdef OPENVINO_STATIC_LIBRARY
# define ONNX_FRONTEND_API
# define ONNX_FRONTEND_C_API
#else
# ifdef ov_onnx_frontend_EXPORTS
# define ONNX_FRONTEND_API OPENVINO_CORE_EXPORTS
# define ONNX_FRONTEND_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS
# else
# define ONNX_FRONTEND_API OPENVINO_CORE_IMPORTS
# define ONNX_FRONTEND_C_API OPENVINO_EXTERN_C OPENVINO_CORE_IMPORTS
# endif // ov_onnx_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/extension/decoder_transformation.hpp"
#include "openvino/frontend/extension/extension_holder.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "openvino/frontend/frontend.hpp"
#include "openvino/frontend/onnx/visibility.hpp"
namespace ov {
namespace frontend {
@ -26,6 +17,8 @@ namespace onnx {
class ONNX_FRONTEND_API FrontEnd : public ov::frontend::FrontEnd {
public:
~FrontEnd() override;
using Ptr = std::shared_ptr<FrontEnd>;
std::shared_ptr<ov::Model> convert(const InputModel::Ptr& model) const override;
void convert(const std::shared_ptr<ov::Model>& partially_converted) const override;
std::shared_ptr<ov::Model> decode(const InputModel::Ptr& model) const override;
@ -36,7 +29,12 @@ public:
protected:
InputModel::Ptr load_impl(const std::vector<ov::Any>& params) const override;
private:
// m_extensions should be the first member here,
// m_extensions can contain SO Extension (holder for other Extensions),
// so it should be released last.
std::vector<Extension::Ptr> m_other_extensions;
std::vector<DecoderTransformationExtension::Ptr> m_transformation_extensions;
std::vector<ConversionExtensionBase::Ptr> m_conversion_extensions;
ExtensionHolder m_extensions;
};

View File

@ -0,0 +1,41 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/node_context.hpp"
#include "openvino/frontend/onnx/visibility.hpp"
namespace ngraph {
namespace onnx_import {
class Node;
}
} // namespace ngraph
namespace ov {
namespace frontend {
namespace onnx {
class ONNX_FRONTEND_API NodeContext : public ov::frontend::NodeContext {
public:
using Ptr = std::shared_ptr<NodeContext>;
explicit NodeContext(const ngraph::onnx_import::Node& context);
size_t get_input_size() const override;
Output<ov::Node> get_input(int port_idx) const override;
ov::Any get_attribute_as_any(const std::string& name) const override;
protected:
const ngraph::onnx_import::Node& m_context;
OutputVector m_inputs;
private:
ov::Any apply_additional_conversion_rules(const ov::Any& data, const std::type_info& type_info) const override;
};
using CreatorFunction = std::function<OutputVector(const ngraph::onnx_import::Node&)>;
} // namespace onnx
} // namespace frontend
} // namespace ov

View File

@ -0,0 +1,20 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/frontend/exception.hpp"
#ifdef OPENVINO_STATIC_LIBRARY
# define ONNX_FRONTEND_API
# define ONNX_FRONTEND_C_API
#else
# ifdef ov_onnx_frontend_EXPORTS
# define ONNX_FRONTEND_API OPENVINO_CORE_EXPORTS
# define ONNX_FRONTEND_C_API OPENVINO_EXTERN_C OPENVINO_CORE_EXPORTS
# else
# define ONNX_FRONTEND_API OPENVINO_CORE_IMPORTS
# define ONNX_FRONTEND_C_API OPENVINO_EXTERN_C OPENVINO_CORE_IMPORTS
# endif // ov_onnx_frontend_EXPORTS
#endif // OPENVINO_STATIC_LIBRARY

View File

@ -25,6 +25,33 @@ Subgraph Attribute::get_subgraph(const Graph* parent_graph) const {
return Subgraph{model_proto, parent_graph};
}
ov::Any Attribute::get_any() const {
switch (get_type()) {
case Type::float_point:
return get_float();
case Type::integer:
return get_integer();
case Type::string:
return get_string();
case Type::float_point_array:
return get_float_array();
case Type::integer_array:
return get_integer_array();
case Type::string_array:
return get_string_array();
// TODO: support attributes.
case Type::sparse_tensor_array:
case Type::graph_array:
case Type::tensor_array:
case Type::tensor:
case Type::graph:
case Type::sparse_tensor:
throw ov::Exception(get_name() + " attribute is not supported.");
default:
throw ov::Exception("Unknown type of attribute " + get_name());
}
}
} // namespace onnx_import
} // namespace ngraph

View File

@ -327,6 +327,8 @@ public:
return detail::attribute::get_value<T>(*m_attribute_proto);
}
ov::Any get_any() const;
private:
const ONNX_NAMESPACE::AttributeProto* m_attribute_proto;
};

View File

@ -10,12 +10,12 @@
#include <string>
#include <vector>
#include "common/extension_holder.hpp"
#include "core/graph_cache.hpp"
#include "core/model.hpp"
#include "ngraph/function.hpp"
#include "ngraph/op/parameter.hpp"
#include "onnx_import/core/operator_set.hpp"
#include "openvino/frontend/extension/extension_holder.hpp"
namespace ngraph {
namespace onnx_import {

View File

@ -162,6 +162,11 @@ Subgraph Node::Impl::get_attribute_value(const std::string& name) const {
return get_subgraph_from_attribute(name);
}
template <>
ov::Any Node::get_attribute_value(const std::string& name) const {
return get_attribute(name).get_any();
}
OutputVector Node::Impl::get_ng_inputs() const {
OutputVector result;
for (const auto& name : m_node_proto->input()) {

View File

@ -8,13 +8,13 @@
#include <map>
#include <memory>
#include "common/extension_holder.hpp"
#include "editor_types.hpp"
#include "ngraph/function.hpp"
#include "ngraph/op/constant.hpp"
#include "ngraph/partial_shape.hpp"
#include "ngraph/type/element_type.hpp"
#include "onnx_import/onnx_importer_visibility.hpp"
#include "openvino/frontend/extension/extension_holder.hpp"
#include "openvino/frontend/extension/progress_reporter_extension.hpp"
#include "openvino/frontend/extension/telemetry.hpp"

View File

@ -5,13 +5,19 @@
#include <fstream>
#include <input_model.hpp>
#include <onnx_import/onnx.hpp>
#include <onnx_import/onnx_utils.hpp>
#include <openvino/frontend/exception.hpp>
#include <openvino/frontend/manager.hpp>
#include <openvino/frontend/onnx/extension/conversion.hpp>
#include <openvino/frontend/onnx/frontend.hpp>
#include <openvino/frontend/onnx/visibility.hpp>
#include <sstream>
#include <utils/onnx_internal.hpp>
#include "onnx_common/onnx_model_validator.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "ops_bridge.hpp"
#include "so_extension.hpp"
using namespace ov;
using namespace ov::frontend::onnx;
@ -63,6 +69,19 @@ InputModel::Ptr FrontEnd::load_impl(const std::vector<ov::Any>& variants) const
std::shared_ptr<ngraph::Function> FrontEnd::convert(const InputModel::Ptr& model) const {
auto model_onnx = std::dynamic_pointer_cast<InputModel>(model);
NGRAPH_CHECK(model_onnx != nullptr, "Invalid input model");
if (!m_transformation_extensions.empty()) {
auto function = decode(model);
ov::pass::Manager manager;
for (const auto& transformation : m_transformation_extensions) {
transformation->register_pass(manager);
}
manager.run_passes(function);
convert(function);
return function;
}
return model_onnx->convert();
}
@ -135,7 +154,42 @@ bool FrontEnd::supported_impl(const std::vector<ov::Any>& variants) const {
void FrontEnd::add_extension(const std::shared_ptr<ov::Extension>& extension) {
if (auto telemetry = std::dynamic_pointer_cast<TelemetryExtension>(extension)) {
m_extensions.telemetry = telemetry;
} else if (auto transformation = std::dynamic_pointer_cast<DecoderTransformationExtension>(extension)) {
m_transformation_extensions.push_back(transformation);
} else if (const auto& so_ext = std::dynamic_pointer_cast<ov::detail::SOExtension>(extension)) {
add_extension(so_ext->extension());
m_other_extensions.push_back(so_ext);
} else if (auto common_conv_ext = std::dynamic_pointer_cast<ov::frontend::ConversionExtension>(extension)) {
m_conversion_extensions.push_back(common_conv_ext);
for (int i = 1; i < ngraph::onnx_import::OperatorsBridge::LATEST_SUPPORTED_ONNX_OPSET_VERSION; ++i)
ngraph::onnx_import::register_operator(common_conv_ext->get_op_type(),
i,
"",
[=](const ngraph::onnx_import::Node& context) -> OutputVector {
return common_conv_ext->get_converter()(NodeContext(context));
});
} else if (const auto onnx_conv_ext = std::dynamic_pointer_cast<ConversionExtension>(extension)) {
m_conversion_extensions.push_back(onnx_conv_ext);
for (int i = 1; i < ngraph::onnx_import::OperatorsBridge::LATEST_SUPPORTED_ONNX_OPSET_VERSION; ++i)
ngraph::onnx_import::register_operator(onnx_conv_ext->get_op_type(),
i,
"",
[=](const ngraph::onnx_import::Node& context) -> OutputVector {
return onnx_conv_ext->get_converter()(NodeContext(context));
});
} else if (auto progress_reporter = std::dynamic_pointer_cast<ProgressReporterExtension>(extension)) {
m_extensions.progress_reporter = progress_reporter;
}
}
FrontEnd::~FrontEnd() {
// We should remove new added operations manually due to deadlock in python GIL (pybind11/gil.h)
// It looks like the issue occurs when we use static c++ objects to store wrapped objects,
// in our case OperatorsBridge is static (singleton), and it stores ConvertionExtension.
for (const auto& conv_ext : m_conversion_extensions) {
for (int i = 1; i < ngraph::onnx_import::OperatorsBridge::LATEST_SUPPORTED_ONNX_OPSET_VERSION; ++i) {
ngraph::onnx_import::unregister_operator(conv_ext->get_op_type(), i, "");
}
}
ngraph::onnx_import::OperatorsBridge::load_initial_state();
}

View File

@ -8,7 +8,7 @@
#include <fstream>
#include <openvino/frontend/input_model.hpp>
#include "common/extension_holder.hpp"
#include "openvino/frontend/extension/extension_holder.hpp"
namespace ov {
namespace frontend {

View File

@ -0,0 +1,44 @@
// Copyright (C) 2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <onnx_import/core/node.hpp>
#include <openvino/frontend/onnx/node_context.hpp>
#include <utils/common.hpp>
ov::frontend::onnx::NodeContext::NodeContext(const ngraph::onnx_import::Node& context)
: ov::frontend::NodeContext(context.op_type()),
m_context(context),
m_inputs(context.get_ng_inputs()) {}
ov::Output<ov::Node> ov::frontend::onnx::NodeContext::get_input(int port_idx) const {
return m_inputs.at(port_idx);
}
ov::Any ov::frontend::onnx::NodeContext::get_attribute_as_any(const std::string& name) const {
try {
return m_context.get_attribute_value<ov::Any>(name);
} catch (ngraph::onnx_import::error::node::UnknownAttribute& e) {
return ov::Any();
}
}
size_t ov::frontend::onnx::NodeContext::get_input_size() const {
return m_inputs.size();
}
ov::Any ov::frontend::onnx::NodeContext::apply_additional_conversion_rules(const ov::Any& data,
const std::type_info& type_info) const {
if (data.is<int64_t>() && type_info == typeid(ov::element::Type)) {
return ngraph::onnx_import::common::get_ngraph_element_type(data.as<int64_t>());
} else if (data.is<std::vector<int64_t>>() && type_info == typeid(std::vector<ov::element::Type>)) {
const auto& casted = data.as<std::vector<int64_t>>();
std::vector<ov::element::Type> types(casted.size());
for (size_t i = 0; i < casted.size(); ++i) {
types[i] = ngraph::onnx_import::common::get_ngraph_element_type(casted[i]);
}
return types;
}
// no conversion rules found
return data;
}

View File

@ -281,6 +281,10 @@ static const char* const MICROSOFT_DOMAIN = "com.microsoft";
m_map[domain_][name_].emplace(ver_, std::bind(op::set_##ver_::fn_, std::placeholders::_1))
OperatorsBridge::OperatorsBridge() {
_load_initial_state();
}
void OperatorsBridge::_load_initial_state() {
REGISTER_OPERATOR("Abs", 1, abs);
REGISTER_OPERATOR("Acos", 1, acos);
REGISTER_OPERATOR("Acosh", 1, acosh);

View File

@ -61,6 +61,10 @@ public:
return instance()._is_operator_registered(name, version, domain);
}
static void load_initial_state() {
return instance()._load_initial_state();
}
private:
// Registered operators structure
// {
@ -91,6 +95,7 @@ private:
bool _is_operator_registered(const std::string& name, std::int64_t version, const std::string& domain);
void _load_initial_state();
std::mutex lock;
};

View File

@ -7,8 +7,8 @@
#include <memory>
#include <string>
#include "common/extension_holder.hpp"
#include "ngraph/function.hpp"
#include "openvino/frontend/extension/extension_holder.hpp"
namespace ONNX_NAMESPACE {
class ModelProto;

View File

@ -7,4 +7,4 @@ ov_add_frontend(NAME paddle
SHUTDOWN_PROTOBUF
PROTOBUF_LITE
FILEDESCRIPTION "FrontEnd to load and convert PaddlePaddle file format"
LINK_LIBRARIES openvino::runtime::dev)
LINK_LIBRARIES openvino::util openvino::runtime::dev)

View File

@ -0,0 +1,56 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/core/any.hpp"
#include "openvino/frontend/node_context.hpp"
#include "openvino/frontend/paddle/visibility.hpp"
namespace ov {
namespace frontend {
namespace paddle {
using InPortName = std::string;
using OutPortName = std::string;
using TensorName = std::string;
using NamedOutputs = std::map<OutPortName, OutputVector>;
using NamedInputs = std::map<InPortName, OutputVector>;
class DecoderBase {
public:
/// \brief Get attribute value by name and requested type
///
/// \param name Attribute name
/// \return Shared pointer to appropriate value if it exists, 'nullptr' otherwise
virtual ov::Any get_attribute(const std::string& name) const = 0;
/// \brief Applies additional conversion rules to the data based on type_info
///
/// \param data Data
/// \param type_info Attribute type information
/// \return Shared pointer to appropriate value if it exists, 'nullptr' otherwise
virtual ov::Any convert_attribute(const ov::Any& data, const std::type_info& type_info) const = 0;
/// \brief Get the output names
virtual std::vector<OutPortName> get_output_names() const = 0;
/// \brief Get the output size
virtual size_t get_output_size() const = 0;
/// \brief Get output port type
///
/// Current API assumes that output port has only one output type.
/// If decoder supports multiple types for specified port, it shall throw general
/// exception
///
/// \param port_name Port name for the node
///
/// \return Type of specified output port
virtual ov::element::Type get_out_port_type(const std::string& port_name) const = 0;
/// \brief Get the type of the operation
virtual std::string get_op_type() const = 0;
};
} // namespace paddle
} // namespace frontend
} // namespace ov

View File

@ -4,7 +4,7 @@
#pragma once
#include <openvino/frontend/exception.hpp>
#include "openvino/frontend/exception.hpp"
namespace ov {
namespace frontend {
@ -14,13 +14,11 @@ class NodeContext;
class OpValidationFailure : public ov::frontend::OpValidationFailure {
public:
OpValidationFailure(const CheckLocInfo& check_loc_info,
const paddle::NodeContext& node,
const std::string& explanation)
OpValidationFailure(const CheckLocInfo& check_loc_info, const NodeContext& node, const std::string& explanation)
: ov::frontend::OpValidationFailure(check_loc_info, get_error_msg_prefix_paddle(node), explanation) {}
private:
static std::string get_error_msg_prefix_paddle(const paddle::NodeContext& node);
static std::string get_error_msg_prefix_paddle(const NodeContext& node);
};
} // namespace paddle
} // namespace frontend

View File

@ -0,0 +1,36 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/frontend.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/frontend/paddle/visibility.hpp"
namespace ov {
namespace frontend {
namespace paddle {
class PADDLE_API ConversionExtension : public ConversionExtensionBase {
public:
using Ptr = std::shared_ptr<ConversionExtension>;
ConversionExtension() = delete;
ConversionExtension(const std::string& op_type, const ov::frontend::CreatorFunctionNamed& converter)
: ConversionExtensionBase(op_type),
m_converter(converter) {}
const ov::frontend::CreatorFunctionNamed& get_converter() const {
return m_converter;
}
private:
ov::frontend::CreatorFunctionNamed m_converter;
};
} // namespace paddle
} // namespace frontend
} // namespace ov

View File

@ -4,12 +4,14 @@
#pragma once
#include <openvino/frontend/extension/decoder_transformation.hpp>
#include <openvino/frontend/extension/telemetry.hpp>
#include <openvino/frontend/frontend.hpp>
#include <openvino/frontend/input_model.hpp>
#include "exceptions.hpp"
#include "openvino/core/extension.hpp"
#include "openvino/frontend/extension/decoder_transformation.hpp"
#include "openvino/frontend/extension/telemetry.hpp"
#include "openvino/frontend/frontend.hpp"
#include "openvino/frontend/input_model.hpp"
#include "openvino/frontend/paddle/exception.hpp"
#include "openvino/frontend/paddle/extension/conversion.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/frontend/paddle/visibility.hpp"
namespace ov {
@ -20,7 +22,8 @@ class OpPlace;
class PADDLE_API FrontEnd : public ov::frontend::FrontEnd {
public:
FrontEnd() = default;
using Ptr = std::shared_ptr<FrontEnd>;
FrontEnd();
/// \brief Completely convert the remaining, not converted part of a function.
/// \param partiallyConverted partially converted OV Model
@ -68,13 +71,21 @@ protected:
/// \return InputModel::Ptr
InputModel::Ptr load_impl(const std::vector<ov::Any>& params) const override;
private:
protected:
static std::shared_ptr<Model> convert_each_node(
const std::shared_ptr<InputModel>& frontend_model,
std::function<std::map<std::string, OutputVector>(const std::map<std::string, Output<Node>>&,
const std::shared_ptr<OpPlace>&)> func);
std::shared_ptr<TelemetryExtension> m_telemetry;
std::vector<std::shared_ptr<DecoderTransformationExtension>> m_transformation_extensions;
// m_extensions should be the first member here,
// m_extensions can contain SO Extension (holder for other Extensions),
// so it should be released last.
std::vector<Extension::Ptr> m_extensions;
TelemetryExtension::Ptr m_telemetry;
std::vector<DecoderTransformationExtension::Ptr> m_transformation_extensions;
std::vector<ConversionExtensionBase::Ptr> m_conversion_extensions;
TranslatorDictionaryType m_op_translators;
};
} // namespace paddle

View File

@ -0,0 +1,104 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "ngraph/compatibility.hpp"
#include "openvino/core/any.hpp"
#include "openvino/frontend/paddle/decoder.hpp"
#include "openvino/frontend/paddle/exception.hpp"
#include "openvino/frontend/paddle/visibility.hpp"
namespace ov {
namespace frontend {
namespace paddle {
using InPortName = std::string;
using OutPortName = std::string;
using TensorName = std::string;
using NamedOutputs = std::map<OutPortName, OutputVector>;
using NamedInputs = std::map<InPortName, OutputVector>;
/// Keep necessary data for a single node in the original FW graph to facilitate
/// conversion process in the rules code.
class NodeContext : public ov::frontend::NodeContext {
public:
using Ptr = std::shared_ptr<NodeContext>;
NodeContext(const DecoderBase& _decoder, const NamedInputs& _name_map)
: ov::frontend::NodeContext(_decoder.get_op_type()),
decoder(_decoder),
name_map(_name_map) {}
/// Detects if there is at least one input attached with a given name
bool has_input(const std::string& name) const {
auto found = name_map.find(name);
if (found != name_map.end())
return !found->second.empty();
return false;
}
/// Returns exactly one input with a given name; throws if there is no inputs or
/// there are more than one input
Output<Node> get_input(const std::string& name) const override {
FRONT_END_GENERAL_CHECK(name_map.at(name).size() == 1);
return name_map.at(name).at(0);
}
/// Returns all inputs with a given name
OutputVector get_ng_inputs(const std::string& name) const {
return name_map.at(name);
}
Output<Node> get_input(const std::string& name, int idx) const override {
return name_map.at(name).at(idx);
}
size_t get_input_size(const std::string& name) const override {
return name_map.at(name).size();
}
std::vector<OutPortName> get_output_names() const {
return decoder.get_output_names();
}
ov::element::Type get_out_port_type(const std::string& port_name) const {
return decoder.get_out_port_type(port_name);
}
NamedOutputs default_single_output_mapping(const std::shared_ptr<Node>& node,
const std::vector<OutPortName>& required_pdpd_out_names) const;
ov::Any get_attribute_as_any(const std::string& name) const override {
auto res = decoder.get_attribute(name);
return res;
}
private:
ov::Any apply_additional_conversion_rules(const ov::Any& any, const std::type_info& type_info) const override {
auto res = decoder.convert_attribute(any, type_info);
return res;
}
const DecoderBase& decoder;
const NamedInputs& name_map;
};
inline NamedOutputs NodeContext::default_single_output_mapping(
const std::shared_ptr<Node>& node,
const std::vector<OutPortName>& required_pdpd_out_names) const {
NamedOutputs named_outputs;
const auto& outputs = node->outputs();
const auto& pdpd_op_output_names = this->get_output_names();
FRONT_END_GENERAL_CHECK(outputs.size() == 1, "OV node must have exactly one output");
for (const auto& pdpd_name : pdpd_op_output_names) {
if (std::find(required_pdpd_out_names.begin(), required_pdpd_out_names.end(), pdpd_name) !=
required_pdpd_out_names.end())
named_outputs[pdpd_name] = {outputs[0]};
}
return named_outputs;
}
using CreatorFunction = std::function<NamedOutputs(const NodeContext&)>;
using TranslatorDictionaryType = std::map<std::string, CreatorFunction>;
} // namespace paddle
} // namespace frontend
} // namespace ov

View File

@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "decoder.hpp"
#include "decoder_proto.hpp"
#include <algorithm>
#include <chrono>
@ -21,7 +21,7 @@ namespace paddle {
using namespace ::paddle::framework;
std::map<::paddle::framework::proto::VarType_Type, ov::element::Type> TYPE_MAP{
std::map<proto::VarType_Type, ov::element::Type> TYPE_MAP{
{proto::VarType_Type::VarType_Type_BOOL, ov::element::boolean},
{proto::VarType_Type::VarType_Type_INT16, ov::element::i16},
{proto::VarType_Type::VarType_Type_INT32, ov::element::i32},
@ -33,34 +33,55 @@ std::map<::paddle::framework::proto::VarType_Type, ov::element::Type> TYPE_MAP{
{proto::VarType_Type::VarType_Type_INT8, ov::element::i8},
{proto::VarType_Type::VarType_Type_BF16, ov::element::bf16}};
ov::Any DecoderProto::get_attribute(const std::string& name, const std::type_info& type_info) const {
ov::Any DecoderProto::get_attribute(const std::string& name) const {
auto attrs = decode_attribute_helper(name);
if (attrs.empty()) {
return {};
}
if (type_info == typeid(std::string)) {
return attrs[0].s();
} else if (type_info == typeid(int64_t)) {
return attrs[0].l();
} else if (type_info == typeid(std::vector<int64_t>)) {
return std::vector<int64_t>(attrs[0].longs().begin(), attrs[0].longs().end());
} else if (type_info == typeid(int32_t)) {
switch (attrs[0].type()) {
case proto::AttrType::INT:
return attrs[0].i();
} else if (type_info == typeid(std::vector<int32_t>)) {
case proto::AttrType::INTS:
return std::vector<int32_t>(attrs[0].ints().begin(), attrs[0].ints().end());
} else if (type_info == typeid(float)) {
case proto::AttrType::FLOAT:
return attrs[0].f();
} else if (type_info == typeid(std::vector<float>)) {
case proto::AttrType::FLOATS:
return std::vector<float>(attrs[0].floats().begin(), attrs[0].floats().end());
} else if (type_info == typeid(ov::element::Type)) {
return TYPE_MAP[static_cast<::paddle::framework::proto::VarType_Type>(attrs[0].i())];
} else if (type_info == typeid(bool)) {
case proto::AttrType::STRING:
return attrs[0].s();
case proto::AttrType::STRINGS:
return std::vector<std::string>(attrs[0].strings().begin(), attrs[0].strings().end());
case proto::AttrType::LONG:
return attrs[0].l();
case proto::AttrType::LONGS:
return std::vector<int64_t>(attrs[0].longs().begin(), attrs[0].longs().end());
case proto::AttrType::BOOLEAN:
return attrs[0].b();
case proto::AttrType::BOOLEANS:
return std::vector<bool>(attrs[0].bools().begin(), attrs[0].bools().end());
case proto::AttrType::BLOCK:
return attrs[0].block_idx();
case proto::AttrType::BLOCKS:
return std::vector<std::int32_t>(attrs[0].blocks_idx().begin(), attrs[0].blocks_idx().end());
default:
FRONT_END_GENERAL_CHECK(false, "Conversion from PaddlePaddle to OpenVINO data type is not supported.");
}
}
// Type is not supported by decoder
return {};
ov::Any DecoderProto::convert_attribute(const Any& data, const std::type_info& type_info) const {
if (data.is<int32_t>() && type_info == typeid(ov::element::Type)) {
return TYPE_MAP.at(static_cast<proto::VarType_Type>(data.as<int32_t>()));
} else if (data.is<std::vector<int32_t>>() && type_info == typeid(std::vector<ov::element::Type>)) {
const auto& casted = data.as<std::vector<int32_t>>();
std::vector<ov::element::Type> types(casted.size());
for (size_t i = 0; i < casted.size(); ++i) {
types[i] = TYPE_MAP.at(static_cast<proto::VarType_Type>(casted[i]));
}
return types;
}
// no conversion rules found.
return data;
}
std::vector<paddle::OutPortName> DecoderProto::get_output_names() const {
@ -94,7 +115,7 @@ ov::element::Type DecoderProto::get_out_port_type(const std::string& port_name)
for (const auto& out_port : op_place->get_output_ports().at(port_name)) {
output_types.push_back(out_port->get_target_tensor_paddle()->get_element_type());
}
FRONT_END_GENERAL_CHECK(output_types.size() > 0, "Port has no tensors connected.");
FRONT_END_GENERAL_CHECK(!output_types.empty(), "Port has no tensors connected.");
FRONT_END_GENERAL_CHECK(std::equal(output_types.begin() + 1, output_types.end(), output_types.begin()),
"Port has tensors with different types connected.");
return output_types[0];

View File

@ -9,14 +9,14 @@
#include <fstream>
#include <map>
#include <memory>
#include <openvino/core/any.hpp>
#include <string>
#include <utility>
#include <vector>
#include "framework.pb.h"
#include "node_context.hpp"
#include "openvino/core/any.hpp"
#include "openvino/frontend/paddle/frontend.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
#include "place.hpp"
namespace ov {
@ -28,7 +28,9 @@ class DecoderProto : public paddle::DecoderBase {
public:
explicit DecoderProto(const std::shared_ptr<OpPlace>& op) : op_place(op) {}
ov::Any get_attribute(const std::string& name, const std::type_info& type_info) const override;
ov::Any get_attribute(const std::string& name) const override;
ov::Any convert_attribute(const ov::Any& data, const std::type_info& type_info) const override;
std::vector<paddle::OutPortName> get_output_names() const override;

View File

@ -2,9 +2,9 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "exceptions.hpp"
#include "openvino/frontend/paddle/exception.hpp"
#include "node_context.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
namespace ov {
namespace frontend {

View File

@ -9,15 +9,17 @@
#include <string>
#include <vector>
#include "decoder.hpp"
#include "decoder_proto.hpp"
#include "framework.pb.h"
#include "input_model.hpp"
#include "node_context.hpp"
#include "op_table.hpp"
#include "openvino/frontend/extension/conversion.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset7.hpp"
#include "paddle_fw_node.hpp"
#include "paddle_utils.hpp"
#include "place.hpp"
#include "so_extension.hpp"
using namespace ov::opset7;
using namespace ov;
@ -132,6 +134,8 @@ std::istream* variant_to_stream_ptr(const ov::Any& variant, std::ifstream& ext_s
}
} // namespace
FrontEnd::FrontEnd() : m_op_translators(paddle::get_supported_ops()) {}
std::shared_ptr<ov::Model> FrontEnd::convert_each_node(
const std::shared_ptr<ov::frontend::InputModel>& frontend_model,
std::function<std::map<std::string, OutputVector>(const std::map<std::string, Output<Node>>&,
@ -292,11 +296,10 @@ std::shared_ptr<ov::Model> FrontEnd::convert(const InputModel::Ptr& model) const
return function;
}
std::map<std::string, paddle::CreatorFunction> CREATORS_MAP = paddle::get_supported_ops();
auto f = convert_each_node(
paddle_model,
[&](const std::map<std::string, Output<Node>>& nodes_dict, const std::shared_ptr<OpPlace>& op_place) {
return paddle::make_ng_node(nodes_dict, op_place, CREATORS_MAP);
return paddle::make_ng_node(nodes_dict, op_place, m_op_translators);
});
return f;
}
@ -304,8 +307,7 @@ std::shared_ptr<ov::Model> FrontEnd::convert(const InputModel::Ptr& model) const
void FrontEnd::convert(const std::shared_ptr<ov::Model>& partiallyConverted) const {
for (const auto& node : partiallyConverted->get_ordered_ops()) {
if (ov::is_type<FrameworkNode>(node)) {
paddle::normalize_framework_node(std::dynamic_pointer_cast<FrameworkNode>(node),
paddle::get_supported_ops());
paddle::normalize_framework_node(std::dynamic_pointer_cast<FrameworkNode>(node), m_op_translators);
}
}
for (const auto& result : partiallyConverted->get_results()) {
@ -329,13 +331,12 @@ std::shared_ptr<ov::Model> FrontEnd::convert_partially(const InputModel::Ptr& mo
return function;
}
std::map<std::string, paddle::CreatorFunction> CREATORS_MAP = paddle::get_supported_ops();
auto f = convert_each_node(
paddle_model,
[&](const std::map<std::string, Output<Node>>& nodes_dict, const std::shared_ptr<OpPlace>& op_place) {
paddle::NamedOutputs named_outputs;
try {
named_outputs = paddle::make_ng_node(nodes_dict, op_place, CREATORS_MAP);
named_outputs = paddle::make_ng_node(nodes_dict, op_place, m_op_translators);
} catch (const OpConversionFailure&) {
named_outputs = paddle::make_framework_node(nodes_dict, op_place);
}
@ -348,7 +349,6 @@ std::shared_ptr<ov::Model> FrontEnd::decode(const InputModel::Ptr& model) const
auto paddle_model = std::dynamic_pointer_cast<InputModel>(model);
FRONT_END_GENERAL_CHECK(paddle_model != nullptr, "Invalid input model");
std::map<std::string, paddle::CreatorFunction> CREATORS_MAP = paddle::get_supported_ops();
auto f = convert_each_node(paddle_model, paddle::make_framework_node);
return f;
}
@ -362,6 +362,19 @@ void FrontEnd::add_extension(const std::shared_ptr<ov::Extension>& extension) {
m_telemetry = telemetry;
} else if (auto transformation = std::dynamic_pointer_cast<DecoderTransformationExtension>(extension)) {
m_transformation_extensions.push_back(transformation);
} else if (const auto& so_ext = std::dynamic_pointer_cast<ov::detail::SOExtension>(extension)) {
add_extension(so_ext->extension());
m_extensions.push_back(so_ext);
} else if (auto common_conv_ext = std::dynamic_pointer_cast<ov::frontend::ConversionExtension>(extension)) {
m_conversion_extensions.push_back(common_conv_ext);
m_op_translators[common_conv_ext->get_op_type()] = [=](const NodeContext& context) {
return common_conv_ext->get_converter_named()(context);
};
} else if (const auto& paddle_conv_ext = std::dynamic_pointer_cast<ConversionExtension>(extension)) {
m_conversion_extensions.push_back(paddle_conv_ext);
m_op_translators[paddle_conv_ext->get_op_type()] = [=](const NodeContext& context) {
return paddle_conv_ext->get_converter()(context);
};
}
}

View File

@ -7,10 +7,10 @@
#include <fstream>
#include <queue>
#include "decoder.hpp"
#include "decoder_proto.hpp"
#include "framework.pb.h"
#include "input_model.hpp"
#include "node_context.hpp"
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset7.hpp"
#include "paddle_utils.hpp"
#include "place.hpp"

View File

@ -4,8 +4,8 @@
#pragma once
#include <openvino/frontend/extension/telemetry.hpp>
#include <openvino/frontend/paddle/frontend.hpp>
#include "openvino/frontend/extension/telemetry.hpp"
#include "openvino/frontend/paddle/frontend.hpp"
namespace ov {
namespace frontend {

View File

@ -2,8 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <node_context.hpp>
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset6.hpp"
namespace ov {
@ -11,7 +10,7 @@ namespace frontend {
namespace paddle {
namespace op {
NamedOutputs argmax(const NodeContext& node) {
auto data = node.get_ng_input("X");
auto data = node.get_input("X");
bool flatten = node.get_attribute<bool>("flatten");
const element::Type& index_element_type = element::i64;
const Output<ov::Node> k = ov::opset6::Constant::create(ov::element::i64, {}, {1});

View File

@ -2,8 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <node_context.hpp>
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset6.hpp"
namespace ov {
namespace frontend {

View File

@ -2,8 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <node_context.hpp>
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset6.hpp"
namespace ov {
@ -11,11 +10,11 @@ namespace frontend {
namespace paddle {
namespace op {
NamedOutputs batch_norm(const NodeContext& node) {
auto data = node.get_ng_input("X");
auto gamma = node.get_ng_input("Scale");
auto beta = node.get_ng_input("Bias");
auto mean = node.get_ng_input("Mean");
auto variance = node.get_ng_input("Variance");
auto data = node.get_input("X");
auto gamma = node.get_input("Scale");
auto beta = node.get_input("Bias");
auto mean = node.get_input("Mean");
auto variance = node.get_input("Variance");
auto data_layout = node.get_attribute<std::string>("data_layout");
PADDLE_OP_CHECK(node, (data_layout == "NCHW" || data_layout == "NHWC"), "Not supported input data layout!");

View File

@ -2,8 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <node_context.hpp>
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset6.hpp"
namespace ov {
@ -11,7 +10,7 @@ namespace frontend {
namespace paddle {
namespace op {
NamedOutputs cast(const NodeContext& node) {
auto data = node.get_ng_input("X");
auto data = node.get_input("X");
auto out_dtype = node.get_attribute<ov::element::Type>("out_dtype");
return node.default_single_output_mapping({std::make_shared<ov::opset6::Convert>(data, out_dtype)}, {"Out"});

View File

@ -2,8 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <node_context.hpp>
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset6.hpp"
namespace ov {
@ -11,7 +10,7 @@ namespace frontend {
namespace paddle {
namespace op {
NamedOutputs clip(const NodeContext& node) {
auto data = node.get_ng_input("X");
auto data = node.get_input("X");
auto min = node.get_attribute<float>("min");
auto max = node.get_attribute<float>("max");
PADDLE_OP_CHECK(node, max >= min, "clip: max value must greater than min value!");

View File

@ -2,8 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <node_context.hpp>
#include "openvino/frontend/paddle/node_context.hpp"
#include "openvino/opsets/opset6.hpp"
namespace ov {

Some files were not shown because too many files have changed in this diff Show More