Finalize deprecation of public IE API (#17962)

* Remove NV12 and I420 blobs and deprecate some legacy API

* Fixed some errors

* Remove NV12 blobs

* Remote NV12 conversion

* Fixed other warnings

* Suppress version

* Fix some warnings

* Fixed version

* Try to fix some warnings

* Suppress warnings in C header

* Suppress warnings in C

* Fixed Windows exceptions

* Try to fix warnings

* Try to fix C bindings build

* Suppress InferRequest

* Fixed some build issues

* Fixed some errors

* Fixed build all for macOS

* Suppress some warnings

* Fixed merge conflict
This commit is contained in:
Ilya Churaev 2023-06-13 07:12:17 +04:00 committed by GitHub
parent 7f1d26ddca
commit c8f3ed814b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 58 additions and 19 deletions

View File

@ -12,6 +12,7 @@
#include "ie_api.h" #include "ie_api.h"
#include "ie_blob.h" #include "ie_blob.h"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
/** /**
@ -24,3 +25,4 @@ namespace InferenceEngine {
INFERENCE_ENGINE_API_CPP(void) blob_copy(Blob::Ptr src, Blob::Ptr dst); INFERENCE_ENGINE_API_CPP(void) blob_copy(Blob::Ptr src, Blob::Ptr dst);
} // namespace InferenceEngine } // namespace InferenceEngine
IE_SUPPRESS_DEPRECATED_END

View File

@ -77,6 +77,7 @@ using SoIVariableStateInternal = ov::SoPtr<IVariableStateInternal>;
* @brief For compatibility reasons. * @brief For compatibility reasons.
*/ */
using MemoryStateInternal = IVariableStateInternal; using MemoryStateInternal = IVariableStateInternal;
IE_SUPPRESS_DEPRECATED_END IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <cstring> #include <cstring>
#include <functional> #include <functional>
#include <map> #include <map>
@ -35,7 +45,7 @@ IE_SUPPRESS_DEPRECATED_START
* *
* @note Each Blob implementation must be derived from this Blob class directly or indirectly * @note Each Blob implementation must be derived from this Blob class directly or indirectly
*/ */
class INFERENCE_ENGINE_API_CLASS(Blob) { class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(Blob) {
public: public:
/** /**
* @brief A smart pointer containing Blob object * @brief A smart pointer containing Blob object
@ -287,7 +297,7 @@ protected:
template <typename T, template <typename T,
typename std::enable_if<!std::is_pointer<T>::value && !std::is_reference<T>::value, int>::type = 0, typename std::enable_if<!std::is_pointer<T>::value && !std::is_reference<T>::value, int>::type = 0,
typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0> typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0>
std::shared_ptr<T> as(const Blob::Ptr& blob) noexcept { INFERENCE_ENGINE_1_0_DEPRECATED std::shared_ptr<T> as(const Blob::Ptr& blob) noexcept {
return std::dynamic_pointer_cast<T>(blob); return std::dynamic_pointer_cast<T>(blob);
} }
@ -300,7 +310,7 @@ std::shared_ptr<T> as(const Blob::Ptr& blob) noexcept {
template <typename T, template <typename T,
typename std::enable_if<!std::is_pointer<T>::value && !std::is_reference<T>::value, int>::type = 0, typename std::enable_if<!std::is_pointer<T>::value && !std::is_reference<T>::value, int>::type = 0,
typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0> typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0>
std::shared_ptr<const T> as(const Blob::CPtr& blob) noexcept { INFERENCE_ENGINE_1_0_DEPRECATED std::shared_ptr<const T> as(const Blob::CPtr& blob) noexcept {
return std::dynamic_pointer_cast<const T>(blob); return std::dynamic_pointer_cast<const T>(blob);
} }
@ -311,7 +321,7 @@ std::shared_ptr<const T> as(const Blob::CPtr& blob) noexcept {
* @note Any Blob implementation that represents a concept of a tensor in memory (for example, * @note Any Blob implementation that represents a concept of a tensor in memory (for example,
* TBlob) must be a subclass of MemoryBlob instead of Blob * TBlob) must be a subclass of MemoryBlob instead of Blob
*/ */
class INFERENCE_ENGINE_API_CLASS(MemoryBlob) : public Blob { class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(MemoryBlob) : public Blob {
public: public:
/** /**
* @brief A smart pointer to the MemoryBlob object * @brief A smart pointer to the MemoryBlob object
@ -510,7 +520,7 @@ using BlobMap = std::map<std::string, Blob::Ptr>;
* @brief Represents real host memory allocated for a Tensor/Blob per C type. * @brief Represents real host memory allocated for a Tensor/Blob per C type.
*/ */
template <typename T, typename = std::enable_if<std::is_standard_layout<T>::value && std::is_trivial<T>::value>> template <typename T, typename = std::enable_if<std::is_standard_layout<T>::value && std::is_trivial<T>::value>>
class TBlob : public MemoryBlob { class INFERENCE_ENGINE_1_0_DEPRECATED TBlob : public MemoryBlob {
template <typename, typename> template <typename, typename>
friend class TBlob; friend class TBlob;
@ -835,7 +845,8 @@ extern template class INFERENCE_ENGINE_API_CLASS(InferenceEngine::TBlob<char>);
* @return A shared pointer to the newly created blob of the given type * @return A shared pointer to the newly created blob of the given type
*/ */
template <typename Type> template <typename Type>
inline typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(const TensorDesc& tensorDesc) { inline INFERENCE_ENGINE_1_0_DEPRECATED typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(
const TensorDesc& tensorDesc) {
if (!tensorDesc.getPrecision().hasStorageType<Type>()) if (!tensorDesc.getPrecision().hasStorageType<Type>())
IE_THROW() << "Cannot make shared blob! " IE_THROW() << "Cannot make shared blob! "
<< "The blob type cannot be used to store objects of current precision"; << "The blob type cannot be used to store objects of current precision";
@ -852,9 +863,8 @@ inline typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(const TensorD
* @return A shared pointer to the newly created blob of the given type * @return A shared pointer to the newly created blob of the given type
*/ */
template <typename Type> template <typename Type>
inline typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(const TensorDesc& tensorDesc, inline INFERENCE_ENGINE_1_0_DEPRECATED typename InferenceEngine::TBlob<Type>::Ptr
Type* ptr, make_shared_blob(const TensorDesc& tensorDesc, Type* ptr, size_t size = 0) {
size_t size = 0) {
if (!tensorDesc.getPrecision().hasStorageType<Type>()) if (!tensorDesc.getPrecision().hasStorageType<Type>())
IE_THROW() << "Cannot make shared blob! " IE_THROW() << "Cannot make shared blob! "
<< "The blob type cannot be used to store objects of current precision"; << "The blob type cannot be used to store objects of current precision";
@ -870,7 +880,7 @@ inline typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(const TensorD
* @return A shared pointer to the newly created blob of the given type * @return A shared pointer to the newly created blob of the given type
*/ */
template <typename Type> template <typename Type>
inline typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob( inline INFERENCE_ENGINE_1_0_DEPRECATED typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(
const TensorDesc& tensorDesc, const TensorDesc& tensorDesc,
const std::shared_ptr<InferenceEngine::IAllocator>& alloc) { const std::shared_ptr<InferenceEngine::IAllocator>& alloc) {
if (!tensorDesc.getPrecision().hasStorageType<Type>()) if (!tensorDesc.getPrecision().hasStorageType<Type>())
@ -887,7 +897,8 @@ inline typename InferenceEngine::TBlob<Type>::Ptr make_shared_blob(
* @return A shared pointer to the newly created blob of the given type * @return A shared pointer to the newly created blob of the given type
*/ */
template <typename TypeTo> template <typename TypeTo>
inline typename InferenceEngine::TBlob<TypeTo>::Ptr make_shared_blob(const TBlob<TypeTo>& arg) { inline INFERENCE_ENGINE_1_0_DEPRECATED typename InferenceEngine::TBlob<TypeTo>::Ptr make_shared_blob(
const TBlob<TypeTo>& arg) {
return std::make_shared<InferenceEngine::TBlob<TypeTo>>(arg); return std::make_shared<InferenceEngine::TBlob<TypeTo>>(arg);
} }
@ -898,7 +909,7 @@ inline typename InferenceEngine::TBlob<TypeTo>::Ptr make_shared_blob(const TBlob
* @return A shared pointer to the newly created Blob object * @return A shared pointer to the newly created Blob object
*/ */
template <typename T, typename... Args, typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0> template <typename T, typename... Args, typename std::enable_if<std::is_base_of<Blob, T>::value, int>::type = 0>
std::shared_ptr<T> make_shared_blob(Args&&... args) { INFERENCE_ENGINE_1_0_DEPRECATED std::shared_ptr<T> make_shared_blob(Args&&... args) {
return std::make_shared<T>(std::forward<Args>(args)...); return std::make_shared<T>(std::forward<Args>(args)...);
} }
@ -909,7 +920,8 @@ std::shared_ptr<T> make_shared_blob(Args&&... args) {
* @param roi A ROI object inside of the original blob. * @param roi A ROI object inside of the original blob.
* @return A shared pointer to the newly created blob. * @return A shared pointer to the newly created blob.
*/ */
INFERENCE_ENGINE_API_CPP(Blob::Ptr) make_shared_blob(const Blob::Ptr& inputBlob, const ROI& roi); INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CPP(Blob::Ptr)
make_shared_blob(const Blob::Ptr& inputBlob, const ROI& roi);
/** /**
* @brief Creates a blob describing given ROI object based on the given blob with pre-allocated memory. * @brief Creates a blob describing given ROI object based on the given blob with pre-allocated memory.
@ -919,8 +931,8 @@ INFERENCE_ENGINE_API_CPP(Blob::Ptr) make_shared_blob(const Blob::Ptr& inputBlob,
* @param end A ROI object end coordinate inside of the original blob. * @param end A ROI object end coordinate inside of the original blob.
* @return A shared pointer to the newly created blob. * @return A shared pointer to the newly created blob.
*/ */
INFERENCE_ENGINE_API_CPP(Blob::Ptr) INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CPP(Blob::Ptr)
make_shared_blob(const Blob::Ptr& inputBlob, const std::vector<size_t>& begin, const std::vector<size_t>& end); make_shared_blob(const Blob::Ptr& inputBlob, const std::vector<size_t>& begin, const std::vector<size_t>& end);
IE_SUPPRESS_DEPRECATED_END IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
@ -24,7 +34,7 @@ class RemoteContext;
* @brief This class represents an Inference Engine abstraction to the memory allocated * @brief This class represents an Inference Engine abstraction to the memory allocated
* on the remote (non-CPU) accelerator device * on the remote (non-CPU) accelerator device
*/ */
class RemoteBlob : public MemoryBlob { class INFERENCE_ENGINE_1_0_DEPRECATED RemoteBlob : public MemoryBlob {
public: public:
/** /**
* @brief A smart pointer to the RemoteBlob object * @brief A smart pointer to the RemoteBlob object

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
@ -25,7 +35,8 @@ IE_SUPPRESS_DEPRECATED_START
* Such context represents a scope on the device within which executable * Such context represents a scope on the device within which executable
* networks and remote memory blobs can exist, function and exchange data. * networks and remote memory blobs can exist, function and exchange data.
*/ */
class INFERENCE_ENGINE_API_CLASS(RemoteContext) : public std::enable_shared_from_this<RemoteContext> { class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(RemoteContext)
: public std::enable_shared_from_this<RemoteContext> {
public: public:
/** /**
* @brief A smart pointer to the RemoteContext object * @brief A smart pointer to the RemoteContext object
@ -139,7 +150,8 @@ public:
* @param ctx Pointer to the plugin object derived from RemoteContext. * @param ctx Pointer to the plugin object derived from RemoteContext.
* @return A pointer to plugin object that implements RemoteBlob interface. * @return A pointer to plugin object that implements RemoteBlob interface.
*/ */
inline RemoteBlob::Ptr make_shared_blob(const TensorDesc& desc, RemoteContext::Ptr ctx) { inline INFERENCE_ENGINE_1_0_DEPRECATED RemoteBlob::Ptr make_shared_blob(const TensorDesc& desc,
RemoteContext::Ptr ctx) {
return ctx->CreateBlob(desc); return ctx->CreateBlob(desc);
} }

View File

@ -43,10 +43,12 @@ std::shared_ptr<ITensor> make_tensor(const std::shared_ptr<ITensor>& other,
const Coordinate& begin, const Coordinate& begin,
const Coordinate& end); const Coordinate& end);
IE_SUPPRESS_DEPRECATED_START
/** @cond INTERNAL */ /** @cond INTERNAL */
std::shared_ptr<ITensor> make_tensor(const std::shared_ptr<InferenceEngine::Blob>& tensor); std::shared_ptr<ITensor> make_tensor(const std::shared_ptr<InferenceEngine::Blob>& tensor);
std::shared_ptr<InferenceEngine::Blob> tensor_to_blob(const std::shared_ptr<ITensor>& tensor); std::shared_ptr<InferenceEngine::Blob> tensor_to_blob(const std::shared_ptr<ITensor>& tensor);
/** @endcond */ /** @endcond */
IE_SUPPRESS_DEPRECATED_END
} // namespace ov } // namespace ov

View File

@ -85,6 +85,7 @@ inline bool strDoesnotContain(const std::string & str, const std::string & subst
FAIL() << "Unknown exception"; \ FAIL() << "Unknown exception"; \
} }
IE_SUPPRESS_DEPRECATED_START
inline void compare_blob(InferenceEngine::Blob::Ptr lhs, InferenceEngine::Blob::Ptr rhs) { inline void compare_blob(InferenceEngine::Blob::Ptr lhs, InferenceEngine::Blob::Ptr rhs) {
ASSERT_EQ(lhs.get(), rhs.get()); ASSERT_EQ(lhs.get(), rhs.get());
//TODO: add blob specific comparison for general case //TODO: add blob specific comparison for general case
@ -97,7 +98,6 @@ inline void compare_dims(const InferenceEngine::SizeVector & lhs, const Inferenc
} }
} }
IE_SUPPRESS_DEPRECATED_START
inline void compare_data(const InferenceEngine::Data & lhs, const InferenceEngine::Data & rhs) { inline void compare_data(const InferenceEngine::Data & lhs, const InferenceEngine::Data & rhs) {
ASSERT_DIMS_EQ(lhs.getDims(), rhs.getDims()); ASSERT_DIMS_EQ(lhs.getDims(), rhs.getDims());
ASSERT_STREQ(lhs.getName().c_str(), rhs.getName().c_str()); ASSERT_STREQ(lhs.getName().c_str(), rhs.getName().c_str());