Deprecate legacy Core and Allocator (#17646)

* Deprecate legacy Core and Allocator

* Suppress blob warnings

* Suppress some warnings

* Suppress more warnings

* Suppress blob allocator

* Suppress more warnings

* Suppress more warnings

* Fixed compilation issues for Template plugin

* Fixed some warnings

* Fixed tests

* Add WA for benchmark_app

* Suppress #warning for developer package

* Rename define

* Disable warnings for compile_tool and benchmark_app

* Suppress Windows warnings

* Suppress more warnings for Windows

* Fixed compile_tool install

* Added message for VS

* Fixed snippets and throw only first error
This commit is contained in:
Ilya Churaev 2023-05-26 07:06:03 +04:00 committed by GitHub
parent ef041565a8
commit dd0060a582
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 294 additions and 277 deletions

View File

@ -54,6 +54,8 @@ macro(ov_deprecated_no_errors)
endif() endif()
elseif(OV_COMPILER_IS_CLANG OR CMAKE_COMPILER_IS_GNUCXX) elseif(OV_COMPILER_IS_CLANG OR CMAKE_COMPILER_IS_GNUCXX)
set(ie_c_cxx_deprecated_no_errors "-Wno-error=deprecated-declarations") set(ie_c_cxx_deprecated_no_errors "-Wno-error=deprecated-declarations")
# Suppress #warning messages
set(ie_c_cxx_deprecated_no_errors "${ie_c_cxx_deprecated_no_errors} -Wno-cpp")
else() else()
message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}") message(WARNING "Unsupported CXX compiler ${CMAKE_CXX_COMPILER_ID}")
endif() endif()

View File

@ -1,30 +0,0 @@
#include <ie_core.hpp>
int main() {
{
//! [part1]
// Inference Engine API
InferenceEngine::Core ie;
// Read a network in IR, PaddlePaddle, or ONNX format:
InferenceEngine::CNNNetwork network = ie.ReadNetwork("sample.xml");
// Load a network to AUTO using the default list of device candidates.
// The following lines are equivalent:
InferenceEngine::ExecutableNetwork exec0 = ie.LoadNetwork(network);
InferenceEngine::ExecutableNetwork exec1 = ie.LoadNetwork(network, "AUTO");
InferenceEngine::ExecutableNetwork exec2 = ie.LoadNetwork(network, "AUTO", {});
// Optional
// You can also specify the devices to be used by AUTO in its selection process.
// The following lines are equivalent:
InferenceEngine::ExecutableNetwork exec3 = ie.LoadNetwork(network, "AUTO:GPU,CPU");
InferenceEngine::ExecutableNetwork exec4 = ie.LoadNetwork(network, "AUTO", {{"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}});
// Optional
// the AUTO plugin is pre-configured (globally) with the explicit option:
ie.SetConfig({{"MULTI_DEVICE_PRIORITIES", "GPU,CPU"}}, "AUTO");
//! [part1]
}
return 0;
}

View File

@ -1,12 +0,0 @@
#include <ie_core.hpp>
int main() {
{
//! [part2]
InferenceEngine::Core ie;
InferenceEngine::CNNNetwork network = ie.ReadNetwork("sample.xml");
InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(network, "AUTO");
//! [part2]
}
return 0;
}

View File

@ -1,14 +0,0 @@
#include <ie_core.hpp>
int main() {
using namespace InferenceEngine;
//! [part0]
InferenceEngine::Core core;
// Load CPU extension as a shared library
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{"<shared lib path>"});
// Add extension to the CPU device
core.AddExtension(extension_ptr, "CPU");
//! [part0]
return 0;
}

View File

@ -1,27 +0,0 @@
#include <openvino/openvino.hpp>
int main() {
//! [part1]
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
ov::CompiledModel compileModel = core.compile_model(model, "MULTI:CPU,GPU");
// Once the priority list is set, you can alter it on the fly:
// reverse the order of priorities
compileModel.set_property(ov::device::priorities("GPU,CPU"));
// exclude some devices (in this case, CPU)
compileModel.set_property(ov::device::priorities("GPU"));
// bring back the excluded devices
compileModel.set_property(ov::device::priorities("GPU,CPU"));
// You cannot add new devices on the fly!
// Attempting to do so will trigger the following exception:
// [ ERROR ] [NOT_FOUND] You can only change device
// priorities but not add new devices with the model's
// ov::device::priorities. CPU device was not in the original device list!
//! [part1]
return 0;
}

View File

@ -1,7 +1,6 @@
#include <openvino/runtime/core.hpp> #include <openvino/runtime/core.hpp>
int main() { int main() {
using namespace InferenceEngine;
//! [part1] //! [part1]
ov::Core core; ov::Core core;
auto network = core.read_model("sample.xml"); auto network = core.read_model("sample.xml");

View File

@ -1,7 +1,6 @@
#include <openvino/runtime/core.hpp> #include <openvino/runtime/core.hpp>
int main() { int main() {
using namespace InferenceEngine;
//! [part2] //! [part2]
ov::Core core; ov::Core core;
core.set_property("CPU", ov::hint::inference_precision(ov::element::f32)); core.set_property("CPU", ov::hint::inference_precision(ov::element::f32));

View File

@ -1,5 +1,3 @@
#include <ie_core.hpp>
int main() { int main() {
//! [part8] //! [part8]
while(true) { while(true) {

View File

@ -1,5 +1,3 @@
#include <ie_core.hpp>
int main() { int main() {
//! [part9] //! [part9]
while(true) { while(true) {

View File

@ -2,10 +2,20 @@
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// //
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include <threading/ie_itask_executor.hpp> #include <threading/ie_itask_executor.hpp>
#include <cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp> #include <cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp>
#include <memory> #include <memory>
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif
using namespace InferenceEngine; using namespace InferenceEngine;
class AcceleratorSyncRequest : public IInferRequestInternal { class AcceleratorSyncRequest : public IInferRequestInternal {

View File

@ -2,8 +2,17 @@
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// //
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include <threading/ie_cpu_streams_executor.hpp> #include <threading/ie_cpu_streams_executor.hpp>
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif
#include <memory> #include <memory>
#include <future> #include <future>
#include <iostream> #include <iostream>

View File

@ -1,155 +0,0 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <memory>
#include <ngraph/rt_info.hpp>
#include <openvino/pass/pattern/op/wrap_type.hpp>
#include <openvino/pass/manager.hpp>
#include <openvino/pass/visualize_tree.hpp>
#include <openvino/pass/serialize.hpp>
#include <transformations/common_optimizations/common_optimizations.hpp>
#include <transformations/op_conversions/convert_gelu.hpp>
#include <transformations/op_conversions/convert_space_to_depth.hpp>
#include <transformations/op_conversions/convert_depth_to_space.hpp>
#include <transformations/op_conversions/convert_pad_to_group_conv.hpp>
// ! [ov:include]
#include <openvino/core/model.hpp>
#include <openvino/opsets/opset8.hpp>
// ! [ov:include]
bool ngraph_api_examples(std::shared_ptr<ov::Node> node) {
{
// ! [ngraph:ports_example]
// Let's suppose that node is opset8::Convolution operation
// as we know opset8::Convolution has two input ports (data, weights) and one output port
ov::Input<ov::Node> data = node->input(0);
ov::Input<ov::Node> weights = node->input(1);
ov::Output<ov::Node> output = node->output(0);
// Getting shape and type
auto pshape = data.get_partial_shape();
auto el_type = data.get_element_type();
// Getting parent for input port
ov::Output<ov::Node> parent_output;
parent_output = data.get_source_output();
// Another short way to get partent for output port
parent_output = node->input_value(0);
// Getting all consumers for output port
auto consumers = output.get_target_inputs();
// ! [ngraph:ports_example]
(void)el_type;
(void)pshape;
}
{
// ! [ngraph:shape_check]
auto partial_shape = node->input(0).get_partial_shape(); // get zero input partial shape
// Check that input shape rank is static
if (!partial_shape.rank().is_static()) {
return false;
}
auto rank_size = partial_shape.rank().get_length();
// Check that second dimension is not dynamic
if (rank_size < 2 || partial_shape[1].is_dynamic()) {
return false;
}
auto dim = partial_shape[1].get_length();
// ! [ngraph:shape_check]
}
return true;
}
// ! [ov:serialize]
void serialize_example(std::shared_ptr<ov::Model> f) {
ov::pass::Manager manager;
// Serialize ov::Function to before.svg file before transformation
manager.register_pass<ov::pass::VisualizeTree>("/path/to/file/before.svg");
// Run your transformation
// manager.register_pass<ov::pass::MyTransformation>();
// Serialize ov::Function to after.svg file after transformation
manager.register_pass<ov::pass::VisualizeTree>("/path/to/file/after.svg");
manager.run_passes(f);
}
// ! [ov:serialize]
// ! [ov:visualize]
void visualization_example(std::shared_ptr<ov::Model> f) {
ov::pass::Manager manager;
// Serialize ov::Function to IR
manager.register_pass<ov::pass::Serialize>("/path/to/file/model.xml", "/path/to/file/model.bin");
manager.run_passes(f);
}
// ! [ov:visualize]
void pass_manager_example1(std::shared_ptr<ov::Model> f) {
// ! [ngraph:disable_gelu]
ov::pass::Manager manager;
manager.register_pass<ov::pass::CommonOptimizations>();
auto pass_config = manager.get_pass_config();
pass_config->disable<ov::pass::ConvertGELU>();
manager.run_passes(f);
// ! [ngraph:disable_gelu]
}
void pass_manager_example2(std::shared_ptr<ov::Model> f) {
ov::pass::Manager manager;
std::function<bool(const std::shared_ptr<const ov::Node>)> transformation_callback;
// ! [ngraph:disable_callback]
// Set callback to particular transformation with specific condition
auto pass_config = manager.get_pass_config();
pass_config->set_callback<ov::pass::ConvertSpaceToDepth,
ov::pass::ConvertDepthToSpace>(
[](const std::shared_ptr<const ov::Node> &node) -> bool {
return node->input_value(0).get_shape().size() <= 5lu &&
node->input_value(0).get_shape().size() == node->get_output_shape(0).size();
});
// Update transformation to call callback
ov::matcher_pass_callback callback = [=](ov::pass::pattern::Matcher &m) {
auto node = m.get_match_root();
if (transformation_callback(node)) {
return false;
}
// transformation code
return false;
};
// ! [ngraph:disable_callback]
}
void pass_manager_example3(std::shared_ptr<ov::Model> f) {
std::function<bool(const std::shared_ptr<const ov::Node>)> transformation_callback;
// ! [ngraph:disabled_by_default]
// Example of disabled by default transformation
{
ov::pass::Manager manager;
manager.register_pass<ov::pass::ConvertPadToGroupConvolution, false>();
manager.run_passes(f);
}
// Enable disabled by default transformation inside plugin
{
ov::pass::Manager manager;
manager.register_pass<ov::pass::CommonOptimizations>();
auto pass_config = manager.get_pass_config();
pass_config->enable<ov::pass::ConvertPadToGroupConvolution>();
manager.run_passes(f);
}
// ! [ngraph:disabled_by_default]
}

View File

@ -2,8 +2,18 @@
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// //
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include <ie_core.hpp> #include <ie_core.hpp>
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif
int main() { int main() {
//! [ie:create_core] //! [ie:create_core]
InferenceEngine::Core core; InferenceEngine::Core core;

View File

@ -1,5 +1,3 @@
#include <ie_core.hpp>
#include <transformations/low_precision/mark_dequantization_subgraph.hpp> #include <transformations/low_precision/mark_dequantization_subgraph.hpp>
#include <low_precision/common/quantization_granularity_restriction.hpp> #include <low_precision/common/quantization_granularity_restriction.hpp>

View File

@ -1,8 +1,18 @@
// Copyright (C) 2018-2021 Intel Corporation // Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// //
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include <ie_extension.h> #include <ie_extension.h>
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif
#include <openvino/core/core.hpp> #include <openvino/core/core.hpp>
#include <openvino/runtime/runtime.hpp> #include <openvino/runtime/runtime.hpp>

View File

@ -5,8 +5,18 @@
#include <openvino/opsets/opset8.hpp> #include <openvino/opsets/opset8.hpp>
#include <openvino/core/preprocess/pre_post_process.hpp> #include <openvino/core/preprocess/pre_post_process.hpp>
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include "inference_engine.hpp" #include "inference_engine.hpp"
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif
int main_new() { int main_new() {
std::string model_path; std::string model_path;
std::string tensor_name; std::string tensor_name;

View File

@ -1,6 +1,16 @@
#include <openvino/runtime/core.hpp> #include <openvino/runtime/core.hpp>
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include <inference_engine.hpp> #include <inference_engine.hpp>
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif
int main_new() { int main_new() {
ov::Core core; ov::Core core;

View File

@ -2,6 +2,7 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
add_definitions(-DIN_OV_COMPONENT)
add_definitions(-DPROJECT_ROOT_DIR="${OpenVINO_SOURCE_DIR}") add_definitions(-DPROJECT_ROOT_DIR="${OpenVINO_SOURCE_DIR}")
include(cmake/install_tbb.cmake) include(cmake/install_tbb.cmake)

View File

@ -10,6 +10,7 @@
#include "openvino/runtime/common.hpp" #include "openvino/runtime/common.hpp"
#include "system_allocator.hpp" // IE private header #include "system_allocator.hpp" // IE private header
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
struct BlobAllocator : public IAllocator { struct BlobAllocator : public IAllocator {
BlobAllocator(const ov::Allocator& impl) : _impl{impl} {} BlobAllocator(const ov::Allocator& impl) : _impl{impl} {}
@ -79,3 +80,4 @@ struct BlobAllocator {
std::shared_ptr<ie::IAllocator> _impl; std::shared_ptr<ie::IAllocator> _impl;
}; };
} // namespace ov } // namespace ov
IE_SUPPRESS_DEPRECATED_END

View File

@ -18,6 +18,7 @@
#include "ie_memcpy.h" #include "ie_memcpy.h"
#include "ie_preprocess.hpp" #include "ie_preprocess.hpp"
IE_SUPPRESS_DEPRECATED_START
/** /**
* @private * @private
*/ */
@ -147,3 +148,4 @@ void CopyVectorToBlob(const InferenceEngine::Blob::Ptr outputBlob, const std::ve
IE_THROW() << "Element size mismatch between blob and vector"; IE_THROW() << "Element size mismatch between blob and vector";
ie_memcpy(outputBlob->buffer().as<T*>(), outputBlob->byteSize(), &inputVector[0], inputVector.size() * sizeof(T)); ie_memcpy(outputBlob->buffer().as<T*>(), outputBlob->byteSize(), &inputVector[0], inputVector.size() * sizeof(T));
} }
IE_SUPPRESS_DEPRECATED_END

View File

@ -18,6 +18,7 @@
namespace InferenceEngine { namespace InferenceEngine {
IE_SUPPRESS_DEPRECATED_START
class IExecutableNetworkInternal; class IExecutableNetworkInternal;
class IVariableStateInternal; class IVariableStateInternal;
@ -368,4 +369,6 @@ private:
*/ */
using SoIInferRequestInternal = ov::SoPtr<IInferRequestInternal>; using SoIInferRequestInternal = ov::SoPtr<IInferRequestInternal>;
IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
@ -28,7 +38,7 @@ class IExtension;
/** /**
* @brief This class contains all the information about the Neural Network and the related binary information * @brief This class contains all the information about the Neural Network and the related binary information
*/ */
class INFERENCE_ENGINE_API_CLASS(CNNNetwork) { class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(CNNNetwork) {
public: public:
/** /**
* @brief A default constructor * @brief A default constructor

View File

@ -10,17 +10,29 @@
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <utility> #include <utility>
#include "ie_api.h"
#include "ie_locked_memory.hpp" #include "ie_locked_memory.hpp"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
namespace details { namespace details {
/** /**
* @brief This class provides range loops support for TBlob objects * @brief This class provides range loops support for TBlob objects
*/ */
template <class T> template <class T>
class BlobIterator { class INFERENCE_ENGINE_1_0_DEPRECATED BlobIterator {
LockedMemory<T> _mem; LockedMemory<T> _mem;
size_t _offset; size_t _offset;
@ -85,3 +97,4 @@ public:
}; };
} // namespace details } // namespace details
} // namespace InferenceEngine } // namespace InferenceEngine
IE_SUPPRESS_DEPRECATED_END

View File

@ -4,4 +4,14 @@
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include "ie_common.h" #include "ie_common.h"

View File

@ -9,16 +9,27 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <memory> #include <memory>
#include "ie_allocator.hpp" #include "ie_allocator.hpp"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
namespace details { namespace details {
/* /*
* @brief This is a helper class to wrap external memory * @brief This is a helper class to wrap external memory
*/ */
class PreAllocator final : public IAllocator { class INFERENCE_ENGINE_1_0_DEPRECATED PreAllocator final : public IAllocator {
void* _actualData; void* _actualData;
size_t _sizeInBytes; size_t _sizeInBytes;
@ -67,9 +78,10 @@ public:
* @return A new allocator * @return A new allocator
*/ */
template <class T> template <class T>
std::shared_ptr<IAllocator> make_pre_allocator(T* ptr, size_t size) { std::shared_ptr<IAllocator> INFERENCE_ENGINE_1_0_DEPRECATED make_pre_allocator(T* ptr, size_t size) {
return std::make_shared<PreAllocator>(ptr, size * sizeof(T)); return std::make_shared<PreAllocator>(ptr, size * sizeof(T));
} }
} // namespace details } // namespace details
} // namespace InferenceEngine } // namespace InferenceEngine
IE_SUPPRESS_DEPRECATED_END

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <memory> #include <memory>
#include "ie_api.h" #include "ie_api.h"
@ -20,8 +30,7 @@ namespace details {
* @deprecated This is internal stuff. Use Inference Engine Plugin API * @deprecated This is internal stuff. Use Inference Engine Plugin API
* @brief This class provides an OS shared module abstraction * @brief This class provides an OS shared module abstraction
*/ */
class INFERENCE_ENGINE_DEPRECATED("This is internal stuff. Use Inference Engine Plugin API") class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(SharedObjectLoader) {
INFERENCE_ENGINE_API_CLASS(SharedObjectLoader) {
std::shared_ptr<void> _so; std::shared_ptr<void> _so;
public: public:

View File

@ -8,6 +8,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <cassert> #include <cassert>
#include <functional> #include <functional>
#include <memory> #include <memory>
@ -24,7 +34,7 @@ namespace details {
* parameter * parameter
*/ */
template <class T> template <class T>
class SOCreatorTrait {}; class INFERENCE_ENGINE_1_0_DEPRECATED SOCreatorTrait {};
/** /**
* @brief Enables only `char` or `wchar_t` template specializations * @brief Enables only `char` or `wchar_t` template specializations
@ -40,7 +50,7 @@ using enableIfSupportedChar =
* @tparam T An type of object SOPointer can hold * @tparam T An type of object SOPointer can hold
*/ */
template <class T> template <class T>
class INFERENCE_ENGINE_DEPRECATED("This is internal stuff. Use Inference Engine Plugin API") SOPointer { class INFERENCE_ENGINE_1_0_DEPRECATED SOPointer {
template <class U> template <class U>
friend class SOPointer; friend class SOPointer;

View File

@ -10,6 +10,12 @@
*/ */
#pragma once #pragma once
// TODO: Remove after migration to new API in the benchmark app
#ifndef IN_OV_COMPONENT
# define IN_OV_COMPONENT
# define WAS_OV_LIBRARY_DEFINED
#endif
#include <ie_remote_context.hpp> #include <ie_remote_context.hpp>
#include <memory> #include <memory>
#include <string> #include <string>
@ -362,3 +368,8 @@ static inline Blob::Ptr make_shared_blob(const TensorDesc& desc, RemoteContext::
} // namespace gpu } // namespace gpu
} // namespace InferenceEngine } // namespace InferenceEngine
#ifdef WAS_OV_LIBRARY_DEFINED
# undef IN_OV_COMPONENT
# undef WAS_OV_LIBRARY_DEFINED
#endif

View File

@ -9,16 +9,27 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <memory> #include <memory>
#include "ie_api.h" #include "ie_api.h"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
/** /**
* @brief Allocator handle mapping type * @brief Allocator handle mapping type
*/ */
enum LockOp { enum INFERENCE_ENGINE_1_0_DEPRECATED LockOp {
LOCK_FOR_READ = 0, //!< A flag to lock data for read LOCK_FOR_READ = 0, //!< A flag to lock data for read
LOCK_FOR_WRITE //!< A flag to lock data for write LOCK_FOR_WRITE //!< A flag to lock data for write
}; };
@ -27,7 +38,7 @@ enum LockOp {
* @interface IAllocator * @interface IAllocator
* @brief Allocator concept to be used for memory management and is used as part of the Blob. * @brief Allocator concept to be used for memory management and is used as part of the Blob.
*/ */
class IAllocator : public std::enable_shared_from_this<IAllocator> { class INFERENCE_ENGINE_1_0_DEPRECATED IAllocator : public std::enable_shared_from_this<IAllocator> {
public: public:
/** /**
* @brief Maps handle to heap memory accessible by any memory manipulation routines. * @brief Maps handle to heap memory accessible by any memory manipulation routines.
@ -69,6 +80,8 @@ protected:
* *
* @return The Inference Engine IAllocator* instance * @return The Inference Engine IAllocator* instance
*/ */
INFERENCE_ENGINE_API_CPP(std::shared_ptr<InferenceEngine::IAllocator>) CreateDefaultAllocator() noexcept; INFERENCE_ENGINE_API_CPP(std::shared_ptr<InferenceEngine::IAllocator>)
INFERENCE_ENGINE_1_0_DEPRECATED CreateDefaultAllocator() noexcept;
} // namespace InferenceEngine } // namespace InferenceEngine
IE_SUPPRESS_DEPRECATED_END

View File

@ -28,6 +28,7 @@
#include "ie_precision.hpp" #include "ie_precision.hpp"
namespace InferenceEngine { namespace InferenceEngine {
IE_SUPPRESS_DEPRECATED_START
/** /**
* @brief This class represents a universal container in the Inference Engine * @brief This class represents a universal container in the Inference Engine
@ -921,4 +922,5 @@ INFERENCE_ENGINE_API_CPP(Blob::Ptr) make_shared_blob(const Blob::Ptr& inputBlob,
INFERENCE_ENGINE_API_CPP(Blob::Ptr) INFERENCE_ENGINE_API_CPP(Blob::Ptr)
make_shared_blob(const Blob::Ptr& inputBlob, const std::vector<size_t>& begin, const std::vector<size_t>& end); make_shared_blob(const Blob::Ptr& inputBlob, const std::vector<size_t>& begin, const std::vector<size_t>& end);
IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -15,6 +15,7 @@
#include "ie_blob.h" #include "ie_blob.h"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
/** /**
* @brief This class represents a blob that contains other blobs * @brief This class represents a blob that contains other blobs
@ -315,3 +316,4 @@ public:
explicit BatchedBlob(std::vector<Blob::Ptr>&& blobs); explicit BatchedBlob(std::vector<Blob::Ptr>&& blobs);
}; };
} // namespace InferenceEngine } // namespace InferenceEngine
IE_SUPPRESS_DEPRECATED_END

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <istream> #include <istream>
#include <map> #include <map>
#include <memory> #include <memory>
@ -29,7 +39,7 @@ namespace InferenceEngine {
* *
* It can throw exceptions safely for the application, where it is properly handled. * It can throw exceptions safely for the application, where it is properly handled.
*/ */
class INFERENCE_ENGINE_API_CLASS(Core) { class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(Core) {
class Impl; class Impl;
std::shared_ptr<Impl> _impl; std::shared_ptr<Impl> _impl;
@ -365,5 +375,5 @@ public:
* You might want to use this function if you are developing a dynamically-loaded library which should clean up all * You might want to use this function if you are developing a dynamically-loaded library which should clean up all
* resources after itself when the library is unloaded. * resources after itself when the library is unloaded.
*/ */
INFERENCE_ENGINE_API_CPP(void) shutdown(); INFERENCE_ENGINE_API_CPP(void) INFERENCE_ENGINE_1_0_DEPRECATED shutdown();
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
@ -21,22 +31,21 @@
namespace InferenceEngine { namespace InferenceEngine {
_IE_SUPPRESS_DEPRECATED_START_GCC IE_SUPPRESS_DEPRECATED_START
/** /**
* @deprecated Use InferenceEngine::CNNNetwork wrapper instead * @deprecated Use InferenceEngine::CNNNetwork wrapper instead
* @interface ICNNNetwork * @interface ICNNNetwork
* @brief This is the main interface to describe the NN topology * @brief This is the main interface to describe the NN topology
*/ */
class INFERENCE_ENGINE_API_CLASS(ICNNNetwork) : public std::enable_shared_from_this<ICNNNetwork> { class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(ICNNNetwork)
: public std::enable_shared_from_this<ICNNNetwork> {
public: public:
IE_SUPPRESS_DEPRECATED_START
/** /**
* @deprecated Use InferenceEngine::CNNNetwork wrapper instead * @deprecated Use InferenceEngine::CNNNetwork wrapper instead
* @brief A shared pointer to a ICNNNetwork interface * @brief A shared pointer to a ICNNNetwork interface
*/ */
using Ptr = std::shared_ptr<ICNNNetwork>; using Ptr = std::shared_ptr<ICNNNetwork>;
IE_SUPPRESS_DEPRECATED_END
/** /**
* @deprecated Use InferenceEngine::CNNNetwork wrapper instead * @deprecated Use InferenceEngine::CNNNetwork wrapper instead
@ -257,14 +266,12 @@ public:
} }
protected: protected:
IE_SUPPRESS_DEPRECATED_START
/** /**
* @brief Default destructor. * @brief Default destructor.
*/ */
~ICNNNetwork() = default; ~ICNNNetwork() = default;
IE_SUPPRESS_DEPRECATED_END
}; };
_IE_SUPPRESS_DEPRECATED_END_GCC IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>

View File

@ -9,6 +9,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
@ -26,7 +36,7 @@ IE_SUPPRESS_DEPRECATED_START
/** /**
* @brief This class contains information about each input of the network * @brief This class contains information about each input of the network
*/ */
class InputInfo { class INFERENCE_ENGINE_1_0_DEPRECATED InputInfo {
public: public:
/** @brief A smart pointer to the InputInfo instance */ /** @brief A smart pointer to the InputInfo instance */
using Ptr = std::shared_ptr<InputInfo>; using Ptr = std::shared_ptr<InputInfo>;
@ -130,11 +140,12 @@ public:
} }
/** /**
* @brief Initializes the pointer to the input data that stores the main input parameters like dims, etc * @brief Initializes the pointer to the input data that stores the main input parameters like dims,
* etc
* *
* This method initializes the precision with the information from the inputPtr if it was not set * This method initializes the precision with the information from the inputPtr if it was not set
* explicitly through InputInfo::setPrecision. If InputInfo::setPrecision is called, this method does not overwrite * explicitly through InputInfo::setPrecision. If InputInfo::setPrecision is called, this method does
* the precision. * not overwrite the precision.
* @param inputPtr Pointer to the input data to set * @param inputPtr Pointer to the input data to set
*/ */
void setInputData(DataPtr inputPtr) { void setInputData(DataPtr inputPtr) {

View File

@ -9,17 +9,28 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <utility> #include <utility>
#include "ie_allocator.hpp" #include "ie_allocator.hpp"
namespace InferenceEngine { namespace InferenceEngine {
IE_SUPPRESS_DEPRECATED_START
namespace details { namespace details {
/** /**
* @brief This class is a LockedMemory concept for hardware memory * @brief This class is a LockedMemory concept for hardware memory
*/ */
template <class T> template <class T>
class LockedMemoryBase { class INFERENCE_ENGINE_1_0_DEPRECATED LockedMemoryBase {
IAllocator* _allocator = nullptr; IAllocator* _allocator = nullptr;
void* _handle = nullptr; void* _handle = nullptr;
mutable T* _locked = nullptr; mutable T* _locked = nullptr;
@ -114,7 +125,7 @@ protected:
* @brief This class represents locked memory for read/write memory * @brief This class represents locked memory for read/write memory
*/ */
template <class T> template <class T>
class LockedMemory : public details::LockedMemoryBase<T> { class INFERENCE_ENGINE_1_0_DEPRECATED LockedMemory : public details::LockedMemoryBase<T> {
using base = details::LockedMemoryBase<T>; using base = details::LockedMemoryBase<T>;
public: public:
@ -222,7 +233,7 @@ public:
* @brief This class is for <void*> data and allows casting to any pointers * @brief This class is for <void*> data and allows casting to any pointers
*/ */
template <> template <>
class LockedMemory<void> : public details::LockedMemoryBase<void> { class INFERENCE_ENGINE_1_0_DEPRECATED LockedMemory<void> : public details::LockedMemoryBase<void> {
using base = details::LockedMemoryBase<void>; using base = details::LockedMemoryBase<void>;
public: public:
@ -291,6 +302,7 @@ public:
return base::isEqualTo(lm.as<void*>()); return base::isEqualTo(lm.as<void*>());
} }
IE_SUPPRESS_DEPRECATED_START
/** /**
* @brief Compares the object with the one stored in the memory * @brief Compares the object with the one stored in the memory
* @param pointer A pointer to compare with * @param pointer A pointer to compare with
@ -300,6 +312,7 @@ public:
friend bool operator==(const void* pointer, const LockedMemory<void>& lm) { friend bool operator==(const void* pointer, const LockedMemory<void>& lm) {
return lm.operator==(pointer); return lm.operator==(pointer);
} }
IE_SUPPRESS_DEPRECATED_END
/** /**
* @brief Casts stored object to any given type * @brief Casts stored object to any given type
@ -332,7 +345,7 @@ public:
* @brief This class is for read-only segments * @brief This class is for read-only segments
*/ */
template <class T> template <class T>
class LockedMemory<const T> : public details::LockedMemoryBase<T> { class INFERENCE_ENGINE_1_0_DEPRECATED LockedMemory<const T> : public details::LockedMemoryBase<T> {
using base = details::LockedMemoryBase<T>; using base = details::LockedMemoryBase<T>;
public: public:
@ -411,4 +424,5 @@ public:
return reinterpret_cast<S>(base::dereference()); return reinterpret_cast<S>(base::dereference());
} }
}; };
IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -8,6 +8,16 @@
*/ */
#pragma once #pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <algorithm> #include <algorithm>
#include <cctype> #include <cctype>
#include <iterator> #include <iterator>

View File

@ -6,6 +6,8 @@
#include <memory> #include <memory>
IE_SUPPRESS_DEPRECATED_START
InferenceEngine::Blob::Ptr make_blob_with_precision(const InferenceEngine::TensorDesc& desc) { InferenceEngine::Blob::Ptr make_blob_with_precision(const InferenceEngine::TensorDesc& desc) {
return make_blob_with_precision(desc.getPrecision(), desc); return make_blob_with_precision(desc.getPrecision(), desc);
} }

View File

@ -14,6 +14,8 @@
//---------------------------------------------------------------------- //----------------------------------------------------------------------
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
template <InferenceEngine::Precision::ePrecision PRC> template <InferenceEngine::Precision::ePrecision PRC>

View File

@ -10,6 +10,7 @@
#include "system_allocator.hpp" #include "system_allocator.hpp"
namespace InferenceEngine { namespace InferenceEngine {
IE_SUPPRESS_DEPRECATED_START
void Blob::setShape(const SizeVector& dims) { void Blob::setShape(const SizeVector& dims) {
// we don't want to allow setShape for: // we don't want to allow setShape for:

View File

@ -5,6 +5,7 @@
#include "system_allocator.hpp" #include "system_allocator.hpp"
namespace InferenceEngine { namespace InferenceEngine {
IE_SUPPRESS_DEPRECATED_START
INFERENCE_ENGINE_API_CPP(std::shared_ptr<IAllocator>) CreateDefaultAllocator() noexcept { INFERENCE_ENGINE_API_CPP(std::shared_ptr<IAllocator>) CreateDefaultAllocator() noexcept {
try { try {

View File

@ -6,6 +6,7 @@
#include "ie_allocator.hpp" #include "ie_allocator.hpp"
IE_SUPPRESS_DEPRECATED_START
namespace InferenceEngine { namespace InferenceEngine {
class SystemMemoryAllocator : public InferenceEngine::IAllocator { class SystemMemoryAllocator : public InferenceEngine::IAllocator {
public: public:
@ -32,5 +33,6 @@ public:
return true; return true;
} }
}; };
IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine } // namespace InferenceEngine

View File

@ -12,6 +12,8 @@
using namespace ::testing; using namespace ::testing;
using namespace InferenceEngine; using namespace InferenceEngine;
IE_SUPPRESS_DEPRECATED_START
using ChannelNum = size_t; using ChannelNum = size_t;
using BatchNum = size_t; using BatchNum = size_t;
using PrecisionType = InferenceEngine::Precision::ePrecision; using PrecisionType = InferenceEngine::Precision::ePrecision;

View File

@ -13,6 +13,8 @@ using namespace ::testing;
using namespace std; using namespace std;
using namespace InferenceEngine; using namespace InferenceEngine;
IE_SUPPRESS_DEPRECATED_START
class PreallocatorTests : public ::testing::Test { class PreallocatorTests : public ::testing::Test {
protected: protected:
std::vector<float> mybuf; std::vector<float> mybuf;

View File

@ -8,6 +8,8 @@
#include "unit_test_utils/mocks/mock_allocator.hpp" #include "unit_test_utils/mocks/mock_allocator.hpp"
IE_SUPPRESS_DEPRECATED_START
class BlobTests : public ::testing::Test { class BlobTests : public ::testing::Test {
protected: protected:
std::shared_ptr<MockAllocator> createMockAllocator() { std::shared_ptr<MockAllocator> createMockAllocator() {

View File

@ -12,6 +12,8 @@ using namespace ::testing;
using namespace std; using namespace std;
using namespace InferenceEngine; using namespace InferenceEngine;
IE_SUPPRESS_DEPRECATED_START
class CompoundBlobTests : public ::testing::Test { class CompoundBlobTests : public ::testing::Test {
protected: protected:
Blob::Ptr _test_blob; Blob::Ptr _test_blob;

View File

@ -9,6 +9,8 @@
using namespace InferenceEngine; using namespace InferenceEngine;
using namespace ::testing; using namespace ::testing;
IE_SUPPRESS_DEPRECATED_START
TEST(LockedMemoryTest, canUnlockMemoryAfterUsage) { TEST(LockedMemoryTest, canUnlockMemoryAfterUsage) {
std::unique_ptr<MockAllocator> allocator(new MockAllocator()); std::unique_ptr<MockAllocator> allocator(new MockAllocator());
char array[] = {1, 2, 3}; char array[] = {1, 2, 3};

View File

@ -7,7 +7,6 @@
#include <memory> #include <memory>
#include "async_infer_request.hpp" #include "async_infer_request.hpp"
#include "ie_ngraph_utils.hpp"
#include "ie_plugin_config.hpp" #include "ie_plugin_config.hpp"
#include "itt.hpp" #include "itt.hpp"
#include "openvino/op/util/op_types.hpp" #include "openvino/op/util/op_types.hpp"

View File

@ -6,7 +6,6 @@
#include <memory> #include <memory>
#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include "ie_plugin_config.hpp" #include "ie_plugin_config.hpp"
#include "itt.hpp" #include "itt.hpp"
#include "openvino/pass/manager.hpp" #include "openvino/pass/manager.hpp"

View File

@ -12,10 +12,12 @@
#include "ie_allocator.hpp" #include "ie_allocator.hpp"
IE_SUPPRESS_DEPRECATED_START
class MockAllocator : public InferenceEngine::IAllocator { class MockAllocator : public InferenceEngine::IAllocator {
public: public:
MOCK_METHOD(void*, lock, (void*, InferenceEngine::LockOp), (noexcept)); MOCK_METHOD(void*, lock, (void*, InferenceEngine::LockOp), (noexcept));
MOCK_METHOD(void, unlock, (void *), (noexcept)); MOCK_METHOD(void, unlock, (void*), (noexcept));
MOCK_METHOD(void*, alloc, (size_t), (noexcept)); MOCK_METHOD(void*, alloc, (size_t), (noexcept));
MOCK_METHOD(bool, free, (void*), (noexcept)); // NOLINT(readability/casting) MOCK_METHOD(bool, free, (void*), (noexcept)); // NOLINT(readability/casting)
}; };
IE_SUPPRESS_DEPRECATED_END

View File

@ -3,6 +3,7 @@
# #
set(TARGET_NAME compile_tool) set(TARGET_NAME compile_tool)
add_definitions(-DIN_OV_COMPONENT)
file(GLOB SRCS file(GLOB SRCS
${CMAKE_CURRENT_SOURCE_DIR}/*.cpp ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp

View File

@ -3,6 +3,7 @@
# #
set(TARGET_NAME benchmark_app_legacy) set(TARGET_NAME benchmark_app_legacy)
add_definitions(-DIN_OV_COMPONENT)
file (GLOB SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp) file (GLOB SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file (GLOB HDR ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp) file (GLOB HDR ${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)