From 2fcf92be4295fe411d102c0e18bb81d1ecd91ba4 Mon Sep 17 00:00:00 2001 From: Anton Pankratv Date: Fri, 5 Mar 2021 12:08:01 +0300 Subject: [PATCH] Removed IRelease Interface (#4032) --- docs/HOWTO/Custom_Layers_Guide.md | 2 +- docs/snippets/CPU_Kernel.cpp | 2 +- docs/template_extension/extension.cpp | 6 +- docs/template_extension/extension.hpp | 1 - .../src/template_executable_network.cpp | 3 +- docs/template_plugin/src/template_plugin.hpp | 2 +- .../ie_bridges/c/src/ie_c_api.cpp | 4 +- .../openvino/inference_engine/ie_api_impl.cpp | 2 +- .../include/cpp/ie_infer_request.hpp | 1 + .../include/details/ie_irelease.hpp | 47 ----- .../include/details/ie_no_release.hpp | 25 --- .../include/details/ie_pre_allocator.hpp | 15 +- .../include/details/ie_so_pointer.hpp | 160 +++++++++--------- inference-engine/include/ie_allocator.hpp | 11 +- inference-engine/include/ie_blob.h | 2 +- inference-engine/include/ie_extension.h | 29 ++-- inference-engine/include/ie_icnn_network.hpp | 8 +- .../include/ie_iexecutable_network.hpp | 5 +- inference-engine/include/ie_iextension.h | 42 ++++- .../include/ie_iinfer_request.hpp | 7 +- .../samples/benchmark_app/main.cpp | 2 +- .../classification_sample_async/main.cpp | 2 +- .../samples/common/format_reader/MnistUbyte.h | 4 - .../samples/common/format_reader/bmp.h | 4 - .../common/format_reader/format_reader.cpp | 2 +- .../common/format_reader/format_reader.h | 4 +- .../common/format_reader/format_reader_ptr.h | 7 +- .../common/format_reader/opencv_wraper.h | 4 - .../reshape_ssd_extension.hpp | 2 - .../object_detection_sample_ssd/main.cpp | 2 +- .../samples/style_transfer_sample/main.cpp | 2 +- .../src/cldnn_engine/cldnn_remote_context.cpp | 2 +- .../src/cldnn_engine/cldnn_remote_context.h | 2 - .../cnn_network_ngraph_impl.hpp | 18 +- .../inference_engine/cpp/ie_cnn_network.cpp | 2 - .../src/inference_engine/ie_core.cpp | 5 +- .../inference_engine/ie_network_reader.cpp | 4 - .../os/lin/lin_shared_object_loader.cpp | 6 +- .../os/win/win_shared_object_loader.cpp | 3 +- .../src/inference_engine/system_allocator.cpp | 4 +- .../src/inference_engine/system_allocator.hpp | 9 +- .../include/legacy/cnn_network_impl.hpp | 9 +- .../convert_function_to_cnn_network.hpp | 4 - .../src/mkldnn_plugin/mkldnn_edge.h | 3 +- .../src/mkldnn_plugin/mkldnn_plugin.h | 2 +- .../src/mkldnn_plugin/nodes/list.hpp | 4 - .../multi_device_exec_network.cpp | 2 +- .../src/multi_device/multi_device_plugin.hpp | 2 +- .../base/ie_executable_network_base.hpp | 11 +- .../base/ie_infer_async_request_base.hpp | 7 - ...cutable_network_thread_safe_async_only.hpp | 4 +- ...executable_network_thread_safe_default.hpp | 5 +- .../impl/ie_plugin_internal.hpp | 6 - .../ie_iinfer_async_request_internal.hpp | 5 - .../interface/ie_iplugin_internal.hpp | 40 ++--- .../src/preprocessing/ie_preprocess_data.cpp | 11 +- .../src/preprocessing/ie_preprocess_data.hpp | 7 +- .../src/readers/ir_reader/ie_ir_reader.cpp | 10 +- .../src/readers/ir_reader/ie_ir_reader.hpp | 3 - .../readers/onnx_reader/ie_onnx_reader.cpp | 10 +- .../readers/onnx_reader/ie_onnx_reader.hpp | 3 - .../src/readers/reader_api/ie_reader.hpp | 13 +- .../myriad_plugin/myriad_executable_network.h | 3 +- .../src/vpu/myriad_plugin/myriad_plugin.h | 2 +- .../inference_engine/core_threading_tests.cpp | 2 +- .../inference_engine/ie_irelease_test.cpp | 22 --- .../ir_serialization/custom_ops.cpp | 6 +- .../ngraph_reader/abs_tests.cpp | 1 - .../ngraph_reader/custom_op_tests.cpp | 2 - .../inference_engine/ngraph_reshape_tests.cpp | 4 - .../shared_object_loader_test.cpp | 20 +-- .../inference_engine/so_pointer_tests.cpp | 28 +-- .../plugin/cpu/extension/extension.cpp | 6 +- .../include/behavior/core_threading_tests.hpp | 2 +- .../unit_test_utils/mocks/mock_allocator.hpp | 1 - .../mocks/mock_engine/mock_plugin.cpp | 14 +- .../mocks/mock_icnn_network.hpp | 25 +-- .../mocks/mock_iexecutable_network.hpp | 1 - .../mocks/mock_iinfer_request.hpp | 1 - .../mocks/mock_not_empty_icnn_network.hpp | 3 +- .../ie_executable_network_base_test.cpp | 6 +- .../ie_infer_async_request_base_test.cpp | 5 +- ...async_request_thread_safe_default_test.cpp | 6 +- .../ie_memory_state_internal_test.cpp | 4 +- .../ie_executable_network_test.cpp | 2 +- .../inference_engine/ie_extension_test.cpp | 10 +- .../unit/inference_engine/ie_plugin_ptr.cpp | 13 +- .../system_allocator_test.cpp | 6 +- .../extensions_tests/extensions_test.cpp | 1 - .../unit/engines/gna/gna_matcher.cpp | 3 - .../mkldnn/constant_propagation_test.cpp | 3 - .../graph/layers/extensions/fake_layer.cpp | 4 - .../layers/extensions/graph_generic_test.cpp | 3 - .../layers/internal/graph_activation_test.cpp | 6 +- .../graph_batchnorm_scaleshift_test.cpp | 4 +- .../layers/internal/graph_batchnorm_test.cpp | 6 +- .../layers/internal/graph_concat_test.cpp | 4 +- .../graph/layers/internal/graph_conv_test.cpp | 4 +- .../layers/internal/graph_deconv_test.cpp | 6 +- .../layers/internal/graph_depthwise_test.cpp | 4 +- .../internal/graph_fullyconnected_test.cpp | 4 +- .../graph/layers/internal/graph_gemm_test.cpp | 5 +- .../layers/internal/graph_leaks_test.cpp | 1 - .../graph/layers/internal/graph_lrn_test.cpp | 4 +- .../layers/internal/graph_permute_test.cpp | 4 +- .../layers/internal/graph_pooling_test.cpp | 4 +- .../layers/internal/graph_power_test.cpp | 4 +- .../layers/internal/graph_softmax_test.cpp | 4 +- .../layers/internal/graph_split_test.cpp | 6 +- .../graph/layers/internal/graph_tile_test.cpp | 4 +- .../structure/graph_optimization_test.cpp | 3 - .../unit/engines/mkldnn/graph/test_graph.hpp | 10 +- .../unit/graph_tools/graph_tools_test.cpp | 1 + 113 files changed, 317 insertions(+), 605 deletions(-) delete mode 100644 inference-engine/include/details/ie_irelease.hpp delete mode 100644 inference-engine/include/details/ie_no_release.hpp delete mode 100644 inference-engine/tests/functional/inference_engine/ie_irelease_test.cpp diff --git a/docs/HOWTO/Custom_Layers_Guide.md b/docs/HOWTO/Custom_Layers_Guide.md index 8037e6e95a2..a2bd065a9b9 100644 --- a/docs/HOWTO/Custom_Layers_Guide.md +++ b/docs/HOWTO/Custom_Layers_Guide.md @@ -344,7 +344,7 @@ make --jobs=$(nproc) The result of this command is a compiled shared library (`.so`, `.dylib` or `.dll`). It should be loaded in the application using `Core` class instance method `AddExtension` like this -`core.AddExtension(make_so_pointer(compiled_library_file_name), "CPU");`. +`core.AddExtension(std::make_shared(compiled_library_file_name), "CPU");`. To test that the extension is implemented correctly we can run the "mri_reconstruction_demo.py" with the following content: diff --git a/docs/snippets/CPU_Kernel.cpp b/docs/snippets/CPU_Kernel.cpp index 920005ffcd3..2bf99784fe1 100644 --- a/docs/snippets/CPU_Kernel.cpp +++ b/docs/snippets/CPU_Kernel.cpp @@ -5,7 +5,7 @@ using namespace InferenceEngine; //! [part0] InferenceEngine::Core core; // Load CPU extension as a shared library -auto extension_ptr = make_so_pointer(""); +auto extension_ptr = std::make_shared(std::string{""}); // Add extension to the CPU device core.AddExtension(extension_ptr, "CPU"); //! [part0] diff --git a/docs/template_extension/extension.cpp b/docs/template_extension/extension.cpp index a66ddf462bc..390102bb33d 100644 --- a/docs/template_extension/extension.cpp +++ b/docs/template_extension/extension.cpp @@ -109,7 +109,10 @@ InferenceEngine::ILayerImpl::Ptr Extension::getImplementation(const std::shared_ //! [extension:getImplementation] //! [extension:CreateExtension] -// Exported function +//Generate exported function +IE_DEFINE_EXTENSION_CREATE_FUNCTION(Extension) +//! [extension:CreateExtension] + INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExtension(InferenceEngine::IExtension *&ext, InferenceEngine::ResponseDesc *resp) noexcept { try { @@ -123,4 +126,3 @@ INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExte return InferenceEngine::GENERAL_ERROR; } } -//! [extension:CreateExtension] diff --git a/docs/template_extension/extension.hpp b/docs/template_extension/extension.hpp index fa7463b7cf1..7da07157d03 100644 --- a/docs/template_extension/extension.hpp +++ b/docs/template_extension/extension.hpp @@ -21,7 +21,6 @@ public: ~Extension(); void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override; void Unload() noexcept override {} - void Release() noexcept override { delete this; } std::map getOpSets() override; std::vector getImplTypes(const std::shared_ptr& node) override; diff --git a/docs/template_plugin/src/template_executable_network.cpp b/docs/template_plugin/src/template_executable_network.cpp index 0a2193342d8..e848ced12e4 100644 --- a/docs/template_plugin/src/template_executable_network.cpp +++ b/docs/template_plugin/src/template_executable_network.cpp @@ -143,8 +143,7 @@ InferenceEngine::IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInf auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs); auto asyncThreadSafeImpl = std::make_shared(std::static_pointer_cast(internalRequest), _taskExecutor, _plugin->_waitExecutor, _callbackExecutor); - asyncRequest.reset(new InferenceEngine::InferRequestBase(asyncThreadSafeImpl), - [](InferenceEngine::IInferRequest *p) { p->Release(); }); + asyncRequest.reset(new InferenceEngine::InferRequestBase(asyncThreadSafeImpl)); asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest); return asyncRequest; } diff --git a/docs/template_plugin/src/template_plugin.hpp b/docs/template_plugin/src/template_plugin.hpp index fe099ff734b..06fe5d6ba3d 100644 --- a/docs/template_plugin/src/template_plugin.hpp +++ b/docs/template_plugin/src/template_plugin.hpp @@ -18,7 +18,7 @@ public: using Ptr = std::shared_ptr; Plugin(); - ~Plugin() override; + ~Plugin(); void SetConfig(const std::map &config) override; InferenceEngine::QueryNetworkResult diff --git a/inference-engine/ie_bridges/c/src/ie_c_api.cpp b/inference-engine/ie_bridges/c/src/ie_c_api.cpp index 1cbed78a359..b49264ddbf1 100644 --- a/inference-engine/ie_bridges/c/src/ie_c_api.cpp +++ b/inference-engine/ie_bridges/c/src/ie_c_api.cpp @@ -451,7 +451,7 @@ IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path, } try { - auto extension_ptr = InferenceEngine::make_so_pointer(extension_path); + auto extension_ptr = std::make_shared(std::string{extension_path}); auto extension = std::dynamic_pointer_cast(extension_ptr); core->object.AddExtension(extension, device_name); } catch (const IE::details::InferenceEngineException& e) { @@ -524,6 +524,8 @@ IEStatusCode ie_core_get_available_devices(const ie_core_t *core, ie_available_d avai_devices->devices = dev_ptrs.release(); } catch (const IE::details::InferenceEngineException& e) { return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; + } catch (const std::exception& e) { + return IEStatusCode::UNEXPECTED; } catch (...) { return IEStatusCode::UNEXPECTED; } diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp index 9d6067da9ee..1078685a2d6 100644 --- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp +++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp @@ -599,7 +599,7 @@ void InferenceEnginePython::IECore::registerPlugins(const std::string &xmlConfig } void InferenceEnginePython::IECore::addExtension(const std::string &ext_lib_path, const std::string &deviceName) { - auto extension_ptr = InferenceEngine::make_so_pointer(ext_lib_path); + auto extension_ptr = std::make_shared(ext_lib_path); auto extension = std::dynamic_pointer_cast(extension_ptr); actual.AddExtension(extension, deviceName); } diff --git a/inference-engine/include/cpp/ie_infer_request.hpp b/inference-engine/include/cpp/ie_infer_request.hpp index d68c313152e..ab4e0d75536 100644 --- a/inference-engine/include/cpp/ie_infer_request.hpp +++ b/inference-engine/include/cpp/ie_infer_request.hpp @@ -18,6 +18,7 @@ #include "ie_iinfer_request.hpp" #include "details/ie_exception_conversion.hpp" #include "details/ie_so_loader.h" +#include "ie_blob.h" namespace InferenceEngine { diff --git a/inference-engine/include/details/ie_irelease.hpp b/inference-engine/include/details/ie_irelease.hpp deleted file mode 100644 index 2bcdb02b62f..00000000000 --- a/inference-engine/include/details/ie_irelease.hpp +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -/** - * @brief A header file for the Inference Engine plugins destruction mechanism - * - * @file ie_irelease.hpp - */ -#pragma once - -#include - -#include "ie_api.h" -#include "ie_no_copy.hpp" - -namespace InferenceEngine { -namespace details { -/** - * @brief This class is used for objects allocated by a shared module (in *.so) - */ -class IRelease : public no_copy { -public: - /** - * @brief Releases current allocated object and all related resources. - * Once this method is called, the pointer to this interface is no longer valid - */ - virtual void Release() noexcept = 0; - -protected: - /** - * @brief Default destructor - */ - ~IRelease() override = default; -}; - -template -inline std::shared_ptr shared_from_irelease(T* ptr) { - std::shared_ptr pointer(ptr, [](IRelease* p) { - if (p) - p->Release(); - }); - return pointer; -} - -} // namespace details -} // namespace InferenceEngine diff --git a/inference-engine/include/details/ie_no_release.hpp b/inference-engine/include/details/ie_no_release.hpp deleted file mode 100644 index 1a036cf1cff..00000000000 --- a/inference-engine/include/details/ie_no_release.hpp +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -/** - * @brief Utility header file. Provides no release base class - * - * @file ie_no_release.hpp - */ -#pragma once - -namespace InferenceEngine { -namespace details { - -/** - * @brief prevent Release method from being called on specific objects - */ -template -class NoReleaseOn : public T { -private: - void Release() noexcept = 0; -}; - -} // namespace details -} // namespace InferenceEngine \ No newline at end of file diff --git a/inference-engine/include/details/ie_pre_allocator.hpp b/inference-engine/include/details/ie_pre_allocator.hpp index 70cce33b258..5c1cd777f50 100644 --- a/inference-engine/include/details/ie_pre_allocator.hpp +++ b/inference-engine/include/details/ie_pre_allocator.hpp @@ -19,7 +19,7 @@ namespace details { /* * @brief This is a helper class to wrap external memory */ -class PreAllocator : public IAllocator { +class PreAllocator final : public IAllocator { void* _actualData; size_t _sizeInBytes; @@ -59,17 +59,6 @@ public: bool free(void*) noexcept override { // NOLINT return false; } - - /** - * @brief Deletes current allocator. - * Can be used if a shared_from_irelease pointer is used - */ - void Release() noexcept override { - delete this; - } - -protected: - virtual ~PreAllocator() = default; }; /** @@ -80,7 +69,7 @@ protected: */ template std::shared_ptr make_pre_allocator(T* ptr, size_t size) { - return shared_from_irelease(new PreAllocator(ptr, size * sizeof(T))); + return std::make_shared(ptr, size * sizeof(T)); } } // namespace details diff --git a/inference-engine/include/details/ie_so_pointer.hpp b/inference-engine/include/details/ie_so_pointer.hpp index bb7ad2d46ba..de3f3838cc7 100644 --- a/inference-engine/include/details/ie_so_pointer.hpp +++ b/inference-engine/include/details/ie_so_pointer.hpp @@ -16,60 +16,9 @@ #include "ie_common.h" #include "ie_so_loader.h" #include "details/ie_exception.hpp" -#include "details/ie_no_release.hpp" -#include "details/ie_irelease.hpp" namespace InferenceEngine { namespace details { - -/** - * @brief This class is a C++ helper to load a symbol from a library and create its instance - */ -template -class SymbolLoader { -private: - std::shared_ptr _so_loader; - -public: - /** - * @brief The main constructor - * @param loader Library to load from - */ - explicit SymbolLoader(std::shared_ptr loader): _so_loader(loader) { - if (_so_loader == nullptr) { - THROW_IE_EXCEPTION << "SymbolLoader cannot be created with nullptr"; - } - } - - /** - * @brief Calls a function from the library that creates an object and returns StatusCode - * @param name Name of function to load object with - * @return If StatusCode provided by function is OK then returns the loaded object. Throws an exception otherwise - */ - template - T* instantiateSymbol(const std::string& name) const { - T* instance = nullptr; - ResponseDesc desc; - StatusCode sts = bind_function(name)(instance, &desc); - if (sts != OK) { - THROW_IE_EXCEPTION << desc.msg; - } - return instance; - } - -private: - /** - * @brief Loads function from the library and returns a pointer to it - * @param functionName Name of function to load - * @return The loaded function - */ - template - std::function bind_function(const std::string& functionName) const { - std::function ptr(reinterpret_cast(_so_loader->get_symbol(functionName.c_str()))); - return ptr; - } -}; - /** * @brief This class is a trait class that provides a creator with a function name corresponding to the templated class * parameter @@ -93,6 +42,13 @@ template class SOPointer { template friend class SOPointer; +IE_SUPPRESS_DEPRECATED_START + struct HasRelease { + template static char test(decltype(&C::Release)); + template static long test(...); + constexpr static const bool value = sizeof(test(nullptr)) == sizeof(char); + }; +IE_SUPPRESS_DEPRECATED_END public: /** @@ -107,22 +63,22 @@ public: template > explicit SOPointer(const std::basic_string & name) - : _so_loader(new Loader(name.c_str())), - _pointedObj(details::shared_from_irelease( - SymbolLoader(_so_loader).template instantiateSymbol(SOCreatorTrait::name))) {} + : _so_loader(new Loader(name.c_str())) { + Load(std::integral_constant{}); + } /** * @brief The main constructor * @param name Name of a shared library file */ explicit SOPointer(const char * name) - : _so_loader(new Loader(name)), - _pointedObj(details::shared_from_irelease( - SymbolLoader(_so_loader).template instantiateSymbol(SOCreatorTrait::name))) {} + : _so_loader(new Loader(name)) { + Load(std::integral_constant{}); + } /** * @brief Constructs an object with existing reference - * @param pointedObj Existing reference to wrap + * @param pointedObj Existing reference to wrap */ explicit SOPointer(T* pointedObj): _so_loader(), _pointedObj(pointedObj) { if (_pointedObj == nullptr) { @@ -134,10 +90,10 @@ public: * @brief Constructs an object with existing loader * @param so_loader Existing pointer to a library loader */ - explicit SOPointer(std::shared_ptr so_loader) - : _so_loader(so_loader), - _pointedObj(details::shared_from_irelease( - SymbolLoader(_so_loader).template instantiateSymbol(SOCreatorTrait::name))) {} + explicit SOPointer(const std::shared_ptr& so_loader) + : _so_loader(so_loader) { + Load(std::integral_constant{}); + } /** * @brief The copy-like constructor, can create So Pointer that dereferenced into child type if T is derived of U @@ -147,24 +103,22 @@ public: SOPointer(const SOPointer& that) : _so_loader(std::dynamic_pointer_cast(that._so_loader)), _pointedObj(std::dynamic_pointer_cast(that._pointedObj)) { - if (_pointedObj == nullptr) { - THROW_IE_EXCEPTION << "Cannot create object from SOPointer reference"; - } + IE_ASSERT(_pointedObj != nullptr); } /** * @brief Standard pointer operator * @return underlined interface with disabled Release method */ - details::NoReleaseOn* operator->() const noexcept { - return reinterpret_cast*>(_pointedObj.get()); + T* operator->() const noexcept { + return _pointedObj.get(); } /** * @brief Standard dereference operator * @return underlined interface with disabled Release method */ - details::NoReleaseOn* operator*() const noexcept { + const T* operator*() const noexcept { return this->operator->(); } @@ -196,6 +150,62 @@ public: } protected: + /** + * @brief Implements load of object from library if Release method is presented + */ + void Load(std::true_type) { + try { + void* create = nullptr; + try { + create = _so_loader->get_symbol((SOCreatorTrait::name + std::string("Shared")).c_str()); + } catch (const details::InferenceEngineException& ex) { + if ((ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) == NOT_FOUND) { + create = nullptr; + } else { + throw; + } + } + if (create == nullptr) { + create = _so_loader->get_symbol(SOCreatorTrait::name); + using CreateF = StatusCode(T*&, ResponseDesc*); + T* object = nullptr; + ResponseDesc desc; + StatusCode sts = reinterpret_cast(create)(object, &desc); + if (sts != OK) { + THROW_IE_EXCEPTION << as_status << sts << desc.msg; + } + IE_SUPPRESS_DEPRECATED_START + _pointedObj = std::shared_ptr(object, [] (T* ptr){ptr->Release();}); + IE_SUPPRESS_DEPRECATED_END + } else { + using CreateF = void(std::shared_ptr&); + reinterpret_cast(create)(_pointedObj); + } + } catch (const InferenceEngineException& ex) { + THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what(); + } catch (const std::exception& ex) { + THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what(); + } catch(...) { + THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] "; + } + } + + /** + * @brief Implements load of object from library + */ + void Load(std::false_type) { + try { + using CreateF = void(std::shared_ptr&); + reinterpret_cast(_so_loader->get_symbol(SOCreatorTrait::name))(_pointedObj); + } catch (const InferenceEngineException& ex) { + THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what(); + } catch (const std::exception& ex) { + THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what(); + } catch(...) { + THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] "; + } + } + /** * @brief Gets a smart pointer to the DLL */ @@ -206,19 +216,5 @@ protected: */ std::shared_ptr _pointedObj; }; - } // namespace details - -/** - * @brief Creates a special shared_pointer wrapper for the given type from a specific shared module - * @tparam T An type of object SOPointer can hold - * @param name Name of the shared library file - * @return A created object - */ -template -inline std::shared_ptr make_so_pointer(const std::string & name) = delete; - -template -inline std::shared_ptr make_so_pointer(const std::wstring & name) = delete; - } // namespace InferenceEngine diff --git a/inference-engine/include/ie_allocator.hpp b/inference-engine/include/ie_allocator.hpp index edfec358c4b..9bd5932a469 100644 --- a/inference-engine/include/ie_allocator.hpp +++ b/inference-engine/include/ie_allocator.hpp @@ -10,7 +10,7 @@ #pragma once #include "ie_api.h" -#include "details/ie_irelease.hpp" +#include namespace InferenceEngine { @@ -26,7 +26,7 @@ enum LockOp { * @interface IAllocator * @brief Allocator concept to be used for memory management and is used as part of the Blob. */ -class IAllocator : public details::IRelease { +class IAllocator : public std::enable_shared_from_this { public: /** * @brief Maps handle to heap memory accessible by any memory manipulation routines. @@ -60,10 +60,7 @@ public: virtual bool free(void* handle) noexcept = 0; protected: - /** - * @brief Disables the ability of deleting the object without release. - */ - ~IAllocator() override = default; + ~IAllocator() = default; }; /** @@ -71,6 +68,6 @@ protected: * * @return The Inference Engine IAllocator* instance */ -INFERENCE_ENGINE_API(InferenceEngine::IAllocator*) CreateDefaultAllocator() noexcept; +INFERENCE_ENGINE_API_CPP(std::shared_ptr) CreateDefaultAllocator() noexcept; } // namespace InferenceEngine diff --git a/inference-engine/include/ie_blob.h b/inference-engine/include/ie_blob.h index 30b9b6b978a..69316340832 100644 --- a/inference-engine/include/ie_blob.h +++ b/inference-engine/include/ie_blob.h @@ -779,7 +779,7 @@ protected: const std::shared_ptr& getAllocator() const noexcept override { // in case when constructor without allocator was used if (!_allocator) { - _allocator = shared_from_irelease(CreateDefaultAllocator()); + _allocator = CreateDefaultAllocator(); } return _allocator; diff --git a/inference-engine/include/ie_extension.h b/inference-engine/include/ie_extension.h index 61262093185..547ccd4954b 100644 --- a/inference-engine/include/ie_extension.h +++ b/inference-engine/include/ie_extension.h @@ -38,7 +38,7 @@ public: /** * @brief This class is a C++ helper to work with objects created using extensions. */ -class INFERENCE_ENGINE_API_CLASS(Extension) : public IExtension { +class INFERENCE_ENGINE_API_CLASS(Extension) final : public IExtension { public: /** * @brief Loads extension from a shared library @@ -65,11 +65,6 @@ public: actual->Unload(); } - /** - * @brief Does nothing since destruction is done via the regular mechanism - */ - void Release() noexcept override {} - /** * @brief Returns operation sets * This method throws an exception if it was not implemented @@ -106,23 +101,29 @@ protected: }; /** - * @brief Creates a special shared_pointer wrapper for the given type from a specific shared module - * - * @param name A std::string name of the shared library file - * @return shared_pointer A wrapper for the given type from a specific shared module + * @brief Creates extension using deprecated API + * @tparam T extension type + * @param name extension library name + * @return shared pointer to extension */ -template <> -inline std::shared_ptr make_so_pointer(const std::string& name) { +template +INFERENCE_ENGINE_DEPRECATED("Use std::make_shared") +inline std::shared_ptr make_so_pointer(const std::string& name) { return std::make_shared(name); } #ifdef ENABLE_UNICODE_PATH_SUPPORT -template <> +/** + * @brief Creates extension using deprecated API + * @param name extension library name + * @return shared pointer to extension + */ +template +INFERENCE_ENGINE_DEPRECATED("Use std::make_shared") inline std::shared_ptr make_so_pointer(const std::wstring& name) { return std::make_shared(name); } #endif - } // namespace InferenceEngine diff --git a/inference-engine/include/ie_icnn_network.hpp b/inference-engine/include/ie_icnn_network.hpp index 10068db3fe4..25ee2715167 100644 --- a/inference-engine/include/ie_icnn_network.hpp +++ b/inference-engine/include/ie_icnn_network.hpp @@ -17,7 +17,6 @@ #include "ie_common.h" #include "ie_data.h" #include "ie_input_info.hpp" -#include "details/ie_irelease.hpp" #if defined IMPLEMENT_INFERENCE_ENGINE_API || defined IMPLEMENT_INFERENCE_ENGINE_PLUGIN || 1 # define INFERENCE_ENGINE_ICNNNETWORK_CLASS(...) INFERENCE_ENGINE_API_CLASS(__VA_ARGS__) @@ -45,7 +44,7 @@ using OutputsDataMap = std::map; * @interface ICNNNetwork * @brief This is the main interface to describe the NN topology */ -class INFERENCE_ENGINE_ICNNNETWORK_CLASS(ICNNNetwork) : public details::IRelease { +class INFERENCE_ENGINE_ICNNNETWORK_CLASS(ICNNNetwork): public std::enable_shared_from_this { public: /** * @brief A shared pointer to a ICNNNetwork interface @@ -200,9 +199,10 @@ public: return NOT_IMPLEMENTED; } +protected: /** - * @brief A virtual destructor. + * @brief Default destructor. */ - virtual ~ICNNNetwork(); + ~ICNNNetwork() = default; }; } // namespace InferenceEngine diff --git a/inference-engine/include/ie_iexecutable_network.hpp b/inference-engine/include/ie_iexecutable_network.hpp index 741727d4698..85f64d5d018 100644 --- a/inference-engine/include/ie_iexecutable_network.hpp +++ b/inference-engine/include/ie_iexecutable_network.hpp @@ -32,7 +32,7 @@ using ConstOutputsDataMap = std::map; /** * @brief This is an interface of an executable network */ -class IExecutableNetwork : public details::IRelease { +class IExecutableNetwork : public std::enable_shared_from_this { public: /** * @brief A smart pointer to the current IExecutableNetwork object @@ -173,6 +173,9 @@ public: * @return code of the operation. InferenceEngine::OK if succeeded */ virtual StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept = 0; + +protected: + ~IExecutableNetwork() = default; }; } // namespace InferenceEngine diff --git a/inference-engine/include/ie_iextension.h b/inference-engine/include/ie_iextension.h index 964d84865ed..5f5ae2b3d5a 100644 --- a/inference-engine/include/ie_iextension.h +++ b/inference-engine/include/ie_iextension.h @@ -25,7 +25,6 @@ * @def INFERENCE_EXTENSION_API(TYPE) * @brief Defines Inference Engine Extension API method */ - #if defined(_WIN32) && defined(IMPLEMENT_INFERENCE_EXTENSION_API) #define INFERENCE_EXTENSION_API(TYPE) extern "C" __declspec(dllexport) TYPE #else @@ -146,7 +145,7 @@ public: /** * @brief This class is the main extension interface */ -class INFERENCE_ENGINE_API_CLASS(IExtension) : public InferenceEngine::details::IRelease { +class INFERENCE_ENGINE_API_CLASS(IExtension) : public std::enable_shared_from_this { public: /** * @brief Returns operation sets @@ -187,6 +186,17 @@ public: * @param versionInfo Pointer to version info, will be set by plugin */ virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0; + + /** + * @brief Implements deprecated API + */ + INFERENCE_ENGINE_DEPRECATED("Do not override or use this method. Use IE_DEFINE_EXTENSION_CREATE_FUNCTION to export extension") + virtual void Release() noexcept { + delete this; + } + +protected: + virtual ~IExtension() = default; }; /** @@ -198,9 +208,31 @@ using IExtensionPtr = std::shared_ptr; * @brief Creates the default instance of the extension * * @param ext Extension interface - * @param resp Response description - * @return Status code */ -INFERENCE_EXTENSION_API(StatusCode) CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept; +INFERENCE_EXTENSION_API(void) CreateExtensionShared(IExtensionPtr& ext); +/** + * @note: Deprecated API + * @brief Creates the default instance of the extension + * @param ext Extension interface + * @param resp Responce + * @return InferenceEngine::OK if extension is constructed and InferenceEngine::GENERAL_ERROR otherwise + */ +#if defined(_WIN32) +INFERENCE_ENGINE_DEPRECATED("Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro") +INFERENCE_EXTENSION_API(StatusCode) +CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept; +#else +INFERENCE_EXTENSION_API(StatusCode) +CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept INFERENCE_ENGINE_DEPRECATED("Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro"); +#endif + +/** + * @def IE_DEFINE_EXTENSION_CREATE_FUNCTION + * @brief Generates extension creation function + */ +#define IE_DEFINE_EXTENSION_CREATE_FUNCTION(ExtensionType) \ +INFERENCE_EXTENSION_API(void) InferenceEngine::CreateExtensionShared(std::shared_ptr& ext) { \ + ext = std::make_shared(); \ +} } // namespace InferenceEngine diff --git a/inference-engine/include/ie_iinfer_request.hpp b/inference-engine/include/ie_iinfer_request.hpp index faa17f1f71e..43a5bc94800 100644 --- a/inference-engine/include/ie_iinfer_request.hpp +++ b/inference-engine/include/ie_iinfer_request.hpp @@ -18,7 +18,6 @@ #include "ie_common.h" #include "ie_preprocess.hpp" #include "ie_imemory_state.hpp" -#include "details/ie_irelease.hpp" namespace InferenceEngine { @@ -26,7 +25,7 @@ namespace InferenceEngine { * @brief This is an interface of asynchronous infer request * */ -class IInferRequest : public details::IRelease { +class IInferRequest : public std::enable_shared_from_this { public: /** * @enum WaitMode @@ -198,7 +197,9 @@ public: * given index */ virtual StatusCode QueryState(IVariableState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept = 0; - IE_SUPPRESS_DEPRECATED_END + +protected: + ~IInferRequest() = default; }; } // namespace InferenceEngine \ No newline at end of file diff --git a/inference-engine/samples/benchmark_app/main.cpp b/inference-engine/samples/benchmark_app/main.cpp index a786eef5053..96dc8abd5a0 100644 --- a/inference-engine/samples/benchmark_app/main.cpp +++ b/inference-engine/samples/benchmark_app/main.cpp @@ -165,7 +165,7 @@ int main(int argc, char *argv[]) { Core ie; if (FLAGS_d.find("CPU") != std::string::npos && !FLAGS_l.empty()) { // CPU (MKLDNN) extensions is loaded as a shared library and passed as a pointer to base extension - const auto extension_ptr = InferenceEngine::make_so_pointer(FLAGS_l); + const auto extension_ptr = std::make_shared(FLAGS_l); ie.AddExtension(extension_ptr); slog::info << "CPU (MKLDNN) extensions is loaded " << FLAGS_l << slog::endl; } diff --git a/inference-engine/samples/classification_sample_async/main.cpp b/inference-engine/samples/classification_sample_async/main.cpp index 264d4ba155f..9796fbc8e4c 100644 --- a/inference-engine/samples/classification_sample_async/main.cpp +++ b/inference-engine/samples/classification_sample_async/main.cpp @@ -78,7 +78,7 @@ int main(int argc, char *argv[]) { if (!FLAGS_l.empty()) { // CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension - IExtensionPtr extension_ptr = make_so_pointer(FLAGS_l); + IExtensionPtr extension_ptr = std::make_shared(FLAGS_l); ie.AddExtension(extension_ptr); slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl; } diff --git a/inference-engine/samples/common/format_reader/MnistUbyte.h b/inference-engine/samples/common/format_reader/MnistUbyte.h index 211f95adddb..c159e230829 100644 --- a/inference-engine/samples/common/format_reader/MnistUbyte.h +++ b/inference-engine/samples/common/format_reader/MnistUbyte.h @@ -43,10 +43,6 @@ public: return _width * _height * 1; } - void Release() noexcept override { - delete this; - } - std::shared_ptr getData(size_t width, size_t height) override { if ((width * height != 0) && (_width * _height != width * height)) { std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n"; diff --git a/inference-engine/samples/common/format_reader/bmp.h b/inference-engine/samples/common/format_reader/bmp.h index a3187f3f7fa..c351b085239 100644 --- a/inference-engine/samples/common/format_reader/bmp.h +++ b/inference-engine/samples/common/format_reader/bmp.h @@ -60,10 +60,6 @@ public: return _width * _height * 3; } - void Release() noexcept override { - delete this; - } - std::shared_ptr getData(size_t width, size_t height) override { if ((width * height != 0) && (_width * _height != width * height)) { std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n"; diff --git a/inference-engine/samples/common/format_reader/format_reader.cpp b/inference-engine/samples/common/format_reader/format_reader.cpp index 02d78eb8d00..63521508891 100644 --- a/inference-engine/samples/common/format_reader/format_reader.cpp +++ b/inference-engine/samples/common/format_reader/format_reader.cpp @@ -23,7 +23,7 @@ Reader *Registry::CreateReader(const char *filename) { for (auto maker : _data) { Reader *ol = maker(filename); if (ol != nullptr && ol->size() != 0) return ol; - if (ol != nullptr) ol->Release(); + if (ol != nullptr) delete ol; } return nullptr; } diff --git a/inference-engine/samples/common/format_reader/format_reader.h b/inference-engine/samples/common/format_reader/format_reader.h index fc5b1cbbbc9..e7384d02acd 100644 --- a/inference-engine/samples/common/format_reader/format_reader.h +++ b/inference-engine/samples/common/format_reader/format_reader.h @@ -45,6 +45,8 @@ protected: std::shared_ptr _data; public: + virtual ~Reader() = default; + /** * \brief Get width * @return width @@ -69,8 +71,6 @@ public: * @return size */ virtual size_t size() const = 0; - - virtual void Release() noexcept = 0; }; } // namespace FormatReader diff --git a/inference-engine/samples/common/format_reader/format_reader_ptr.h b/inference-engine/samples/common/format_reader/format_reader_ptr.h index b69bbf539a8..1d0f25986f4 100644 --- a/inference-engine/samples/common/format_reader/format_reader_ptr.h +++ b/inference-engine/samples/common/format_reader/format_reader_ptr.h @@ -15,10 +15,7 @@ namespace FormatReader { class ReaderPtr { public: - explicit ReaderPtr(const char *imageName) : reader(CreateFormatReader(imageName), - [](Reader *p) { - p->Release(); - }) {} + explicit ReaderPtr(const char *imageName) : reader(CreateFormatReader(imageName)) {} /** * @brief dereference operator overload * @return Reader @@ -40,6 +37,6 @@ public: } protected: - std::unique_ptr> reader; + std::unique_ptr reader; }; } // namespace FormatReader diff --git a/inference-engine/samples/common/format_reader/opencv_wraper.h b/inference-engine/samples/common/format_reader/opencv_wraper.h index c642b87dfdc..6329bcfa92e 100644 --- a/inference-engine/samples/common/format_reader/opencv_wraper.h +++ b/inference-engine/samples/common/format_reader/opencv_wraper.h @@ -46,10 +46,6 @@ public: return _size; } - void Release() noexcept override { - delete this; - } - std::shared_ptr getData(size_t width, size_t height) override; }; } // namespace FormatReader diff --git a/inference-engine/samples/hello_reshape_ssd/reshape_ssd_extension.hpp b/inference-engine/samples/hello_reshape_ssd/reshape_ssd_extension.hpp index 699824d6112..94543b22012 100644 --- a/inference-engine/samples/hello_reshape_ssd/reshape_ssd_extension.hpp +++ b/inference-engine/samples/hello_reshape_ssd/reshape_ssd_extension.hpp @@ -131,8 +131,6 @@ public: void Unload() noexcept override {} - void Release() noexcept override {} - std::vector getImplTypes(const std::shared_ptr& node) override { if (impls.find(node->description()) == impls.end()) return {}; diff --git a/inference-engine/samples/object_detection_sample_ssd/main.cpp b/inference-engine/samples/object_detection_sample_ssd/main.cpp index 233caa7f881..dcb3ddaa382 100644 --- a/inference-engine/samples/object_detection_sample_ssd/main.cpp +++ b/inference-engine/samples/object_detection_sample_ssd/main.cpp @@ -87,7 +87,7 @@ int main(int argc, char *argv[]) { if (!FLAGS_l.empty()) { // CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension - IExtensionPtr extension_ptr = make_so_pointer(FLAGS_l); + IExtensionPtr extension_ptr = std::make_shared(FLAGS_l); ie.AddExtension(extension_ptr); slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl; } diff --git a/inference-engine/samples/style_transfer_sample/main.cpp b/inference-engine/samples/style_transfer_sample/main.cpp index 6a036a9191a..0da8075d7b7 100644 --- a/inference-engine/samples/style_transfer_sample/main.cpp +++ b/inference-engine/samples/style_transfer_sample/main.cpp @@ -70,7 +70,7 @@ int main(int argc, char *argv[]) { if (!FLAGS_l.empty()) { // CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension - IExtensionPtr extension_ptr = make_so_pointer(FLAGS_l); + IExtensionPtr extension_ptr = std::make_shared(FLAGS_l); ie.AddExtension(extension_ptr); slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl; } diff --git a/inference-engine/src/cldnn_engine/cldnn_remote_context.cpp b/inference-engine/src/cldnn_engine/cldnn_remote_context.cpp index 50dd852b3b2..5d8c0bf1916 100644 --- a/inference-engine/src/cldnn_engine/cldnn_remote_context.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_remote_context.cpp @@ -151,7 +151,7 @@ void CLDNNRemoteBlobImpl::allocate() noexcept { const std::shared_ptr& CLDNNRemoteBlobImpl::getAllocator() const noexcept { if (!_allocator) { - _allocator = shared_from_irelease(reinterpret_cast(&m_allocator)); + _allocator = std::shared_ptr(&m_allocator, [] (IAllocator*) {}); } return _allocator; }; diff --git a/inference-engine/src/cldnn_engine/cldnn_remote_context.h b/inference-engine/src/cldnn_engine/cldnn_remote_context.h index a539e2c6b50..02f6e7fcc03 100644 --- a/inference-engine/src/cldnn_engine/cldnn_remote_context.h +++ b/inference-engine/src/cldnn_engine/cldnn_remote_context.h @@ -198,8 +198,6 @@ public: * @return false if handle cannot be released, otherwise - true. */ bool free(void* handle) noexcept override { return true; } - - void Release() noexcept override {} }; class CLDNNExecutionContextImpl : public InferenceEngine::gpu::details::param_map_obj_getter { diff --git a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp index 82d19bf61fd..9d0bc076dd7 100644 --- a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp +++ b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.hpp @@ -36,12 +36,11 @@ namespace details { /** * @brief Ngraph-based implementation of the ICNNNetwork interface. */ -class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl): public ICNNNetwork { +class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl) final : public ICNNNetwork { public: CNNNetworkNGraphImpl(const std::shared_ptr<::ngraph::Function>& nGraph, const std::vector& exts = {}); CNNNetworkNGraphImpl(const CNNNetwork& nGraph); - ~CNNNetworkNGraphImpl() override = default; void getOutputsInfo(std::map& out) const noexcept override; @@ -63,10 +62,6 @@ public: void addOutput(const ::ngraph::Output<::ngraph::Node> & dataName); - void Release() noexcept override { - delete this; - } - std::shared_ptr getFunction() const noexcept override { return _ngraph_function; } @@ -111,16 +106,5 @@ private: void reshape(); void reshape(const std::map>& inputShapes); }; - -class TINGraphBody : public CNNNetworkNGraphImpl { -public: - explicit TINGraphBody(const std::shared_ptr<::ngraph::Function>& func): CNNNetworkNGraphImpl(func) {} - -protected: - std::shared_ptr<::ngraph::Function> cloneFunction(bool constFolding) const override { - return _ngraph_function; - } -}; - } // namespace details } // namespace InferenceEngine diff --git a/inference-engine/src/inference_engine/cpp/ie_cnn_network.cpp b/inference-engine/src/inference_engine/cpp/ie_cnn_network.cpp index 1c2ac1d3700..1bc0f29f7d5 100644 --- a/inference-engine/src/inference_engine/cpp/ie_cnn_network.cpp +++ b/inference-engine/src/inference_engine/cpp/ie_cnn_network.cpp @@ -10,8 +10,6 @@ namespace InferenceEngine { -ICNNNetwork::~ICNNNetwork() {} - CNNNetwork::CNNNetwork() : network(), actual(), output() { } diff --git a/inference-engine/src/inference_engine/ie_core.cpp b/inference-engine/src/inference_engine/ie_core.cpp index b5bb82116cb..0f0d6da189b 100644 --- a/inference-engine/src/inference_engine/ie_core.cpp +++ b/inference-engine/src/inference_engine/ie_core.cpp @@ -364,7 +364,7 @@ public: allowNotImplemented([&]() { for (auto&& extensionLocation : desc.listOfExtentions) { - plugin.AddExtension(make_so_pointer(extensionLocation)); + plugin.AddExtension(std::make_shared(extensionLocation)); } }); } @@ -738,11 +738,10 @@ std::vector Core::GetAvailableDevices() const { for (auto&& deviceName : _impl->GetListOfDevicesInRegistry()) { std::vector devicesIDs; - try { Parameter p = GetMetric(deviceName, propertyName); devicesIDs = p.as>(); - } catch (details::InferenceEngineException&) { + } catch (details::InferenceEngineException& e) { // plugin is not created by e.g. invalid env } catch (const std::exception& ex) { THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName diff --git a/inference-engine/src/inference_engine/ie_network_reader.cpp b/inference-engine/src/inference_engine/ie_network_reader.cpp index 4f7c4e78b9b..3573cb3dfbe 100644 --- a/inference-engine/src/inference_engine/ie_network_reader.cpp +++ b/inference-engine/src/inference_engine/ie_network_reader.cpp @@ -62,10 +62,6 @@ class Reader: public IReader { return const_cast(this)->getReaderPtr(); } - void Release() noexcept override { - delete this; - } - public: using Ptr = std::shared_ptr; Reader(const std::string& name, const std::string location): name(name), location(location) {} diff --git a/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp b/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp index 3252adce7d5..62726738b10 100644 --- a/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp +++ b/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp @@ -45,7 +45,8 @@ public: procAddr = dlsym(shared_object, symbolName); if (procAddr == nullptr) - THROW_IE_EXCEPTION << "dlSym cannot locate method '" << symbolName << "': " << dlerror(); + THROW_IE_EXCEPTION << details::as_status << NOT_FOUND + << "dlSym cannot locate method '" << symbolName << "': " << dlerror(); return procAddr; } }; @@ -60,8 +61,7 @@ SharedObjectLoader::SharedObjectLoader(const char * pluginName) { _impl.reset(new Impl(pluginName)); } -SharedObjectLoader::~SharedObjectLoader() noexcept(false) { -} +SharedObjectLoader::~SharedObjectLoader() noexcept(false) {} void* SharedObjectLoader::get_symbol(const char* symbolName) const { return _impl->get_symbol(symbolName); diff --git a/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp b/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp index b0dcad15e5b..93fdd3e1916 100644 --- a/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp +++ b/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp @@ -247,7 +247,8 @@ class SharedObjectLoader::Impl { } auto procAddr = reinterpret_cast(GetProcAddress(shared_object, symbolName)); if (procAddr == nullptr) - THROW_IE_EXCEPTION << "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError(); + THROW_IE_EXCEPTION << details::as_status << NOT_FOUND + << "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError(); return procAddr; } diff --git a/inference-engine/src/inference_engine/system_allocator.cpp b/inference-engine/src/inference_engine/system_allocator.cpp index 43072edc4f7..819b99e7718 100644 --- a/inference-engine/src/inference_engine/system_allocator.cpp +++ b/inference-engine/src/inference_engine/system_allocator.cpp @@ -6,9 +6,9 @@ namespace InferenceEngine { -IAllocator* CreateDefaultAllocator() noexcept { +INFERENCE_ENGINE_API_CPP(std::shared_ptr) CreateDefaultAllocator() noexcept { try { - return new SystemMemoryAllocator(); + return std::make_shared(); } catch (...) { return nullptr; } diff --git a/inference-engine/src/inference_engine/system_allocator.hpp b/inference-engine/src/inference_engine/system_allocator.hpp index 078cee3fd35..8d418f78e40 100644 --- a/inference-engine/src/inference_engine/system_allocator.hpp +++ b/inference-engine/src/inference_engine/system_allocator.hpp @@ -8,12 +8,9 @@ #include "ie_allocator.hpp" +namespace InferenceEngine { class SystemMemoryAllocator : public InferenceEngine::IAllocator { public: - void Release() noexcept override { - delete this; - } - void* lock(void* handle, InferenceEngine::LockOp = InferenceEngine::LOCK_FOR_WRITE) noexcept override { return handle; } @@ -36,4 +33,6 @@ public: } return true; } -}; \ No newline at end of file +}; + +} // namespace InferenceEngine \ No newline at end of file diff --git a/inference-engine/src/legacy_api/include/legacy/cnn_network_impl.hpp b/inference-engine/src/legacy_api/include/legacy/cnn_network_impl.hpp index 9ff94d99e00..08a11b55e0b 100644 --- a/inference-engine/src/legacy_api/include/legacy/cnn_network_impl.hpp +++ b/inference-engine/src/legacy_api/include/legacy/cnn_network_impl.hpp @@ -25,13 +25,12 @@ namespace details { IE_SUPPRESS_DEPRECATED_START -class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl): public ICNNNetwork, - public std::enable_shared_from_this { +class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl) final : public ICNNNetwork { public: CNNNetworkImpl(); explicit CNNNetworkImpl(const ICNNNetwork & ngraphImpl); explicit CNNNetworkImpl(const CNNNetwork & ngraphImpl); - ~CNNNetworkImpl() override; + ~CNNNetworkImpl(); std::shared_ptr<::ngraph::Function> getFunction() noexcept override { return nullptr; @@ -116,10 +115,6 @@ public: void removeOutput(const std::string& dataName); - void Release() noexcept override { - delete this; - } - virtual void validate(int = 2); StatusCode reshape(const std::map>& inputShapes, diff --git a/inference-engine/src/legacy_api/include/legacy/convert_function_to_cnn_network.hpp b/inference-engine/src/legacy_api/include/legacy/convert_function_to_cnn_network.hpp index 2434fe0faaa..0688e0e2dd5 100644 --- a/inference-engine/src/legacy_api/include/legacy/convert_function_to_cnn_network.hpp +++ b/inference-engine/src/legacy_api/include/legacy/convert_function_to_cnn_network.hpp @@ -33,10 +33,6 @@ class ConstAllocatorWrapper : public IAllocator { public: explicit ConstAllocatorWrapper(std::shared_ptr constOp): _constOp(std::move(constOp)) {} - void Release() noexcept override { - delete this; - } - void* lock(void* handle, LockOp) noexcept override { return handle; } diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_edge.h b/inference-engine/src/mkldnn_plugin/mkldnn_edge.h index 8e12d76bd9e..0954bca7079 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_edge.h +++ b/inference-engine/src/mkldnn_plugin/mkldnn_edge.h @@ -5,7 +5,8 @@ #pragma once #include - +#include +#include
#include "mkldnn_memory.h" #include "mkldnn_dims.h" #include "mkldnn_weights_cache.hpp" diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_plugin.h b/inference-engine/src/mkldnn_plugin/mkldnn_plugin.h index 028d5238be2..114424d5f2f 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_plugin.h +++ b/inference-engine/src/mkldnn_plugin/mkldnn_plugin.h @@ -19,7 +19,7 @@ namespace MKLDNNPlugin { class Engine : public InferenceEngine::InferencePluginInternal { public: Engine(); - ~Engine() override; + ~Engine(); InferenceEngine::ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network, diff --git a/inference-engine/src/mkldnn_plugin/nodes/list.hpp b/inference-engine/src/mkldnn_plugin/nodes/list.hpp index 7ab38631347..b64f832cd04 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/list.hpp +++ b/inference-engine/src/mkldnn_plugin/nodes/list.hpp @@ -83,10 +83,6 @@ public: void Unload() noexcept override {} - void Release() noexcept override { - delete this; - } - using LayersFactory = openvino::cc::Factory< std::string, InferenceEngine::ILayerImplFactory*(const InferenceEngine::CNNLayer*)>; diff --git a/inference-engine/src/multi_device/multi_device_exec_network.cpp b/inference-engine/src/multi_device/multi_device_exec_network.cpp index 13114e5faa1..510ad420b21 100644 --- a/inference-engine/src/multi_device/multi_device_exec_network.cpp +++ b/inference-engine/src/multi_device/multi_device_exec_network.cpp @@ -201,7 +201,7 @@ IInferRequest::Ptr MultiDeviceExecutableNetwork::CreateInferRequest() { _needPerfCounters, std::static_pointer_cast(shared_from_this()), _callbackExecutor); - asyncRequest.reset(new InferRequestBase(asyncTreadSafeImpl), [](IInferRequest *p) { p->Release(); }); + asyncRequest.reset(new InferRequestBase(asyncTreadSafeImpl)); asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest); return asyncRequest; } diff --git a/inference-engine/src/multi_device/multi_device_plugin.hpp b/inference-engine/src/multi_device/multi_device_plugin.hpp index 0e2d9a43711..76ae11e020b 100644 --- a/inference-engine/src/multi_device/multi_device_plugin.hpp +++ b/inference-engine/src/multi_device/multi_device_plugin.hpp @@ -18,7 +18,7 @@ namespace MultiDevicePlugin { class MultiDeviceInferencePlugin : public InferenceEngine::InferencePluginInternal { public: MultiDeviceInferencePlugin(); - ~MultiDeviceInferencePlugin() override = default; + ~MultiDeviceInferencePlugin() = default; InferenceEngine::ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network, const std::map& config) override; diff --git a/inference-engine/src/plugin_api/cpp_interfaces/base/ie_executable_network_base.hpp b/inference-engine/src/plugin_api/cpp_interfaces/base/ie_executable_network_base.hpp index 8d7dd27e21f..be641c93895 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/base/ie_executable_network_base.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/base/ie_executable_network_base.hpp @@ -88,10 +88,6 @@ public: } IE_SUPPRESS_DEPRECATED_END - void Release() noexcept override { - delete this; - } - StatusCode SetConfig(const std::map& config, ResponseDesc* resp) noexcept override { TO_STATUS(_impl->SetConfig(config)); } @@ -107,9 +103,6 @@ public: StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept override { TO_STATUS(pContext = _impl->GetContext()); } - -protected: - ~ExecutableNetworkBase() override = default; }; IE_SUPPRESS_DEPRECATED_END_WIN @@ -124,9 +117,7 @@ template inline typename InferenceEngine::ExecutableNetwork make_executable_network(std::shared_ptr impl) { // to suppress warning about deprecated QueryState IE_SUPPRESS_DEPRECATED_START - typename ExecutableNetworkBase::Ptr net(new ExecutableNetworkBase(impl), [](IExecutableNetwork* p) { - p->Release(); - }); + typename ExecutableNetworkBase::Ptr net(new ExecutableNetworkBase(impl)); IE_SUPPRESS_DEPRECATED_END return InferenceEngine::ExecutableNetwork(net); } diff --git a/inference-engine/src/plugin_api/cpp_interfaces/base/ie_infer_async_request_base.hpp b/inference-engine/src/plugin_api/cpp_interfaces/base/ie_infer_async_request_base.hpp index 8078431fff7..e88c2c3e46e 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/base/ie_infer_async_request_base.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/base/ie_infer_async_request_base.hpp @@ -84,10 +84,6 @@ public: TO_STATUS(_impl->SetUserData(data)); } - void Release() noexcept override { - delete this; - } - StatusCode SetBatch(int batch_size, ResponseDesc* resp) noexcept override { TO_STATUS(_impl->SetBatch(batch_size)); } @@ -108,9 +104,6 @@ public: } } IE_SUPPRESS_DEPRECATED_END - -private: - ~InferRequestBase() = default; }; } // namespace InferenceEngine diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_async_only.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_async_only.hpp index 3cfd1ddad5a..42e0e0e81a0 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_async_only.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_async_only.hpp @@ -39,9 +39,7 @@ public: auto asyncRequestImpl = this->CreateAsyncInferRequestImpl(_networkInputs, _networkOutputs); asyncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this()); - asyncRequest.reset(new InferRequestBase(asyncRequestImpl), [](IInferRequest* p) { - p->Release(); - }); + asyncRequest.reset(new InferRequestBase(asyncRequestImpl)); asyncRequestImpl->SetPointerToPublicInterface(asyncRequest); return asyncRequest; } diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp index 6b41edf84db..49714e13faf 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp @@ -62,15 +62,12 @@ protected: */ template IInferRequest::Ptr CreateAsyncInferRequestFromSync() { - IInferRequest::Ptr asyncRequest; - auto syncRequestImpl = this->CreateInferRequestImpl(_networkInputs, _networkOutputs); syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this()); auto asyncThreadSafeImpl = std::make_shared( syncRequestImpl, _taskExecutor, _callbackExecutor); - asyncRequest.reset(new InferRequestBase(asyncThreadSafeImpl), - [](IInferRequest *p) { p->Release(); }); + IInferRequest::Ptr asyncRequest = std::make_shared(asyncThreadSafeImpl); asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest); return asyncRequest; diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp index 6862bf78622..73064fa723d 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp @@ -47,12 +47,6 @@ static inline void parsePluginName(std::istream& networkModel) { * @ingroup ie_dev_api_plugin_api */ class InferencePluginInternal : public IInferencePlugin { -protected: - /** - * @brief Destroys the object. - */ - ~InferencePluginInternal() override = default; - public: ExecutableNetwork LoadNetwork(const CNNNetwork& network, const std::map& config) override { diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_async_request_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_async_request_internal.hpp index 75cb9a1e484..14c5c7e1644 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_async_request_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iinfer_async_request_internal.hpp @@ -26,11 +26,6 @@ public: */ typedef std::shared_ptr Ptr; - /** - * @brief A virtual destructor - */ - virtual ~IAsyncInferRequestInternal() = default; - /** * @brief Start inference of specified input(s) in asynchronous mode * @note The method returns immediately. Inference starts also immediately. diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index dbce52204f0..8e748df3e5c 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -83,8 +83,7 @@ inline void copyInputOutputInfo(const InputsDataMap & networkInputs, const Outpu * @brief An API of plugin to be implemented by a plugin * @ingroup ie_dev_api_plugin_api */ -class IInferencePlugin : public details::IRelease, - public std::enable_shared_from_this { +class IInferencePlugin : public std::enable_shared_from_this { class VersionStore : public Version { std::string _dsc; std::string _buildNumber; @@ -112,12 +111,6 @@ class IInferencePlugin : public details::IRelease, } } _version; -protected: - /** - * @brief Destroys the object. - */ - ~IInferencePlugin() override = default; - public: /** * @brief A shared pointer to IInferencePlugin interface @@ -140,10 +133,6 @@ public: return _version; } - void Release() noexcept override { - delete this; - } - /** * @brief Provides a name of a plugin * @return The name. @@ -271,6 +260,9 @@ public: * @return The result of query operator containing supported layers map */ virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map& config) const = 0; + +protected: + ~IInferencePlugin() = default; }; } // namespace InferenceEngine @@ -280,16 +272,16 @@ public: * @brief Defines the exported `CreatePluginEngine` function which is used to create a plugin instance * @ingroup ie_dev_api_plugin_api */ -#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \ - INFERENCE_PLUGIN_API(InferenceEngine::StatusCode) CreatePluginEngine( \ - InferenceEngine::IInferencePlugin *&plugin, \ - InferenceEngine::ResponseDesc *resp) noexcept { \ - try { \ - plugin = new PluginType(__VA_ARGS__); \ - plugin->SetVersion(version); \ - return InferenceEngine::OK; \ - } \ - catch (std::exception &ex) { \ - return InferenceEngine::DescriptionBuffer(InferenceEngine::GENERAL_ERROR, resp) << ex.what(); \ - } \ +#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \ + INFERENCE_PLUGIN_API(void) CreatePluginEngine(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) { \ + try { \ + plugin = ::std::make_shared(__VA_ARGS__); \ + } catch (const InferenceEngine::details::InferenceEngineException& e) { \ + throw; \ + } catch (const std::exception& ex) { \ + THROW_IE_EXCEPTION << ex.what(); \ + } catch (...) { \ + THROW_IE_EXCEPTION_WITH_STATUS(UNEXPECTED); \ + } \ + plugin->SetVersion(version); \ } diff --git a/inference-engine/src/preprocessing/ie_preprocess_data.cpp b/inference-engine/src/preprocessing/ie_preprocess_data.cpp index e513da80f72..a672be24ab0 100644 --- a/inference-engine/src/preprocessing/ie_preprocess_data.cpp +++ b/inference-engine/src/preprocessing/ie_preprocess_data.cpp @@ -36,18 +36,13 @@ public: void execute(Blob::Ptr &preprocessedBlob, const PreProcessInfo &info, bool serial, int batchSize = -1) override; - void Release() noexcept override; - void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) override; }; -StatusCode CreatePreProcessData(IPreProcessData *& data, ResponseDesc * /*resp*/) noexcept { - data = new PreProcessData(); - return StatusCode::OK; -} +INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr& data); -void PreProcessData::Release() noexcept { - delete this; +INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr& data) { + data = std::make_shared(); } void PreProcessData::setRoiBlob(const Blob::Ptr &blob) { diff --git a/inference-engine/src/preprocessing/ie_preprocess_data.hpp b/inference-engine/src/preprocessing/ie_preprocess_data.hpp index f0e351f09bf..b305c6b05f9 100644 --- a/inference-engine/src/preprocessing/ie_preprocess_data.hpp +++ b/inference-engine/src/preprocessing/ie_preprocess_data.hpp @@ -31,7 +31,7 @@ namespace InferenceEngine { /** * @brief This class stores pre-process information for exact input */ -class IPreProcessData : public details::IRelease { +class IPreProcessData : public std::enable_shared_from_this { public: /** * @brief Sets ROI blob to be resized and placed to the default input blob during pre-processing. @@ -58,9 +58,12 @@ public: //FIXME: rename to verifyAplicable virtual void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) = 0; + +protected: + ~IPreProcessData() = default; }; -INFERENCE_PRERPOC_PLUGIN_API(StatusCode) CreatePreProcessData(IPreProcessData *& data, ResponseDesc *resp) noexcept; +INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr& data); namespace details { diff --git a/inference-engine/src/readers/ir_reader/ie_ir_reader.cpp b/inference-engine/src/readers/ir_reader/ie_ir_reader.cpp index 4a6a2eeee28..766b51196f1 100644 --- a/inference-engine/src/readers/ir_reader/ie_ir_reader.cpp +++ b/inference-engine/src/readers/ir_reader/ie_ir_reader.cpp @@ -48,12 +48,6 @@ CNNNetwork IRReader::read(std::istream& model, const Blob::CPtr& weights, const return CNNNetwork(parser.parse(root, weights)); } -INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept { - try { - reader = new IRReader(); - return OK; - } - catch (std::exception &) { - return GENERAL_ERROR; - } +INFERENCE_PLUGIN_API(void) InferenceEngine::CreateReader(std::shared_ptr& reader) { + reader = std::make_shared(); } diff --git a/inference-engine/src/readers/ir_reader/ie_ir_reader.hpp b/inference-engine/src/readers/ir_reader/ie_ir_reader.hpp index 88f6eb2fd96..f3923521603 100644 --- a/inference-engine/src/readers/ir_reader/ie_ir_reader.hpp +++ b/inference-engine/src/readers/ir_reader/ie_ir_reader.hpp @@ -32,9 +32,6 @@ namespace InferenceEngine { */ class IRReader: public IReader { public: - void Release() noexcept override { - delete this; - } /** * @brief Checks that reader supports format of the model * @param model stream with model diff --git a/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp b/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp index 6c3f5bb0933..8c5bcc9009e 100644 --- a/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp +++ b/inference-engine/src/readers/onnx_reader/ie_onnx_reader.cpp @@ -65,12 +65,6 @@ CNNNetwork ONNXReader::read(std::istream& model, const std::vector& reader) { + reader = std::make_shared(); } diff --git a/inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp b/inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp index 740eeefbd80..802d18563f1 100644 --- a/inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp +++ b/inference-engine/src/readers/onnx_reader/ie_onnx_reader.hpp @@ -10,9 +10,6 @@ namespace InferenceEngine { class ONNXReader: public IReader { public: - void Release() noexcept override { - delete this; - } /** * @brief Checks that reader supports format of the model * @param model stream with model diff --git a/inference-engine/src/readers/reader_api/ie_reader.hpp b/inference-engine/src/readers/reader_api/ie_reader.hpp index 1d52e0cdbad..c7ddae6c0d9 100644 --- a/inference-engine/src/readers/reader_api/ie_reader.hpp +++ b/inference-engine/src/readers/reader_api/ie_reader.hpp @@ -4,7 +4,6 @@ #pragma once -#include
#include #include #include @@ -17,7 +16,7 @@ namespace InferenceEngine { /** * @brief IReader an abstract interface for Inference Engine readers */ -class IReader: public details::IRelease { +class IReader: public std::enable_shared_from_this { public: /** * @brief Checks that reader supports format of the model @@ -49,15 +48,15 @@ public: * @return vector of file extensions, for example the reader for OpenVINO IR returns {"bin"} */ virtual std::vector getDataFileExtensions() const = 0; + +protected: + ~IReader() = default; }; /** * @brief Creates the default instance of the reader - * - * @param reader Reader interface - * @param resp Response description - * @return Status code + * @return Reader interface */ -INFERENCE_PLUGIN_API(StatusCode) CreateReader(IReader*& reader, ResponseDesc* resp) noexcept; +INFERENCE_PLUGIN_API(void) CreateReader(std::shared_ptr& reader); } // namespace InferenceEngine diff --git a/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.h b/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.h index 5319e70a9de..eb24b3a9a88 100644 --- a/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.h +++ b/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.h @@ -88,8 +88,7 @@ public: auto taskExecutorGetResult = getNextTaskExecutor(); auto asyncThreadSafeImpl = std::make_shared( syncRequestImpl, _taskExecutor, _callbackExecutor, taskExecutorGetResult); - asyncRequest.reset(new ie::InferRequestBase(asyncThreadSafeImpl), - [](ie::IInferRequest *p) { p->Release(); }); + asyncRequest.reset(new ie::InferRequestBase(asyncThreadSafeImpl)); asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest); return asyncRequest; } diff --git a/inference-engine/src/vpu/myriad_plugin/myriad_plugin.h b/inference-engine/src/vpu/myriad_plugin/myriad_plugin.h index 7e7cb7f96b6..7eada56f035 100644 --- a/inference-engine/src/vpu/myriad_plugin/myriad_plugin.h +++ b/inference-engine/src/vpu/myriad_plugin/myriad_plugin.h @@ -21,7 +21,7 @@ class Engine : public ie::InferencePluginInternal { public: explicit Engine(std::shared_ptr mvnc); - ~Engine() override { + ~Engine() { MyriadExecutor::closeDevices(_devicePool, _mvnc); } diff --git a/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp b/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp index 8299dd39d60..5dfa5f091c4 100644 --- a/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp @@ -46,7 +46,7 @@ public: void safeAddExtension(InferenceEngine::Core & ie) { try { - auto extension = InferenceEngine::make_so_pointer( + auto extension = std::make_shared( FileUtils::makePluginLibraryName({}, std::string("template_extension") + IE_BUILD_POSTFIX)); ie.AddExtension(extension); diff --git a/inference-engine/tests/functional/inference_engine/ie_irelease_test.cpp b/inference-engine/tests/functional/inference_engine/ie_irelease_test.cpp deleted file mode 100644 index ad69466c769..00000000000 --- a/inference-engine/tests/functional/inference_engine/ie_irelease_test.cpp +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (C) 2019 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include - -#include -#include
- -#include "common_test_utils/test_common.hpp" - -using IReleaseTests = CommonTestUtils::TestsCommon; - -/** - * @brief Testing that callback with Release() from shared_from_irelease(...) - * won't be applied for nullptr. - */ -TEST_F(IReleaseTests, sharedFromIReleaseWithNull) { - InferenceEngine::details::IRelease *irelease = nullptr; - std::shared_ptr ptr = InferenceEngine::details::shared_from_irelease(irelease); - ptr.reset(); -} \ No newline at end of file diff --git a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp index e2e633e857c..5feffdc581e 100644 --- a/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp +++ b/inference-engine/tests/functional/inference_engine/ir_serialization/custom_ops.cpp @@ -43,7 +43,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpUser_MO) { InferenceEngine::Core ie; ie.AddExtension( - InferenceEngine::make_so_pointer( + std::make_shared( get_extension_path())); auto expected = ie.ReadNetwork(model); @@ -65,7 +65,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpUser_ONNXImporter) { InferenceEngine::Core ie; ie.AddExtension( - InferenceEngine::make_so_pointer( + std::make_shared( get_extension_path())); auto expected = ie.ReadNetwork(model); @@ -87,7 +87,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpTransformation) { InferenceEngine::Core ie; auto extension = - InferenceEngine::make_so_pointer( + std::make_shared( get_extension_path()); ie.AddExtension(extension); auto expected = ie.ReadNetwork(model); diff --git a/inference-engine/tests/functional/inference_engine/ngraph_reader/abs_tests.cpp b/inference-engine/tests/functional/inference_engine/ngraph_reader/abs_tests.cpp index 6460907f9c5..99c00d14cc6 100644 --- a/inference-engine/tests/functional/inference_engine/ngraph_reader/abs_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/ngraph_reader/abs_tests.cpp @@ -32,7 +32,6 @@ constexpr ngraph::NodeTypeInfo FakeAbs::type_info; class AbsFakeExtension: public InferenceEngine::IExtension { public: void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {} - void Release() noexcept override { delete this; } void Unload() noexcept override {} std::map getOpSets() override{ diff --git a/inference-engine/tests/functional/inference_engine/ngraph_reader/custom_op_tests.cpp b/inference-engine/tests/functional/inference_engine/ngraph_reader/custom_op_tests.cpp index 35f0875fcc1..94a46247c94 100644 --- a/inference-engine/tests/functional/inference_engine/ngraph_reader/custom_op_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/ngraph_reader/custom_op_tests.cpp @@ -55,8 +55,6 @@ class CustomAddConstExtension : public InferenceEngine::IExtension { void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {} - void Release() noexcept override { delete this; } - void Unload() noexcept override {} std::map getOpSets() override { diff --git a/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp b/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp index 2c4a0b5c006..7eab10ad247 100644 --- a/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp @@ -259,8 +259,6 @@ public: void Unload() noexcept override {} - void Release() noexcept override {} - std::map getOpSets() override { static std::map opsets; if (opsets.empty()) { @@ -426,8 +424,6 @@ public: void Unload() noexcept override {}; - void Release() noexcept override {} - std::map getOpSets() override { static std::map opsets; if (opsets.empty()) { diff --git a/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp b/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp index 8da411cc405..e67e1dfc551 100644 --- a/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp +++ b/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp @@ -26,9 +26,10 @@ protected: } unique_ptr sharedObjectLoader; - template - std::function make_std_function(const std::string& functionName) { - std::function ptr(reinterpret_cast(sharedObjectLoader->get_symbol(functionName.c_str()))); + using CreateF = void(std::shared_ptr&); + + std::function make_std_function(const std::string& functionName) { + std::function ptr(reinterpret_cast(sharedObjectLoader->get_symbol(functionName.c_str()))); return ptr; } }; @@ -48,22 +49,21 @@ TEST_F(SharedObjectLoaderTests, loaderThrowsIfNoPlugin) { TEST_F(SharedObjectLoaderTests, canFindExistedMethod) { loadDll(get_mock_engine_name()); - auto factory = make_std_function("CreatePluginEngine"); + auto factory = make_std_function("CreatePluginEngine"); EXPECT_NE(nullptr, factory); } TEST_F(SharedObjectLoaderTests, throwIfMethodNofFoundInLibrary) { loadDll(get_mock_engine_name()); - EXPECT_THROW(make_std_function("wrong_function"), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_std_function("wrong_function"), + InferenceEngine::details::InferenceEngineException); } TEST_F(SharedObjectLoaderTests, canCallExistedMethod) { loadDll(get_mock_engine_name()); - auto factory = make_std_function("CreatePluginEngine"); - IInferencePlugin* ptr = nullptr; - ResponseDesc resp; - EXPECT_NO_THROW(factory(ptr, &resp)); - ptr->Release(); + auto factory = make_std_function("CreatePluginEngine"); + std::shared_ptr ptr; + EXPECT_NO_THROW(factory(ptr)); } diff --git a/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp b/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp index 7ca63f51206..82f985ada99 100644 --- a/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp @@ -11,7 +11,6 @@ #include #include #include
-#include
#include #include @@ -81,10 +80,12 @@ namespace InferenceEngine { namespace details { +struct UnknownPlugin : std::enable_shared_from_this {}; + template<> -class SOCreatorTrait { +class SOCreatorTrait { public: - static constexpr auto name = "CreateIRelease"; + static constexpr auto name = "CreateUnknownPlugin"; }; } // namespace details @@ -92,12 +93,12 @@ public: } // namespace InferenceEngine TEST_F(SoPointerTests, UnknownPlugin) { - ASSERT_THROW(SOPointer("UnknownPlugin"), InferenceEngineException); + ASSERT_THROW(SOPointer("UnknownPlugin"), InferenceEngineException); } TEST_F(SoPointerTests, UnknownPluginExceptionStr) { try { - SOPointer("UnknownPlugin"); + SOPointer("UnknownPlugin"); } catch (InferenceEngineException &e) { ASSERT_STR_CONTAINS(e.what(), "Cannot load library 'UnknownPlugin':"); @@ -105,20 +106,3 @@ TEST_F(SoPointerTests, UnknownPluginExceptionStr) { ASSERT_STR_DOES_NOT_CONTAIN(e.what(), "from CWD:"); } } - -using SymbolLoaderTests = ::testing::Test; - -TEST_F(SymbolLoaderTests, throwCreateNullPtr) { - ASSERT_THROW(SymbolLoader(nullptr), InferenceEngineException); -} - -TEST_F(SymbolLoaderTests, instantiateSymbol) { - std::string name = FileUtils::makePluginLibraryName(getIELibraryPath(), - std::string("mock_engine") + IE_BUILD_POSTFIX); - std::shared_ptr sharedLoader(new SharedObjectLoader(name.c_str())); - SymbolLoader loader(sharedLoader); - IInferencePlugin * value = nullptr; - ASSERT_NE(nullptr, value = loader.instantiateSymbol( - SOCreatorTrait::name)); - value->Release(); -} diff --git a/inference-engine/tests/functional/plugin/cpu/extension/extension.cpp b/inference-engine/tests/functional/plugin/cpu/extension/extension.cpp index ac48ca9d7f7..591ab7da37b 100644 --- a/inference-engine/tests/functional/plugin/cpu/extension/extension.cpp +++ b/inference-engine/tests/functional/plugin/cpu/extension/extension.cpp @@ -106,8 +106,6 @@ class CustomAbsExtension : public InferenceEngine::IExtension { void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {} - void Release() noexcept override { delete this; } - void Unload() noexcept override {} std::map getOpSets() override { @@ -329,7 +327,7 @@ TEST(Extension, XmlModelWithExtensionFromDSO) { std::vector input_values{1, 2, 3, 4, 5, 6, 7, 8}; std::vector expected{12, 13, 14, 15, 16, 17, 18, 19}; InferenceEngine::Core ie; - ie.AddExtension(InferenceEngine::make_so_pointer(get_extension_path())); + ie.AddExtension(std::make_shared(get_extension_path())); infer_model(ie, model, input_values, expected); } @@ -406,7 +404,7 @@ opset_import { std::vector input_values{1, 2, 3, 4, 5, 6, 7, 8}; std::vector expected{12, 13, 14, 15, 16, 17, 18, 19}; InferenceEngine::Core ie; - ie.AddExtension(InferenceEngine::make_so_pointer(get_extension_path())); + ie.AddExtension(std::make_shared(get_extension_path())); infer_model(ie, model, input_values, expected); } diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp index 25148eac87c..6a9275f5b15 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp @@ -65,7 +65,7 @@ public: void safeAddExtension(InferenceEngine::Core & ie) { try { - auto extension = InferenceEngine::make_so_pointer( + auto extension = std::make_shared( FileUtils::makePluginLibraryName({}, "template_extension")); ie.AddExtension(extension); } catch (const InferenceEngine::details::InferenceEngineException & ex) { diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_allocator.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_allocator.hpp index a75eeb49e04..eb7e946c92b 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_allocator.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_allocator.hpp @@ -14,7 +14,6 @@ class MockAllocator : public InferenceEngine::IAllocator { public: - MOCK_QUALIFIED_METHOD0(Release, noexcept, void()); MOCK_QUALIFIED_METHOD2(lock, noexcept, void*(void*, InferenceEngine::LockOp)); MOCK_QUALIFIED_METHOD1(unlock, noexcept, void(void *)); MOCK_QUALIFIED_METHOD1(alloc, noexcept, void*(size_t)); diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp index 0c809800105..8b5acd2a8f9 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp @@ -40,16 +40,10 @@ MockPlugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network, InferenceEngine::IInferencePlugin *__target = nullptr; -INFERENCE_PLUGIN_API(StatusCode) CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept { - try { - IInferencePlugin *p = nullptr; - std::swap(__target, p); - plugin = new MockPlugin(p); - return OK; - } - catch (std::exception &ex) { - return DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); - } +INFERENCE_PLUGIN_API(void) CreatePluginEngine(std::shared_ptr& plugin) { + IInferencePlugin *p = nullptr; + std::swap(__target, p); + plugin = std::make_shared(p); } INFERENCE_PLUGIN_API(InferenceEngine::IInferencePlugin*) diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_icnn_network.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_icnn_network.hpp index 5756a304927..7e825623ea7 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_icnn_network.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_icnn_network.hpp @@ -22,7 +22,7 @@ IE_SUPPRESS_DEPRECATED_START * @class MockICNNNetwork * @brief Main interface to describe the NN topology */ -class MockICNNNetwork : public InferenceEngine::ICNNNetwork { +class MockICNNNetwork final : public InferenceEngine::ICNNNetwork { public: MOCK_QUALIFIED_METHOD0(getFunction, const noexcept, std::shared_ptr ()); MOCK_QUALIFIED_METHOD0(getFunction, noexcept, std::shared_ptr()); @@ -37,7 +37,6 @@ class MockICNNNetwork : public InferenceEngine::ICNNNetwork { InferenceEngine::ResponseDesc*)); MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*)); MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t()); - MOCK_QUALIFIED_METHOD0(Release, noexcept, void()); MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(InferenceEngine::ICNNNetwork::InputShapes&)); MOCK_QUALIFIED_METHOD2(reshape, noexcept, InferenceEngine::StatusCode(const InferenceEngine::ICNNNetwork::InputShapes &, InferenceEngine::ResponseDesc *)); MOCK_QUALIFIED_METHOD3(serialize, const noexcept, InferenceEngine::StatusCode( @@ -45,25 +44,3 @@ class MockICNNNetwork : public InferenceEngine::ICNNNetwork { const std::string &, InferenceEngine::ResponseDesc*)); }; - -/** - * @class MockCNNNetworkImpl - * @brief Main interface to describe the NN topology - */ -class MockCNNNetworkImpl: public InferenceEngine::details::CNNNetworkImpl { -public: - MOCK_QUALIFIED_METHOD1(getOutputsInfo, const noexcept, void(InferenceEngine::OutputsDataMap& out)); - MOCK_QUALIFIED_METHOD1(getInputsInfo, const noexcept, void(InferenceEngine::InputsDataMap &inputs)); - MOCK_QUALIFIED_METHOD1(getInput, const noexcept, InferenceEngine::InputInfo::Ptr(const std::string &inputName)); - MOCK_QUALIFIED_METHOD0(getName, const noexcept, const std::string&()); - MOCK_QUALIFIED_METHOD0(layerCount, const noexcept, size_t()); - MOCK_QUALIFIED_METHOD3(addOutput, noexcept, InferenceEngine::StatusCode(const std::string &, size_t , InferenceEngine::ResponseDesc*)); - MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*)); - MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t()); - MOCK_QUALIFIED_METHOD0(Release, noexcept, void()); - MOCK_METHOD1(validate, void(int)); - - void validateNetwork() { - InferenceEngine::details::CNNNetworkImpl::validate(); - } -}; diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iexecutable_network.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iexecutable_network.hpp index 7aea137622b..63d629ea40b 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iexecutable_network.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iexecutable_network.hpp @@ -33,7 +33,6 @@ public: MOCK_QUALIFIED_METHOD3(GetMetric, const noexcept, StatusCode(const std::string &name, Parameter &result, ResponseDesc *resp)); MOCK_QUALIFIED_METHOD2(GetContext, const noexcept, StatusCode(RemoteContext::Ptr &pContext, ResponseDesc *resp)); MOCK_QUALIFIED_METHOD3(QueryState, noexcept, StatusCode(IVariableState::Ptr &, size_t, ResponseDesc *)); - MOCK_QUALIFIED_METHOD0(Release, noexcept, void()); }; IE_SUPPRESS_DEPRECATED_END diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iinfer_request.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iinfer_request.hpp index cbc3fdac32d..df70be531df 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iinfer_request.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_iinfer_request.hpp @@ -27,7 +27,6 @@ public: MOCK_QUALIFIED_METHOD2(GetUserData, noexcept, StatusCode(void**, ResponseDesc*)); MOCK_QUALIFIED_METHOD2(SetUserData, noexcept, StatusCode(void*, ResponseDesc*)); MOCK_QUALIFIED_METHOD1(SetCompletionCallback, noexcept, StatusCode(IInferRequest::CompletionCallback)); - MOCK_QUALIFIED_METHOD0(Release, noexcept, void()); MOCK_QUALIFIED_METHOD1(Infer, noexcept, StatusCode(ResponseDesc*)); MOCK_QUALIFIED_METHOD2(GetPerformanceCounts, const noexcept, StatusCode(std::map &perfMap, ResponseDesc*)); diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp index 971b2e7131a..acb3ab74bac 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_not_empty_icnn_network.hpp @@ -15,7 +15,7 @@ namespace InferenceEngine { -class MockNotEmptyICNNNetwork : public ICNNNetwork { +class MockNotEmptyICNNNetwork final : public ICNNNetwork { public: static constexpr const char* INPUT_BLOB_NAME = "first_input"; const SizeVector INPUT_DIMENTIONS = { 1, 3, 299, 299 }; @@ -73,7 +73,6 @@ public: MOCK_QUALIFIED_METHOD3(addOutput, noexcept, StatusCode(const std::string &, size_t , ResponseDesc*)); MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, StatusCode(const size_t size, ResponseDesc*)); MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t()); - MOCK_QUALIFIED_METHOD0(Release, noexcept, void()); MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(ICNNNetwork::InputShapes &)); MOCK_QUALIFIED_METHOD2(reshape, noexcept, StatusCode(const ICNNNetwork::InputShapes &, ResponseDesc *)); MOCK_QUALIFIED_METHOD3(serialize, const noexcept, StatusCode(const std::string &, const std::string &, InferenceEngine::ResponseDesc*)); diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_executable_network_base_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_executable_network_base_test.cpp index 20125117abe..b57f4ab8a89 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_executable_network_base_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_executable_network_base_test.cpp @@ -33,8 +33,7 @@ protected: virtual void SetUp() { mockExeNetwork = make_shared(); - exeNetwork = details::shared_from_irelease( - new ExecutableNetworkBase(mockExeNetwork)); + exeNetwork = std::make_shared(mockExeNetwork); InputsDataMap networkInputs; OutputsDataMap networkOutputs; mockAsyncInferRequestInternal = make_shared(networkInputs, networkOutputs); @@ -108,8 +107,7 @@ protected: virtual void SetUp() { mockExeNetwork = make_shared(); - exeNetwork = details::shared_from_irelease( - new ExecutableNetworkBase(mockExeNetwork)); + exeNetwork = std::make_shared(mockExeNetwork); InputsDataMap networkInputs; OutputsDataMap networkOutputs; mockInferRequestInternal = make_shared(networkInputs, networkOutputs); diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp index cd116340e52..64522c7330a 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp @@ -34,7 +34,7 @@ protected: virtual void SetUp() { mock_impl.reset(new MockIAsyncInferRequestInternal()); - request = details::shared_from_irelease(new InferRequestBase(mock_impl)); + request = std::make_shared(mock_impl); } }; @@ -242,8 +242,7 @@ protected: OutputsDataMap outputsInfo; mockNotEmptyNet.getOutputsInfo(outputsInfo); mockInferRequestInternal = make_shared(inputsInfo, outputsInfo); - inferRequest = shared_from_irelease( - new InferRequestBase(mockInferRequestInternal)); + inferRequest = std::make_shared(mockInferRequestInternal); return make_shared(inferRequest); } diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp index 8ac217e6cda..7ae41e057da 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp @@ -198,7 +198,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackTakesOKIfAsyncRequestWasOK) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); IInferRequest::Ptr asyncRequest; - asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); }); + asyncRequest.reset(new InferRequestBase(testRequest)); testRequest->SetPointerToPublicInterface(asyncRequest); testRequest->SetCompletionCallback([](InferenceEngine::IInferRequest::Ptr request, StatusCode status) { @@ -214,7 +214,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackIsCalledIfAsyncRequestFailed) auto taskExecutor = std::make_shared(); testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); IInferRequest::Ptr asyncRequest; - asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); }); + asyncRequest.reset(new InferRequestBase(testRequest)); testRequest->SetPointerToPublicInterface(asyncRequest); bool wasCalled = false; @@ -236,7 +236,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, canCatchExceptionIfAsyncRequestFailed auto taskExecutor = std::make_shared(); testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); IInferRequest::Ptr asyncRequest; - asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); }); + asyncRequest.reset(new InferRequestBase(testRequest)); testRequest->SetPointerToPublicInterface(asyncRequest); EXPECT_CALL(*mockInferRequestInternal.get(), InferImpl()).WillOnce(Throw(std::exception())); diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp index d8cffe2e4de..bfd1538f9aa 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_memory_state_internal_test.cpp @@ -20,9 +20,7 @@ using namespace InferenceEngine::details; template inline typename InferenceEngine::InferRequest make_infer_request(std::shared_ptr impl) { - typename InferRequestBase::Ptr req(new InferRequestBase(impl), [](IInferRequest* p) { - p->Release(); - }); + typename InferRequestBase::Ptr req(new InferRequestBase(impl)); return InferenceEngine::InferRequest(req); } diff --git a/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp b/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp index 21ea155807d..f5c85dac8fc 100644 --- a/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp @@ -223,7 +223,7 @@ protected: virtual void SetUp() { mock_impl.reset(new MockIExecutableNetworkInternal()); - exeNetwork = shared_from_irelease(new ExecutableNetworkBase(mock_impl)); + exeNetwork = std::make_shared(mock_impl); } }; diff --git a/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp b/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp index 89f48bee6e2..9f001d3770c 100644 --- a/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp @@ -25,34 +25,34 @@ std::string getExtensionPath() { } TEST(ExtensionTests, testGetOpSets) { - IExtensionPtr extension = make_so_pointer(getExtensionPath()); + IExtensionPtr extension = std::make_shared(getExtensionPath()); auto opsets = extension->getOpSets(); ASSERT_FALSE(opsets.empty()); opsets.clear(); } TEST(ExtensionTests, testGetImplTypes) { - IExtensionPtr extension = make_so_pointer(getExtensionPath()); + IExtensionPtr extension = std::make_shared(getExtensionPath()); auto opset = extension->getOpSets().begin()->second; std::shared_ptr op(opset.create(opset.get_types_info().begin()->name)); ASSERT_FALSE(extension->getImplTypes(op).empty()); } TEST(ExtensionTests, testGetImplTypesThrowsIfNgraphNodeIsNullPtr) { - IExtensionPtr extension = make_so_pointer(getExtensionPath()); + IExtensionPtr extension = std::make_shared(getExtensionPath()); ASSERT_THROW(extension->getImplTypes(std::shared_ptr ()), InferenceEngine::details::InferenceEngineException); } TEST(ExtensionTests, testGetImplementation) { - IExtensionPtr extension = make_so_pointer(getExtensionPath()); + IExtensionPtr extension = std::make_shared(getExtensionPath()); auto opset = extension->getOpSets().begin()->second; std::shared_ptr op(opset.create("Template")); ASSERT_NE(nullptr, extension->getImplementation(op, extension->getImplTypes(op)[0])); } TEST(ExtensionTests, testGetImplementationThrowsIfNgraphNodeIsNullPtr) { - IExtensionPtr extension = make_so_pointer(getExtensionPath()); + IExtensionPtr extension = std::make_shared(getExtensionPath()); ASSERT_THROW(extension->getImplementation(std::shared_ptr (), ""), InferenceEngine::details::InferenceEngineException); } diff --git a/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp b/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp index fe612fb38c3..a6a5da2dd33 100644 --- a/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp @@ -40,15 +40,6 @@ protected: MockInferencePluginInternal2 engine; }; -TEST_F(PluginTest, canCreatePlugin) { - auto ptr = make_std_function("CreatePluginEngineProxy"); - - unique_ptr> smart_ptr(ptr(nullptr), [](IInferencePlugin *p) { - p->Release(); - }); -} - TEST_F(PluginTest, canCreatePluginUsingSmartPtr) { ASSERT_NO_THROW(InferenceEnginePluginPtr ptr(get_mock_engine_name())); } @@ -66,11 +57,11 @@ TEST_F(PluginTest, canSetConfiguration) { InferenceEnginePluginPtr ptr = getPtr(); // TODO: dynamic->reinterpret because of clang/gcc cannot // dynamically cast this MOCK object - ASSERT_TRUE(reinterpret_cast(*ptr)->config.empty()); + ASSERT_TRUE(dynamic_cast(ptr.operator->())->config.empty()); std::map config = { { "key", "value" } }; ASSERT_NO_THROW(ptr->SetConfig(config)); config.clear(); - ASSERT_STREQ(reinterpret_cast(*ptr)->config["key"].c_str(), "value"); + ASSERT_STREQ(dynamic_cast(ptr.operator->())->config["key"].c_str(), "value"); } diff --git a/inference-engine/tests/unit/inference_engine/system_allocator_test.cpp b/inference-engine/tests/unit/inference_engine/system_allocator_test.cpp index c0b03800005..f51de261129 100644 --- a/inference-engine/tests/unit/inference_engine/system_allocator_test.cpp +++ b/inference-engine/tests/unit/inference_engine/system_allocator_test.cpp @@ -9,14 +9,10 @@ #include "system_allocator.hpp" +using namespace InferenceEngine; class SystemAllocatorReleaseTests : public CommonTestUtils::TestsCommon { }; -TEST_F(SystemAllocatorReleaseTests, canRelease) { - SystemMemoryAllocator *allocator_ = new SystemMemoryAllocator(); - allocator_->Release(); -} - class SystemAllocatorTests : public CommonTestUtils::TestsCommon { protected: void SetUp() override { diff --git a/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp b/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp index 0adcc944d7a..9f7ba002b03 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp @@ -104,7 +104,6 @@ public: return std::make_shared(node); }; } - void Release() noexcept override { delete this; } void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override { static const InferenceEngine::Version VERSION{{}, "", ""}; diff --git a/inference-engine/tests_deprecated/unit/engines/gna/gna_matcher.cpp b/inference-engine/tests_deprecated/unit/engines/gna/gna_matcher.cpp index 3e0b509576e..d8f97e191f5 100644 --- a/inference-engine/tests_deprecated/unit/engines/gna/gna_matcher.cpp +++ b/inference-engine/tests_deprecated/unit/engines/gna/gna_matcher.cpp @@ -63,9 +63,6 @@ public: bool free(void* handle) noexcept override { return true; } - void Release() noexcept override { - delete this; - } }; #if GNA_LIB_VER == 2 void expect_enqueue_calls(GNACppApi &mockApi, bool enableHardwareConsistency = true){ diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/constant_propagation_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/constant_propagation_test.cpp index a3586d7104c..01fcefa3f2e 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/constant_propagation_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/constant_propagation_test.cpp @@ -91,9 +91,6 @@ public: void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {} void Unload() noexcept override {} - void Release() noexcept override { - delete this; - } InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override { types = new char *[factories.size()]; size_t count = 0; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp index 799cec8ccce..5391545cf99 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp @@ -21,10 +21,6 @@ class FakeExtensions : public Cpu::MKLDNNExtensions { public: void Unload() noexcept override {}; - void Release() noexcept override { - delete this; - }; - static std::shared_ptr GetExtensionsHolder() { static std::shared_ptr localHolder; if (localHolder == nullptr) { diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp index 337abdf7440..d186d95967a 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp @@ -432,9 +432,6 @@ public: void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {} void Unload() noexcept override {} - void Release() noexcept override { - delete this; - } InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override { types = new char *[factories.size()]; size_t count = 0; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp index c4587a736be..9092bb4ce52 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp @@ -327,9 +327,9 @@ protected: InferenceEngine::Core core; InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp index 18c75ae9908..169e5c97ceb 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp @@ -285,8 +285,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr)); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp index 57137a1e962..aa86f98ce2d 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp @@ -254,9 +254,9 @@ protected: InferenceEngine::Core core; InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr)); - - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp index 10ca519e3a4..f88a74bd77d 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp @@ -321,8 +321,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp index e762fe114ad..020f4999688 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp @@ -433,8 +433,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr)); - auto implNet = dynamic_cast(&((ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); ResponseDesc resp; StatusCode sts = implNet->setBatchSizeReshape(dims[0], &resp); ASSERT_EQ((int)StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp index 035101b036b..96a9ebf322f 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp @@ -481,13 +481,13 @@ protected: memcpy(model_blob_ptr, blb->buffer().as(), blb->byteSize()); model_blob_ptr += blb->byteSize(); } - + InferenceEngine::Core core; InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, model_blob)); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp index c76122c0e16..938017c24db 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp @@ -368,8 +368,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr)); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp index c7dc6f9e092..00d65d60124 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp @@ -267,8 +267,8 @@ class MKLDNNGraphDynBatchFullyConnectedTests: public MKLDNNGraphFullyConnectedTe InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr)); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp index 1f7eb5edf5d..9d2a5492e38 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp @@ -396,9 +396,8 @@ protected: InferenceEngine::Core core; InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_leaks_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_leaks_test.cpp index 18a36d56e4c..a45d7e8751e 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_leaks_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_leaks_test.cpp @@ -22,7 +22,6 @@ public: struct TestExecutableNetworkBase : public InferenceEngine::ExecutableNetworkBase { using InferenceEngine::ExecutableNetworkBase::_impl; - ~TestExecutableNetworkBase() override = default; }; static MKLDNNPlugin::MKLDNNGraph& getGraph(InferenceEngine::IExecutableNetwork::Ptr execNetwork) { diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp index d880b746afc..f946eb67b07 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp @@ -245,8 +245,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp index aaddef2714d..9e08bc0c8ba 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp @@ -554,8 +554,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp index e0078445b35..e94592e6c63 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp @@ -437,8 +437,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp index 84b5a08dd24..8c27ca1a3b8 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp @@ -260,8 +260,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp index 47b2cecb9b5..8d6a6105247 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp @@ -335,8 +335,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp index f2fe38b6eeb..2be31841e27 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp @@ -313,9 +313,9 @@ protected: InferenceEngine::Core core; InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp index 7e9c2549961..a8dc36d1555 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp @@ -215,8 +215,8 @@ protected: InferenceEngine::CNNNetwork network; ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); - auto implNet = dynamic_cast(&((InferenceEngine::ICNNNetwork&)network)); - ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl"; + ASSERT_EQ(nullptr, network.getFunction()); + auto implNet = static_cast(&((InferenceEngine::ICNNNetwork&)network)); InferenceEngine::ResponseDesc resp; InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp); ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_optimization_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_optimization_test.cpp index e9a2c650734..af759fd7e77 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_optimization_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_optimization_test.cpp @@ -303,9 +303,6 @@ public: void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {} void Unload() noexcept override {} - void Release() noexcept override { - delete this; - } InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override { types = new char *[factories.size()]; size_t count = 0; diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/test_graph.hpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/test_graph.hpp index fd032cca535..7c4f2c31168 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/test_graph.hpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/test_graph.hpp @@ -278,10 +278,7 @@ public: MKLDNNGraph::CreateGraph(InferenceEngine::CNNNetwork(convertedNetwork), extMgr, cache); } else { auto & icnnnet = static_cast(network); - InferenceEngine::details::CNNNetworkImpl* netImpl = dynamic_cast(&icnnnet); - if (netImpl == nullptr) { - THROW_IE_EXCEPTION << "unexpected network type"; - } + InferenceEngine::details::CNNNetworkImpl* netImpl = static_cast(&icnnnet); MoveInternalBlobsToConstLayers(netImpl); MKLDNNGraph::CreateGraph(network, extMgr, cache); } @@ -295,10 +292,7 @@ public: MKLDNNGraph::CreateGraph(InferenceEngine::CNNNetwork(convertedNetwork), extensionManager, cache); } else { auto & icnnnet = static_cast(network); - InferenceEngine::details::CNNNetworkImpl* netImpl = dynamic_cast(&icnnnet); - if (netImpl == nullptr) { - THROW_IE_EXCEPTION << "unexpected network type"; - } + InferenceEngine::details::CNNNetworkImpl* netImpl = static_cast(&icnnnet); MoveInternalBlobsToConstLayers(netImpl); MKLDNNGraph::CreateGraph(network, extensionManager, cache); } diff --git a/inference-engine/tests_deprecated/unit/graph_tools/graph_tools_test.cpp b/inference-engine/tests_deprecated/unit/graph_tools/graph_tools_test.cpp index 2747827a2ea..bd17ce97d8b 100644 --- a/inference-engine/tests_deprecated/unit/graph_tools/graph_tools_test.cpp +++ b/inference-engine/tests_deprecated/unit/graph_tools/graph_tools_test.cpp @@ -14,6 +14,7 @@ #include #include #include +#include using namespace testing; using namespace InferenceEngine;