Removed IRelease Interface (#4032)

This commit is contained in:
Anton Pankratv 2021-03-05 12:08:01 +03:00 committed by GitHub
parent 699c63bdaf
commit 2fcf92be42
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
113 changed files with 317 additions and 605 deletions

View File

@ -344,7 +344,7 @@ make --jobs=$(nproc)
The result of this command is a compiled shared library (`.so`, `.dylib` or `.dll`). It should be loaded in the
application using `Core` class instance method `AddExtension` like this
`core.AddExtension(make_so_pointer<IExtension>(compiled_library_file_name), "CPU");`.
`core.AddExtension(std::make_shared<Extension>(compiled_library_file_name), "CPU");`.
To test that the extension is implemented correctly we can run the "mri_reconstruction_demo.py" with the following content:

View File

@ -5,7 +5,7 @@ using namespace InferenceEngine;
//! [part0]
InferenceEngine::Core core;
// Load CPU extension as a shared library
auto extension_ptr = make_so_pointer<InferenceEngine::IExtension>("<shared lib path>");
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{"<shared lib path>"});
// Add extension to the CPU device
core.AddExtension(extension_ptr, "CPU");
//! [part0]

View File

@ -109,7 +109,10 @@ InferenceEngine::ILayerImpl::Ptr Extension::getImplementation(const std::shared_
//! [extension:getImplementation]
//! [extension:CreateExtension]
// Exported function
//Generate exported function
IE_DEFINE_EXTENSION_CREATE_FUNCTION(Extension)
//! [extension:CreateExtension]
INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExtension(InferenceEngine::IExtension *&ext,
InferenceEngine::ResponseDesc *resp) noexcept {
try {
@ -123,4 +126,3 @@ INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExte
return InferenceEngine::GENERAL_ERROR;
}
}
//! [extension:CreateExtension]

View File

@ -21,7 +21,6 @@ public:
~Extension();
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override;
void Unload() noexcept override {}
void Release() noexcept override { delete this; }
std::map<std::string, ngraph::OpSet> getOpSets() override;
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override;

View File

@ -143,8 +143,7 @@ InferenceEngine::IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInf
auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs);
auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest),
_taskExecutor, _plugin->_waitExecutor, _callbackExecutor);
asyncRequest.reset(new InferenceEngine::InferRequestBase(asyncThreadSafeImpl),
[](InferenceEngine::IInferRequest *p) { p->Release(); });
asyncRequest.reset(new InferenceEngine::InferRequestBase(asyncThreadSafeImpl));
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
return asyncRequest;
}

View File

@ -18,7 +18,7 @@ public:
using Ptr = std::shared_ptr<Plugin>;
Plugin();
~Plugin() override;
~Plugin();
void SetConfig(const std::map<std::string, std::string> &config) override;
InferenceEngine::QueryNetworkResult

View File

@ -451,7 +451,7 @@ IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path,
}
try {
auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(extension_path);
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{extension_path});
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
core->object.AddExtension(extension, device_name);
} catch (const IE::details::InferenceEngineException& e) {
@ -524,6 +524,8 @@ IEStatusCode ie_core_get_available_devices(const ie_core_t *core, ie_available_d
avai_devices->devices = dev_ptrs.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (const std::exception& e) {
return IEStatusCode::UNEXPECTED;
} catch (...) {
return IEStatusCode::UNEXPECTED;
}

View File

@ -599,7 +599,7 @@ void InferenceEnginePython::IECore::registerPlugins(const std::string &xmlConfig
}
void InferenceEnginePython::IECore::addExtension(const std::string &ext_lib_path, const std::string &deviceName) {
auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(ext_lib_path);
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(ext_lib_path);
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
actual.AddExtension(extension, deviceName);
}

View File

@ -18,6 +18,7 @@
#include "ie_iinfer_request.hpp"
#include "details/ie_exception_conversion.hpp"
#include "details/ie_so_loader.h"
#include "ie_blob.h"
namespace InferenceEngine {

View File

@ -1,47 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief A header file for the Inference Engine plugins destruction mechanism
*
* @file ie_irelease.hpp
*/
#pragma once
#include <memory>
#include "ie_api.h"
#include "ie_no_copy.hpp"
namespace InferenceEngine {
namespace details {
/**
* @brief This class is used for objects allocated by a shared module (in *.so)
*/
class IRelease : public no_copy {
public:
/**
* @brief Releases current allocated object and all related resources.
* Once this method is called, the pointer to this interface is no longer valid
*/
virtual void Release() noexcept = 0;
protected:
/**
* @brief Default destructor
*/
~IRelease() override = default;
};
template <class T>
inline std::shared_ptr<T> shared_from_irelease(T* ptr) {
std::shared_ptr<T> pointer(ptr, [](IRelease* p) {
if (p)
p->Release();
});
return pointer;
}
} // namespace details
} // namespace InferenceEngine

View File

@ -1,25 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief Utility header file. Provides no release base class
*
* @file ie_no_release.hpp
*/
#pragma once
namespace InferenceEngine {
namespace details {
/**
* @brief prevent Release method from being called on specific objects
*/
template <class T>
class NoReleaseOn : public T {
private:
void Release() noexcept = 0;
};
} // namespace details
} // namespace InferenceEngine

View File

@ -19,7 +19,7 @@ namespace details {
/*
* @brief This is a helper class to wrap external memory
*/
class PreAllocator : public IAllocator {
class PreAllocator final : public IAllocator {
void* _actualData;
size_t _sizeInBytes;
@ -59,17 +59,6 @@ public:
bool free(void*) noexcept override { // NOLINT
return false;
}
/**
* @brief Deletes current allocator.
* Can be used if a shared_from_irelease pointer is used
*/
void Release() noexcept override {
delete this;
}
protected:
virtual ~PreAllocator() = default;
};
/**
@ -80,7 +69,7 @@ protected:
*/
template <class T>
std::shared_ptr<IAllocator> make_pre_allocator(T* ptr, size_t size) {
return shared_from_irelease(new PreAllocator(ptr, size * sizeof(T)));
return std::make_shared<PreAllocator>(ptr, size * sizeof(T));
}
} // namespace details

View File

@ -16,60 +16,9 @@
#include "ie_common.h"
#include "ie_so_loader.h"
#include "details/ie_exception.hpp"
#include "details/ie_no_release.hpp"
#include "details/ie_irelease.hpp"
namespace InferenceEngine {
namespace details {
/**
* @brief This class is a C++ helper to load a symbol from a library and create its instance
*/
template <class Loader>
class SymbolLoader {
private:
std::shared_ptr<Loader> _so_loader;
public:
/**
* @brief The main constructor
* @param loader Library to load from
*/
explicit SymbolLoader(std::shared_ptr<Loader> loader): _so_loader(loader) {
if (_so_loader == nullptr) {
THROW_IE_EXCEPTION << "SymbolLoader cannot be created with nullptr";
}
}
/**
* @brief Calls a function from the library that creates an object and returns StatusCode
* @param name Name of function to load object with
* @return If StatusCode provided by function is OK then returns the loaded object. Throws an exception otherwise
*/
template <class T>
T* instantiateSymbol(const std::string& name) const {
T* instance = nullptr;
ResponseDesc desc;
StatusCode sts = bind_function<StatusCode(T*&, ResponseDesc*)>(name)(instance, &desc);
if (sts != OK) {
THROW_IE_EXCEPTION << desc.msg;
}
return instance;
}
private:
/**
* @brief Loads function from the library and returns a pointer to it
* @param functionName Name of function to load
* @return The loaded function
*/
template <class T>
std::function<T> bind_function(const std::string& functionName) const {
std::function<T> ptr(reinterpret_cast<T*>(_so_loader->get_symbol(functionName.c_str())));
return ptr;
}
};
/**
* @brief This class is a trait class that provides a creator with a function name corresponding to the templated class
* parameter
@ -93,6 +42,13 @@ template <class T, class Loader = SharedObjectLoader>
class SOPointer {
template <class U, class W>
friend class SOPointer;
IE_SUPPRESS_DEPRECATED_START
struct HasRelease {
template <typename C> static char test(decltype(&C::Release));
template <typename C> static long test(...);
constexpr static const bool value = sizeof(test<T>(nullptr)) == sizeof(char);
};
IE_SUPPRESS_DEPRECATED_END
public:
/**
@ -107,22 +63,22 @@ public:
template <typename C,
typename = enableIfSupportedChar<C>>
explicit SOPointer(const std::basic_string<C> & name)
: _so_loader(new Loader(name.c_str())),
_pointedObj(details::shared_from_irelease(
SymbolLoader<Loader>(_so_loader).template instantiateSymbol<T>(SOCreatorTrait<T>::name))) {}
: _so_loader(new Loader(name.c_str())) {
Load(std::integral_constant<bool, HasRelease::value>{});
}
/**
* @brief The main constructor
* @param name Name of a shared library file
*/
explicit SOPointer(const char * name)
: _so_loader(new Loader(name)),
_pointedObj(details::shared_from_irelease(
SymbolLoader<Loader>(_so_loader).template instantiateSymbol<T>(SOCreatorTrait<T>::name))) {}
: _so_loader(new Loader(name)) {
Load(std::integral_constant<bool, HasRelease::value>{});
}
/**
* @brief Constructs an object with existing reference
* @param pointedObj Existing reference to wrap
* @param pointedObj Existing reference to wrap
*/
explicit SOPointer(T* pointedObj): _so_loader(), _pointedObj(pointedObj) {
if (_pointedObj == nullptr) {
@ -134,10 +90,10 @@ public:
* @brief Constructs an object with existing loader
* @param so_loader Existing pointer to a library loader
*/
explicit SOPointer(std::shared_ptr<Loader> so_loader)
: _so_loader(so_loader),
_pointedObj(details::shared_from_irelease(
SymbolLoader<Loader>(_so_loader).template instantiateSymbol<T>(SOCreatorTrait<T>::name))) {}
explicit SOPointer(const std::shared_ptr<Loader>& so_loader)
: _so_loader(so_loader) {
Load(std::integral_constant<bool, HasRelease::value>{});
}
/**
* @brief The copy-like constructor, can create So Pointer that dereferenced into child type if T is derived of U
@ -147,24 +103,22 @@ public:
SOPointer(const SOPointer<U, W>& that)
: _so_loader(std::dynamic_pointer_cast<Loader>(that._so_loader)),
_pointedObj(std::dynamic_pointer_cast<T>(that._pointedObj)) {
if (_pointedObj == nullptr) {
THROW_IE_EXCEPTION << "Cannot create object from SOPointer<U, W> reference";
}
IE_ASSERT(_pointedObj != nullptr);
}
/**
* @brief Standard pointer operator
* @return underlined interface with disabled Release method
*/
details::NoReleaseOn<T>* operator->() const noexcept {
return reinterpret_cast<details::NoReleaseOn<T>*>(_pointedObj.get());
T* operator->() const noexcept {
return _pointedObj.get();
}
/**
* @brief Standard dereference operator
* @return underlined interface with disabled Release method
*/
details::NoReleaseOn<T>* operator*() const noexcept {
const T* operator*() const noexcept {
return this->operator->();
}
@ -196,6 +150,62 @@ public:
}
protected:
/**
* @brief Implements load of object from library if Release method is presented
*/
void Load(std::true_type) {
try {
void* create = nullptr;
try {
create = _so_loader->get_symbol((SOCreatorTrait<T>::name + std::string("Shared")).c_str());
} catch (const details::InferenceEngineException& ex) {
if ((ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) == NOT_FOUND) {
create = nullptr;
} else {
throw;
}
}
if (create == nullptr) {
create = _so_loader->get_symbol(SOCreatorTrait<T>::name);
using CreateF = StatusCode(T*&, ResponseDesc*);
T* object = nullptr;
ResponseDesc desc;
StatusCode sts = reinterpret_cast<CreateF*>(create)(object, &desc);
if (sts != OK) {
THROW_IE_EXCEPTION << as_status << sts << desc.msg;
}
IE_SUPPRESS_DEPRECATED_START
_pointedObj = std::shared_ptr<T>(object, [] (T* ptr){ptr->Release();});
IE_SUPPRESS_DEPRECATED_END
} else {
using CreateF = void(std::shared_ptr<T>&);
reinterpret_cast<CreateF*>(create)(_pointedObj);
}
} catch (const InferenceEngineException& ex) {
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
} catch(...) {
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
}
}
/**
* @brief Implements load of object from library
*/
void Load(std::false_type) {
try {
using CreateF = void(std::shared_ptr<T>&);
reinterpret_cast<CreateF*>(_so_loader->get_symbol(SOCreatorTrait<T>::name))(_pointedObj);
} catch (const InferenceEngineException& ex) {
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
} catch(...) {
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
}
}
/**
* @brief Gets a smart pointer to the DLL
*/
@ -206,19 +216,5 @@ protected:
*/
std::shared_ptr<T> _pointedObj;
};
} // namespace details
/**
* @brief Creates a special shared_pointer wrapper for the given type from a specific shared module
* @tparam T An type of object SOPointer can hold
* @param name Name of the shared library file
* @return A created object
*/
template <class T>
inline std::shared_ptr<T> make_so_pointer(const std::string & name) = delete;
template <class T>
inline std::shared_ptr<T> make_so_pointer(const std::wstring & name) = delete;
} // namespace InferenceEngine

View File

@ -10,7 +10,7 @@
#pragma once
#include "ie_api.h"
#include "details/ie_irelease.hpp"
#include <memory>
namespace InferenceEngine {
@ -26,7 +26,7 @@ enum LockOp {
* @interface IAllocator
* @brief Allocator concept to be used for memory management and is used as part of the Blob.
*/
class IAllocator : public details::IRelease {
class IAllocator : public std::enable_shared_from_this<IAllocator> {
public:
/**
* @brief Maps handle to heap memory accessible by any memory manipulation routines.
@ -60,10 +60,7 @@ public:
virtual bool free(void* handle) noexcept = 0;
protected:
/**
* @brief Disables the ability of deleting the object without release.
*/
~IAllocator() override = default;
~IAllocator() = default;
};
/**
@ -71,6 +68,6 @@ protected:
*
* @return The Inference Engine IAllocator* instance
*/
INFERENCE_ENGINE_API(InferenceEngine::IAllocator*) CreateDefaultAllocator() noexcept;
INFERENCE_ENGINE_API_CPP(std::shared_ptr<InferenceEngine::IAllocator>) CreateDefaultAllocator() noexcept;
} // namespace InferenceEngine

View File

@ -779,7 +779,7 @@ protected:
const std::shared_ptr<IAllocator>& getAllocator() const noexcept override {
// in case when constructor without allocator was used
if (!_allocator) {
_allocator = shared_from_irelease(CreateDefaultAllocator());
_allocator = CreateDefaultAllocator();
}
return _allocator;

View File

@ -38,7 +38,7 @@ public:
/**
* @brief This class is a C++ helper to work with objects created using extensions.
*/
class INFERENCE_ENGINE_API_CLASS(Extension) : public IExtension {
class INFERENCE_ENGINE_API_CLASS(Extension) final : public IExtension {
public:
/**
* @brief Loads extension from a shared library
@ -65,11 +65,6 @@ public:
actual->Unload();
}
/**
* @brief Does nothing since destruction is done via the regular mechanism
*/
void Release() noexcept override {}
/**
* @brief Returns operation sets
* This method throws an exception if it was not implemented
@ -106,23 +101,29 @@ protected:
};
/**
* @brief Creates a special shared_pointer wrapper for the given type from a specific shared module
*
* @param name A std::string name of the shared library file
* @return shared_pointer A wrapper for the given type from a specific shared module
* @brief Creates extension using deprecated API
* @tparam T extension type
* @param name extension library name
* @return shared pointer to extension
*/
template <>
inline std::shared_ptr<IExtension> make_so_pointer(const std::string& name) {
template<typename T = IExtension>
INFERENCE_ENGINE_DEPRECATED("Use std::make_shared<Extension>")
inline std::shared_ptr<T> make_so_pointer(const std::string& name) {
return std::make_shared<Extension>(name);
}
#ifdef ENABLE_UNICODE_PATH_SUPPORT
template <>
/**
* @brief Creates extension using deprecated API
* @param name extension library name
* @return shared pointer to extension
*/
template<typename T = IExtension>
INFERENCE_ENGINE_DEPRECATED("Use std::make_shared<Extension>")
inline std::shared_ptr<IExtension> make_so_pointer(const std::wstring& name) {
return std::make_shared<Extension>(name);
}
#endif
} // namespace InferenceEngine

View File

@ -17,7 +17,6 @@
#include "ie_common.h"
#include "ie_data.h"
#include "ie_input_info.hpp"
#include "details/ie_irelease.hpp"
#if defined IMPLEMENT_INFERENCE_ENGINE_API || defined IMPLEMENT_INFERENCE_ENGINE_PLUGIN || 1
# define INFERENCE_ENGINE_ICNNNETWORK_CLASS(...) INFERENCE_ENGINE_API_CLASS(__VA_ARGS__)
@ -45,7 +44,7 @@ using OutputsDataMap = std::map<std::string, DataPtr>;
* @interface ICNNNetwork
* @brief This is the main interface to describe the NN topology
*/
class INFERENCE_ENGINE_ICNNNETWORK_CLASS(ICNNNetwork) : public details::IRelease {
class INFERENCE_ENGINE_ICNNNETWORK_CLASS(ICNNNetwork): public std::enable_shared_from_this<ICNNNetwork> {
public:
/**
* @brief A shared pointer to a ICNNNetwork interface
@ -200,9 +199,10 @@ public:
return NOT_IMPLEMENTED;
}
protected:
/**
* @brief A virtual destructor.
* @brief Default destructor.
*/
virtual ~ICNNNetwork();
~ICNNNetwork() = default;
};
} // namespace InferenceEngine

View File

@ -32,7 +32,7 @@ using ConstOutputsDataMap = std::map<std::string, CDataPtr>;
/**
* @brief This is an interface of an executable network
*/
class IExecutableNetwork : public details::IRelease {
class IExecutableNetwork : public std::enable_shared_from_this<IExecutableNetwork> {
public:
/**
* @brief A smart pointer to the current IExecutableNetwork object
@ -173,6 +173,9 @@ public:
* @return code of the operation. InferenceEngine::OK if succeeded
*/
virtual StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept = 0;
protected:
~IExecutableNetwork() = default;
};
} // namespace InferenceEngine

View File

@ -25,7 +25,6 @@
* @def INFERENCE_EXTENSION_API(TYPE)
* @brief Defines Inference Engine Extension API method
*/
#if defined(_WIN32) && defined(IMPLEMENT_INFERENCE_EXTENSION_API)
#define INFERENCE_EXTENSION_API(TYPE) extern "C" __declspec(dllexport) TYPE
#else
@ -146,7 +145,7 @@ public:
/**
* @brief This class is the main extension interface
*/
class INFERENCE_ENGINE_API_CLASS(IExtension) : public InferenceEngine::details::IRelease {
class INFERENCE_ENGINE_API_CLASS(IExtension) : public std::enable_shared_from_this<IExtension> {
public:
/**
* @brief Returns operation sets
@ -187,6 +186,17 @@ public:
* @param versionInfo Pointer to version info, will be set by plugin
*/
virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
/**
* @brief Implements deprecated API
*/
INFERENCE_ENGINE_DEPRECATED("Do not override or use this method. Use IE_DEFINE_EXTENSION_CREATE_FUNCTION to export extension")
virtual void Release() noexcept {
delete this;
}
protected:
virtual ~IExtension() = default;
};
/**
@ -198,9 +208,31 @@ using IExtensionPtr = std::shared_ptr<IExtension>;
* @brief Creates the default instance of the extension
*
* @param ext Extension interface
* @param resp Response description
* @return Status code
*/
INFERENCE_EXTENSION_API(StatusCode) CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
INFERENCE_EXTENSION_API(void) CreateExtensionShared(IExtensionPtr& ext);
/**
* @note: Deprecated API
* @brief Creates the default instance of the extension
* @param ext Extension interface
* @param resp Responce
* @return InferenceEngine::OK if extension is constructed and InferenceEngine::GENERAL_ERROR otherwise
*/
#if defined(_WIN32)
INFERENCE_ENGINE_DEPRECATED("Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro")
INFERENCE_EXTENSION_API(StatusCode)
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
#else
INFERENCE_EXTENSION_API(StatusCode)
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept INFERENCE_ENGINE_DEPRECATED("Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro");
#endif
/**
* @def IE_DEFINE_EXTENSION_CREATE_FUNCTION
* @brief Generates extension creation function
*/
#define IE_DEFINE_EXTENSION_CREATE_FUNCTION(ExtensionType) \
INFERENCE_EXTENSION_API(void) InferenceEngine::CreateExtensionShared(std::shared_ptr<InferenceEngine::IExtension>& ext) { \
ext = std::make_shared<Extension>(); \
}
} // namespace InferenceEngine

View File

@ -18,7 +18,6 @@
#include "ie_common.h"
#include "ie_preprocess.hpp"
#include "ie_imemory_state.hpp"
#include "details/ie_irelease.hpp"
namespace InferenceEngine {
@ -26,7 +25,7 @@ namespace InferenceEngine {
* @brief This is an interface of asynchronous infer request
*
*/
class IInferRequest : public details::IRelease {
class IInferRequest : public std::enable_shared_from_this<IInferRequest> {
public:
/**
* @enum WaitMode
@ -198,7 +197,9 @@ public:
* given index
*/
virtual StatusCode QueryState(IVariableState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept = 0;
IE_SUPPRESS_DEPRECATED_END
protected:
~IInferRequest() = default;
};
} // namespace InferenceEngine

View File

@ -165,7 +165,7 @@ int main(int argc, char *argv[]) {
Core ie;
if (FLAGS_d.find("CPU") != std::string::npos && !FLAGS_l.empty()) {
// CPU (MKLDNN) extensions is loaded as a shared library and passed as a pointer to base extension
const auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(FLAGS_l);
const auto extension_ptr = std::make_shared<InferenceEngine::Extension>(FLAGS_l);
ie.AddExtension(extension_ptr);
slog::info << "CPU (MKLDNN) extensions is loaded " << FLAGS_l << slog::endl;
}

View File

@ -78,7 +78,7 @@ int main(int argc, char *argv[]) {
if (!FLAGS_l.empty()) {
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
IExtensionPtr extension_ptr = make_so_pointer<IExtension>(FLAGS_l);
IExtensionPtr extension_ptr = std::make_shared<Extension>(FLAGS_l);
ie.AddExtension(extension_ptr);
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
}

View File

@ -43,10 +43,6 @@ public:
return _width * _height * 1;
}
void Release() noexcept override {
delete this;
}
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override {
if ((width * height != 0) && (_width * _height != width * height)) {
std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n";

View File

@ -60,10 +60,6 @@ public:
return _width * _height * 3;
}
void Release() noexcept override {
delete this;
}
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override {
if ((width * height != 0) && (_width * _height != width * height)) {
std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n";

View File

@ -23,7 +23,7 @@ Reader *Registry::CreateReader(const char *filename) {
for (auto maker : _data) {
Reader *ol = maker(filename);
if (ol != nullptr && ol->size() != 0) return ol;
if (ol != nullptr) ol->Release();
if (ol != nullptr) delete ol;
}
return nullptr;
}

View File

@ -45,6 +45,8 @@ protected:
std::shared_ptr<unsigned char> _data;
public:
virtual ~Reader() = default;
/**
* \brief Get width
* @return width
@ -69,8 +71,6 @@ public:
* @return size
*/
virtual size_t size() const = 0;
virtual void Release() noexcept = 0;
};
} // namespace FormatReader

View File

@ -15,10 +15,7 @@
namespace FormatReader {
class ReaderPtr {
public:
explicit ReaderPtr(const char *imageName) : reader(CreateFormatReader(imageName),
[](Reader *p) {
p->Release();
}) {}
explicit ReaderPtr(const char *imageName) : reader(CreateFormatReader(imageName)) {}
/**
* @brief dereference operator overload
* @return Reader
@ -40,6 +37,6 @@ public:
}
protected:
std::unique_ptr<Reader, std::function<void(Reader *)>> reader;
std::unique_ptr<Reader> reader;
};
} // namespace FormatReader

View File

@ -46,10 +46,6 @@ public:
return _size;
}
void Release() noexcept override {
delete this;
}
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override;
};
} // namespace FormatReader

View File

@ -131,8 +131,6 @@ public:
void Unload() noexcept override {}
void Release() noexcept override {}
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override {
if (impls.find(node->description()) == impls.end())
return {};

View File

@ -87,7 +87,7 @@ int main(int argc, char *argv[]) {
if (!FLAGS_l.empty()) {
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
IExtensionPtr extension_ptr = make_so_pointer<IExtension>(FLAGS_l);
IExtensionPtr extension_ptr = std::make_shared<Extension>(FLAGS_l);
ie.AddExtension(extension_ptr);
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
}

View File

@ -70,7 +70,7 @@ int main(int argc, char *argv[]) {
if (!FLAGS_l.empty()) {
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
IExtensionPtr extension_ptr = make_so_pointer<IExtension>(FLAGS_l);
IExtensionPtr extension_ptr = std::make_shared<Extension>(FLAGS_l);
ie.AddExtension(extension_ptr);
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
}

View File

@ -151,7 +151,7 @@ void CLDNNRemoteBlobImpl::allocate() noexcept {
const std::shared_ptr<IAllocator>& CLDNNRemoteBlobImpl::getAllocator() const noexcept {
if (!_allocator) {
_allocator = shared_from_irelease(reinterpret_cast<IAllocator*>(&m_allocator));
_allocator = std::shared_ptr<IAllocator>(&m_allocator, [] (IAllocator*) {});
}
return _allocator;
};

View File

@ -198,8 +198,6 @@ public:
* @return false if handle cannot be released, otherwise - true.
*/
bool free(void* handle) noexcept override { return true; }
void Release() noexcept override {}
};
class CLDNNExecutionContextImpl : public InferenceEngine::gpu::details::param_map_obj_getter {

View File

@ -36,12 +36,11 @@ namespace details {
/**
* @brief Ngraph-based implementation of the ICNNNetwork interface.
*/
class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl): public ICNNNetwork {
class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl) final : public ICNNNetwork {
public:
CNNNetworkNGraphImpl(const std::shared_ptr<::ngraph::Function>& nGraph,
const std::vector<IExtensionPtr>& exts = {});
CNNNetworkNGraphImpl(const CNNNetwork& nGraph);
~CNNNetworkNGraphImpl() override = default;
void getOutputsInfo(std::map<std::string, DataPtr>& out) const noexcept override;
@ -63,10 +62,6 @@ public:
void addOutput(const ::ngraph::Output<::ngraph::Node> & dataName);
void Release() noexcept override {
delete this;
}
std::shared_ptr<const ::ngraph::Function> getFunction() const noexcept override {
return _ngraph_function;
}
@ -111,16 +106,5 @@ private:
void reshape();
void reshape(const std::map<std::string, std::vector<size_t>>& inputShapes);
};
class TINGraphBody : public CNNNetworkNGraphImpl {
public:
explicit TINGraphBody(const std::shared_ptr<::ngraph::Function>& func): CNNNetworkNGraphImpl(func) {}
protected:
std::shared_ptr<::ngraph::Function> cloneFunction(bool constFolding) const override {
return _ngraph_function;
}
};
} // namespace details
} // namespace InferenceEngine

View File

@ -10,8 +10,6 @@
namespace InferenceEngine {
ICNNNetwork::~ICNNNetwork() {}
CNNNetwork::CNNNetwork() :
network(), actual(), output() {
}

View File

@ -364,7 +364,7 @@ public:
allowNotImplemented([&]() {
for (auto&& extensionLocation : desc.listOfExtentions) {
plugin.AddExtension(make_so_pointer<IExtension>(extensionLocation));
plugin.AddExtension(std::make_shared<Extension>(extensionLocation));
}
});
}
@ -738,11 +738,10 @@ std::vector<std::string> Core::GetAvailableDevices() const {
for (auto&& deviceName : _impl->GetListOfDevicesInRegistry()) {
std::vector<std::string> devicesIDs;
try {
Parameter p = GetMetric(deviceName, propertyName);
devicesIDs = p.as<std::vector<std::string>>();
} catch (details::InferenceEngineException&) {
} catch (details::InferenceEngineException& e) {
// plugin is not created by e.g. invalid env
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName

View File

@ -62,10 +62,6 @@ class Reader: public IReader {
return const_cast<Reader*>(this)->getReaderPtr();
}
void Release() noexcept override {
delete this;
}
public:
using Ptr = std::shared_ptr<Reader>;
Reader(const std::string& name, const std::string location): name(name), location(location) {}

View File

@ -45,7 +45,8 @@ public:
procAddr = dlsym(shared_object, symbolName);
if (procAddr == nullptr)
THROW_IE_EXCEPTION << "dlSym cannot locate method '" << symbolName << "': " << dlerror();
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
<< "dlSym cannot locate method '" << symbolName << "': " << dlerror();
return procAddr;
}
};
@ -60,8 +61,7 @@ SharedObjectLoader::SharedObjectLoader(const char * pluginName) {
_impl.reset(new Impl(pluginName));
}
SharedObjectLoader::~SharedObjectLoader() noexcept(false) {
}
SharedObjectLoader::~SharedObjectLoader() noexcept(false) {}
void* SharedObjectLoader::get_symbol(const char* symbolName) const {
return _impl->get_symbol(symbolName);

View File

@ -247,7 +247,8 @@ class SharedObjectLoader::Impl {
}
auto procAddr = reinterpret_cast<void*>(GetProcAddress(shared_object, symbolName));
if (procAddr == nullptr)
THROW_IE_EXCEPTION << "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
<< "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
return procAddr;
}

View File

@ -6,9 +6,9 @@
namespace InferenceEngine {
IAllocator* CreateDefaultAllocator() noexcept {
INFERENCE_ENGINE_API_CPP(std::shared_ptr<IAllocator>) CreateDefaultAllocator() noexcept {
try {
return new SystemMemoryAllocator();
return std::make_shared<SystemMemoryAllocator>();
} catch (...) {
return nullptr;
}

View File

@ -8,12 +8,9 @@
#include "ie_allocator.hpp"
namespace InferenceEngine {
class SystemMemoryAllocator : public InferenceEngine::IAllocator {
public:
void Release() noexcept override {
delete this;
}
void* lock(void* handle, InferenceEngine::LockOp = InferenceEngine::LOCK_FOR_WRITE) noexcept override {
return handle;
}
@ -36,4 +33,6 @@ public:
}
return true;
}
};
};
} // namespace InferenceEngine

View File

@ -25,13 +25,12 @@ namespace details {
IE_SUPPRESS_DEPRECATED_START
class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl): public ICNNNetwork,
public std::enable_shared_from_this<ICNNNetwork> {
class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl) final : public ICNNNetwork {
public:
CNNNetworkImpl();
explicit CNNNetworkImpl(const ICNNNetwork & ngraphImpl);
explicit CNNNetworkImpl(const CNNNetwork & ngraphImpl);
~CNNNetworkImpl() override;
~CNNNetworkImpl();
std::shared_ptr<::ngraph::Function> getFunction() noexcept override {
return nullptr;
@ -116,10 +115,6 @@ public:
void removeOutput(const std::string& dataName);
void Release() noexcept override {
delete this;
}
virtual void validate(int = 2);
StatusCode reshape(const std::map<std::string, std::vector<size_t>>& inputShapes,

View File

@ -33,10 +33,6 @@ class ConstAllocatorWrapper : public IAllocator {
public:
explicit ConstAllocatorWrapper(std::shared_ptr<ngraph::op::Constant> constOp): _constOp(std::move(constOp)) {}
void Release() noexcept override {
delete this;
}
void* lock(void* handle, LockOp) noexcept override {
return handle;
}

View File

@ -5,7 +5,8 @@
#pragma once
#include <ie_blob.h>
#include <memory>
#include <details/ie_no_copy.hpp>
#include "mkldnn_memory.h"
#include "mkldnn_dims.h"
#include "mkldnn_weights_cache.hpp"

View File

@ -19,7 +19,7 @@ namespace MKLDNNPlugin {
class Engine : public InferenceEngine::InferencePluginInternal {
public:
Engine();
~Engine() override;
~Engine();
InferenceEngine::ExecutableNetworkInternal::Ptr
LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network,

View File

@ -83,10 +83,6 @@ public:
void Unload() noexcept override {}
void Release() noexcept override {
delete this;
}
using LayersFactory = openvino::cc::Factory<
std::string,
InferenceEngine::ILayerImplFactory*(const InferenceEngine::CNNLayer*)>;

View File

@ -201,7 +201,7 @@ IInferRequest::Ptr MultiDeviceExecutableNetwork::CreateInferRequest() {
_needPerfCounters,
std::static_pointer_cast<MultiDeviceExecutableNetwork>(shared_from_this()),
_callbackExecutor);
asyncRequest.reset(new InferRequestBase(asyncTreadSafeImpl), [](IInferRequest *p) { p->Release(); });
asyncRequest.reset(new InferRequestBase(asyncTreadSafeImpl));
asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
return asyncRequest;
}

View File

@ -18,7 +18,7 @@ namespace MultiDevicePlugin {
class MultiDeviceInferencePlugin : public InferenceEngine::InferencePluginInternal {
public:
MultiDeviceInferencePlugin();
~MultiDeviceInferencePlugin() override = default;
~MultiDeviceInferencePlugin() = default;
InferenceEngine::ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) override;

View File

@ -88,10 +88,6 @@ public:
}
IE_SUPPRESS_DEPRECATED_END
void Release() noexcept override {
delete this;
}
StatusCode SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) noexcept override {
TO_STATUS(_impl->SetConfig(config));
}
@ -107,9 +103,6 @@ public:
StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept override {
TO_STATUS(pContext = _impl->GetContext());
}
protected:
~ExecutableNetworkBase() override = default;
};
IE_SUPPRESS_DEPRECATED_END_WIN
@ -124,9 +117,7 @@ template <class T>
inline typename InferenceEngine::ExecutableNetwork make_executable_network(std::shared_ptr<T> impl) {
// to suppress warning about deprecated QueryState
IE_SUPPRESS_DEPRECATED_START
typename ExecutableNetworkBase::Ptr net(new ExecutableNetworkBase(impl), [](IExecutableNetwork* p) {
p->Release();
});
typename ExecutableNetworkBase::Ptr net(new ExecutableNetworkBase(impl));
IE_SUPPRESS_DEPRECATED_END
return InferenceEngine::ExecutableNetwork(net);
}

View File

@ -84,10 +84,6 @@ public:
TO_STATUS(_impl->SetUserData(data));
}
void Release() noexcept override {
delete this;
}
StatusCode SetBatch(int batch_size, ResponseDesc* resp) noexcept override {
TO_STATUS(_impl->SetBatch(batch_size));
}
@ -108,9 +104,6 @@ public:
}
}
IE_SUPPRESS_DEPRECATED_END
private:
~InferRequestBase() = default;
};
} // namespace InferenceEngine

View File

@ -39,9 +39,7 @@ public:
auto asyncRequestImpl = this->CreateAsyncInferRequestImpl(_networkInputs, _networkOutputs);
asyncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
asyncRequest.reset(new InferRequestBase(asyncRequestImpl), [](IInferRequest* p) {
p->Release();
});
asyncRequest.reset(new InferRequestBase(asyncRequestImpl));
asyncRequestImpl->SetPointerToPublicInterface(asyncRequest);
return asyncRequest;
}

View File

@ -62,15 +62,12 @@ protected:
*/
template <typename AsyncInferRequestType = AsyncInferRequestThreadSafeDefault>
IInferRequest::Ptr CreateAsyncInferRequestFromSync() {
IInferRequest::Ptr asyncRequest;
auto syncRequestImpl = this->CreateInferRequestImpl(_networkInputs, _networkOutputs);
syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
auto asyncThreadSafeImpl = std::make_shared<AsyncInferRequestType>(
syncRequestImpl, _taskExecutor, _callbackExecutor);
asyncRequest.reset(new InferRequestBase(asyncThreadSafeImpl),
[](IInferRequest *p) { p->Release(); });
IInferRequest::Ptr asyncRequest = std::make_shared<InferRequestBase>(asyncThreadSafeImpl);
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
return asyncRequest;

View File

@ -47,12 +47,6 @@ static inline void parsePluginName(std::istream& networkModel) {
* @ingroup ie_dev_api_plugin_api
*/
class InferencePluginInternal : public IInferencePlugin {
protected:
/**
* @brief Destroys the object.
*/
~InferencePluginInternal() override = default;
public:
ExecutableNetwork LoadNetwork(const CNNNetwork& network,
const std::map<std::string, std::string>& config) override {

View File

@ -26,11 +26,6 @@ public:
*/
typedef std::shared_ptr<IAsyncInferRequestInternal> Ptr;
/**
* @brief A virtual destructor
*/
virtual ~IAsyncInferRequestInternal() = default;
/**
* @brief Start inference of specified input(s) in asynchronous mode
* @note The method returns immediately. Inference starts also immediately.

View File

@ -83,8 +83,7 @@ inline void copyInputOutputInfo(const InputsDataMap & networkInputs, const Outpu
* @brief An API of plugin to be implemented by a plugin
* @ingroup ie_dev_api_plugin_api
*/
class IInferencePlugin : public details::IRelease,
public std::enable_shared_from_this<IInferencePlugin> {
class IInferencePlugin : public std::enable_shared_from_this<IInferencePlugin> {
class VersionStore : public Version {
std::string _dsc;
std::string _buildNumber;
@ -112,12 +111,6 @@ class IInferencePlugin : public details::IRelease,
}
} _version;
protected:
/**
* @brief Destroys the object.
*/
~IInferencePlugin() override = default;
public:
/**
* @brief A shared pointer to IInferencePlugin interface
@ -140,10 +133,6 @@ public:
return _version;
}
void Release() noexcept override {
delete this;
}
/**
* @brief Provides a name of a plugin
* @return The name.
@ -271,6 +260,9 @@ public:
* @return The result of query operator containing supported layers map
*/
virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map<std::string, std::string>& config) const = 0;
protected:
~IInferencePlugin() = default;
};
} // namespace InferenceEngine
@ -280,16 +272,16 @@ public:
* @brief Defines the exported `CreatePluginEngine` function which is used to create a plugin instance
* @ingroup ie_dev_api_plugin_api
*/
#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
INFERENCE_PLUGIN_API(InferenceEngine::StatusCode) CreatePluginEngine( \
InferenceEngine::IInferencePlugin *&plugin, \
InferenceEngine::ResponseDesc *resp) noexcept { \
try { \
plugin = new PluginType(__VA_ARGS__); \
plugin->SetVersion(version); \
return InferenceEngine::OK; \
} \
catch (std::exception &ex) { \
return InferenceEngine::DescriptionBuffer(InferenceEngine::GENERAL_ERROR, resp) << ex.what(); \
} \
#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
INFERENCE_PLUGIN_API(void) CreatePluginEngine(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) { \
try { \
plugin = ::std::make_shared<PluginType>(__VA_ARGS__); \
} catch (const InferenceEngine::details::InferenceEngineException& e) { \
throw; \
} catch (const std::exception& ex) { \
THROW_IE_EXCEPTION << ex.what(); \
} catch (...) { \
THROW_IE_EXCEPTION_WITH_STATUS(UNEXPECTED); \
} \
plugin->SetVersion(version); \
}

View File

@ -36,18 +36,13 @@ public:
void execute(Blob::Ptr &preprocessedBlob, const PreProcessInfo &info, bool serial, int batchSize = -1) override;
void Release() noexcept override;
void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) override;
};
StatusCode CreatePreProcessData(IPreProcessData *& data, ResponseDesc * /*resp*/) noexcept {
data = new PreProcessData();
return StatusCode::OK;
}
INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr<IPreProcessData>& data);
void PreProcessData::Release() noexcept {
delete this;
INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr<IPreProcessData>& data) {
data = std::make_shared<PreProcessData>();
}
void PreProcessData::setRoiBlob(const Blob::Ptr &blob) {

View File

@ -31,7 +31,7 @@ namespace InferenceEngine {
/**
* @brief This class stores pre-process information for exact input
*/
class IPreProcessData : public details::IRelease {
class IPreProcessData : public std::enable_shared_from_this<IPreProcessData> {
public:
/**
* @brief Sets ROI blob to be resized and placed to the default input blob during pre-processing.
@ -58,9 +58,12 @@ public:
//FIXME: rename to verifyAplicable
virtual void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) = 0;
protected:
~IPreProcessData() = default;
};
INFERENCE_PRERPOC_PLUGIN_API(StatusCode) CreatePreProcessData(IPreProcessData *& data, ResponseDesc *resp) noexcept;
INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr<IPreProcessData>& data);
namespace details {

View File

@ -48,12 +48,6 @@ CNNNetwork IRReader::read(std::istream& model, const Blob::CPtr& weights, const
return CNNNetwork(parser.parse(root, weights));
}
INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept {
try {
reader = new IRReader();
return OK;
}
catch (std::exception &) {
return GENERAL_ERROR;
}
INFERENCE_PLUGIN_API(void) InferenceEngine::CreateReader(std::shared_ptr<IReader>& reader) {
reader = std::make_shared<IRReader>();
}

View File

@ -32,9 +32,6 @@ namespace InferenceEngine {
*/
class IRReader: public IReader {
public:
void Release() noexcept override {
delete this;
}
/**
* @brief Checks that reader supports format of the model
* @param model stream with model

View File

@ -65,12 +65,6 @@ CNNNetwork ONNXReader::read(std::istream& model, const std::vector<IExtensionPtr
return CNNNetwork(ngraph::onnx_import::import_onnx_model(model, readPathFromStream(model)), exts);
}
INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept {
try {
reader = new ONNXReader();
return OK;
}
catch (std::exception &) {
return GENERAL_ERROR;
}
INFERENCE_PLUGIN_API(void) InferenceEngine::CreateReader(std::shared_ptr<IReader>& reader) {
reader = std::make_shared<ONNXReader>();
}

View File

@ -10,9 +10,6 @@ namespace InferenceEngine {
class ONNXReader: public IReader {
public:
void Release() noexcept override {
delete this;
}
/**
* @brief Checks that reader supports format of the model
* @param model stream with model

View File

@ -4,7 +4,6 @@
#pragma once
#include <details/ie_irelease.hpp>
#include <cpp/ie_cnn_network.h>
#include <ie_iextension.h>
#include <istream>
@ -17,7 +16,7 @@ namespace InferenceEngine {
/**
* @brief IReader an abstract interface for Inference Engine readers
*/
class IReader: public details::IRelease {
class IReader: public std::enable_shared_from_this<IReader> {
public:
/**
* @brief Checks that reader supports format of the model
@ -49,15 +48,15 @@ public:
* @return vector of file extensions, for example the reader for OpenVINO IR returns {"bin"}
*/
virtual std::vector<std::string> getDataFileExtensions() const = 0;
protected:
~IReader() = default;
};
/**
* @brief Creates the default instance of the reader
*
* @param reader Reader interface
* @param resp Response description
* @return Status code
* @return Reader interface
*/
INFERENCE_PLUGIN_API(StatusCode) CreateReader(IReader*& reader, ResponseDesc* resp) noexcept;
INFERENCE_PLUGIN_API(void) CreateReader(std::shared_ptr<IReader>& reader);
} // namespace InferenceEngine

View File

@ -88,8 +88,7 @@ public:
auto taskExecutorGetResult = getNextTaskExecutor();
auto asyncThreadSafeImpl = std::make_shared<MyriadAsyncInferRequest>(
syncRequestImpl, _taskExecutor, _callbackExecutor, taskExecutorGetResult);
asyncRequest.reset(new ie::InferRequestBase(asyncThreadSafeImpl),
[](ie::IInferRequest *p) { p->Release(); });
asyncRequest.reset(new ie::InferRequestBase(asyncThreadSafeImpl));
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
return asyncRequest;
}

View File

@ -21,7 +21,7 @@ class Engine : public ie::InferencePluginInternal {
public:
explicit Engine(std::shared_ptr<IMvnc> mvnc);
~Engine() override {
~Engine() {
MyriadExecutor::closeDevices(_devicePool, _mvnc);
}

View File

@ -46,7 +46,7 @@ public:
void safeAddExtension(InferenceEngine::Core & ie) {
try {
auto extension = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
auto extension = std::make_shared<InferenceEngine::Extension>(
FileUtils::makePluginLibraryName<char>({},
std::string("template_extension") + IE_BUILD_POSTFIX));
ie.AddExtension(extension);

View File

@ -1,22 +0,0 @@
// Copyright (C) 2019 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <memory>
#include <gtest/gtest.h>
#include <details/ie_irelease.hpp>
#include "common_test_utils/test_common.hpp"
using IReleaseTests = CommonTestUtils::TestsCommon;
/**
* @brief Testing that callback with Release() from shared_from_irelease(...)
* won't be applied for nullptr.
*/
TEST_F(IReleaseTests, sharedFromIReleaseWithNull) {
InferenceEngine::details::IRelease *irelease = nullptr;
std::shared_ptr<InferenceEngine::details::IRelease> ptr = InferenceEngine::details::shared_from_irelease(irelease);
ptr.reset();
}

View File

@ -43,7 +43,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpUser_MO) {
InferenceEngine::Core ie;
ie.AddExtension(
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
std::make_shared<InferenceEngine::Extension>(
get_extension_path()));
auto expected = ie.ReadNetwork(model);
@ -65,7 +65,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpUser_ONNXImporter) {
InferenceEngine::Core ie;
ie.AddExtension(
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
std::make_shared<InferenceEngine::Extension>(
get_extension_path()));
auto expected = ie.ReadNetwork(model);
@ -87,7 +87,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpTransformation) {
InferenceEngine::Core ie;
auto extension =
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
std::make_shared<InferenceEngine::Extension>(
get_extension_path());
ie.AddExtension(extension);
auto expected = ie.ReadNetwork(model);

View File

@ -32,7 +32,6 @@ constexpr ngraph::NodeTypeInfo FakeAbs::type_info;
class AbsFakeExtension: public InferenceEngine::IExtension {
public:
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
void Release() noexcept override { delete this; }
void Unload() noexcept override {}
std::map<std::string, ngraph::OpSet> getOpSets() override{

View File

@ -55,8 +55,6 @@ class CustomAddConstExtension : public InferenceEngine::IExtension {
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
void Release() noexcept override { delete this; }
void Unload() noexcept override {}
std::map<std::string, ngraph::OpSet> getOpSets() override {

View File

@ -259,8 +259,6 @@ public:
void Unload() noexcept override {}
void Release() noexcept override {}
std::map<std::string, ngraph::OpSet> getOpSets() override {
static std::map<std::string, ngraph::OpSet> opsets;
if (opsets.empty()) {
@ -426,8 +424,6 @@ public:
void Unload() noexcept override {};
void Release() noexcept override {}
std::map<std::string, ngraph::OpSet> getOpSets() override {
static std::map<std::string, ngraph::OpSet> opsets;
if (opsets.empty()) {

View File

@ -26,9 +26,10 @@ protected:
}
unique_ptr<SharedObjectLoader> sharedObjectLoader;
template <class T>
std::function<T> make_std_function(const std::string& functionName) {
std::function<T> ptr(reinterpret_cast<T*>(sharedObjectLoader->get_symbol(functionName.c_str())));
using CreateF = void(std::shared_ptr<IInferencePlugin>&);
std::function<CreateF> make_std_function(const std::string& functionName) {
std::function<CreateF> ptr(reinterpret_cast<CreateF*>(sharedObjectLoader->get_symbol(functionName.c_str())));
return ptr;
}
};
@ -48,22 +49,21 @@ TEST_F(SharedObjectLoaderTests, loaderThrowsIfNoPlugin) {
TEST_F(SharedObjectLoaderTests, canFindExistedMethod) {
loadDll(get_mock_engine_name());
auto factory = make_std_function<StatusCode(IInferencePlugin*&, ResponseDesc*)>("CreatePluginEngine");
auto factory = make_std_function("CreatePluginEngine");
EXPECT_NE(nullptr, factory);
}
TEST_F(SharedObjectLoaderTests, throwIfMethodNofFoundInLibrary) {
loadDll(get_mock_engine_name());
EXPECT_THROW(make_std_function<IInferencePlugin*()>("wrong_function"), InferenceEngine::details::InferenceEngineException);
EXPECT_THROW(make_std_function("wrong_function"),
InferenceEngine::details::InferenceEngineException);
}
TEST_F(SharedObjectLoaderTests, canCallExistedMethod) {
loadDll(get_mock_engine_name());
auto factory = make_std_function<StatusCode(IInferencePlugin*&, ResponseDesc*)>("CreatePluginEngine");
IInferencePlugin* ptr = nullptr;
ResponseDesc resp;
EXPECT_NO_THROW(factory(ptr, &resp));
ptr->Release();
auto factory = make_std_function("CreatePluginEngine");
std::shared_ptr<IInferencePlugin> ptr;
EXPECT_NO_THROW(factory(ptr));
}

View File

@ -11,7 +11,6 @@
#include <memory>
#include <common_test_utils/test_assertions.hpp>
#include <details/ie_so_pointer.hpp>
#include <details/ie_irelease.hpp>
#include <cpp_interfaces/interface/ie_iplugin_internal.hpp>
#include <ie_plugin_ptr.hpp>
@ -81,10 +80,12 @@ namespace InferenceEngine {
namespace details {
struct UnknownPlugin : std::enable_shared_from_this<UnknownPlugin> {};
template<>
class SOCreatorTrait<InferenceEngine::details::IRelease> {
class SOCreatorTrait<InferenceEngine::details::UnknownPlugin> {
public:
static constexpr auto name = "CreateIRelease";
static constexpr auto name = "CreateUnknownPlugin";
};
} // namespace details
@ -92,12 +93,12 @@ public:
} // namespace InferenceEngine
TEST_F(SoPointerTests, UnknownPlugin) {
ASSERT_THROW(SOPointer<InferenceEngine::details::IRelease>("UnknownPlugin"), InferenceEngineException);
ASSERT_THROW(SOPointer<InferenceEngine::details::UnknownPlugin>("UnknownPlugin"), InferenceEngineException);
}
TEST_F(SoPointerTests, UnknownPluginExceptionStr) {
try {
SOPointer<InferenceEngine::details::IRelease>("UnknownPlugin");
SOPointer<InferenceEngine::details::UnknownPlugin>("UnknownPlugin");
}
catch (InferenceEngineException &e) {
ASSERT_STR_CONTAINS(e.what(), "Cannot load library 'UnknownPlugin':");
@ -105,20 +106,3 @@ TEST_F(SoPointerTests, UnknownPluginExceptionStr) {
ASSERT_STR_DOES_NOT_CONTAIN(e.what(), "from CWD:");
}
}
using SymbolLoaderTests = ::testing::Test;
TEST_F(SymbolLoaderTests, throwCreateNullPtr) {
ASSERT_THROW(SymbolLoader<SharedObjectLoader>(nullptr), InferenceEngineException);
}
TEST_F(SymbolLoaderTests, instantiateSymbol) {
std::string name = FileUtils::makePluginLibraryName<char>(getIELibraryPath(),
std::string("mock_engine") + IE_BUILD_POSTFIX);
std::shared_ptr<SharedObjectLoader> sharedLoader(new SharedObjectLoader(name.c_str()));
SymbolLoader<SharedObjectLoader> loader(sharedLoader);
IInferencePlugin * value = nullptr;
ASSERT_NE(nullptr, value = loader.instantiateSymbol<IInferencePlugin>(
SOCreatorTrait<IInferencePlugin>::name));
value->Release();
}

View File

@ -106,8 +106,6 @@ class CustomAbsExtension : public InferenceEngine::IExtension {
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
void Release() noexcept override { delete this; }
void Unload() noexcept override {}
std::map<std::string, ngraph::OpSet> getOpSets() override {
@ -329,7 +327,7 @@ TEST(Extension, XmlModelWithExtensionFromDSO) {
std::vector<float> input_values{1, 2, 3, 4, 5, 6, 7, 8};
std::vector<float> expected{12, 13, 14, 15, 16, 17, 18, 19};
InferenceEngine::Core ie;
ie.AddExtension(InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(get_extension_path()));
ie.AddExtension(std::make_shared<InferenceEngine::Extension>(get_extension_path()));
infer_model(ie, model, input_values, expected);
}
@ -406,7 +404,7 @@ opset_import {
std::vector<float> input_values{1, 2, 3, 4, 5, 6, 7, 8};
std::vector<float> expected{12, 13, 14, 15, 16, 17, 18, 19};
InferenceEngine::Core ie;
ie.AddExtension(InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(get_extension_path()));
ie.AddExtension(std::make_shared<InferenceEngine::Extension>(get_extension_path()));
infer_model(ie, model, input_values, expected);
}

View File

@ -65,7 +65,7 @@ public:
void safeAddExtension(InferenceEngine::Core & ie) {
try {
auto extension = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
auto extension = std::make_shared<InferenceEngine::Extension>(
FileUtils::makePluginLibraryName<char>({}, "template_extension"));
ie.AddExtension(extension);
} catch (const InferenceEngine::details::InferenceEngineException & ex) {

View File

@ -14,7 +14,6 @@
class MockAllocator : public InferenceEngine::IAllocator {
public:
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_QUALIFIED_METHOD2(lock, noexcept, void*(void*, InferenceEngine::LockOp));
MOCK_QUALIFIED_METHOD1(unlock, noexcept, void(void *));
MOCK_QUALIFIED_METHOD1(alloc, noexcept, void*(size_t));

View File

@ -40,16 +40,10 @@ MockPlugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
InferenceEngine::IInferencePlugin *__target = nullptr;
INFERENCE_PLUGIN_API(StatusCode) CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept {
try {
IInferencePlugin *p = nullptr;
std::swap(__target, p);
plugin = new MockPlugin(p);
return OK;
}
catch (std::exception &ex) {
return DescriptionBuffer(GENERAL_ERROR, resp) << ex.what();
}
INFERENCE_PLUGIN_API(void) CreatePluginEngine(std::shared_ptr<InferenceEngine::IInferencePlugin>& plugin) {
IInferencePlugin *p = nullptr;
std::swap(__target, p);
plugin = std::make_shared<MockPlugin>(p);
}
INFERENCE_PLUGIN_API(InferenceEngine::IInferencePlugin*)

View File

@ -22,7 +22,7 @@ IE_SUPPRESS_DEPRECATED_START
* @class MockICNNNetwork
* @brief Main interface to describe the NN topology
*/
class MockICNNNetwork : public InferenceEngine::ICNNNetwork {
class MockICNNNetwork final : public InferenceEngine::ICNNNetwork {
public:
MOCK_QUALIFIED_METHOD0(getFunction, const noexcept, std::shared_ptr<const ngraph::Function> ());
MOCK_QUALIFIED_METHOD0(getFunction, noexcept, std::shared_ptr<ngraph::Function>());
@ -37,7 +37,6 @@ class MockICNNNetwork : public InferenceEngine::ICNNNetwork {
InferenceEngine::ResponseDesc*));
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*));
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(InferenceEngine::ICNNNetwork::InputShapes&));
MOCK_QUALIFIED_METHOD2(reshape, noexcept, InferenceEngine::StatusCode(const InferenceEngine::ICNNNetwork::InputShapes &, InferenceEngine::ResponseDesc *));
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, InferenceEngine::StatusCode(
@ -45,25 +44,3 @@ class MockICNNNetwork : public InferenceEngine::ICNNNetwork {
const std::string &,
InferenceEngine::ResponseDesc*));
};
/**
* @class MockCNNNetworkImpl
* @brief Main interface to describe the NN topology
*/
class MockCNNNetworkImpl: public InferenceEngine::details::CNNNetworkImpl {
public:
MOCK_QUALIFIED_METHOD1(getOutputsInfo, const noexcept, void(InferenceEngine::OutputsDataMap& out));
MOCK_QUALIFIED_METHOD1(getInputsInfo, const noexcept, void(InferenceEngine::InputsDataMap &inputs));
MOCK_QUALIFIED_METHOD1(getInput, const noexcept, InferenceEngine::InputInfo::Ptr(const std::string &inputName));
MOCK_QUALIFIED_METHOD0(getName, const noexcept, const std::string&());
MOCK_QUALIFIED_METHOD0(layerCount, const noexcept, size_t());
MOCK_QUALIFIED_METHOD3(addOutput, noexcept, InferenceEngine::StatusCode(const std::string &, size_t , InferenceEngine::ResponseDesc*));
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*));
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_METHOD1(validate, void(int));
void validateNetwork() {
InferenceEngine::details::CNNNetworkImpl::validate();
}
};

View File

@ -33,7 +33,6 @@ public:
MOCK_QUALIFIED_METHOD3(GetMetric, const noexcept, StatusCode(const std::string &name, Parameter &result, ResponseDesc *resp));
MOCK_QUALIFIED_METHOD2(GetContext, const noexcept, StatusCode(RemoteContext::Ptr &pContext, ResponseDesc *resp));
MOCK_QUALIFIED_METHOD3(QueryState, noexcept, StatusCode(IVariableState::Ptr &, size_t, ResponseDesc *));
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
};
IE_SUPPRESS_DEPRECATED_END

View File

@ -27,7 +27,6 @@ public:
MOCK_QUALIFIED_METHOD2(GetUserData, noexcept, StatusCode(void**, ResponseDesc*));
MOCK_QUALIFIED_METHOD2(SetUserData, noexcept, StatusCode(void*, ResponseDesc*));
MOCK_QUALIFIED_METHOD1(SetCompletionCallback, noexcept, StatusCode(IInferRequest::CompletionCallback));
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_QUALIFIED_METHOD1(Infer, noexcept, StatusCode(ResponseDesc*));
MOCK_QUALIFIED_METHOD2(GetPerformanceCounts, const noexcept,
StatusCode(std::map<std::string, InferenceEngineProfileInfo> &perfMap, ResponseDesc*));

View File

@ -15,7 +15,7 @@
namespace InferenceEngine {
class MockNotEmptyICNNNetwork : public ICNNNetwork {
class MockNotEmptyICNNNetwork final : public ICNNNetwork {
public:
static constexpr const char* INPUT_BLOB_NAME = "first_input";
const SizeVector INPUT_DIMENTIONS = { 1, 3, 299, 299 };
@ -73,7 +73,6 @@ public:
MOCK_QUALIFIED_METHOD3(addOutput, noexcept, StatusCode(const std::string &, size_t , ResponseDesc*));
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, StatusCode(const size_t size, ResponseDesc*));
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(ICNNNetwork::InputShapes &));
MOCK_QUALIFIED_METHOD2(reshape, noexcept, StatusCode(const ICNNNetwork::InputShapes &, ResponseDesc *));
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, StatusCode(const std::string &, const std::string &, InferenceEngine::ResponseDesc*));

View File

@ -33,8 +33,7 @@ protected:
virtual void SetUp() {
mockExeNetwork = make_shared<MockExecutableNetworkThreadSafeAsyncOnly>();
exeNetwork = details::shared_from_irelease(
new ExecutableNetworkBase(mockExeNetwork));
exeNetwork = std::make_shared<ExecutableNetworkBase>(mockExeNetwork);
InputsDataMap networkInputs;
OutputsDataMap networkOutputs;
mockAsyncInferRequestInternal = make_shared<MockAsyncInferRequestInternal>(networkInputs, networkOutputs);
@ -108,8 +107,7 @@ protected:
virtual void SetUp() {
mockExeNetwork = make_shared<MockExecutableNetworkThreadSafe>();
exeNetwork = details::shared_from_irelease(
new ExecutableNetworkBase(mockExeNetwork));
exeNetwork = std::make_shared<ExecutableNetworkBase>(mockExeNetwork);
InputsDataMap networkInputs;
OutputsDataMap networkOutputs;
mockInferRequestInternal = make_shared<MockInferRequestInternal>(networkInputs, networkOutputs);

View File

@ -34,7 +34,7 @@ protected:
virtual void SetUp() {
mock_impl.reset(new MockIAsyncInferRequestInternal());
request = details::shared_from_irelease(new InferRequestBase(mock_impl));
request = std::make_shared<InferRequestBase>(mock_impl);
}
};
@ -242,8 +242,7 @@ protected:
OutputsDataMap outputsInfo;
mockNotEmptyNet.getOutputsInfo(outputsInfo);
mockInferRequestInternal = make_shared<MockAsyncInferRequestInternal>(inputsInfo, outputsInfo);
inferRequest = shared_from_irelease(
new InferRequestBase(mockInferRequestInternal));
inferRequest = std::make_shared<InferRequestBase>(mockInferRequestInternal);
return make_shared<InferRequest>(inferRequest);
}

View File

@ -198,7 +198,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackTakesOKIfAsyncRequestWasOK) {
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
IInferRequest::Ptr asyncRequest;
asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); });
asyncRequest.reset(new InferRequestBase(testRequest));
testRequest->SetPointerToPublicInterface(asyncRequest);
testRequest->SetCompletionCallback([](InferenceEngine::IInferRequest::Ptr request, StatusCode status) {
@ -214,7 +214,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackIsCalledIfAsyncRequestFailed)
auto taskExecutor = std::make_shared<CPUStreamsExecutor>();
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
IInferRequest::Ptr asyncRequest;
asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); });
asyncRequest.reset(new InferRequestBase(testRequest));
testRequest->SetPointerToPublicInterface(asyncRequest);
bool wasCalled = false;
@ -236,7 +236,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, canCatchExceptionIfAsyncRequestFailed
auto taskExecutor = std::make_shared<CPUStreamsExecutor>();
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
IInferRequest::Ptr asyncRequest;
asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); });
asyncRequest.reset(new InferRequestBase(testRequest));
testRequest->SetPointerToPublicInterface(asyncRequest);
EXPECT_CALL(*mockInferRequestInternal.get(), InferImpl()).WillOnce(Throw(std::exception()));

View File

@ -20,9 +20,7 @@ using namespace InferenceEngine::details;
template <class T>
inline typename InferenceEngine::InferRequest make_infer_request(std::shared_ptr<T> impl) {
typename InferRequestBase::Ptr req(new InferRequestBase(impl), [](IInferRequest* p) {
p->Release();
});
typename InferRequestBase::Ptr req(new InferRequestBase(impl));
return InferenceEngine::InferRequest(req);
}

View File

@ -223,7 +223,7 @@ protected:
virtual void SetUp() {
mock_impl.reset(new MockIExecutableNetworkInternal());
exeNetwork = shared_from_irelease(new ExecutableNetworkBase(mock_impl));
exeNetwork = std::make_shared<ExecutableNetworkBase>(mock_impl);
}
};

View File

@ -25,34 +25,34 @@ std::string getExtensionPath() {
}
TEST(ExtensionTests, testGetOpSets) {
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
auto opsets = extension->getOpSets();
ASSERT_FALSE(opsets.empty());
opsets.clear();
}
TEST(ExtensionTests, testGetImplTypes) {
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
auto opset = extension->getOpSets().begin()->second;
std::shared_ptr<ngraph::Node> op(opset.create(opset.get_types_info().begin()->name));
ASSERT_FALSE(extension->getImplTypes(op).empty());
}
TEST(ExtensionTests, testGetImplTypesThrowsIfNgraphNodeIsNullPtr) {
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
ASSERT_THROW(extension->getImplTypes(std::shared_ptr<ngraph::Node> ()),
InferenceEngine::details::InferenceEngineException);
}
TEST(ExtensionTests, testGetImplementation) {
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
auto opset = extension->getOpSets().begin()->second;
std::shared_ptr<ngraph::Node> op(opset.create("Template"));
ASSERT_NE(nullptr, extension->getImplementation(op, extension->getImplTypes(op)[0]));
}
TEST(ExtensionTests, testGetImplementationThrowsIfNgraphNodeIsNullPtr) {
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
ASSERT_THROW(extension->getImplementation(std::shared_ptr<ngraph::Node> (), ""),
InferenceEngine::details::InferenceEngineException);
}

View File

@ -40,15 +40,6 @@ protected:
MockInferencePluginInternal2 engine;
};
TEST_F(PluginTest, canCreatePlugin) {
auto ptr = make_std_function<IInferencePlugin*
(IInferencePlugin*)>("CreatePluginEngineProxy");
unique_ptr<IInferencePlugin, std::function<void(IInferencePlugin*)>> smart_ptr(ptr(nullptr), [](IInferencePlugin *p) {
p->Release();
});
}
TEST_F(PluginTest, canCreatePluginUsingSmartPtr) {
ASSERT_NO_THROW(InferenceEnginePluginPtr ptr(get_mock_engine_name()));
}
@ -66,11 +57,11 @@ TEST_F(PluginTest, canSetConfiguration) {
InferenceEnginePluginPtr ptr = getPtr();
// TODO: dynamic->reinterpret because of clang/gcc cannot
// dynamically cast this MOCK object
ASSERT_TRUE(reinterpret_cast<MockPlugin*>(*ptr)->config.empty());
ASSERT_TRUE(dynamic_cast<MockPlugin*>(ptr.operator->())->config.empty());
std::map<std::string, std::string> config = { { "key", "value" } };
ASSERT_NO_THROW(ptr->SetConfig(config));
config.clear();
ASSERT_STREQ(reinterpret_cast<MockPlugin*>(*ptr)->config["key"].c_str(), "value");
ASSERT_STREQ(dynamic_cast<MockPlugin*>(ptr.operator->())->config["key"].c_str(), "value");
}

View File

@ -9,14 +9,10 @@
#include "system_allocator.hpp"
using namespace InferenceEngine;
class SystemAllocatorReleaseTests : public CommonTestUtils::TestsCommon {
};
TEST_F(SystemAllocatorReleaseTests, canRelease) {
SystemMemoryAllocator *allocator_ = new SystemMemoryAllocator();
allocator_->Release();
}
class SystemAllocatorTests : public CommonTestUtils::TestsCommon {
protected:
void SetUp() override {

View File

@ -104,7 +104,6 @@ public:
return std::make_shared<NewFakePrimitiveImpl>(node);
};
}
void Release() noexcept override { delete this; }
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {
static const InferenceEngine::Version VERSION{{}, "", ""};

View File

@ -63,9 +63,6 @@ public:
bool free(void* handle) noexcept override {
return true;
}
void Release() noexcept override {
delete this;
}
};
#if GNA_LIB_VER == 2
void expect_enqueue_calls(GNACppApi &mockApi, bool enableHardwareConsistency = true){

View File

@ -91,9 +91,6 @@ public:
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {}
void Unload() noexcept override {}
void Release() noexcept override {
delete this;
}
InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
types = new char *[factories.size()];
size_t count = 0;

View File

@ -21,10 +21,6 @@ class FakeExtensions : public Cpu::MKLDNNExtensions {
public:
void Unload() noexcept override {};
void Release() noexcept override {
delete this;
};
static std::shared_ptr<TestExtensionsHolder> GetExtensionsHolder() {
static std::shared_ptr<TestExtensionsHolder> localHolder;
if (localHolder == nullptr) {

View File

@ -432,9 +432,6 @@ public:
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {}
void Unload() noexcept override {}
void Release() noexcept override {
delete this;
}
InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
types = new char *[factories.size()];
size_t count = 0;

View File

@ -327,9 +327,9 @@ protected:
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;

View File

@ -285,8 +285,8 @@ protected:
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;

View File

@ -254,9 +254,9 @@ protected:
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;

View File

@ -321,8 +321,8 @@ protected:
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;

View File

@ -433,8 +433,8 @@ protected:
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
auto implNet = dynamic_cast<details::CNNNetworkImpl *>(&((ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ResponseDesc resp;
StatusCode sts = implNet->setBatchSizeReshape(dims[0], &resp);
ASSERT_EQ((int)StatusCode::OK, sts) << resp.msg;

View File

@ -481,13 +481,13 @@ protected:
memcpy(model_blob_ptr, blb->buffer().as<uint8_t*>(), blb->byteSize());
model_blob_ptr += blb->byteSize();
}
InferenceEngine::Core core;
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, model_blob));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;

View File

@ -368,8 +368,8 @@ protected:
InferenceEngine::CNNNetwork network;
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
ASSERT_EQ(nullptr, network.getFunction());
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
InferenceEngine::ResponseDesc resp;
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;

Some files were not shown because too many files have changed in this diff Show More