Removed IRelease Interface (#4032)
This commit is contained in:
parent
699c63bdaf
commit
2fcf92be42
@ -344,7 +344,7 @@ make --jobs=$(nproc)
|
|||||||
|
|
||||||
The result of this command is a compiled shared library (`.so`, `.dylib` or `.dll`). It should be loaded in the
|
The result of this command is a compiled shared library (`.so`, `.dylib` or `.dll`). It should be loaded in the
|
||||||
application using `Core` class instance method `AddExtension` like this
|
application using `Core` class instance method `AddExtension` like this
|
||||||
`core.AddExtension(make_so_pointer<IExtension>(compiled_library_file_name), "CPU");`.
|
`core.AddExtension(std::make_shared<Extension>(compiled_library_file_name), "CPU");`.
|
||||||
|
|
||||||
To test that the extension is implemented correctly we can run the "mri_reconstruction_demo.py" with the following content:
|
To test that the extension is implemented correctly we can run the "mri_reconstruction_demo.py" with the following content:
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ using namespace InferenceEngine;
|
|||||||
//! [part0]
|
//! [part0]
|
||||||
InferenceEngine::Core core;
|
InferenceEngine::Core core;
|
||||||
// Load CPU extension as a shared library
|
// Load CPU extension as a shared library
|
||||||
auto extension_ptr = make_so_pointer<InferenceEngine::IExtension>("<shared lib path>");
|
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{"<shared lib path>"});
|
||||||
// Add extension to the CPU device
|
// Add extension to the CPU device
|
||||||
core.AddExtension(extension_ptr, "CPU");
|
core.AddExtension(extension_ptr, "CPU");
|
||||||
//! [part0]
|
//! [part0]
|
||||||
|
@ -109,7 +109,10 @@ InferenceEngine::ILayerImpl::Ptr Extension::getImplementation(const std::shared_
|
|||||||
//! [extension:getImplementation]
|
//! [extension:getImplementation]
|
||||||
|
|
||||||
//! [extension:CreateExtension]
|
//! [extension:CreateExtension]
|
||||||
// Exported function
|
//Generate exported function
|
||||||
|
IE_DEFINE_EXTENSION_CREATE_FUNCTION(Extension)
|
||||||
|
//! [extension:CreateExtension]
|
||||||
|
|
||||||
INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExtension(InferenceEngine::IExtension *&ext,
|
INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExtension(InferenceEngine::IExtension *&ext,
|
||||||
InferenceEngine::ResponseDesc *resp) noexcept {
|
InferenceEngine::ResponseDesc *resp) noexcept {
|
||||||
try {
|
try {
|
||||||
@ -123,4 +126,3 @@ INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateExte
|
|||||||
return InferenceEngine::GENERAL_ERROR;
|
return InferenceEngine::GENERAL_ERROR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//! [extension:CreateExtension]
|
|
||||||
|
@ -21,7 +21,6 @@ public:
|
|||||||
~Extension();
|
~Extension();
|
||||||
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override;
|
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override;
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
void Release() noexcept override { delete this; }
|
|
||||||
|
|
||||||
std::map<std::string, ngraph::OpSet> getOpSets() override;
|
std::map<std::string, ngraph::OpSet> getOpSets() override;
|
||||||
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override;
|
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override;
|
||||||
|
@ -143,8 +143,7 @@ InferenceEngine::IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInf
|
|||||||
auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs);
|
auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs);
|
||||||
auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest),
|
auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest),
|
||||||
_taskExecutor, _plugin->_waitExecutor, _callbackExecutor);
|
_taskExecutor, _plugin->_waitExecutor, _callbackExecutor);
|
||||||
asyncRequest.reset(new InferenceEngine::InferRequestBase(asyncThreadSafeImpl),
|
asyncRequest.reset(new InferenceEngine::InferRequestBase(asyncThreadSafeImpl));
|
||||||
[](InferenceEngine::IInferRequest *p) { p->Release(); });
|
|
||||||
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
||||||
return asyncRequest;
|
return asyncRequest;
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ public:
|
|||||||
using Ptr = std::shared_ptr<Plugin>;
|
using Ptr = std::shared_ptr<Plugin>;
|
||||||
|
|
||||||
Plugin();
|
Plugin();
|
||||||
~Plugin() override;
|
~Plugin();
|
||||||
|
|
||||||
void SetConfig(const std::map<std::string, std::string> &config) override;
|
void SetConfig(const std::map<std::string, std::string> &config) override;
|
||||||
InferenceEngine::QueryNetworkResult
|
InferenceEngine::QueryNetworkResult
|
||||||
|
@ -451,7 +451,7 @@ IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path,
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(extension_path);
|
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{extension_path});
|
||||||
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
|
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
|
||||||
core->object.AddExtension(extension, device_name);
|
core->object.AddExtension(extension, device_name);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} catch (const IE::details::InferenceEngineException& e) {
|
||||||
@ -524,6 +524,8 @@ IEStatusCode ie_core_get_available_devices(const ie_core_t *core, ie_available_d
|
|||||||
avai_devices->devices = dev_ptrs.release();
|
avai_devices->devices = dev_ptrs.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} catch (const IE::details::InferenceEngineException& e) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
||||||
|
} catch (const std::exception& e) {
|
||||||
|
return IEStatusCode::UNEXPECTED;
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
@ -599,7 +599,7 @@ void InferenceEnginePython::IECore::registerPlugins(const std::string &xmlConfig
|
|||||||
}
|
}
|
||||||
|
|
||||||
void InferenceEnginePython::IECore::addExtension(const std::string &ext_lib_path, const std::string &deviceName) {
|
void InferenceEnginePython::IECore::addExtension(const std::string &ext_lib_path, const std::string &deviceName) {
|
||||||
auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(ext_lib_path);
|
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(ext_lib_path);
|
||||||
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
|
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
|
||||||
actual.AddExtension(extension, deviceName);
|
actual.AddExtension(extension, deviceName);
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
#include "ie_iinfer_request.hpp"
|
#include "ie_iinfer_request.hpp"
|
||||||
#include "details/ie_exception_conversion.hpp"
|
#include "details/ie_exception_conversion.hpp"
|
||||||
#include "details/ie_so_loader.h"
|
#include "details/ie_so_loader.h"
|
||||||
|
#include "ie_blob.h"
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
|
@ -1,47 +0,0 @@
|
|||||||
// Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A header file for the Inference Engine plugins destruction mechanism
|
|
||||||
*
|
|
||||||
* @file ie_irelease.hpp
|
|
||||||
*/
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
|
|
||||||
#include "ie_api.h"
|
|
||||||
#include "ie_no_copy.hpp"
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
|
||||||
namespace details {
|
|
||||||
/**
|
|
||||||
* @brief This class is used for objects allocated by a shared module (in *.so)
|
|
||||||
*/
|
|
||||||
class IRelease : public no_copy {
|
|
||||||
public:
|
|
||||||
/**
|
|
||||||
* @brief Releases current allocated object and all related resources.
|
|
||||||
* Once this method is called, the pointer to this interface is no longer valid
|
|
||||||
*/
|
|
||||||
virtual void Release() noexcept = 0;
|
|
||||||
|
|
||||||
protected:
|
|
||||||
/**
|
|
||||||
* @brief Default destructor
|
|
||||||
*/
|
|
||||||
~IRelease() override = default;
|
|
||||||
};
|
|
||||||
|
|
||||||
template <class T>
|
|
||||||
inline std::shared_ptr<T> shared_from_irelease(T* ptr) {
|
|
||||||
std::shared_ptr<T> pointer(ptr, [](IRelease* p) {
|
|
||||||
if (p)
|
|
||||||
p->Release();
|
|
||||||
});
|
|
||||||
return pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace details
|
|
||||||
} // namespace InferenceEngine
|
|
@ -1,25 +0,0 @@
|
|||||||
// Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Utility header file. Provides no release base class
|
|
||||||
*
|
|
||||||
* @file ie_no_release.hpp
|
|
||||||
*/
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
|
||||||
namespace details {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief prevent Release method from being called on specific objects
|
|
||||||
*/
|
|
||||||
template <class T>
|
|
||||||
class NoReleaseOn : public T {
|
|
||||||
private:
|
|
||||||
void Release() noexcept = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace details
|
|
||||||
} // namespace InferenceEngine
|
|
@ -19,7 +19,7 @@ namespace details {
|
|||||||
/*
|
/*
|
||||||
* @brief This is a helper class to wrap external memory
|
* @brief This is a helper class to wrap external memory
|
||||||
*/
|
*/
|
||||||
class PreAllocator : public IAllocator {
|
class PreAllocator final : public IAllocator {
|
||||||
void* _actualData;
|
void* _actualData;
|
||||||
size_t _sizeInBytes;
|
size_t _sizeInBytes;
|
||||||
|
|
||||||
@ -59,17 +59,6 @@ public:
|
|||||||
bool free(void*) noexcept override { // NOLINT
|
bool free(void*) noexcept override { // NOLINT
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Deletes current allocator.
|
|
||||||
* Can be used if a shared_from_irelease pointer is used
|
|
||||||
*/
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
|
||||||
virtual ~PreAllocator() = default;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -80,7 +69,7 @@ protected:
|
|||||||
*/
|
*/
|
||||||
template <class T>
|
template <class T>
|
||||||
std::shared_ptr<IAllocator> make_pre_allocator(T* ptr, size_t size) {
|
std::shared_ptr<IAllocator> make_pre_allocator(T* ptr, size_t size) {
|
||||||
return shared_from_irelease(new PreAllocator(ptr, size * sizeof(T)));
|
return std::make_shared<PreAllocator>(ptr, size * sizeof(T));
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace details
|
} // namespace details
|
||||||
|
@ -16,60 +16,9 @@
|
|||||||
#include "ie_common.h"
|
#include "ie_common.h"
|
||||||
#include "ie_so_loader.h"
|
#include "ie_so_loader.h"
|
||||||
#include "details/ie_exception.hpp"
|
#include "details/ie_exception.hpp"
|
||||||
#include "details/ie_no_release.hpp"
|
|
||||||
#include "details/ie_irelease.hpp"
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
namespace details {
|
namespace details {
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief This class is a C++ helper to load a symbol from a library and create its instance
|
|
||||||
*/
|
|
||||||
template <class Loader>
|
|
||||||
class SymbolLoader {
|
|
||||||
private:
|
|
||||||
std::shared_ptr<Loader> _so_loader;
|
|
||||||
|
|
||||||
public:
|
|
||||||
/**
|
|
||||||
* @brief The main constructor
|
|
||||||
* @param loader Library to load from
|
|
||||||
*/
|
|
||||||
explicit SymbolLoader(std::shared_ptr<Loader> loader): _so_loader(loader) {
|
|
||||||
if (_so_loader == nullptr) {
|
|
||||||
THROW_IE_EXCEPTION << "SymbolLoader cannot be created with nullptr";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Calls a function from the library that creates an object and returns StatusCode
|
|
||||||
* @param name Name of function to load object with
|
|
||||||
* @return If StatusCode provided by function is OK then returns the loaded object. Throws an exception otherwise
|
|
||||||
*/
|
|
||||||
template <class T>
|
|
||||||
T* instantiateSymbol(const std::string& name) const {
|
|
||||||
T* instance = nullptr;
|
|
||||||
ResponseDesc desc;
|
|
||||||
StatusCode sts = bind_function<StatusCode(T*&, ResponseDesc*)>(name)(instance, &desc);
|
|
||||||
if (sts != OK) {
|
|
||||||
THROW_IE_EXCEPTION << desc.msg;
|
|
||||||
}
|
|
||||||
return instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
/**
|
|
||||||
* @brief Loads function from the library and returns a pointer to it
|
|
||||||
* @param functionName Name of function to load
|
|
||||||
* @return The loaded function
|
|
||||||
*/
|
|
||||||
template <class T>
|
|
||||||
std::function<T> bind_function(const std::string& functionName) const {
|
|
||||||
std::function<T> ptr(reinterpret_cast<T*>(_so_loader->get_symbol(functionName.c_str())));
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief This class is a trait class that provides a creator with a function name corresponding to the templated class
|
* @brief This class is a trait class that provides a creator with a function name corresponding to the templated class
|
||||||
* parameter
|
* parameter
|
||||||
@ -93,6 +42,13 @@ template <class T, class Loader = SharedObjectLoader>
|
|||||||
class SOPointer {
|
class SOPointer {
|
||||||
template <class U, class W>
|
template <class U, class W>
|
||||||
friend class SOPointer;
|
friend class SOPointer;
|
||||||
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
|
struct HasRelease {
|
||||||
|
template <typename C> static char test(decltype(&C::Release));
|
||||||
|
template <typename C> static long test(...);
|
||||||
|
constexpr static const bool value = sizeof(test<T>(nullptr)) == sizeof(char);
|
||||||
|
};
|
||||||
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
@ -107,18 +63,18 @@ public:
|
|||||||
template <typename C,
|
template <typename C,
|
||||||
typename = enableIfSupportedChar<C>>
|
typename = enableIfSupportedChar<C>>
|
||||||
explicit SOPointer(const std::basic_string<C> & name)
|
explicit SOPointer(const std::basic_string<C> & name)
|
||||||
: _so_loader(new Loader(name.c_str())),
|
: _so_loader(new Loader(name.c_str())) {
|
||||||
_pointedObj(details::shared_from_irelease(
|
Load(std::integral_constant<bool, HasRelease::value>{});
|
||||||
SymbolLoader<Loader>(_so_loader).template instantiateSymbol<T>(SOCreatorTrait<T>::name))) {}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief The main constructor
|
* @brief The main constructor
|
||||||
* @param name Name of a shared library file
|
* @param name Name of a shared library file
|
||||||
*/
|
*/
|
||||||
explicit SOPointer(const char * name)
|
explicit SOPointer(const char * name)
|
||||||
: _so_loader(new Loader(name)),
|
: _so_loader(new Loader(name)) {
|
||||||
_pointedObj(details::shared_from_irelease(
|
Load(std::integral_constant<bool, HasRelease::value>{});
|
||||||
SymbolLoader<Loader>(_so_loader).template instantiateSymbol<T>(SOCreatorTrait<T>::name))) {}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Constructs an object with existing reference
|
* @brief Constructs an object with existing reference
|
||||||
@ -134,10 +90,10 @@ public:
|
|||||||
* @brief Constructs an object with existing loader
|
* @brief Constructs an object with existing loader
|
||||||
* @param so_loader Existing pointer to a library loader
|
* @param so_loader Existing pointer to a library loader
|
||||||
*/
|
*/
|
||||||
explicit SOPointer(std::shared_ptr<Loader> so_loader)
|
explicit SOPointer(const std::shared_ptr<Loader>& so_loader)
|
||||||
: _so_loader(so_loader),
|
: _so_loader(so_loader) {
|
||||||
_pointedObj(details::shared_from_irelease(
|
Load(std::integral_constant<bool, HasRelease::value>{});
|
||||||
SymbolLoader<Loader>(_so_loader).template instantiateSymbol<T>(SOCreatorTrait<T>::name))) {}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief The copy-like constructor, can create So Pointer that dereferenced into child type if T is derived of U
|
* @brief The copy-like constructor, can create So Pointer that dereferenced into child type if T is derived of U
|
||||||
@ -147,24 +103,22 @@ public:
|
|||||||
SOPointer(const SOPointer<U, W>& that)
|
SOPointer(const SOPointer<U, W>& that)
|
||||||
: _so_loader(std::dynamic_pointer_cast<Loader>(that._so_loader)),
|
: _so_loader(std::dynamic_pointer_cast<Loader>(that._so_loader)),
|
||||||
_pointedObj(std::dynamic_pointer_cast<T>(that._pointedObj)) {
|
_pointedObj(std::dynamic_pointer_cast<T>(that._pointedObj)) {
|
||||||
if (_pointedObj == nullptr) {
|
IE_ASSERT(_pointedObj != nullptr);
|
||||||
THROW_IE_EXCEPTION << "Cannot create object from SOPointer<U, W> reference";
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Standard pointer operator
|
* @brief Standard pointer operator
|
||||||
* @return underlined interface with disabled Release method
|
* @return underlined interface with disabled Release method
|
||||||
*/
|
*/
|
||||||
details::NoReleaseOn<T>* operator->() const noexcept {
|
T* operator->() const noexcept {
|
||||||
return reinterpret_cast<details::NoReleaseOn<T>*>(_pointedObj.get());
|
return _pointedObj.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Standard dereference operator
|
* @brief Standard dereference operator
|
||||||
* @return underlined interface with disabled Release method
|
* @return underlined interface with disabled Release method
|
||||||
*/
|
*/
|
||||||
details::NoReleaseOn<T>* operator*() const noexcept {
|
const T* operator*() const noexcept {
|
||||||
return this->operator->();
|
return this->operator->();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,6 +150,62 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
/**
|
||||||
|
* @brief Implements load of object from library if Release method is presented
|
||||||
|
*/
|
||||||
|
void Load(std::true_type) {
|
||||||
|
try {
|
||||||
|
void* create = nullptr;
|
||||||
|
try {
|
||||||
|
create = _so_loader->get_symbol((SOCreatorTrait<T>::name + std::string("Shared")).c_str());
|
||||||
|
} catch (const details::InferenceEngineException& ex) {
|
||||||
|
if ((ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) == NOT_FOUND) {
|
||||||
|
create = nullptr;
|
||||||
|
} else {
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (create == nullptr) {
|
||||||
|
create = _so_loader->get_symbol(SOCreatorTrait<T>::name);
|
||||||
|
using CreateF = StatusCode(T*&, ResponseDesc*);
|
||||||
|
T* object = nullptr;
|
||||||
|
ResponseDesc desc;
|
||||||
|
StatusCode sts = reinterpret_cast<CreateF*>(create)(object, &desc);
|
||||||
|
if (sts != OK) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << sts << desc.msg;
|
||||||
|
}
|
||||||
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
|
_pointedObj = std::shared_ptr<T>(object, [] (T* ptr){ptr->Release();});
|
||||||
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
} else {
|
||||||
|
using CreateF = void(std::shared_ptr<T>&);
|
||||||
|
reinterpret_cast<CreateF*>(create)(_pointedObj);
|
||||||
|
}
|
||||||
|
} catch (const InferenceEngineException& ex) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
|
||||||
|
} catch (const std::exception& ex) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
|
||||||
|
} catch(...) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Implements load of object from library
|
||||||
|
*/
|
||||||
|
void Load(std::false_type) {
|
||||||
|
try {
|
||||||
|
using CreateF = void(std::shared_ptr<T>&);
|
||||||
|
reinterpret_cast<CreateF*>(_so_loader->get_symbol(SOCreatorTrait<T>::name))(_pointedObj);
|
||||||
|
} catch (const InferenceEngineException& ex) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
|
||||||
|
} catch (const std::exception& ex) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
|
||||||
|
} catch(...) {
|
||||||
|
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Gets a smart pointer to the DLL
|
* @brief Gets a smart pointer to the DLL
|
||||||
*/
|
*/
|
||||||
@ -206,19 +216,5 @@ protected:
|
|||||||
*/
|
*/
|
||||||
std::shared_ptr<T> _pointedObj;
|
std::shared_ptr<T> _pointedObj;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace details
|
} // namespace details
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Creates a special shared_pointer wrapper for the given type from a specific shared module
|
|
||||||
* @tparam T An type of object SOPointer can hold
|
|
||||||
* @param name Name of the shared library file
|
|
||||||
* @return A created object
|
|
||||||
*/
|
|
||||||
template <class T>
|
|
||||||
inline std::shared_ptr<T> make_so_pointer(const std::string & name) = delete;
|
|
||||||
|
|
||||||
template <class T>
|
|
||||||
inline std::shared_ptr<T> make_so_pointer(const std::wstring & name) = delete;
|
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "ie_api.h"
|
#include "ie_api.h"
|
||||||
#include "details/ie_irelease.hpp"
|
#include <memory>
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ enum LockOp {
|
|||||||
* @interface IAllocator
|
* @interface IAllocator
|
||||||
* @brief Allocator concept to be used for memory management and is used as part of the Blob.
|
* @brief Allocator concept to be used for memory management and is used as part of the Blob.
|
||||||
*/
|
*/
|
||||||
class IAllocator : public details::IRelease {
|
class IAllocator : public std::enable_shared_from_this<IAllocator> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Maps handle to heap memory accessible by any memory manipulation routines.
|
* @brief Maps handle to heap memory accessible by any memory manipulation routines.
|
||||||
@ -60,10 +60,7 @@ public:
|
|||||||
virtual bool free(void* handle) noexcept = 0;
|
virtual bool free(void* handle) noexcept = 0;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
/**
|
~IAllocator() = default;
|
||||||
* @brief Disables the ability of deleting the object without release.
|
|
||||||
*/
|
|
||||||
~IAllocator() override = default;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -71,6 +68,6 @@ protected:
|
|||||||
*
|
*
|
||||||
* @return The Inference Engine IAllocator* instance
|
* @return The Inference Engine IAllocator* instance
|
||||||
*/
|
*/
|
||||||
INFERENCE_ENGINE_API(InferenceEngine::IAllocator*) CreateDefaultAllocator() noexcept;
|
INFERENCE_ENGINE_API_CPP(std::shared_ptr<InferenceEngine::IAllocator>) CreateDefaultAllocator() noexcept;
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -779,7 +779,7 @@ protected:
|
|||||||
const std::shared_ptr<IAllocator>& getAllocator() const noexcept override {
|
const std::shared_ptr<IAllocator>& getAllocator() const noexcept override {
|
||||||
// in case when constructor without allocator was used
|
// in case when constructor without allocator was used
|
||||||
if (!_allocator) {
|
if (!_allocator) {
|
||||||
_allocator = shared_from_irelease(CreateDefaultAllocator());
|
_allocator = CreateDefaultAllocator();
|
||||||
}
|
}
|
||||||
|
|
||||||
return _allocator;
|
return _allocator;
|
||||||
|
@ -38,7 +38,7 @@ public:
|
|||||||
/**
|
/**
|
||||||
* @brief This class is a C++ helper to work with objects created using extensions.
|
* @brief This class is a C++ helper to work with objects created using extensions.
|
||||||
*/
|
*/
|
||||||
class INFERENCE_ENGINE_API_CLASS(Extension) : public IExtension {
|
class INFERENCE_ENGINE_API_CLASS(Extension) final : public IExtension {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Loads extension from a shared library
|
* @brief Loads extension from a shared library
|
||||||
@ -65,11 +65,6 @@ public:
|
|||||||
actual->Unload();
|
actual->Unload();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Does nothing since destruction is done via the regular mechanism
|
|
||||||
*/
|
|
||||||
void Release() noexcept override {}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Returns operation sets
|
* @brief Returns operation sets
|
||||||
* This method throws an exception if it was not implemented
|
* This method throws an exception if it was not implemented
|
||||||
@ -106,23 +101,29 @@ protected:
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Creates a special shared_pointer wrapper for the given type from a specific shared module
|
* @brief Creates extension using deprecated API
|
||||||
*
|
* @tparam T extension type
|
||||||
* @param name A std::string name of the shared library file
|
* @param name extension library name
|
||||||
* @return shared_pointer A wrapper for the given type from a specific shared module
|
* @return shared pointer to extension
|
||||||
*/
|
*/
|
||||||
template <>
|
template<typename T = IExtension>
|
||||||
inline std::shared_ptr<IExtension> make_so_pointer(const std::string& name) {
|
INFERENCE_ENGINE_DEPRECATED("Use std::make_shared<Extension>")
|
||||||
|
inline std::shared_ptr<T> make_so_pointer(const std::string& name) {
|
||||||
return std::make_shared<Extension>(name);
|
return std::make_shared<Extension>(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef ENABLE_UNICODE_PATH_SUPPORT
|
#ifdef ENABLE_UNICODE_PATH_SUPPORT
|
||||||
|
|
||||||
template <>
|
/**
|
||||||
|
* @brief Creates extension using deprecated API
|
||||||
|
* @param name extension library name
|
||||||
|
* @return shared pointer to extension
|
||||||
|
*/
|
||||||
|
template<typename T = IExtension>
|
||||||
|
INFERENCE_ENGINE_DEPRECATED("Use std::make_shared<Extension>")
|
||||||
inline std::shared_ptr<IExtension> make_so_pointer(const std::wstring& name) {
|
inline std::shared_ptr<IExtension> make_so_pointer(const std::wstring& name) {
|
||||||
return std::make_shared<Extension>(name);
|
return std::make_shared<Extension>(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
#include "ie_common.h"
|
#include "ie_common.h"
|
||||||
#include "ie_data.h"
|
#include "ie_data.h"
|
||||||
#include "ie_input_info.hpp"
|
#include "ie_input_info.hpp"
|
||||||
#include "details/ie_irelease.hpp"
|
|
||||||
|
|
||||||
#if defined IMPLEMENT_INFERENCE_ENGINE_API || defined IMPLEMENT_INFERENCE_ENGINE_PLUGIN || 1
|
#if defined IMPLEMENT_INFERENCE_ENGINE_API || defined IMPLEMENT_INFERENCE_ENGINE_PLUGIN || 1
|
||||||
# define INFERENCE_ENGINE_ICNNNETWORK_CLASS(...) INFERENCE_ENGINE_API_CLASS(__VA_ARGS__)
|
# define INFERENCE_ENGINE_ICNNNETWORK_CLASS(...) INFERENCE_ENGINE_API_CLASS(__VA_ARGS__)
|
||||||
@ -45,7 +44,7 @@ using OutputsDataMap = std::map<std::string, DataPtr>;
|
|||||||
* @interface ICNNNetwork
|
* @interface ICNNNetwork
|
||||||
* @brief This is the main interface to describe the NN topology
|
* @brief This is the main interface to describe the NN topology
|
||||||
*/
|
*/
|
||||||
class INFERENCE_ENGINE_ICNNNETWORK_CLASS(ICNNNetwork) : public details::IRelease {
|
class INFERENCE_ENGINE_ICNNNETWORK_CLASS(ICNNNetwork): public std::enable_shared_from_this<ICNNNetwork> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief A shared pointer to a ICNNNetwork interface
|
* @brief A shared pointer to a ICNNNetwork interface
|
||||||
@ -200,9 +199,10 @@ public:
|
|||||||
return NOT_IMPLEMENTED;
|
return NOT_IMPLEMENTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
/**
|
/**
|
||||||
* @brief A virtual destructor.
|
* @brief Default destructor.
|
||||||
*/
|
*/
|
||||||
virtual ~ICNNNetwork();
|
~ICNNNetwork() = default;
|
||||||
};
|
};
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -32,7 +32,7 @@ using ConstOutputsDataMap = std::map<std::string, CDataPtr>;
|
|||||||
/**
|
/**
|
||||||
* @brief This is an interface of an executable network
|
* @brief This is an interface of an executable network
|
||||||
*/
|
*/
|
||||||
class IExecutableNetwork : public details::IRelease {
|
class IExecutableNetwork : public std::enable_shared_from_this<IExecutableNetwork> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief A smart pointer to the current IExecutableNetwork object
|
* @brief A smart pointer to the current IExecutableNetwork object
|
||||||
@ -173,6 +173,9 @@ public:
|
|||||||
* @return code of the operation. InferenceEngine::OK if succeeded
|
* @return code of the operation. InferenceEngine::OK if succeeded
|
||||||
*/
|
*/
|
||||||
virtual StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept = 0;
|
virtual StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept = 0;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
~IExecutableNetwork() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -25,7 +25,6 @@
|
|||||||
* @def INFERENCE_EXTENSION_API(TYPE)
|
* @def INFERENCE_EXTENSION_API(TYPE)
|
||||||
* @brief Defines Inference Engine Extension API method
|
* @brief Defines Inference Engine Extension API method
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#if defined(_WIN32) && defined(IMPLEMENT_INFERENCE_EXTENSION_API)
|
#if defined(_WIN32) && defined(IMPLEMENT_INFERENCE_EXTENSION_API)
|
||||||
#define INFERENCE_EXTENSION_API(TYPE) extern "C" __declspec(dllexport) TYPE
|
#define INFERENCE_EXTENSION_API(TYPE) extern "C" __declspec(dllexport) TYPE
|
||||||
#else
|
#else
|
||||||
@ -146,7 +145,7 @@ public:
|
|||||||
/**
|
/**
|
||||||
* @brief This class is the main extension interface
|
* @brief This class is the main extension interface
|
||||||
*/
|
*/
|
||||||
class INFERENCE_ENGINE_API_CLASS(IExtension) : public InferenceEngine::details::IRelease {
|
class INFERENCE_ENGINE_API_CLASS(IExtension) : public std::enable_shared_from_this<IExtension> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Returns operation sets
|
* @brief Returns operation sets
|
||||||
@ -187,6 +186,17 @@ public:
|
|||||||
* @param versionInfo Pointer to version info, will be set by plugin
|
* @param versionInfo Pointer to version info, will be set by plugin
|
||||||
*/
|
*/
|
||||||
virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
|
virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Implements deprecated API
|
||||||
|
*/
|
||||||
|
INFERENCE_ENGINE_DEPRECATED("Do not override or use this method. Use IE_DEFINE_EXTENSION_CREATE_FUNCTION to export extension")
|
||||||
|
virtual void Release() noexcept {
|
||||||
|
delete this;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
virtual ~IExtension() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -198,9 +208,31 @@ using IExtensionPtr = std::shared_ptr<IExtension>;
|
|||||||
* @brief Creates the default instance of the extension
|
* @brief Creates the default instance of the extension
|
||||||
*
|
*
|
||||||
* @param ext Extension interface
|
* @param ext Extension interface
|
||||||
* @param resp Response description
|
|
||||||
* @return Status code
|
|
||||||
*/
|
*/
|
||||||
INFERENCE_EXTENSION_API(StatusCode) CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
|
INFERENCE_EXTENSION_API(void) CreateExtensionShared(IExtensionPtr& ext);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @note: Deprecated API
|
||||||
|
* @brief Creates the default instance of the extension
|
||||||
|
* @param ext Extension interface
|
||||||
|
* @param resp Responce
|
||||||
|
* @return InferenceEngine::OK if extension is constructed and InferenceEngine::GENERAL_ERROR otherwise
|
||||||
|
*/
|
||||||
|
#if defined(_WIN32)
|
||||||
|
INFERENCE_ENGINE_DEPRECATED("Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro")
|
||||||
|
INFERENCE_EXTENSION_API(StatusCode)
|
||||||
|
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
|
||||||
|
#else
|
||||||
|
INFERENCE_EXTENSION_API(StatusCode)
|
||||||
|
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept INFERENCE_ENGINE_DEPRECATED("Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro");
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @def IE_DEFINE_EXTENSION_CREATE_FUNCTION
|
||||||
|
* @brief Generates extension creation function
|
||||||
|
*/
|
||||||
|
#define IE_DEFINE_EXTENSION_CREATE_FUNCTION(ExtensionType) \
|
||||||
|
INFERENCE_EXTENSION_API(void) InferenceEngine::CreateExtensionShared(std::shared_ptr<InferenceEngine::IExtension>& ext) { \
|
||||||
|
ext = std::make_shared<Extension>(); \
|
||||||
|
}
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
#include "ie_common.h"
|
#include "ie_common.h"
|
||||||
#include "ie_preprocess.hpp"
|
#include "ie_preprocess.hpp"
|
||||||
#include "ie_imemory_state.hpp"
|
#include "ie_imemory_state.hpp"
|
||||||
#include "details/ie_irelease.hpp"
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
@ -26,7 +25,7 @@ namespace InferenceEngine {
|
|||||||
* @brief This is an interface of asynchronous infer request
|
* @brief This is an interface of asynchronous infer request
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class IInferRequest : public details::IRelease {
|
class IInferRequest : public std::enable_shared_from_this<IInferRequest> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @enum WaitMode
|
* @enum WaitMode
|
||||||
@ -198,7 +197,9 @@ public:
|
|||||||
* given index
|
* given index
|
||||||
*/
|
*/
|
||||||
virtual StatusCode QueryState(IVariableState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept = 0;
|
virtual StatusCode QueryState(IVariableState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept = 0;
|
||||||
IE_SUPPRESS_DEPRECATED_END
|
|
||||||
|
protected:
|
||||||
|
~IInferRequest() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
@ -165,7 +165,7 @@ int main(int argc, char *argv[]) {
|
|||||||
Core ie;
|
Core ie;
|
||||||
if (FLAGS_d.find("CPU") != std::string::npos && !FLAGS_l.empty()) {
|
if (FLAGS_d.find("CPU") != std::string::npos && !FLAGS_l.empty()) {
|
||||||
// CPU (MKLDNN) extensions is loaded as a shared library and passed as a pointer to base extension
|
// CPU (MKLDNN) extensions is loaded as a shared library and passed as a pointer to base extension
|
||||||
const auto extension_ptr = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(FLAGS_l);
|
const auto extension_ptr = std::make_shared<InferenceEngine::Extension>(FLAGS_l);
|
||||||
ie.AddExtension(extension_ptr);
|
ie.AddExtension(extension_ptr);
|
||||||
slog::info << "CPU (MKLDNN) extensions is loaded " << FLAGS_l << slog::endl;
|
slog::info << "CPU (MKLDNN) extensions is loaded " << FLAGS_l << slog::endl;
|
||||||
}
|
}
|
||||||
|
@ -78,7 +78,7 @@ int main(int argc, char *argv[]) {
|
|||||||
|
|
||||||
if (!FLAGS_l.empty()) {
|
if (!FLAGS_l.empty()) {
|
||||||
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
|
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
|
||||||
IExtensionPtr extension_ptr = make_so_pointer<IExtension>(FLAGS_l);
|
IExtensionPtr extension_ptr = std::make_shared<Extension>(FLAGS_l);
|
||||||
ie.AddExtension(extension_ptr);
|
ie.AddExtension(extension_ptr);
|
||||||
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
|
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
|
||||||
}
|
}
|
||||||
|
@ -43,10 +43,6 @@ public:
|
|||||||
return _width * _height * 1;
|
return _width * _height * 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override {
|
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override {
|
||||||
if ((width * height != 0) && (_width * _height != width * height)) {
|
if ((width * height != 0) && (_width * _height != width * height)) {
|
||||||
std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n";
|
std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n";
|
||||||
|
@ -60,10 +60,6 @@ public:
|
|||||||
return _width * _height * 3;
|
return _width * _height * 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override {
|
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override {
|
||||||
if ((width * height != 0) && (_width * _height != width * height)) {
|
if ((width * height != 0) && (_width * _height != width * height)) {
|
||||||
std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n";
|
std::cout << "[ WARNING ] Image won't be resized! Please use OpenCV.\n";
|
||||||
|
@ -23,7 +23,7 @@ Reader *Registry::CreateReader(const char *filename) {
|
|||||||
for (auto maker : _data) {
|
for (auto maker : _data) {
|
||||||
Reader *ol = maker(filename);
|
Reader *ol = maker(filename);
|
||||||
if (ol != nullptr && ol->size() != 0) return ol;
|
if (ol != nullptr && ol->size() != 0) return ol;
|
||||||
if (ol != nullptr) ol->Release();
|
if (ol != nullptr) delete ol;
|
||||||
}
|
}
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
@ -45,6 +45,8 @@ protected:
|
|||||||
std::shared_ptr<unsigned char> _data;
|
std::shared_ptr<unsigned char> _data;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
virtual ~Reader() = default;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* \brief Get width
|
* \brief Get width
|
||||||
* @return width
|
* @return width
|
||||||
@ -69,8 +71,6 @@ public:
|
|||||||
* @return size
|
* @return size
|
||||||
*/
|
*/
|
||||||
virtual size_t size() const = 0;
|
virtual size_t size() const = 0;
|
||||||
|
|
||||||
virtual void Release() noexcept = 0;
|
|
||||||
};
|
};
|
||||||
} // namespace FormatReader
|
} // namespace FormatReader
|
||||||
|
|
||||||
|
@ -15,10 +15,7 @@
|
|||||||
namespace FormatReader {
|
namespace FormatReader {
|
||||||
class ReaderPtr {
|
class ReaderPtr {
|
||||||
public:
|
public:
|
||||||
explicit ReaderPtr(const char *imageName) : reader(CreateFormatReader(imageName),
|
explicit ReaderPtr(const char *imageName) : reader(CreateFormatReader(imageName)) {}
|
||||||
[](Reader *p) {
|
|
||||||
p->Release();
|
|
||||||
}) {}
|
|
||||||
/**
|
/**
|
||||||
* @brief dereference operator overload
|
* @brief dereference operator overload
|
||||||
* @return Reader
|
* @return Reader
|
||||||
@ -40,6 +37,6 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::unique_ptr<Reader, std::function<void(Reader *)>> reader;
|
std::unique_ptr<Reader> reader;
|
||||||
};
|
};
|
||||||
} // namespace FormatReader
|
} // namespace FormatReader
|
||||||
|
@ -46,10 +46,6 @@ public:
|
|||||||
return _size;
|
return _size;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override;
|
std::shared_ptr<unsigned char> getData(size_t width, size_t height) override;
|
||||||
};
|
};
|
||||||
} // namespace FormatReader
|
} // namespace FormatReader
|
||||||
|
@ -131,8 +131,6 @@ public:
|
|||||||
|
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
|
|
||||||
void Release() noexcept override {}
|
|
||||||
|
|
||||||
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override {
|
std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) override {
|
||||||
if (impls.find(node->description()) == impls.end())
|
if (impls.find(node->description()) == impls.end())
|
||||||
return {};
|
return {};
|
||||||
|
@ -87,7 +87,7 @@ int main(int argc, char *argv[]) {
|
|||||||
|
|
||||||
if (!FLAGS_l.empty()) {
|
if (!FLAGS_l.empty()) {
|
||||||
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
|
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
|
||||||
IExtensionPtr extension_ptr = make_so_pointer<IExtension>(FLAGS_l);
|
IExtensionPtr extension_ptr = std::make_shared<Extension>(FLAGS_l);
|
||||||
ie.AddExtension(extension_ptr);
|
ie.AddExtension(extension_ptr);
|
||||||
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
|
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ int main(int argc, char *argv[]) {
|
|||||||
|
|
||||||
if (!FLAGS_l.empty()) {
|
if (!FLAGS_l.empty()) {
|
||||||
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
|
// CPU(MKLDNN) extensions are loaded as a shared library and passed as a pointer to base extension
|
||||||
IExtensionPtr extension_ptr = make_so_pointer<IExtension>(FLAGS_l);
|
IExtensionPtr extension_ptr = std::make_shared<Extension>(FLAGS_l);
|
||||||
ie.AddExtension(extension_ptr);
|
ie.AddExtension(extension_ptr);
|
||||||
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
|
slog::info << "CPU Extension loaded: " << FLAGS_l << slog::endl;
|
||||||
}
|
}
|
||||||
|
@ -151,7 +151,7 @@ void CLDNNRemoteBlobImpl::allocate() noexcept {
|
|||||||
|
|
||||||
const std::shared_ptr<IAllocator>& CLDNNRemoteBlobImpl::getAllocator() const noexcept {
|
const std::shared_ptr<IAllocator>& CLDNNRemoteBlobImpl::getAllocator() const noexcept {
|
||||||
if (!_allocator) {
|
if (!_allocator) {
|
||||||
_allocator = shared_from_irelease(reinterpret_cast<IAllocator*>(&m_allocator));
|
_allocator = std::shared_ptr<IAllocator>(&m_allocator, [] (IAllocator*) {});
|
||||||
}
|
}
|
||||||
return _allocator;
|
return _allocator;
|
||||||
};
|
};
|
||||||
|
@ -198,8 +198,6 @@ public:
|
|||||||
* @return false if handle cannot be released, otherwise - true.
|
* @return false if handle cannot be released, otherwise - true.
|
||||||
*/
|
*/
|
||||||
bool free(void* handle) noexcept override { return true; }
|
bool free(void* handle) noexcept override { return true; }
|
||||||
|
|
||||||
void Release() noexcept override {}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
class CLDNNExecutionContextImpl : public InferenceEngine::gpu::details::param_map_obj_getter {
|
class CLDNNExecutionContextImpl : public InferenceEngine::gpu::details::param_map_obj_getter {
|
||||||
|
@ -36,12 +36,11 @@ namespace details {
|
|||||||
/**
|
/**
|
||||||
* @brief Ngraph-based implementation of the ICNNNetwork interface.
|
* @brief Ngraph-based implementation of the ICNNNetwork interface.
|
||||||
*/
|
*/
|
||||||
class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl): public ICNNNetwork {
|
class INFERENCE_ENGINE_API_CLASS(CNNNetworkNGraphImpl) final : public ICNNNetwork {
|
||||||
public:
|
public:
|
||||||
CNNNetworkNGraphImpl(const std::shared_ptr<::ngraph::Function>& nGraph,
|
CNNNetworkNGraphImpl(const std::shared_ptr<::ngraph::Function>& nGraph,
|
||||||
const std::vector<IExtensionPtr>& exts = {});
|
const std::vector<IExtensionPtr>& exts = {});
|
||||||
CNNNetworkNGraphImpl(const CNNNetwork& nGraph);
|
CNNNetworkNGraphImpl(const CNNNetwork& nGraph);
|
||||||
~CNNNetworkNGraphImpl() override = default;
|
|
||||||
|
|
||||||
void getOutputsInfo(std::map<std::string, DataPtr>& out) const noexcept override;
|
void getOutputsInfo(std::map<std::string, DataPtr>& out) const noexcept override;
|
||||||
|
|
||||||
@ -63,10 +62,6 @@ public:
|
|||||||
|
|
||||||
void addOutput(const ::ngraph::Output<::ngraph::Node> & dataName);
|
void addOutput(const ::ngraph::Output<::ngraph::Node> & dataName);
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<const ::ngraph::Function> getFunction() const noexcept override {
|
std::shared_ptr<const ::ngraph::Function> getFunction() const noexcept override {
|
||||||
return _ngraph_function;
|
return _ngraph_function;
|
||||||
}
|
}
|
||||||
@ -111,16 +106,5 @@ private:
|
|||||||
void reshape();
|
void reshape();
|
||||||
void reshape(const std::map<std::string, std::vector<size_t>>& inputShapes);
|
void reshape(const std::map<std::string, std::vector<size_t>>& inputShapes);
|
||||||
};
|
};
|
||||||
|
|
||||||
class TINGraphBody : public CNNNetworkNGraphImpl {
|
|
||||||
public:
|
|
||||||
explicit TINGraphBody(const std::shared_ptr<::ngraph::Function>& func): CNNNetworkNGraphImpl(func) {}
|
|
||||||
|
|
||||||
protected:
|
|
||||||
std::shared_ptr<::ngraph::Function> cloneFunction(bool constFolding) const override {
|
|
||||||
return _ngraph_function;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace details
|
} // namespace details
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -10,8 +10,6 @@
|
|||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
ICNNNetwork::~ICNNNetwork() {}
|
|
||||||
|
|
||||||
CNNNetwork::CNNNetwork() :
|
CNNNetwork::CNNNetwork() :
|
||||||
network(), actual(), output() {
|
network(), actual(), output() {
|
||||||
}
|
}
|
||||||
|
@ -364,7 +364,7 @@ public:
|
|||||||
|
|
||||||
allowNotImplemented([&]() {
|
allowNotImplemented([&]() {
|
||||||
for (auto&& extensionLocation : desc.listOfExtentions) {
|
for (auto&& extensionLocation : desc.listOfExtentions) {
|
||||||
plugin.AddExtension(make_so_pointer<IExtension>(extensionLocation));
|
plugin.AddExtension(std::make_shared<Extension>(extensionLocation));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -738,11 +738,10 @@ std::vector<std::string> Core::GetAvailableDevices() const {
|
|||||||
|
|
||||||
for (auto&& deviceName : _impl->GetListOfDevicesInRegistry()) {
|
for (auto&& deviceName : _impl->GetListOfDevicesInRegistry()) {
|
||||||
std::vector<std::string> devicesIDs;
|
std::vector<std::string> devicesIDs;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Parameter p = GetMetric(deviceName, propertyName);
|
Parameter p = GetMetric(deviceName, propertyName);
|
||||||
devicesIDs = p.as<std::vector<std::string>>();
|
devicesIDs = p.as<std::vector<std::string>>();
|
||||||
} catch (details::InferenceEngineException&) {
|
} catch (details::InferenceEngineException& e) {
|
||||||
// plugin is not created by e.g. invalid env
|
// plugin is not created by e.g. invalid env
|
||||||
} catch (const std::exception& ex) {
|
} catch (const std::exception& ex) {
|
||||||
THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName
|
THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName
|
||||||
|
@ -62,10 +62,6 @@ class Reader: public IReader {
|
|||||||
return const_cast<Reader*>(this)->getReaderPtr();
|
return const_cast<Reader*>(this)->getReaderPtr();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
using Ptr = std::shared_ptr<Reader>;
|
using Ptr = std::shared_ptr<Reader>;
|
||||||
Reader(const std::string& name, const std::string location): name(name), location(location) {}
|
Reader(const std::string& name, const std::string location): name(name), location(location) {}
|
||||||
|
@ -45,7 +45,8 @@ public:
|
|||||||
|
|
||||||
procAddr = dlsym(shared_object, symbolName);
|
procAddr = dlsym(shared_object, symbolName);
|
||||||
if (procAddr == nullptr)
|
if (procAddr == nullptr)
|
||||||
THROW_IE_EXCEPTION << "dlSym cannot locate method '" << symbolName << "': " << dlerror();
|
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
|
||||||
|
<< "dlSym cannot locate method '" << symbolName << "': " << dlerror();
|
||||||
return procAddr;
|
return procAddr;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -60,8 +61,7 @@ SharedObjectLoader::SharedObjectLoader(const char * pluginName) {
|
|||||||
_impl.reset(new Impl(pluginName));
|
_impl.reset(new Impl(pluginName));
|
||||||
}
|
}
|
||||||
|
|
||||||
SharedObjectLoader::~SharedObjectLoader() noexcept(false) {
|
SharedObjectLoader::~SharedObjectLoader() noexcept(false) {}
|
||||||
}
|
|
||||||
|
|
||||||
void* SharedObjectLoader::get_symbol(const char* symbolName) const {
|
void* SharedObjectLoader::get_symbol(const char* symbolName) const {
|
||||||
return _impl->get_symbol(symbolName);
|
return _impl->get_symbol(symbolName);
|
||||||
|
@ -247,7 +247,8 @@ class SharedObjectLoader::Impl {
|
|||||||
}
|
}
|
||||||
auto procAddr = reinterpret_cast<void*>(GetProcAddress(shared_object, symbolName));
|
auto procAddr = reinterpret_cast<void*>(GetProcAddress(shared_object, symbolName));
|
||||||
if (procAddr == nullptr)
|
if (procAddr == nullptr)
|
||||||
THROW_IE_EXCEPTION << "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
|
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
|
||||||
|
<< "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
|
||||||
|
|
||||||
return procAddr;
|
return procAddr;
|
||||||
}
|
}
|
||||||
|
@ -6,9 +6,9 @@
|
|||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
IAllocator* CreateDefaultAllocator() noexcept {
|
INFERENCE_ENGINE_API_CPP(std::shared_ptr<IAllocator>) CreateDefaultAllocator() noexcept {
|
||||||
try {
|
try {
|
||||||
return new SystemMemoryAllocator();
|
return std::make_shared<SystemMemoryAllocator>();
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
@ -8,12 +8,9 @@
|
|||||||
|
|
||||||
#include "ie_allocator.hpp"
|
#include "ie_allocator.hpp"
|
||||||
|
|
||||||
|
namespace InferenceEngine {
|
||||||
class SystemMemoryAllocator : public InferenceEngine::IAllocator {
|
class SystemMemoryAllocator : public InferenceEngine::IAllocator {
|
||||||
public:
|
public:
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
void* lock(void* handle, InferenceEngine::LockOp = InferenceEngine::LOCK_FOR_WRITE) noexcept override {
|
void* lock(void* handle, InferenceEngine::LockOp = InferenceEngine::LOCK_FOR_WRITE) noexcept override {
|
||||||
return handle;
|
return handle;
|
||||||
}
|
}
|
||||||
@ -37,3 +34,5 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
} // namespace InferenceEngine
|
@ -25,13 +25,12 @@ namespace details {
|
|||||||
|
|
||||||
IE_SUPPRESS_DEPRECATED_START
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
|
|
||||||
class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl): public ICNNNetwork,
|
class INFERENCE_ENGINE_API_CLASS(CNNNetworkImpl) final : public ICNNNetwork {
|
||||||
public std::enable_shared_from_this<ICNNNetwork> {
|
|
||||||
public:
|
public:
|
||||||
CNNNetworkImpl();
|
CNNNetworkImpl();
|
||||||
explicit CNNNetworkImpl(const ICNNNetwork & ngraphImpl);
|
explicit CNNNetworkImpl(const ICNNNetwork & ngraphImpl);
|
||||||
explicit CNNNetworkImpl(const CNNNetwork & ngraphImpl);
|
explicit CNNNetworkImpl(const CNNNetwork & ngraphImpl);
|
||||||
~CNNNetworkImpl() override;
|
~CNNNetworkImpl();
|
||||||
|
|
||||||
std::shared_ptr<::ngraph::Function> getFunction() noexcept override {
|
std::shared_ptr<::ngraph::Function> getFunction() noexcept override {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
@ -116,10 +115,6 @@ public:
|
|||||||
|
|
||||||
void removeOutput(const std::string& dataName);
|
void removeOutput(const std::string& dataName);
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void validate(int = 2);
|
virtual void validate(int = 2);
|
||||||
|
|
||||||
StatusCode reshape(const std::map<std::string, std::vector<size_t>>& inputShapes,
|
StatusCode reshape(const std::map<std::string, std::vector<size_t>>& inputShapes,
|
||||||
|
@ -33,10 +33,6 @@ class ConstAllocatorWrapper : public IAllocator {
|
|||||||
public:
|
public:
|
||||||
explicit ConstAllocatorWrapper(std::shared_ptr<ngraph::op::Constant> constOp): _constOp(std::move(constOp)) {}
|
explicit ConstAllocatorWrapper(std::shared_ptr<ngraph::op::Constant> constOp): _constOp(std::move(constOp)) {}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
void* lock(void* handle, LockOp) noexcept override {
|
void* lock(void* handle, LockOp) noexcept override {
|
||||||
return handle;
|
return handle;
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,8 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <ie_blob.h>
|
#include <ie_blob.h>
|
||||||
|
#include <memory>
|
||||||
|
#include <details/ie_no_copy.hpp>
|
||||||
#include "mkldnn_memory.h"
|
#include "mkldnn_memory.h"
|
||||||
#include "mkldnn_dims.h"
|
#include "mkldnn_dims.h"
|
||||||
#include "mkldnn_weights_cache.hpp"
|
#include "mkldnn_weights_cache.hpp"
|
||||||
|
@ -19,7 +19,7 @@ namespace MKLDNNPlugin {
|
|||||||
class Engine : public InferenceEngine::InferencePluginInternal {
|
class Engine : public InferenceEngine::InferencePluginInternal {
|
||||||
public:
|
public:
|
||||||
Engine();
|
Engine();
|
||||||
~Engine() override;
|
~Engine();
|
||||||
|
|
||||||
InferenceEngine::ExecutableNetworkInternal::Ptr
|
InferenceEngine::ExecutableNetworkInternal::Ptr
|
||||||
LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network,
|
LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network,
|
||||||
|
@ -83,10 +83,6 @@ public:
|
|||||||
|
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
using LayersFactory = openvino::cc::Factory<
|
using LayersFactory = openvino::cc::Factory<
|
||||||
std::string,
|
std::string,
|
||||||
InferenceEngine::ILayerImplFactory*(const InferenceEngine::CNNLayer*)>;
|
InferenceEngine::ILayerImplFactory*(const InferenceEngine::CNNLayer*)>;
|
||||||
|
@ -201,7 +201,7 @@ IInferRequest::Ptr MultiDeviceExecutableNetwork::CreateInferRequest() {
|
|||||||
_needPerfCounters,
|
_needPerfCounters,
|
||||||
std::static_pointer_cast<MultiDeviceExecutableNetwork>(shared_from_this()),
|
std::static_pointer_cast<MultiDeviceExecutableNetwork>(shared_from_this()),
|
||||||
_callbackExecutor);
|
_callbackExecutor);
|
||||||
asyncRequest.reset(new InferRequestBase(asyncTreadSafeImpl), [](IInferRequest *p) { p->Release(); });
|
asyncRequest.reset(new InferRequestBase(asyncTreadSafeImpl));
|
||||||
asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
||||||
return asyncRequest;
|
return asyncRequest;
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ namespace MultiDevicePlugin {
|
|||||||
class MultiDeviceInferencePlugin : public InferenceEngine::InferencePluginInternal {
|
class MultiDeviceInferencePlugin : public InferenceEngine::InferencePluginInternal {
|
||||||
public:
|
public:
|
||||||
MultiDeviceInferencePlugin();
|
MultiDeviceInferencePlugin();
|
||||||
~MultiDeviceInferencePlugin() override = default;
|
~MultiDeviceInferencePlugin() = default;
|
||||||
|
|
||||||
InferenceEngine::ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
|
InferenceEngine::ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
|
||||||
const std::map<std::string, std::string>& config) override;
|
const std::map<std::string, std::string>& config) override;
|
||||||
|
@ -88,10 +88,6 @@ public:
|
|||||||
}
|
}
|
||||||
IE_SUPPRESS_DEPRECATED_END
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
StatusCode SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) noexcept override {
|
StatusCode SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) noexcept override {
|
||||||
TO_STATUS(_impl->SetConfig(config));
|
TO_STATUS(_impl->SetConfig(config));
|
||||||
}
|
}
|
||||||
@ -107,9 +103,6 @@ public:
|
|||||||
StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept override {
|
StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept override {
|
||||||
TO_STATUS(pContext = _impl->GetContext());
|
TO_STATUS(pContext = _impl->GetContext());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
|
||||||
~ExecutableNetworkBase() override = default;
|
|
||||||
};
|
};
|
||||||
IE_SUPPRESS_DEPRECATED_END_WIN
|
IE_SUPPRESS_DEPRECATED_END_WIN
|
||||||
|
|
||||||
@ -124,9 +117,7 @@ template <class T>
|
|||||||
inline typename InferenceEngine::ExecutableNetwork make_executable_network(std::shared_ptr<T> impl) {
|
inline typename InferenceEngine::ExecutableNetwork make_executable_network(std::shared_ptr<T> impl) {
|
||||||
// to suppress warning about deprecated QueryState
|
// to suppress warning about deprecated QueryState
|
||||||
IE_SUPPRESS_DEPRECATED_START
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
typename ExecutableNetworkBase::Ptr net(new ExecutableNetworkBase(impl), [](IExecutableNetwork* p) {
|
typename ExecutableNetworkBase::Ptr net(new ExecutableNetworkBase(impl));
|
||||||
p->Release();
|
|
||||||
});
|
|
||||||
IE_SUPPRESS_DEPRECATED_END
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
return InferenceEngine::ExecutableNetwork(net);
|
return InferenceEngine::ExecutableNetwork(net);
|
||||||
}
|
}
|
||||||
|
@ -84,10 +84,6 @@ public:
|
|||||||
TO_STATUS(_impl->SetUserData(data));
|
TO_STATUS(_impl->SetUserData(data));
|
||||||
}
|
}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
StatusCode SetBatch(int batch_size, ResponseDesc* resp) noexcept override {
|
StatusCode SetBatch(int batch_size, ResponseDesc* resp) noexcept override {
|
||||||
TO_STATUS(_impl->SetBatch(batch_size));
|
TO_STATUS(_impl->SetBatch(batch_size));
|
||||||
}
|
}
|
||||||
@ -108,9 +104,6 @@ public:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
IE_SUPPRESS_DEPRECATED_END
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
|
||||||
private:
|
|
||||||
~InferRequestBase() = default;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -39,9 +39,7 @@ public:
|
|||||||
auto asyncRequestImpl = this->CreateAsyncInferRequestImpl(_networkInputs, _networkOutputs);
|
auto asyncRequestImpl = this->CreateAsyncInferRequestImpl(_networkInputs, _networkOutputs);
|
||||||
asyncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
|
asyncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
|
||||||
|
|
||||||
asyncRequest.reset(new InferRequestBase(asyncRequestImpl), [](IInferRequest* p) {
|
asyncRequest.reset(new InferRequestBase(asyncRequestImpl));
|
||||||
p->Release();
|
|
||||||
});
|
|
||||||
asyncRequestImpl->SetPointerToPublicInterface(asyncRequest);
|
asyncRequestImpl->SetPointerToPublicInterface(asyncRequest);
|
||||||
return asyncRequest;
|
return asyncRequest;
|
||||||
}
|
}
|
||||||
|
@ -62,15 +62,12 @@ protected:
|
|||||||
*/
|
*/
|
||||||
template <typename AsyncInferRequestType = AsyncInferRequestThreadSafeDefault>
|
template <typename AsyncInferRequestType = AsyncInferRequestThreadSafeDefault>
|
||||||
IInferRequest::Ptr CreateAsyncInferRequestFromSync() {
|
IInferRequest::Ptr CreateAsyncInferRequestFromSync() {
|
||||||
IInferRequest::Ptr asyncRequest;
|
|
||||||
|
|
||||||
auto syncRequestImpl = this->CreateInferRequestImpl(_networkInputs, _networkOutputs);
|
auto syncRequestImpl = this->CreateInferRequestImpl(_networkInputs, _networkOutputs);
|
||||||
syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
|
syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
|
||||||
|
|
||||||
auto asyncThreadSafeImpl = std::make_shared<AsyncInferRequestType>(
|
auto asyncThreadSafeImpl = std::make_shared<AsyncInferRequestType>(
|
||||||
syncRequestImpl, _taskExecutor, _callbackExecutor);
|
syncRequestImpl, _taskExecutor, _callbackExecutor);
|
||||||
asyncRequest.reset(new InferRequestBase(asyncThreadSafeImpl),
|
IInferRequest::Ptr asyncRequest = std::make_shared<InferRequestBase>(asyncThreadSafeImpl);
|
||||||
[](IInferRequest *p) { p->Release(); });
|
|
||||||
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
||||||
|
|
||||||
return asyncRequest;
|
return asyncRequest;
|
||||||
|
@ -47,12 +47,6 @@ static inline void parsePluginName(std::istream& networkModel) {
|
|||||||
* @ingroup ie_dev_api_plugin_api
|
* @ingroup ie_dev_api_plugin_api
|
||||||
*/
|
*/
|
||||||
class InferencePluginInternal : public IInferencePlugin {
|
class InferencePluginInternal : public IInferencePlugin {
|
||||||
protected:
|
|
||||||
/**
|
|
||||||
* @brief Destroys the object.
|
|
||||||
*/
|
|
||||||
~InferencePluginInternal() override = default;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
ExecutableNetwork LoadNetwork(const CNNNetwork& network,
|
ExecutableNetwork LoadNetwork(const CNNNetwork& network,
|
||||||
const std::map<std::string, std::string>& config) override {
|
const std::map<std::string, std::string>& config) override {
|
||||||
|
@ -26,11 +26,6 @@ public:
|
|||||||
*/
|
*/
|
||||||
typedef std::shared_ptr<IAsyncInferRequestInternal> Ptr;
|
typedef std::shared_ptr<IAsyncInferRequestInternal> Ptr;
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A virtual destructor
|
|
||||||
*/
|
|
||||||
virtual ~IAsyncInferRequestInternal() = default;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Start inference of specified input(s) in asynchronous mode
|
* @brief Start inference of specified input(s) in asynchronous mode
|
||||||
* @note The method returns immediately. Inference starts also immediately.
|
* @note The method returns immediately. Inference starts also immediately.
|
||||||
|
@ -83,8 +83,7 @@ inline void copyInputOutputInfo(const InputsDataMap & networkInputs, const Outpu
|
|||||||
* @brief An API of plugin to be implemented by a plugin
|
* @brief An API of plugin to be implemented by a plugin
|
||||||
* @ingroup ie_dev_api_plugin_api
|
* @ingroup ie_dev_api_plugin_api
|
||||||
*/
|
*/
|
||||||
class IInferencePlugin : public details::IRelease,
|
class IInferencePlugin : public std::enable_shared_from_this<IInferencePlugin> {
|
||||||
public std::enable_shared_from_this<IInferencePlugin> {
|
|
||||||
class VersionStore : public Version {
|
class VersionStore : public Version {
|
||||||
std::string _dsc;
|
std::string _dsc;
|
||||||
std::string _buildNumber;
|
std::string _buildNumber;
|
||||||
@ -112,12 +111,6 @@ class IInferencePlugin : public details::IRelease,
|
|||||||
}
|
}
|
||||||
} _version;
|
} _version;
|
||||||
|
|
||||||
protected:
|
|
||||||
/**
|
|
||||||
* @brief Destroys the object.
|
|
||||||
*/
|
|
||||||
~IInferencePlugin() override = default;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief A shared pointer to IInferencePlugin interface
|
* @brief A shared pointer to IInferencePlugin interface
|
||||||
@ -140,10 +133,6 @@ public:
|
|||||||
return _version;
|
return _version;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Provides a name of a plugin
|
* @brief Provides a name of a plugin
|
||||||
* @return The name.
|
* @return The name.
|
||||||
@ -271,6 +260,9 @@ public:
|
|||||||
* @return The result of query operator containing supported layers map
|
* @return The result of query operator containing supported layers map
|
||||||
*/
|
*/
|
||||||
virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map<std::string, std::string>& config) const = 0;
|
virtual QueryNetworkResult QueryNetwork(const CNNNetwork& network, const std::map<std::string, std::string>& config) const = 0;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
~IInferencePlugin() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
@ -281,15 +273,15 @@ public:
|
|||||||
* @ingroup ie_dev_api_plugin_api
|
* @ingroup ie_dev_api_plugin_api
|
||||||
*/
|
*/
|
||||||
#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
|
#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
|
||||||
INFERENCE_PLUGIN_API(InferenceEngine::StatusCode) CreatePluginEngine( \
|
INFERENCE_PLUGIN_API(void) CreatePluginEngine(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) { \
|
||||||
InferenceEngine::IInferencePlugin *&plugin, \
|
|
||||||
InferenceEngine::ResponseDesc *resp) noexcept { \
|
|
||||||
try { \
|
try { \
|
||||||
plugin = new PluginType(__VA_ARGS__); \
|
plugin = ::std::make_shared<PluginType>(__VA_ARGS__); \
|
||||||
|
} catch (const InferenceEngine::details::InferenceEngineException& e) { \
|
||||||
|
throw; \
|
||||||
|
} catch (const std::exception& ex) { \
|
||||||
|
THROW_IE_EXCEPTION << ex.what(); \
|
||||||
|
} catch (...) { \
|
||||||
|
THROW_IE_EXCEPTION_WITH_STATUS(UNEXPECTED); \
|
||||||
|
} \
|
||||||
plugin->SetVersion(version); \
|
plugin->SetVersion(version); \
|
||||||
return InferenceEngine::OK; \
|
|
||||||
} \
|
|
||||||
catch (std::exception &ex) { \
|
|
||||||
return InferenceEngine::DescriptionBuffer(InferenceEngine::GENERAL_ERROR, resp) << ex.what(); \
|
|
||||||
} \
|
|
||||||
}
|
}
|
||||||
|
@ -36,18 +36,13 @@ public:
|
|||||||
|
|
||||||
void execute(Blob::Ptr &preprocessedBlob, const PreProcessInfo &info, bool serial, int batchSize = -1) override;
|
void execute(Blob::Ptr &preprocessedBlob, const PreProcessInfo &info, bool serial, int batchSize = -1) override;
|
||||||
|
|
||||||
void Release() noexcept override;
|
|
||||||
|
|
||||||
void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) override;
|
void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
StatusCode CreatePreProcessData(IPreProcessData *& data, ResponseDesc * /*resp*/) noexcept {
|
INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr<IPreProcessData>& data);
|
||||||
data = new PreProcessData();
|
|
||||||
return StatusCode::OK;
|
|
||||||
}
|
|
||||||
|
|
||||||
void PreProcessData::Release() noexcept {
|
INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr<IPreProcessData>& data) {
|
||||||
delete this;
|
data = std::make_shared<PreProcessData>();
|
||||||
}
|
}
|
||||||
|
|
||||||
void PreProcessData::setRoiBlob(const Blob::Ptr &blob) {
|
void PreProcessData::setRoiBlob(const Blob::Ptr &blob) {
|
||||||
|
@ -31,7 +31,7 @@ namespace InferenceEngine {
|
|||||||
/**
|
/**
|
||||||
* @brief This class stores pre-process information for exact input
|
* @brief This class stores pre-process information for exact input
|
||||||
*/
|
*/
|
||||||
class IPreProcessData : public details::IRelease {
|
class IPreProcessData : public std::enable_shared_from_this<IPreProcessData> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Sets ROI blob to be resized and placed to the default input blob during pre-processing.
|
* @brief Sets ROI blob to be resized and placed to the default input blob during pre-processing.
|
||||||
@ -58,9 +58,12 @@ public:
|
|||||||
|
|
||||||
//FIXME: rename to verifyAplicable
|
//FIXME: rename to verifyAplicable
|
||||||
virtual void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) = 0;
|
virtual void isApplicable(const Blob::Ptr &src, const Blob::Ptr &dst) = 0;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
~IPreProcessData() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
INFERENCE_PRERPOC_PLUGIN_API(StatusCode) CreatePreProcessData(IPreProcessData *& data, ResponseDesc *resp) noexcept;
|
INFERENCE_PRERPOC_PLUGIN_API(void) CreatePreProcessData(std::shared_ptr<IPreProcessData>& data);
|
||||||
|
|
||||||
namespace details {
|
namespace details {
|
||||||
|
|
||||||
|
@ -48,12 +48,6 @@ CNNNetwork IRReader::read(std::istream& model, const Blob::CPtr& weights, const
|
|||||||
return CNNNetwork(parser.parse(root, weights));
|
return CNNNetwork(parser.parse(root, weights));
|
||||||
}
|
}
|
||||||
|
|
||||||
INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept {
|
INFERENCE_PLUGIN_API(void) InferenceEngine::CreateReader(std::shared_ptr<IReader>& reader) {
|
||||||
try {
|
reader = std::make_shared<IRReader>();
|
||||||
reader = new IRReader();
|
|
||||||
return OK;
|
|
||||||
}
|
|
||||||
catch (std::exception &) {
|
|
||||||
return GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -32,9 +32,6 @@ namespace InferenceEngine {
|
|||||||
*/
|
*/
|
||||||
class IRReader: public IReader {
|
class IRReader: public IReader {
|
||||||
public:
|
public:
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* @brief Checks that reader supports format of the model
|
* @brief Checks that reader supports format of the model
|
||||||
* @param model stream with model
|
* @param model stream with model
|
||||||
|
@ -65,12 +65,6 @@ CNNNetwork ONNXReader::read(std::istream& model, const std::vector<IExtensionPtr
|
|||||||
return CNNNetwork(ngraph::onnx_import::import_onnx_model(model, readPathFromStream(model)), exts);
|
return CNNNetwork(ngraph::onnx_import::import_onnx_model(model, readPathFromStream(model)), exts);
|
||||||
}
|
}
|
||||||
|
|
||||||
INFERENCE_PLUGIN_API(StatusCode) InferenceEngine::CreateReader(IReader*& reader, ResponseDesc *resp) noexcept {
|
INFERENCE_PLUGIN_API(void) InferenceEngine::CreateReader(std::shared_ptr<IReader>& reader) {
|
||||||
try {
|
reader = std::make_shared<ONNXReader>();
|
||||||
reader = new ONNXReader();
|
|
||||||
return OK;
|
|
||||||
}
|
|
||||||
catch (std::exception &) {
|
|
||||||
return GENERAL_ERROR;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -10,9 +10,6 @@ namespace InferenceEngine {
|
|||||||
|
|
||||||
class ONNXReader: public IReader {
|
class ONNXReader: public IReader {
|
||||||
public:
|
public:
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* @brief Checks that reader supports format of the model
|
* @brief Checks that reader supports format of the model
|
||||||
* @param model stream with model
|
* @param model stream with model
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <details/ie_irelease.hpp>
|
|
||||||
#include <cpp/ie_cnn_network.h>
|
#include <cpp/ie_cnn_network.h>
|
||||||
#include <ie_iextension.h>
|
#include <ie_iextension.h>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
@ -17,7 +16,7 @@ namespace InferenceEngine {
|
|||||||
/**
|
/**
|
||||||
* @brief IReader an abstract interface for Inference Engine readers
|
* @brief IReader an abstract interface for Inference Engine readers
|
||||||
*/
|
*/
|
||||||
class IReader: public details::IRelease {
|
class IReader: public std::enable_shared_from_this<IReader> {
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
* @brief Checks that reader supports format of the model
|
* @brief Checks that reader supports format of the model
|
||||||
@ -49,15 +48,15 @@ public:
|
|||||||
* @return vector of file extensions, for example the reader for OpenVINO IR returns {"bin"}
|
* @return vector of file extensions, for example the reader for OpenVINO IR returns {"bin"}
|
||||||
*/
|
*/
|
||||||
virtual std::vector<std::string> getDataFileExtensions() const = 0;
|
virtual std::vector<std::string> getDataFileExtensions() const = 0;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
~IReader() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Creates the default instance of the reader
|
* @brief Creates the default instance of the reader
|
||||||
*
|
* @return Reader interface
|
||||||
* @param reader Reader interface
|
|
||||||
* @param resp Response description
|
|
||||||
* @return Status code
|
|
||||||
*/
|
*/
|
||||||
INFERENCE_PLUGIN_API(StatusCode) CreateReader(IReader*& reader, ResponseDesc* resp) noexcept;
|
INFERENCE_PLUGIN_API(void) CreateReader(std::shared_ptr<IReader>& reader);
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
@ -88,8 +88,7 @@ public:
|
|||||||
auto taskExecutorGetResult = getNextTaskExecutor();
|
auto taskExecutorGetResult = getNextTaskExecutor();
|
||||||
auto asyncThreadSafeImpl = std::make_shared<MyriadAsyncInferRequest>(
|
auto asyncThreadSafeImpl = std::make_shared<MyriadAsyncInferRequest>(
|
||||||
syncRequestImpl, _taskExecutor, _callbackExecutor, taskExecutorGetResult);
|
syncRequestImpl, _taskExecutor, _callbackExecutor, taskExecutorGetResult);
|
||||||
asyncRequest.reset(new ie::InferRequestBase(asyncThreadSafeImpl),
|
asyncRequest.reset(new ie::InferRequestBase(asyncThreadSafeImpl));
|
||||||
[](ie::IInferRequest *p) { p->Release(); });
|
|
||||||
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
|
||||||
return asyncRequest;
|
return asyncRequest;
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ class Engine : public ie::InferencePluginInternal {
|
|||||||
public:
|
public:
|
||||||
explicit Engine(std::shared_ptr<IMvnc> mvnc);
|
explicit Engine(std::shared_ptr<IMvnc> mvnc);
|
||||||
|
|
||||||
~Engine() override {
|
~Engine() {
|
||||||
MyriadExecutor::closeDevices(_devicePool, _mvnc);
|
MyriadExecutor::closeDevices(_devicePool, _mvnc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ public:
|
|||||||
|
|
||||||
void safeAddExtension(InferenceEngine::Core & ie) {
|
void safeAddExtension(InferenceEngine::Core & ie) {
|
||||||
try {
|
try {
|
||||||
auto extension = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
|
auto extension = std::make_shared<InferenceEngine::Extension>(
|
||||||
FileUtils::makePluginLibraryName<char>({},
|
FileUtils::makePluginLibraryName<char>({},
|
||||||
std::string("template_extension") + IE_BUILD_POSTFIX));
|
std::string("template_extension") + IE_BUILD_POSTFIX));
|
||||||
ie.AddExtension(extension);
|
ie.AddExtension(extension);
|
||||||
|
@ -1,22 +0,0 @@
|
|||||||
// Copyright (C) 2019 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
|
||||||
#include <details/ie_irelease.hpp>
|
|
||||||
|
|
||||||
#include "common_test_utils/test_common.hpp"
|
|
||||||
|
|
||||||
using IReleaseTests = CommonTestUtils::TestsCommon;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Testing that callback with Release() from shared_from_irelease(...)
|
|
||||||
* won't be applied for nullptr.
|
|
||||||
*/
|
|
||||||
TEST_F(IReleaseTests, sharedFromIReleaseWithNull) {
|
|
||||||
InferenceEngine::details::IRelease *irelease = nullptr;
|
|
||||||
std::shared_ptr<InferenceEngine::details::IRelease> ptr = InferenceEngine::details::shared_from_irelease(irelease);
|
|
||||||
ptr.reset();
|
|
||||||
}
|
|
@ -43,7 +43,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpUser_MO) {
|
|||||||
|
|
||||||
InferenceEngine::Core ie;
|
InferenceEngine::Core ie;
|
||||||
ie.AddExtension(
|
ie.AddExtension(
|
||||||
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
|
std::make_shared<InferenceEngine::Extension>(
|
||||||
get_extension_path()));
|
get_extension_path()));
|
||||||
|
|
||||||
auto expected = ie.ReadNetwork(model);
|
auto expected = ie.ReadNetwork(model);
|
||||||
@ -65,7 +65,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpUser_ONNXImporter) {
|
|||||||
|
|
||||||
InferenceEngine::Core ie;
|
InferenceEngine::Core ie;
|
||||||
ie.AddExtension(
|
ie.AddExtension(
|
||||||
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
|
std::make_shared<InferenceEngine::Extension>(
|
||||||
get_extension_path()));
|
get_extension_path()));
|
||||||
|
|
||||||
auto expected = ie.ReadNetwork(model);
|
auto expected = ie.ReadNetwork(model);
|
||||||
@ -87,7 +87,7 @@ TEST_F(CustomOpsSerializationTest, CustomOpTransformation) {
|
|||||||
|
|
||||||
InferenceEngine::Core ie;
|
InferenceEngine::Core ie;
|
||||||
auto extension =
|
auto extension =
|
||||||
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
|
std::make_shared<InferenceEngine::Extension>(
|
||||||
get_extension_path());
|
get_extension_path());
|
||||||
ie.AddExtension(extension);
|
ie.AddExtension(extension);
|
||||||
auto expected = ie.ReadNetwork(model);
|
auto expected = ie.ReadNetwork(model);
|
||||||
|
@ -32,7 +32,6 @@ constexpr ngraph::NodeTypeInfo FakeAbs::type_info;
|
|||||||
class AbsFakeExtension: public InferenceEngine::IExtension {
|
class AbsFakeExtension: public InferenceEngine::IExtension {
|
||||||
public:
|
public:
|
||||||
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
|
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
|
||||||
void Release() noexcept override { delete this; }
|
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
|
|
||||||
std::map<std::string, ngraph::OpSet> getOpSets() override{
|
std::map<std::string, ngraph::OpSet> getOpSets() override{
|
||||||
|
@ -55,8 +55,6 @@ class CustomAddConstExtension : public InferenceEngine::IExtension {
|
|||||||
|
|
||||||
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
|
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
|
||||||
|
|
||||||
void Release() noexcept override { delete this; }
|
|
||||||
|
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
|
|
||||||
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
||||||
|
@ -259,8 +259,6 @@ public:
|
|||||||
|
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
|
|
||||||
void Release() noexcept override {}
|
|
||||||
|
|
||||||
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
||||||
static std::map<std::string, ngraph::OpSet> opsets;
|
static std::map<std::string, ngraph::OpSet> opsets;
|
||||||
if (opsets.empty()) {
|
if (opsets.empty()) {
|
||||||
@ -426,8 +424,6 @@ public:
|
|||||||
|
|
||||||
void Unload() noexcept override {};
|
void Unload() noexcept override {};
|
||||||
|
|
||||||
void Release() noexcept override {}
|
|
||||||
|
|
||||||
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
||||||
static std::map<std::string, ngraph::OpSet> opsets;
|
static std::map<std::string, ngraph::OpSet> opsets;
|
||||||
if (opsets.empty()) {
|
if (opsets.empty()) {
|
||||||
|
@ -26,9 +26,10 @@ protected:
|
|||||||
}
|
}
|
||||||
unique_ptr<SharedObjectLoader> sharedObjectLoader;
|
unique_ptr<SharedObjectLoader> sharedObjectLoader;
|
||||||
|
|
||||||
template <class T>
|
using CreateF = void(std::shared_ptr<IInferencePlugin>&);
|
||||||
std::function<T> make_std_function(const std::string& functionName) {
|
|
||||||
std::function<T> ptr(reinterpret_cast<T*>(sharedObjectLoader->get_symbol(functionName.c_str())));
|
std::function<CreateF> make_std_function(const std::string& functionName) {
|
||||||
|
std::function<CreateF> ptr(reinterpret_cast<CreateF*>(sharedObjectLoader->get_symbol(functionName.c_str())));
|
||||||
return ptr;
|
return ptr;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -48,22 +49,21 @@ TEST_F(SharedObjectLoaderTests, loaderThrowsIfNoPlugin) {
|
|||||||
TEST_F(SharedObjectLoaderTests, canFindExistedMethod) {
|
TEST_F(SharedObjectLoaderTests, canFindExistedMethod) {
|
||||||
loadDll(get_mock_engine_name());
|
loadDll(get_mock_engine_name());
|
||||||
|
|
||||||
auto factory = make_std_function<StatusCode(IInferencePlugin*&, ResponseDesc*)>("CreatePluginEngine");
|
auto factory = make_std_function("CreatePluginEngine");
|
||||||
EXPECT_NE(nullptr, factory);
|
EXPECT_NE(nullptr, factory);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(SharedObjectLoaderTests, throwIfMethodNofFoundInLibrary) {
|
TEST_F(SharedObjectLoaderTests, throwIfMethodNofFoundInLibrary) {
|
||||||
loadDll(get_mock_engine_name());
|
loadDll(get_mock_engine_name());
|
||||||
|
|
||||||
EXPECT_THROW(make_std_function<IInferencePlugin*()>("wrong_function"), InferenceEngine::details::InferenceEngineException);
|
EXPECT_THROW(make_std_function("wrong_function"),
|
||||||
|
InferenceEngine::details::InferenceEngineException);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(SharedObjectLoaderTests, canCallExistedMethod) {
|
TEST_F(SharedObjectLoaderTests, canCallExistedMethod) {
|
||||||
loadDll(get_mock_engine_name());
|
loadDll(get_mock_engine_name());
|
||||||
|
|
||||||
auto factory = make_std_function<StatusCode(IInferencePlugin*&, ResponseDesc*)>("CreatePluginEngine");
|
auto factory = make_std_function("CreatePluginEngine");
|
||||||
IInferencePlugin* ptr = nullptr;
|
std::shared_ptr<IInferencePlugin> ptr;
|
||||||
ResponseDesc resp;
|
EXPECT_NO_THROW(factory(ptr));
|
||||||
EXPECT_NO_THROW(factory(ptr, &resp));
|
|
||||||
ptr->Release();
|
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,6 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <common_test_utils/test_assertions.hpp>
|
#include <common_test_utils/test_assertions.hpp>
|
||||||
#include <details/ie_so_pointer.hpp>
|
#include <details/ie_so_pointer.hpp>
|
||||||
#include <details/ie_irelease.hpp>
|
|
||||||
#include <cpp_interfaces/interface/ie_iplugin_internal.hpp>
|
#include <cpp_interfaces/interface/ie_iplugin_internal.hpp>
|
||||||
#include <ie_plugin_ptr.hpp>
|
#include <ie_plugin_ptr.hpp>
|
||||||
|
|
||||||
@ -81,10 +80,12 @@ namespace InferenceEngine {
|
|||||||
|
|
||||||
namespace details {
|
namespace details {
|
||||||
|
|
||||||
|
struct UnknownPlugin : std::enable_shared_from_this<UnknownPlugin> {};
|
||||||
|
|
||||||
template<>
|
template<>
|
||||||
class SOCreatorTrait<InferenceEngine::details::IRelease> {
|
class SOCreatorTrait<InferenceEngine::details::UnknownPlugin> {
|
||||||
public:
|
public:
|
||||||
static constexpr auto name = "CreateIRelease";
|
static constexpr auto name = "CreateUnknownPlugin";
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace details
|
} // namespace details
|
||||||
@ -92,12 +93,12 @@ public:
|
|||||||
} // namespace InferenceEngine
|
} // namespace InferenceEngine
|
||||||
|
|
||||||
TEST_F(SoPointerTests, UnknownPlugin) {
|
TEST_F(SoPointerTests, UnknownPlugin) {
|
||||||
ASSERT_THROW(SOPointer<InferenceEngine::details::IRelease>("UnknownPlugin"), InferenceEngineException);
|
ASSERT_THROW(SOPointer<InferenceEngine::details::UnknownPlugin>("UnknownPlugin"), InferenceEngineException);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(SoPointerTests, UnknownPluginExceptionStr) {
|
TEST_F(SoPointerTests, UnknownPluginExceptionStr) {
|
||||||
try {
|
try {
|
||||||
SOPointer<InferenceEngine::details::IRelease>("UnknownPlugin");
|
SOPointer<InferenceEngine::details::UnknownPlugin>("UnknownPlugin");
|
||||||
}
|
}
|
||||||
catch (InferenceEngineException &e) {
|
catch (InferenceEngineException &e) {
|
||||||
ASSERT_STR_CONTAINS(e.what(), "Cannot load library 'UnknownPlugin':");
|
ASSERT_STR_CONTAINS(e.what(), "Cannot load library 'UnknownPlugin':");
|
||||||
@ -105,20 +106,3 @@ TEST_F(SoPointerTests, UnknownPluginExceptionStr) {
|
|||||||
ASSERT_STR_DOES_NOT_CONTAIN(e.what(), "from CWD:");
|
ASSERT_STR_DOES_NOT_CONTAIN(e.what(), "from CWD:");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
using SymbolLoaderTests = ::testing::Test;
|
|
||||||
|
|
||||||
TEST_F(SymbolLoaderTests, throwCreateNullPtr) {
|
|
||||||
ASSERT_THROW(SymbolLoader<SharedObjectLoader>(nullptr), InferenceEngineException);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(SymbolLoaderTests, instantiateSymbol) {
|
|
||||||
std::string name = FileUtils::makePluginLibraryName<char>(getIELibraryPath(),
|
|
||||||
std::string("mock_engine") + IE_BUILD_POSTFIX);
|
|
||||||
std::shared_ptr<SharedObjectLoader> sharedLoader(new SharedObjectLoader(name.c_str()));
|
|
||||||
SymbolLoader<SharedObjectLoader> loader(sharedLoader);
|
|
||||||
IInferencePlugin * value = nullptr;
|
|
||||||
ASSERT_NE(nullptr, value = loader.instantiateSymbol<IInferencePlugin>(
|
|
||||||
SOCreatorTrait<IInferencePlugin>::name));
|
|
||||||
value->Release();
|
|
||||||
}
|
|
||||||
|
@ -106,8 +106,6 @@ class CustomAbsExtension : public InferenceEngine::IExtension {
|
|||||||
|
|
||||||
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
|
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
|
||||||
|
|
||||||
void Release() noexcept override { delete this; }
|
|
||||||
|
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
|
|
||||||
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
std::map<std::string, ngraph::OpSet> getOpSets() override {
|
||||||
@ -329,7 +327,7 @@ TEST(Extension, XmlModelWithExtensionFromDSO) {
|
|||||||
std::vector<float> input_values{1, 2, 3, 4, 5, 6, 7, 8};
|
std::vector<float> input_values{1, 2, 3, 4, 5, 6, 7, 8};
|
||||||
std::vector<float> expected{12, 13, 14, 15, 16, 17, 18, 19};
|
std::vector<float> expected{12, 13, 14, 15, 16, 17, 18, 19};
|
||||||
InferenceEngine::Core ie;
|
InferenceEngine::Core ie;
|
||||||
ie.AddExtension(InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(get_extension_path()));
|
ie.AddExtension(std::make_shared<InferenceEngine::Extension>(get_extension_path()));
|
||||||
infer_model(ie, model, input_values, expected);
|
infer_model(ie, model, input_values, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -406,7 +404,7 @@ opset_import {
|
|||||||
std::vector<float> input_values{1, 2, 3, 4, 5, 6, 7, 8};
|
std::vector<float> input_values{1, 2, 3, 4, 5, 6, 7, 8};
|
||||||
std::vector<float> expected{12, 13, 14, 15, 16, 17, 18, 19};
|
std::vector<float> expected{12, 13, 14, 15, 16, 17, 18, 19};
|
||||||
InferenceEngine::Core ie;
|
InferenceEngine::Core ie;
|
||||||
ie.AddExtension(InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(get_extension_path()));
|
ie.AddExtension(std::make_shared<InferenceEngine::Extension>(get_extension_path()));
|
||||||
infer_model(ie, model, input_values, expected);
|
infer_model(ie, model, input_values, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ public:
|
|||||||
|
|
||||||
void safeAddExtension(InferenceEngine::Core & ie) {
|
void safeAddExtension(InferenceEngine::Core & ie) {
|
||||||
try {
|
try {
|
||||||
auto extension = InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(
|
auto extension = std::make_shared<InferenceEngine::Extension>(
|
||||||
FileUtils::makePluginLibraryName<char>({}, "template_extension"));
|
FileUtils::makePluginLibraryName<char>({}, "template_extension"));
|
||||||
ie.AddExtension(extension);
|
ie.AddExtension(extension);
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException & ex) {
|
} catch (const InferenceEngine::details::InferenceEngineException & ex) {
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
|
|
||||||
class MockAllocator : public InferenceEngine::IAllocator {
|
class MockAllocator : public InferenceEngine::IAllocator {
|
||||||
public:
|
public:
|
||||||
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
|
|
||||||
MOCK_QUALIFIED_METHOD2(lock, noexcept, void*(void*, InferenceEngine::LockOp));
|
MOCK_QUALIFIED_METHOD2(lock, noexcept, void*(void*, InferenceEngine::LockOp));
|
||||||
MOCK_QUALIFIED_METHOD1(unlock, noexcept, void(void *));
|
MOCK_QUALIFIED_METHOD1(unlock, noexcept, void(void *));
|
||||||
MOCK_QUALIFIED_METHOD1(alloc, noexcept, void*(size_t));
|
MOCK_QUALIFIED_METHOD1(alloc, noexcept, void*(size_t));
|
||||||
|
@ -40,16 +40,10 @@ MockPlugin::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
|
|||||||
|
|
||||||
InferenceEngine::IInferencePlugin *__target = nullptr;
|
InferenceEngine::IInferencePlugin *__target = nullptr;
|
||||||
|
|
||||||
INFERENCE_PLUGIN_API(StatusCode) CreatePluginEngine(IInferencePlugin *&plugin, ResponseDesc *resp) noexcept {
|
INFERENCE_PLUGIN_API(void) CreatePluginEngine(std::shared_ptr<InferenceEngine::IInferencePlugin>& plugin) {
|
||||||
try {
|
|
||||||
IInferencePlugin *p = nullptr;
|
IInferencePlugin *p = nullptr;
|
||||||
std::swap(__target, p);
|
std::swap(__target, p);
|
||||||
plugin = new MockPlugin(p);
|
plugin = std::make_shared<MockPlugin>(p);
|
||||||
return OK;
|
|
||||||
}
|
|
||||||
catch (std::exception &ex) {
|
|
||||||
return DescriptionBuffer(GENERAL_ERROR, resp) << ex.what();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
INFERENCE_PLUGIN_API(InferenceEngine::IInferencePlugin*)
|
INFERENCE_PLUGIN_API(InferenceEngine::IInferencePlugin*)
|
||||||
|
@ -22,7 +22,7 @@ IE_SUPPRESS_DEPRECATED_START
|
|||||||
* @class MockICNNNetwork
|
* @class MockICNNNetwork
|
||||||
* @brief Main interface to describe the NN topology
|
* @brief Main interface to describe the NN topology
|
||||||
*/
|
*/
|
||||||
class MockICNNNetwork : public InferenceEngine::ICNNNetwork {
|
class MockICNNNetwork final : public InferenceEngine::ICNNNetwork {
|
||||||
public:
|
public:
|
||||||
MOCK_QUALIFIED_METHOD0(getFunction, const noexcept, std::shared_ptr<const ngraph::Function> ());
|
MOCK_QUALIFIED_METHOD0(getFunction, const noexcept, std::shared_ptr<const ngraph::Function> ());
|
||||||
MOCK_QUALIFIED_METHOD0(getFunction, noexcept, std::shared_ptr<ngraph::Function>());
|
MOCK_QUALIFIED_METHOD0(getFunction, noexcept, std::shared_ptr<ngraph::Function>());
|
||||||
@ -37,7 +37,6 @@ class MockICNNNetwork : public InferenceEngine::ICNNNetwork {
|
|||||||
InferenceEngine::ResponseDesc*));
|
InferenceEngine::ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*));
|
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
|
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
|
||||||
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
|
|
||||||
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(InferenceEngine::ICNNNetwork::InputShapes&));
|
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(InferenceEngine::ICNNNetwork::InputShapes&));
|
||||||
MOCK_QUALIFIED_METHOD2(reshape, noexcept, InferenceEngine::StatusCode(const InferenceEngine::ICNNNetwork::InputShapes &, InferenceEngine::ResponseDesc *));
|
MOCK_QUALIFIED_METHOD2(reshape, noexcept, InferenceEngine::StatusCode(const InferenceEngine::ICNNNetwork::InputShapes &, InferenceEngine::ResponseDesc *));
|
||||||
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, InferenceEngine::StatusCode(
|
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, InferenceEngine::StatusCode(
|
||||||
@ -45,25 +44,3 @@ class MockICNNNetwork : public InferenceEngine::ICNNNetwork {
|
|||||||
const std::string &,
|
const std::string &,
|
||||||
InferenceEngine::ResponseDesc*));
|
InferenceEngine::ResponseDesc*));
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* @class MockCNNNetworkImpl
|
|
||||||
* @brief Main interface to describe the NN topology
|
|
||||||
*/
|
|
||||||
class MockCNNNetworkImpl: public InferenceEngine::details::CNNNetworkImpl {
|
|
||||||
public:
|
|
||||||
MOCK_QUALIFIED_METHOD1(getOutputsInfo, const noexcept, void(InferenceEngine::OutputsDataMap& out));
|
|
||||||
MOCK_QUALIFIED_METHOD1(getInputsInfo, const noexcept, void(InferenceEngine::InputsDataMap &inputs));
|
|
||||||
MOCK_QUALIFIED_METHOD1(getInput, const noexcept, InferenceEngine::InputInfo::Ptr(const std::string &inputName));
|
|
||||||
MOCK_QUALIFIED_METHOD0(getName, const noexcept, const std::string&());
|
|
||||||
MOCK_QUALIFIED_METHOD0(layerCount, const noexcept, size_t());
|
|
||||||
MOCK_QUALIFIED_METHOD3(addOutput, noexcept, InferenceEngine::StatusCode(const std::string &, size_t , InferenceEngine::ResponseDesc*));
|
|
||||||
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, InferenceEngine::StatusCode(const size_t size, InferenceEngine::ResponseDesc*));
|
|
||||||
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
|
|
||||||
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
|
|
||||||
MOCK_METHOD1(validate, void(int));
|
|
||||||
|
|
||||||
void validateNetwork() {
|
|
||||||
InferenceEngine::details::CNNNetworkImpl::validate();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
@ -33,7 +33,6 @@ public:
|
|||||||
MOCK_QUALIFIED_METHOD3(GetMetric, const noexcept, StatusCode(const std::string &name, Parameter &result, ResponseDesc *resp));
|
MOCK_QUALIFIED_METHOD3(GetMetric, const noexcept, StatusCode(const std::string &name, Parameter &result, ResponseDesc *resp));
|
||||||
MOCK_QUALIFIED_METHOD2(GetContext, const noexcept, StatusCode(RemoteContext::Ptr &pContext, ResponseDesc *resp));
|
MOCK_QUALIFIED_METHOD2(GetContext, const noexcept, StatusCode(RemoteContext::Ptr &pContext, ResponseDesc *resp));
|
||||||
MOCK_QUALIFIED_METHOD3(QueryState, noexcept, StatusCode(IVariableState::Ptr &, size_t, ResponseDesc *));
|
MOCK_QUALIFIED_METHOD3(QueryState, noexcept, StatusCode(IVariableState::Ptr &, size_t, ResponseDesc *));
|
||||||
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
|
|
||||||
};
|
};
|
||||||
|
|
||||||
IE_SUPPRESS_DEPRECATED_END
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
@ -27,7 +27,6 @@ public:
|
|||||||
MOCK_QUALIFIED_METHOD2(GetUserData, noexcept, StatusCode(void**, ResponseDesc*));
|
MOCK_QUALIFIED_METHOD2(GetUserData, noexcept, StatusCode(void**, ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD2(SetUserData, noexcept, StatusCode(void*, ResponseDesc*));
|
MOCK_QUALIFIED_METHOD2(SetUserData, noexcept, StatusCode(void*, ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD1(SetCompletionCallback, noexcept, StatusCode(IInferRequest::CompletionCallback));
|
MOCK_QUALIFIED_METHOD1(SetCompletionCallback, noexcept, StatusCode(IInferRequest::CompletionCallback));
|
||||||
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
|
|
||||||
MOCK_QUALIFIED_METHOD1(Infer, noexcept, StatusCode(ResponseDesc*));
|
MOCK_QUALIFIED_METHOD1(Infer, noexcept, StatusCode(ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD2(GetPerformanceCounts, const noexcept,
|
MOCK_QUALIFIED_METHOD2(GetPerformanceCounts, const noexcept,
|
||||||
StatusCode(std::map<std::string, InferenceEngineProfileInfo> &perfMap, ResponseDesc*));
|
StatusCode(std::map<std::string, InferenceEngineProfileInfo> &perfMap, ResponseDesc*));
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
class MockNotEmptyICNNNetwork : public ICNNNetwork {
|
class MockNotEmptyICNNNetwork final : public ICNNNetwork {
|
||||||
public:
|
public:
|
||||||
static constexpr const char* INPUT_BLOB_NAME = "first_input";
|
static constexpr const char* INPUT_BLOB_NAME = "first_input";
|
||||||
const SizeVector INPUT_DIMENTIONS = { 1, 3, 299, 299 };
|
const SizeVector INPUT_DIMENTIONS = { 1, 3, 299, 299 };
|
||||||
@ -73,7 +73,6 @@ public:
|
|||||||
MOCK_QUALIFIED_METHOD3(addOutput, noexcept, StatusCode(const std::string &, size_t , ResponseDesc*));
|
MOCK_QUALIFIED_METHOD3(addOutput, noexcept, StatusCode(const std::string &, size_t , ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, StatusCode(const size_t size, ResponseDesc*));
|
MOCK_QUALIFIED_METHOD2(setBatchSize, noexcept, StatusCode(const size_t size, ResponseDesc*));
|
||||||
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
|
MOCK_QUALIFIED_METHOD0(getBatchSize, const noexcept, size_t());
|
||||||
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
|
|
||||||
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(ICNNNetwork::InputShapes &));
|
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(ICNNNetwork::InputShapes &));
|
||||||
MOCK_QUALIFIED_METHOD2(reshape, noexcept, StatusCode(const ICNNNetwork::InputShapes &, ResponseDesc *));
|
MOCK_QUALIFIED_METHOD2(reshape, noexcept, StatusCode(const ICNNNetwork::InputShapes &, ResponseDesc *));
|
||||||
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, StatusCode(const std::string &, const std::string &, InferenceEngine::ResponseDesc*));
|
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, StatusCode(const std::string &, const std::string &, InferenceEngine::ResponseDesc*));
|
||||||
|
@ -33,8 +33,7 @@ protected:
|
|||||||
|
|
||||||
virtual void SetUp() {
|
virtual void SetUp() {
|
||||||
mockExeNetwork = make_shared<MockExecutableNetworkThreadSafeAsyncOnly>();
|
mockExeNetwork = make_shared<MockExecutableNetworkThreadSafeAsyncOnly>();
|
||||||
exeNetwork = details::shared_from_irelease(
|
exeNetwork = std::make_shared<ExecutableNetworkBase>(mockExeNetwork);
|
||||||
new ExecutableNetworkBase(mockExeNetwork));
|
|
||||||
InputsDataMap networkInputs;
|
InputsDataMap networkInputs;
|
||||||
OutputsDataMap networkOutputs;
|
OutputsDataMap networkOutputs;
|
||||||
mockAsyncInferRequestInternal = make_shared<MockAsyncInferRequestInternal>(networkInputs, networkOutputs);
|
mockAsyncInferRequestInternal = make_shared<MockAsyncInferRequestInternal>(networkInputs, networkOutputs);
|
||||||
@ -108,8 +107,7 @@ protected:
|
|||||||
|
|
||||||
virtual void SetUp() {
|
virtual void SetUp() {
|
||||||
mockExeNetwork = make_shared<MockExecutableNetworkThreadSafe>();
|
mockExeNetwork = make_shared<MockExecutableNetworkThreadSafe>();
|
||||||
exeNetwork = details::shared_from_irelease(
|
exeNetwork = std::make_shared<ExecutableNetworkBase>(mockExeNetwork);
|
||||||
new ExecutableNetworkBase(mockExeNetwork));
|
|
||||||
InputsDataMap networkInputs;
|
InputsDataMap networkInputs;
|
||||||
OutputsDataMap networkOutputs;
|
OutputsDataMap networkOutputs;
|
||||||
mockInferRequestInternal = make_shared<MockInferRequestInternal>(networkInputs, networkOutputs);
|
mockInferRequestInternal = make_shared<MockInferRequestInternal>(networkInputs, networkOutputs);
|
||||||
|
@ -34,7 +34,7 @@ protected:
|
|||||||
|
|
||||||
virtual void SetUp() {
|
virtual void SetUp() {
|
||||||
mock_impl.reset(new MockIAsyncInferRequestInternal());
|
mock_impl.reset(new MockIAsyncInferRequestInternal());
|
||||||
request = details::shared_from_irelease(new InferRequestBase(mock_impl));
|
request = std::make_shared<InferRequestBase>(mock_impl);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -242,8 +242,7 @@ protected:
|
|||||||
OutputsDataMap outputsInfo;
|
OutputsDataMap outputsInfo;
|
||||||
mockNotEmptyNet.getOutputsInfo(outputsInfo);
|
mockNotEmptyNet.getOutputsInfo(outputsInfo);
|
||||||
mockInferRequestInternal = make_shared<MockAsyncInferRequestInternal>(inputsInfo, outputsInfo);
|
mockInferRequestInternal = make_shared<MockAsyncInferRequestInternal>(inputsInfo, outputsInfo);
|
||||||
inferRequest = shared_from_irelease(
|
inferRequest = std::make_shared<InferRequestBase>(mockInferRequestInternal);
|
||||||
new InferRequestBase(mockInferRequestInternal));
|
|
||||||
return make_shared<InferRequest>(inferRequest);
|
return make_shared<InferRequest>(inferRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,7 +198,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackTakesOKIfAsyncRequestWasOK) {
|
|||||||
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
|
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
|
||||||
|
|
||||||
IInferRequest::Ptr asyncRequest;
|
IInferRequest::Ptr asyncRequest;
|
||||||
asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); });
|
asyncRequest.reset(new InferRequestBase(testRequest));
|
||||||
testRequest->SetPointerToPublicInterface(asyncRequest);
|
testRequest->SetPointerToPublicInterface(asyncRequest);
|
||||||
|
|
||||||
testRequest->SetCompletionCallback([](InferenceEngine::IInferRequest::Ptr request, StatusCode status) {
|
testRequest->SetCompletionCallback([](InferenceEngine::IInferRequest::Ptr request, StatusCode status) {
|
||||||
@ -214,7 +214,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, callbackIsCalledIfAsyncRequestFailed)
|
|||||||
auto taskExecutor = std::make_shared<CPUStreamsExecutor>();
|
auto taskExecutor = std::make_shared<CPUStreamsExecutor>();
|
||||||
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
|
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
|
||||||
IInferRequest::Ptr asyncRequest;
|
IInferRequest::Ptr asyncRequest;
|
||||||
asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); });
|
asyncRequest.reset(new InferRequestBase(testRequest));
|
||||||
testRequest->SetPointerToPublicInterface(asyncRequest);
|
testRequest->SetPointerToPublicInterface(asyncRequest);
|
||||||
|
|
||||||
bool wasCalled = false;
|
bool wasCalled = false;
|
||||||
@ -236,7 +236,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, canCatchExceptionIfAsyncRequestFailed
|
|||||||
auto taskExecutor = std::make_shared<CPUStreamsExecutor>();
|
auto taskExecutor = std::make_shared<CPUStreamsExecutor>();
|
||||||
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
|
testRequest = make_shared<AsyncInferRequestThreadSafeDefault>(mockInferRequestInternal, taskExecutor, taskExecutor);
|
||||||
IInferRequest::Ptr asyncRequest;
|
IInferRequest::Ptr asyncRequest;
|
||||||
asyncRequest.reset(new InferRequestBase(testRequest), [](IInferRequest *p) { p->Release(); });
|
asyncRequest.reset(new InferRequestBase(testRequest));
|
||||||
testRequest->SetPointerToPublicInterface(asyncRequest);
|
testRequest->SetPointerToPublicInterface(asyncRequest);
|
||||||
|
|
||||||
EXPECT_CALL(*mockInferRequestInternal.get(), InferImpl()).WillOnce(Throw(std::exception()));
|
EXPECT_CALL(*mockInferRequestInternal.get(), InferImpl()).WillOnce(Throw(std::exception()));
|
||||||
|
@ -20,9 +20,7 @@ using namespace InferenceEngine::details;
|
|||||||
|
|
||||||
template <class T>
|
template <class T>
|
||||||
inline typename InferenceEngine::InferRequest make_infer_request(std::shared_ptr<T> impl) {
|
inline typename InferenceEngine::InferRequest make_infer_request(std::shared_ptr<T> impl) {
|
||||||
typename InferRequestBase::Ptr req(new InferRequestBase(impl), [](IInferRequest* p) {
|
typename InferRequestBase::Ptr req(new InferRequestBase(impl));
|
||||||
p->Release();
|
|
||||||
});
|
|
||||||
return InferenceEngine::InferRequest(req);
|
return InferenceEngine::InferRequest(req);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,7 +223,7 @@ protected:
|
|||||||
|
|
||||||
virtual void SetUp() {
|
virtual void SetUp() {
|
||||||
mock_impl.reset(new MockIExecutableNetworkInternal());
|
mock_impl.reset(new MockIExecutableNetworkInternal());
|
||||||
exeNetwork = shared_from_irelease(new ExecutableNetworkBase(mock_impl));
|
exeNetwork = std::make_shared<ExecutableNetworkBase>(mock_impl);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,34 +25,34 @@ std::string getExtensionPath() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST(ExtensionTests, testGetOpSets) {
|
TEST(ExtensionTests, testGetOpSets) {
|
||||||
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
|
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
|
||||||
auto opsets = extension->getOpSets();
|
auto opsets = extension->getOpSets();
|
||||||
ASSERT_FALSE(opsets.empty());
|
ASSERT_FALSE(opsets.empty());
|
||||||
opsets.clear();
|
opsets.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(ExtensionTests, testGetImplTypes) {
|
TEST(ExtensionTests, testGetImplTypes) {
|
||||||
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
|
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
|
||||||
auto opset = extension->getOpSets().begin()->second;
|
auto opset = extension->getOpSets().begin()->second;
|
||||||
std::shared_ptr<ngraph::Node> op(opset.create(opset.get_types_info().begin()->name));
|
std::shared_ptr<ngraph::Node> op(opset.create(opset.get_types_info().begin()->name));
|
||||||
ASSERT_FALSE(extension->getImplTypes(op).empty());
|
ASSERT_FALSE(extension->getImplTypes(op).empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(ExtensionTests, testGetImplTypesThrowsIfNgraphNodeIsNullPtr) {
|
TEST(ExtensionTests, testGetImplTypesThrowsIfNgraphNodeIsNullPtr) {
|
||||||
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
|
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
|
||||||
ASSERT_THROW(extension->getImplTypes(std::shared_ptr<ngraph::Node> ()),
|
ASSERT_THROW(extension->getImplTypes(std::shared_ptr<ngraph::Node> ()),
|
||||||
InferenceEngine::details::InferenceEngineException);
|
InferenceEngine::details::InferenceEngineException);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(ExtensionTests, testGetImplementation) {
|
TEST(ExtensionTests, testGetImplementation) {
|
||||||
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
|
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
|
||||||
auto opset = extension->getOpSets().begin()->second;
|
auto opset = extension->getOpSets().begin()->second;
|
||||||
std::shared_ptr<ngraph::Node> op(opset.create("Template"));
|
std::shared_ptr<ngraph::Node> op(opset.create("Template"));
|
||||||
ASSERT_NE(nullptr, extension->getImplementation(op, extension->getImplTypes(op)[0]));
|
ASSERT_NE(nullptr, extension->getImplementation(op, extension->getImplTypes(op)[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(ExtensionTests, testGetImplementationThrowsIfNgraphNodeIsNullPtr) {
|
TEST(ExtensionTests, testGetImplementationThrowsIfNgraphNodeIsNullPtr) {
|
||||||
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
|
IExtensionPtr extension = std::make_shared<Extension>(getExtensionPath());
|
||||||
ASSERT_THROW(extension->getImplementation(std::shared_ptr<ngraph::Node> (), ""),
|
ASSERT_THROW(extension->getImplementation(std::shared_ptr<ngraph::Node> (), ""),
|
||||||
InferenceEngine::details::InferenceEngineException);
|
InferenceEngine::details::InferenceEngineException);
|
||||||
}
|
}
|
||||||
|
@ -40,15 +40,6 @@ protected:
|
|||||||
MockInferencePluginInternal2 engine;
|
MockInferencePluginInternal2 engine;
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_F(PluginTest, canCreatePlugin) {
|
|
||||||
auto ptr = make_std_function<IInferencePlugin*
|
|
||||||
(IInferencePlugin*)>("CreatePluginEngineProxy");
|
|
||||||
|
|
||||||
unique_ptr<IInferencePlugin, std::function<void(IInferencePlugin*)>> smart_ptr(ptr(nullptr), [](IInferencePlugin *p) {
|
|
||||||
p->Release();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(PluginTest, canCreatePluginUsingSmartPtr) {
|
TEST_F(PluginTest, canCreatePluginUsingSmartPtr) {
|
||||||
ASSERT_NO_THROW(InferenceEnginePluginPtr ptr(get_mock_engine_name()));
|
ASSERT_NO_THROW(InferenceEnginePluginPtr ptr(get_mock_engine_name()));
|
||||||
}
|
}
|
||||||
@ -66,11 +57,11 @@ TEST_F(PluginTest, canSetConfiguration) {
|
|||||||
InferenceEnginePluginPtr ptr = getPtr();
|
InferenceEnginePluginPtr ptr = getPtr();
|
||||||
// TODO: dynamic->reinterpret because of clang/gcc cannot
|
// TODO: dynamic->reinterpret because of clang/gcc cannot
|
||||||
// dynamically cast this MOCK object
|
// dynamically cast this MOCK object
|
||||||
ASSERT_TRUE(reinterpret_cast<MockPlugin*>(*ptr)->config.empty());
|
ASSERT_TRUE(dynamic_cast<MockPlugin*>(ptr.operator->())->config.empty());
|
||||||
|
|
||||||
std::map<std::string, std::string> config = { { "key", "value" } };
|
std::map<std::string, std::string> config = { { "key", "value" } };
|
||||||
ASSERT_NO_THROW(ptr->SetConfig(config));
|
ASSERT_NO_THROW(ptr->SetConfig(config));
|
||||||
config.clear();
|
config.clear();
|
||||||
|
|
||||||
ASSERT_STREQ(reinterpret_cast<MockPlugin*>(*ptr)->config["key"].c_str(), "value");
|
ASSERT_STREQ(dynamic_cast<MockPlugin*>(ptr.operator->())->config["key"].c_str(), "value");
|
||||||
}
|
}
|
||||||
|
@ -9,14 +9,10 @@
|
|||||||
|
|
||||||
#include "system_allocator.hpp"
|
#include "system_allocator.hpp"
|
||||||
|
|
||||||
|
using namespace InferenceEngine;
|
||||||
class SystemAllocatorReleaseTests : public CommonTestUtils::TestsCommon {
|
class SystemAllocatorReleaseTests : public CommonTestUtils::TestsCommon {
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_F(SystemAllocatorReleaseTests, canRelease) {
|
|
||||||
SystemMemoryAllocator *allocator_ = new SystemMemoryAllocator();
|
|
||||||
allocator_->Release();
|
|
||||||
}
|
|
||||||
|
|
||||||
class SystemAllocatorTests : public CommonTestUtils::TestsCommon {
|
class SystemAllocatorTests : public CommonTestUtils::TestsCommon {
|
||||||
protected:
|
protected:
|
||||||
void SetUp() override {
|
void SetUp() override {
|
||||||
|
@ -104,7 +104,6 @@ public:
|
|||||||
return std::make_shared<NewFakePrimitiveImpl>(node);
|
return std::make_shared<NewFakePrimitiveImpl>(node);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
void Release() noexcept override { delete this; }
|
|
||||||
|
|
||||||
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {
|
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {
|
||||||
static const InferenceEngine::Version VERSION{{}, "", ""};
|
static const InferenceEngine::Version VERSION{{}, "", ""};
|
||||||
|
@ -63,9 +63,6 @@ public:
|
|||||||
bool free(void* handle) noexcept override {
|
bool free(void* handle) noexcept override {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
#if GNA_LIB_VER == 2
|
#if GNA_LIB_VER == 2
|
||||||
void expect_enqueue_calls(GNACppApi &mockApi, bool enableHardwareConsistency = true){
|
void expect_enqueue_calls(GNACppApi &mockApi, bool enableHardwareConsistency = true){
|
||||||
|
@ -91,9 +91,6 @@ public:
|
|||||||
|
|
||||||
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {}
|
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {}
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
|
InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
|
||||||
types = new char *[factories.size()];
|
types = new char *[factories.size()];
|
||||||
size_t count = 0;
|
size_t count = 0;
|
||||||
|
@ -21,10 +21,6 @@ class FakeExtensions : public Cpu::MKLDNNExtensions {
|
|||||||
public:
|
public:
|
||||||
void Unload() noexcept override {};
|
void Unload() noexcept override {};
|
||||||
|
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
};
|
|
||||||
|
|
||||||
static std::shared_ptr<TestExtensionsHolder> GetExtensionsHolder() {
|
static std::shared_ptr<TestExtensionsHolder> GetExtensionsHolder() {
|
||||||
static std::shared_ptr<TestExtensionsHolder> localHolder;
|
static std::shared_ptr<TestExtensionsHolder> localHolder;
|
||||||
if (localHolder == nullptr) {
|
if (localHolder == nullptr) {
|
||||||
|
@ -432,9 +432,6 @@ public:
|
|||||||
|
|
||||||
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {}
|
void GetVersion(const InferenceEngine::Version *&versionInfo) const noexcept override {}
|
||||||
void Unload() noexcept override {}
|
void Unload() noexcept override {}
|
||||||
void Release() noexcept override {
|
|
||||||
delete this;
|
|
||||||
}
|
|
||||||
InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
|
InferenceEngine::StatusCode getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
|
||||||
types = new char *[factories.size()];
|
types = new char *[factories.size()];
|
||||||
size_t count = 0;
|
size_t count = 0;
|
||||||
|
@ -328,8 +328,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
InferenceEngine::ResponseDesc resp;
|
InferenceEngine::ResponseDesc resp;
|
||||||
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
||||||
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
||||||
|
@ -285,8 +285,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
InferenceEngine::ResponseDesc resp;
|
InferenceEngine::ResponseDesc resp;
|
||||||
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
||||||
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
||||||
|
@ -255,8 +255,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
InferenceEngine::ResponseDesc resp;
|
InferenceEngine::ResponseDesc resp;
|
||||||
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
||||||
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
||||||
|
@ -321,8 +321,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
InferenceEngine::ResponseDesc resp;
|
InferenceEngine::ResponseDesc resp;
|
||||||
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
||||||
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
||||||
|
@ -433,8 +433,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<details::CNNNetworkImpl *>(&((ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
ResponseDesc resp;
|
ResponseDesc resp;
|
||||||
StatusCode sts = implNet->setBatchSizeReshape(dims[0], &resp);
|
StatusCode sts = implNet->setBatchSizeReshape(dims[0], &resp);
|
||||||
ASSERT_EQ((int)StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)StatusCode::OK, sts) << resp.msg;
|
||||||
|
@ -486,8 +486,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, model_blob));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, model_blob));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
InferenceEngine::ResponseDesc resp;
|
InferenceEngine::ResponseDesc resp;
|
||||||
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
||||||
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
||||||
|
@ -368,8 +368,8 @@ protected:
|
|||||||
InferenceEngine::CNNNetwork network;
|
InferenceEngine::CNNNetwork network;
|
||||||
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
ASSERT_NO_THROW(network = core.ReadNetwork(model, weights_ptr));
|
||||||
|
|
||||||
auto implNet = dynamic_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
ASSERT_EQ(nullptr, network.getFunction());
|
||||||
ASSERT_NE(nullptr, implNet) << "Failed to cast ICNNNetwork to CNNNetworkImpl";
|
auto implNet = static_cast<InferenceEngine::details::CNNNetworkImpl *>(&((InferenceEngine::ICNNNetwork&)network));
|
||||||
InferenceEngine::ResponseDesc resp;
|
InferenceEngine::ResponseDesc resp;
|
||||||
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
InferenceEngine::StatusCode sts = implNet->setBatchSizeReshape(MB, &resp);
|
||||||
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
ASSERT_EQ((int)InferenceEngine::StatusCode::OK, sts) << resp.msg;
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user