Throw ov::Exception from ov::runtime (#7536)

* Exceptions

* Throw ov::Exception from ov::runtime

* code style

* Fixed function name
This commit is contained in:
Ilya Lavrenov 2021-09-21 14:26:01 +03:00 committed by GitHub
parent d6ade04fc7
commit db385569c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 287 additions and 256 deletions

View File

@ -9,6 +9,7 @@
#include "ie_common.h"
#include "ie_executable_network_base.hpp"
#include "ie_remote_context.hpp"
#include "openvino/core/except.hpp"
#include "openvino/runtime/executable_network.hpp"
namespace InferenceEngine {
@ -22,6 +23,16 @@ namespace InferenceEngine {
InferenceEngine::details::Rethrow(); \
}
#define OV_EXEC_NET_CALL_STATEMENT(...) \
OPENVINO_ASSERT(_impl != nullptr, "ExecutableNetwork was not initialized."); \
try { \
__VA_ARGS__; \
} catch (const std::exception& ex) { \
throw ov::Exception(ex.what()); \
} catch (...) { \
OPENVINO_ASSERT(false, "Unexpected exception"); \
}
ExecutableNetwork::ExecutableNetwork(const details::SharedObjectLoader& so, const IExecutableNetworkInternal::Ptr& impl)
: _so(so),
_impl(impl) {
@ -115,43 +126,43 @@ ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<void>& so,
const std::shared_ptr<ie::IExecutableNetworkInternal>& impl)
: _so{so},
_impl{impl} {
IE_ASSERT(_impl != nullptr);
OPENVINO_ASSERT(_impl != nullptr, "ExecutableNetwork was not initialized.");
}
std::shared_ptr<const Function> ExecutableNetwork::get_runtime_function() const {
EXEC_NET_CALL_STATEMENT(return std::const_pointer_cast<const Function>(_impl->GetExecGraphInfo()));
OV_EXEC_NET_CALL_STATEMENT(return std::const_pointer_cast<const Function>(_impl->GetExecGraphInfo()));
}
ParameterVector ExecutableNetwork::get_parameters() const {
EXEC_NET_CALL_STATEMENT(return _impl->GetExecGraphInfo()->get_parameters());
OV_EXEC_NET_CALL_STATEMENT(return _impl->GetExecGraphInfo()->get_parameters());
}
ResultVector ExecutableNetwork::get_results() const {
EXEC_NET_CALL_STATEMENT(return _impl->GetExecGraphInfo()->get_results());
OV_EXEC_NET_CALL_STATEMENT(return _impl->GetExecGraphInfo()->get_results());
}
InferRequest ExecutableNetwork::create_infer_request() {
EXEC_NET_CALL_STATEMENT(return {_so, _impl->CreateInferRequest()});
OV_EXEC_NET_CALL_STATEMENT(return {_so, _impl->CreateInferRequest()});
}
void ExecutableNetwork::export_model(std::ostream& networkModel) {
EXEC_NET_CALL_STATEMENT(_impl->Export(networkModel));
OV_EXEC_NET_CALL_STATEMENT(_impl->Export(networkModel));
}
void ExecutableNetwork::set_config(const ie::ParamMap& config) {
EXEC_NET_CALL_STATEMENT(_impl->SetConfig(config));
OV_EXEC_NET_CALL_STATEMENT(_impl->SetConfig(config));
}
ie::Parameter ExecutableNetwork::get_config(const std::string& name) const {
EXEC_NET_CALL_STATEMENT(return _impl->GetConfig(name));
OV_EXEC_NET_CALL_STATEMENT(return _impl->GetConfig(name));
}
ie::Parameter ExecutableNetwork::get_metric(const std::string& name) const {
EXEC_NET_CALL_STATEMENT(return _impl->GetMetric(name));
OV_EXEC_NET_CALL_STATEMENT(return _impl->GetMetric(name));
}
std::shared_ptr<ie::RemoteContext> ExecutableNetwork::get_context() const {
EXEC_NET_CALL_STATEMENT(return _impl->GetContext());
OV_EXEC_NET_CALL_STATEMENT(return _impl->GetContext());
}
bool ExecutableNetwork::operator!() const noexcept {

View File

@ -13,6 +13,7 @@
#include "details/ie_so_loader.h"
#include "ie_infer_async_request_base.hpp"
#include "ie_remote_context.hpp"
#include "openvino/core/except.hpp"
#include "openvino/runtime/infer_request.hpp"
namespace InferenceEngine {
@ -26,6 +27,16 @@ namespace InferenceEngine {
::InferenceEngine::details::Rethrow(); \
}
#define OV_INFER_REQ_CALL_STATEMENT(...) \
OPENVINO_ASSERT(_impl != nullptr, "InferRequest was not initialized."); \
try { \
__VA_ARGS__; \
} catch (const std::exception& ex) { \
throw ov::Exception(ex.what()); \
} catch (...) { \
OPENVINO_ASSERT(false, "Unexpected exception"); \
}
InferRequest::InferRequest(const details::SharedObjectLoader& so, const IInferRequestInternal::Ptr& impl)
: _so(so),
_impl(impl) {
@ -199,16 +210,16 @@ namespace runtime {
InferRequest::InferRequest(const std::shared_ptr<void>& so, const ie::IInferRequestInternal::Ptr& impl)
: _so{so},
_impl{impl} {
IE_ASSERT(_impl != nullptr);
OPENVINO_ASSERT(_impl != nullptr, "InferRequest was not initialized.");
}
void InferRequest::set_blob(const std::string& name, const ie::Blob::Ptr& data) {
INFER_REQ_CALL_STATEMENT(_impl->SetBlob(name, data);)
OV_INFER_REQ_CALL_STATEMENT(_impl->SetBlob(name, data);)
}
ie::Blob::Ptr InferRequest::get_blob(const std::string& name) {
ie::Blob::Ptr blobPtr;
INFER_REQ_CALL_STATEMENT(blobPtr = _impl->GetBlob(name);)
OV_INFER_REQ_CALL_STATEMENT(blobPtr = _impl->GetBlob(name);)
std::string error = "Internal error: blob with name `" + name + "` is not allocated!";
const bool remoteBlobPassed = blobPtr->is<ie::RemoteBlob>();
if (blobPtr == nullptr)
@ -219,15 +230,15 @@ ie::Blob::Ptr InferRequest::get_blob(const std::string& name) {
}
void InferRequest::infer() {
INFER_REQ_CALL_STATEMENT(_impl->Infer();)
OV_INFER_REQ_CALL_STATEMENT(_impl->Infer();)
}
void InferRequest::cancel() {
INFER_REQ_CALL_STATEMENT(_impl->Cancel();)
OV_INFER_REQ_CALL_STATEMENT(_impl->Cancel();)
}
std::vector<ProfilingInfo> InferRequest::get_profiling_info() const {
INFER_REQ_CALL_STATEMENT({
OV_INFER_REQ_CALL_STATEMENT({
auto ieInfos = _impl->GetPerformanceCounts();
std::vector<ProfilingInfo> infos;
infos.reserve(ieInfos.size());
@ -265,36 +276,36 @@ std::vector<ProfilingInfo> InferRequest::get_profiling_info() const {
}
void InferRequest::set_input(const ie::BlobMap& inputs) {
INFER_REQ_CALL_STATEMENT(for (auto&& input : inputs) { _impl->SetBlob(input.first, input.second); })
OV_INFER_REQ_CALL_STATEMENT(for (auto&& input : inputs) { _impl->SetBlob(input.first, input.second); })
}
void InferRequest::set_output(const ie::BlobMap& results) {
INFER_REQ_CALL_STATEMENT(for (auto&& result : results) { _impl->SetBlob(result.first, result.second); })
OV_INFER_REQ_CALL_STATEMENT(for (auto&& result : results) { _impl->SetBlob(result.first, result.second); })
}
void InferRequest::set_batch(const int batch) {
INFER_REQ_CALL_STATEMENT(_impl->SetBatch(batch);)
OV_INFER_REQ_CALL_STATEMENT(_impl->SetBatch(batch);)
}
void InferRequest::start_async() {
INFER_REQ_CALL_STATEMENT(_impl->StartAsync();)
OV_INFER_REQ_CALL_STATEMENT(_impl->StartAsync();)
}
void InferRequest::wait() {
INFER_REQ_CALL_STATEMENT(_impl->Wait(ie::InferRequest::RESULT_READY);)
OV_INFER_REQ_CALL_STATEMENT(_impl->Wait(ie::InferRequest::RESULT_READY);)
}
bool InferRequest::wait_for(const std::chrono::milliseconds timeout) {
INFER_REQ_CALL_STATEMENT(return _impl->Wait(timeout.count()) == ie::OK;)
OV_INFER_REQ_CALL_STATEMENT(return _impl->Wait(timeout.count()) == ie::OK;)
}
void InferRequest::set_callback(std::function<void(std::exception_ptr)> callback) {
INFER_REQ_CALL_STATEMENT(_impl->SetCallback(std::move(callback));)
OV_INFER_REQ_CALL_STATEMENT(_impl->SetCallback(std::move(callback));)
}
std::vector<VariableState> InferRequest::query_state() {
std::vector<VariableState> variable_states;
INFER_REQ_CALL_STATEMENT({
OV_INFER_REQ_CALL_STATEMENT({
for (auto&& state : _impl->QueryState()) {
variable_states.emplace_back(VariableState{_so, state});
}

View File

@ -126,6 +126,14 @@ public:
namespace ov {
namespace runtime {
#define OV_PLUGIN_CALL_STATEMENT(...) \
OPENVINO_ASSERT(_ptr != nullptr, "InferencePlugin was not initialized."); \
try { \
__VA_ARGS__; \
} catch (...) { \
::InferenceEngine::details::Rethrow(); \
}
/**
* @brief This class is a C++ API wrapper for IInferencePlugin.
*
@ -138,81 +146,81 @@ struct InferencePlugin {
InferencePlugin(const std::shared_ptr<void>& so, const std::shared_ptr<ie::IInferencePlugin>& impl) :
_so{so},
_ptr{impl} {
IE_ASSERT(_ptr != nullptr);
OPENVINO_ASSERT(_ptr != nullptr, "InferencePlugin was not initialized.");
}
void set_name(const std::string& deviceName) {
PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName));
OV_PLUGIN_CALL_STATEMENT(_ptr->SetName(deviceName));
}
void set_core(std::weak_ptr<ie::ICore> core) {
PLUGIN_CALL_STATEMENT(_ptr->SetCore(core));
OV_PLUGIN_CALL_STATEMENT(_ptr->SetCore(core));
}
const ie::Version get_version() const {
PLUGIN_CALL_STATEMENT(return _ptr->GetVersion());
OV_PLUGIN_CALL_STATEMENT(return _ptr->GetVersion());
}
void add_extension(const ie::IExtensionPtr& extension) {
PLUGIN_CALL_STATEMENT(_ptr->AddExtension(extension));
OV_PLUGIN_CALL_STATEMENT(_ptr->AddExtension(extension));
}
void set_config(const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config));
OV_PLUGIN_CALL_STATEMENT(_ptr->SetConfig(config));
}
SoPtr<ie::IExecutableNetworkInternal> load_model(const ie::CNNNetwork& network, const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(network, config)});
SoPtr<ie::IExecutableNetworkInternal> compile_model(const ie::CNNNetwork& network, const ConfigMap& config) {
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(network, config)});
}
SoPtr<ie::IExecutableNetworkInternal> load_model(const ie::CNNNetwork& network,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(network, config, context)});
SoPtr<ie::IExecutableNetworkInternal> compile_model(const ie::CNNNetwork& network,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config) {
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(network, config, context)});
}
SoPtr<ie::IExecutableNetworkInternal> load_model(const std::string& modelPath, const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(modelPath, config)});
SoPtr<ie::IExecutableNetworkInternal> compile_model(const std::string& modelPath, const ConfigMap& config) {
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->LoadNetwork(modelPath, config)});
}
ie::QueryNetworkResult query_model(const ie::CNNNetwork& network,
const ConfigMap& config) const {
ie::QueryNetworkResult res;
PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config));
if (res.rc != ie::OK) IE_THROW() << res.resp.msg;
OV_PLUGIN_CALL_STATEMENT(res = _ptr->QueryNetwork(network, config));
OPENVINO_ASSERT(res.rc == ie::OK, res.resp.msg);
return res;
}
SoPtr<ie::IExecutableNetworkInternal> import_model(const std::string& modelFileName,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(modelFileName, config)});
const ConfigMap& config) {
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(modelFileName, config)});
}
SoPtr<ie::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(networkModel, config)});
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(networkModel, config)});
}
SoPtr<ie::IExecutableNetworkInternal> import_model(std::istream& networkModel,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(networkModel, context, config)});
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config) {
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->ImportNetwork(networkModel, context, config)});
}
ie::Parameter get_metric(const std::string& name, const ie::ParamMap& options) const {
PLUGIN_CALL_STATEMENT(return _ptr->GetMetric(name, options));
OV_PLUGIN_CALL_STATEMENT(return _ptr->GetMetric(name, options));
}
SoPtr<ie::RemoteContext> create_context(const ie::ParamMap& params) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->CreateContext(params)});
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->CreateContext(params)});
}
SoPtr<ie::RemoteContext> get_default_context(const ie::ParamMap& params) {
PLUGIN_CALL_STATEMENT(return {_so, _ptr->GetDefaultContext(params)});
OV_PLUGIN_CALL_STATEMENT(return {_so, _ptr->GetDefaultContext(params)});
}
ie::Parameter get_config(const std::string& name, const ie::ParamMap& options) const {
PLUGIN_CALL_STATEMENT(return _ptr->GetConfig(name, options));
OV_PLUGIN_CALL_STATEMENT(return _ptr->GetConfig(name, options));
}
};
@ -220,3 +228,4 @@ struct InferencePlugin {
} // namespace ov
#undef PLUGIN_CALL_STATEMENT
#undef OV_PLUGIN_CALL_STATEMENT

View File

@ -2,18 +2,20 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "openvino/runtime/remote_context.hpp"
#include "ie_remote_blob.hpp"
#include "ie_remote_context.hpp"
#define REMOTE_CONTEXT_STATEMENT(...) \
if (_impl == nullptr) \
IE_THROW(NotAllocated) << "RemoteContext was not initialized."; \
try { \
__VA_ARGS__; \
} catch (...) { \
::InferenceEngine::details::Rethrow(); \
#include <exception>
#include "ie_remote_blob.hpp"
#include "openvino/core/except.hpp"
#include "openvino/runtime/remote_context.hpp"
#define OV_REMOTE_CONTEXT_STATEMENT(...) \
OPENVINO_ASSERT(_impl != nullptr, "RemoteContext was not initialized."); \
try { \
__VA_ARGS__; \
} catch (const std::exception& ex) { \
throw ov::Exception(ex.what()); \
}
namespace ov {
@ -22,21 +24,20 @@ namespace runtime {
RemoteContext::RemoteContext(const std::shared_ptr<void>& so, const ie::RemoteContext::Ptr& impl)
: _so(so),
_impl(impl) {
if (_impl == nullptr)
IE_THROW() << "RemoteContext was not initialized.";
OPENVINO_ASSERT(_impl != nullptr, "RemoteContext was not initialized.");
}
std::string RemoteContext::get_device_name() const {
REMOTE_CONTEXT_STATEMENT(return _impl->getDeviceName());
OV_REMOTE_CONTEXT_STATEMENT(return _impl->getDeviceName());
}
std::shared_ptr<ie::RemoteBlob> RemoteContext::create_blob(const ie::TensorDesc& tensorDesc,
const ie::ParamMap& params) {
REMOTE_CONTEXT_STATEMENT(return _impl->CreateBlob(tensorDesc, params));
OV_REMOTE_CONTEXT_STATEMENT(return _impl->CreateBlob(tensorDesc, params));
}
ie::ParamMap RemoteContext::get_params() const {
REMOTE_CONTEXT_STATEMENT(return _impl->getParams());
OV_REMOTE_CONTEXT_STATEMENT(return _impl->getParams());
}
} // namespace runtime

View File

@ -4,8 +4,7 @@
#include "cpp/ie_memory_state.hpp"
#include "cpp_interfaces/interface/ie_ivariable_state_internal.hpp"
#include "details/ie_so_loader.h"
#include "exception2status.hpp"
#include "openvino/core/except.hpp"
#include "openvino/runtime/variable_state.hpp"
#define VARIABLE_CALL_STATEMENT(...) \
@ -17,6 +16,16 @@
::InferenceEngine::details::Rethrow(); \
}
#define OV_VARIABLE_CALL_STATEMENT(...) \
OPENVINO_ASSERT(_impl != nullptr, "VariableState was not initialized."); \
try { \
__VA_ARGS__; \
} catch (const std::exception& ex) { \
throw ov::Exception(ex.what()); \
} catch (...) { \
OPENVINO_ASSERT(false, "Unexpected exception"); \
}
namespace InferenceEngine {
VariableState::VariableState(const details::SharedObjectLoader& so, const IVariableStateInternal::Ptr& impl)
@ -52,23 +61,23 @@ namespace runtime {
VariableState::VariableState(const std::shared_ptr<void>& so, const ie::IVariableStateInternal::Ptr& impl)
: _so{so},
_impl{impl} {
IE_ASSERT(_impl != nullptr);
OPENVINO_ASSERT(_impl != nullptr, "VariableState was not initialized.");
}
void VariableState::reset() {
VARIABLE_CALL_STATEMENT(_impl->Reset());
OV_VARIABLE_CALL_STATEMENT(_impl->Reset());
}
std::string VariableState::get_name() const {
VARIABLE_CALL_STATEMENT(return _impl->GetName());
OV_VARIABLE_CALL_STATEMENT(return _impl->GetName());
}
ie::Blob::CPtr VariableState::get_state() const {
VARIABLE_CALL_STATEMENT(return _impl->GetState());
OV_VARIABLE_CALL_STATEMENT(return _impl->GetState());
}
void VariableState::set_state(const ie::Blob::Ptr& state) {
VARIABLE_CALL_STATEMENT(_impl->SetState(state));
OV_VARIABLE_CALL_STATEMENT(_impl->SetState(state));
}
} // namespace runtime

View File

@ -28,6 +28,7 @@
#include "ngraph/ngraph.hpp"
#include "ngraph/opsets/opset.hpp"
#include "ngraph/pass/constant_folding.hpp"
#include "openvino/core/except.hpp"
#include "openvino/runtime/core.hpp"
#include "openvino/runtime/executable_network.hpp"
#include "openvino/util/file_util.hpp"
@ -210,7 +211,7 @@ class CoreImpl : public ie::ICore, public std::enable_shared_from_this<ie::ICore
return supported;
}
ov::runtime::SoPtr<ie::IExecutableNetworkInternal> load_model_impl(
ov::runtime::SoPtr<ie::IExecutableNetworkInternal> compile_model_impl(
const InferenceEngine::CNNNetwork& network,
InferencePlugin& plugin,
const std::map<std::string, std::string>& parsedConfig,
@ -218,10 +219,10 @@ class CoreImpl : public ie::ICore, public std::enable_shared_from_this<ie::ICore
const std::string& blobID,
const std::string& modelPath = std::string(),
bool forceDisableCache = false) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "CoreImpl::load_model_impl");
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "CoreImpl::compile_model_impl");
ov::runtime::SoPtr<ie::IExecutableNetworkInternal> execNetwork;
execNetwork =
context ? plugin.load_model(network, context, parsedConfig) : plugin.load_model(network, parsedConfig);
execNetwork = context ? plugin.compile_model(network, context, parsedConfig)
: plugin.compile_model(network, parsedConfig);
auto cacheManager = coreConfig.getCacheConfig()._cacheManager;
if (!forceDisableCache && cacheManager && DeviceSupportsImportExport(plugin)) {
try {
@ -252,7 +253,7 @@ class CoreImpl : public ie::ICore, public std::enable_shared_from_this<ie::ICore
ov::runtime::SoPtr<ie::IExecutableNetworkInternal> execNetwork;
struct HeaderException {};
IE_ASSERT(cacheManager != nullptr);
OPENVINO_ASSERT(cacheManager != nullptr);
try {
cacheManager->readCacheEntry(blobId, [&](std::istream& networkStream) {
OV_ITT_SCOPE(FIRST_INFERENCE,
@ -460,10 +461,10 @@ public:
auto lock = cacheGuard.getHashLock(hash);
res = LoadNetworkFromCache(cacheManager, hash, plugin, parsed._config, context, loadedFromCache);
if (!loadedFromCache) {
res = load_model_impl(network, plugin, parsed._config, context, hash);
res = compile_model_impl(network, plugin, parsed._config, context, hash);
}
} else {
res = load_model_impl(network, plugin, parsed._config, context, {});
res = compile_model_impl(network, plugin, parsed._config, context, {});
}
return res;
}
@ -487,10 +488,10 @@ public:
auto lock = cacheGuard.getHashLock(hash);
res = LoadNetworkFromCache(cacheManager, hash, plugin, parsed._config, nullptr, loadedFromCache);
if (!loadedFromCache) {
res = load_model_impl(network, plugin, parsed._config, nullptr, hash, {}, forceDisableCache);
res = compile_model_impl(network, plugin, parsed._config, nullptr, hash, {}, forceDisableCache);
}
} else {
res = load_model_impl(network, plugin, parsed._config, nullptr, {}, {}, forceDisableCache);
res = compile_model_impl(network, plugin, parsed._config, nullptr, {}, {}, forceDisableCache);
}
return {{res._so}, res._ptr};
}
@ -510,13 +511,13 @@ public:
res = LoadNetworkFromCache(cacheManager, hash, plugin, parsed._config, nullptr, loadedFromCache, modelPath);
if (!loadedFromCache) {
auto cnnNetwork = ReadNetwork(modelPath, std::string());
res = load_model_impl(cnnNetwork, plugin, parsed._config, nullptr, hash, modelPath);
res = compile_model_impl(cnnNetwork, plugin, parsed._config, nullptr, hash, modelPath);
}
} else if (cacheManager) {
res = plugin.load_model(modelPath, parsed._config);
res = plugin.compile_model(modelPath, parsed._config);
} else {
auto cnnNetwork = ReadNetwork(modelPath, std::string());
res = load_model_impl(cnnNetwork, plugin, parsed._config, nullptr, {}, modelPath);
res = compile_model_impl(cnnNetwork, plugin, parsed._config, nullptr, {}, modelPath);
}
return {{res._so}, res._ptr};
}
@ -610,6 +611,8 @@ public:
devicesIDs = p.as<std::vector<std::string>>();
} catch (ie::Exception&) {
// plugin is not created by e.g. invalid env
} catch (ov::Exception&) {
// plugin is not created by e.g. invalid env
} catch (const std::exception& ex) {
IE_THROW() << "An exception is thrown while trying to create the " << deviceName
<< " device and call GetMetric: " << ex.what();
@ -1221,65 +1224,82 @@ void Core::UnregisterPlugin(const std::string& deviceName_) {
namespace ov {
namespace runtime {
#define OV_CORE_CALL_STATEMENT(...) \
try { \
__VA_ARGS__; \
} catch (const std::exception& ex) { \
throw ov::Exception(ex.what()); \
} catch (...) { \
OPENVINO_ASSERT(false, "Unexpected exception"); \
}
class Core::Impl : public CoreImpl {};
Core::Core(const std::string& xmlConfigFile) {
_impl = std::make_shared<Impl>();
register_plugins(parseXmlConfig(xmlConfigFile));
OV_CORE_CALL_STATEMENT(register_plugins(parseXmlConfig(xmlConfigFile)));
}
std::map<std::string, ie::Version> Core::get_versions(const std::string& deviceName) const {
return _impl->GetVersions(deviceName);
OV_CORE_CALL_STATEMENT(return _impl->GetVersions(deviceName))
}
#ifdef ENABLE_UNICODE_PATH_SUPPORT
std::shared_ptr<ngraph::Function> Core::read_model(const std::wstring& modelPath, const std::wstring& binPath) const {
return _impl->ReadNetwork(ov::util::wstring_to_string(modelPath), ov::util::wstring_to_string(binPath))
.getFunction();
OV_CORE_CALL_STATEMENT(
return _impl->ReadNetwork(ov::util::wstring_to_string(modelPath), ov::util::wstring_to_string(binPath))
.getFunction(););
}
#endif
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& modelPath, const std::string& binPath) const {
return _impl->ReadNetwork(modelPath, binPath).getFunction();
OV_CORE_CALL_STATEMENT(return _impl->ReadNetwork(modelPath, binPath).getFunction(););
}
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& model, const ie::Blob::CPtr& weights) const {
return _impl->ReadNetwork(model, weights).getFunction();
OV_CORE_CALL_STATEMENT(return _impl->ReadNetwork(model, weights).getFunction(););
}
ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const ConfigMap& config) {
auto exec =
_impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), deviceName, config);
return {exec.operator const InferenceEngine::details::SharedObjectLoader&().get(),
exec.operator std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>&()};
OV_CORE_CALL_STATEMENT(
auto exec =
_impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), deviceName, config);
return {exec.operator const InferenceEngine::details::SharedObjectLoader&().get(),
exec.operator std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>&()};);
}
ExecutableNetwork Core::compile_model(const std::string& modelPath,
const std::string& deviceName,
const ConfigMap& config) {
auto exec = _impl->LoadNetwork(modelPath, deviceName, config);
return {exec.operator const InferenceEngine::details::SharedObjectLoader&().get(),
exec.operator std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>&()};
OV_CORE_CALL_STATEMENT(auto exec = _impl->LoadNetwork(modelPath, deviceName, config);
return {exec.operator const InferenceEngine::details::SharedObjectLoader&().get(),
exec.operator std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>&()};);
}
ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const RemoteContext& context,
const ConfigMap& config) {
auto exec =
_impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), context._impl, config);
return {exec._so, exec._ptr};
OV_CORE_CALL_STATEMENT(auto exec =
_impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
context._impl,
config);
return {exec._so, exec._ptr};);
}
void Core::add_extension(const ie::IExtensionPtr& extension) {
_impl->AddExtension(extension);
OV_CORE_CALL_STATEMENT(_impl->AddExtension(extension););
}
ExecutableNetwork Core::import_model(std::istream& networkModel,
const std::string& deviceName,
const ConfigMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");
auto exec = _impl->ImportNetwork(networkModel, deviceName, config);
return {exec.operator const InferenceEngine::details::SharedObjectLoader&().get(),
exec.operator std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>&()};
OV_CORE_CALL_STATEMENT(auto exec = _impl->ImportNetwork(networkModel, deviceName, config);
return {exec.operator const InferenceEngine::details::SharedObjectLoader&().get(),
exec.operator std::shared_ptr<InferenceEngine::IExecutableNetworkInternal>&()};);
}
ExecutableNetwork Core::import_model(std::istream& networkModel,
@ -1297,143 +1317,109 @@ ExecutableNetwork Core::import_model(std::istream& networkModel,
if (exportMagic == magic) {
std::getline(networkModel, deviceName);
} else {
IE_THROW() << "Passed compiled stream does not contain device name. "
"Please, provide device name manually";
OPENVINO_ASSERT(false,
"Passed compiled stream does not contain device name. "
"Please, provide device name manually");
}
networkModel.seekg(currentPos, networkModel.beg);
auto exec = _impl->GetCPPPluginByName(deviceName).import_model(networkModel, {});
return {exec._so, exec._ptr};
OV_CORE_CALL_STATEMENT(auto exec = _impl->GetCPPPluginByName(deviceName).import_model(networkModel, {});
return {exec._so, exec._ptr};);
}
SupportedOpsMap Core::query_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const ConfigMap& config) const {
auto cnnNet = ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network));
auto qnResult = _impl->QueryNetwork(cnnNet, deviceName, config);
return qnResult.supportedLayersMap;
OV_CORE_CALL_STATEMENT(auto cnnNet = ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network));
auto qnResult = _impl->QueryNetwork(cnnNet, deviceName, config);
return qnResult.supportedLayersMap;);
}
void Core::set_config(const ConfigMap& config, const std::string& deviceName) {
// HETERO case
if (deviceName.find("HETERO:") == 0) {
IE_THROW() << "SetConfig is supported only for HETERO itself (without devices). "
"You can configure the devices with SetConfig before creating the HETERO on top.";
}
// MULTI case
if (deviceName.find("MULTI:") == 0) {
IE_THROW() << "SetConfig is supported only for MULTI itself (without devices). "
"You can configure the devices with SetConfig before creating the MULTI on top.";
}
// AUTO case
if (deviceName.find("AUTO:") == 0) {
IE_THROW() << "SetConfig is supported only for AUTO itself (without devices). "
"You can configure the devices with SetConfig before creating the AUTO on top.";
}
OPENVINO_ASSERT(deviceName.find("HETERO:") != 0,
"set_config is supported only for HETERO itself (without devices). "
"You can configure the devices with set_config before creating the HETERO on top.");
OPENVINO_ASSERT(deviceName.find("MULTI:") != 0,
"set_config is supported only for MULTI itself (without devices). "
"You can configure the devices with set_config before creating the MULTI on top.");
OPENVINO_ASSERT(deviceName.find("AUTO:") != 0,
"set_config is supported only for AUTO itself (without devices). "
"You can configure the devices with set_config before creating the AUTO on top.");
// GPU.0, GPU.1 cases
if (deviceName.find(".") != std::string::npos) {
IE_THROW()
<< "SetConfig is supported only for device family itself (without particular device .#). "
"You can pass .# as a particular device instance to QueryNetwork, LoadNetwork, ImportNetwork only";
}
OPENVINO_ASSERT(deviceName.find(".") == std::string::npos,
"set_config is supported only for device family itself (without particular device .#). "
"You can pass .# as a particular device instance to query_model, compile_model, import_model only");
if (deviceName.empty()) {
_impl->SetConfigForPlugins(config, std::string());
} else {
auto parsed = parseDeviceNameIntoConfig(deviceName, config);
_impl->SetConfigForPlugins(parsed._config, parsed._deviceName);
}
OV_CORE_CALL_STATEMENT(
if (deviceName.empty()) { _impl->SetConfigForPlugins(config, std::string()); } else {
auto parsed = parseDeviceNameIntoConfig(deviceName, config);
_impl->SetConfigForPlugins(parsed._config, parsed._deviceName);
});
}
ie::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const {
// HETERO case
{
if (deviceName.find("HETERO:") == 0) {
IE_THROW() << "You can only GetConfig of the HETERO itself (without devices). "
"GetConfig is also possible for the individual devices before creating the HETERO on top.";
}
}
// MULTI case
{
if (deviceName.find("MULTI:") == 0) {
IE_THROW() << "You can only GetConfig of the MULTI itself (without devices). "
"GetConfig is also possible for the individual devices before creating the MULTI on top.";
}
}
// AUTO case
{
if (deviceName.find("AUTO:") == 0) {
IE_THROW() << "You can only GetConfig of the AUTO itself (without devices). "
"GetConfig is also possible for the individual devices before creating the AUTO on top.";
}
}
OPENVINO_ASSERT(deviceName.find("HETERO:") != 0,
"You can only get_config of the HETERO itself (without devices). "
"get_config is also possible for the individual devices before creating the HETERO on top.");
OPENVINO_ASSERT(deviceName.find("MULTI:") != 0,
"You can only get_config of the MULTI itself (without devices). "
"get_config is also possible for the individual devices before creating the MULTI on top.");
OPENVINO_ASSERT(deviceName.find("AUTO:") != 0,
"You can only get_config of the AUTO itself (without devices). "
"get_config is also possible for the individual devices before creating the AUTO on top.");
auto parsed = parseDeviceNameIntoConfig(deviceName);
OV_CORE_CALL_STATEMENT(
auto parsed = parseDeviceNameIntoConfig(deviceName);
// we need to return a copy of Parameter object which is created on Core side,
// not in ie plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return copyParameterValue(_impl->GetCPPPluginByName(parsed._deviceName).get_config(name, parsed._config));
// we need to return a copy of Parameter object which is created on Core side,
// not in ie plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return copyParameterValue(_impl->GetCPPPluginByName(parsed._deviceName).get_config(name, parsed._config)););
}
ie::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const {
return _impl->GetMetric(deviceName, name);
OV_CORE_CALL_STATEMENT(return _impl->GetMetric(deviceName, name););
}
std::vector<std::string> Core::get_available_devices() const {
return _impl->GetAvailableDevices();
OV_CORE_CALL_STATEMENT(return _impl->GetAvailableDevices(););
}
void Core::register_plugin(const std::string& pluginName, const std::string& deviceName) {
_impl->RegisterPluginByName(pluginName, deviceName);
OV_CORE_CALL_STATEMENT(_impl->RegisterPluginByName(pluginName, deviceName););
}
void Core::unload_plugin(const std::string& deviceName) {
ie::DeviceIDParser parser(deviceName);
std::string devName = parser.getDeviceName();
OV_CORE_CALL_STATEMENT(ie::DeviceIDParser parser(deviceName); std::string devName = parser.getDeviceName();
_impl->UnloadPluginByName(devName);
_impl->UnloadPluginByName(devName););
}
void Core::register_plugins(const std::string& xmlConfigFile) {
_impl->RegisterPluginsInRegistry(xmlConfigFile);
OV_CORE_CALL_STATEMENT(_impl->RegisterPluginsInRegistry(xmlConfigFile););
}
RemoteContext Core::create_context(const std::string& deviceName, const ie::ParamMap& params) {
if (deviceName.find("HETERO") == 0) {
IE_THROW() << "HETERO device does not support remote context";
}
if (deviceName.find("MULTI") == 0) {
IE_THROW() << "MULTI device does not support remote context";
}
if (deviceName.find("AUTO") == 0) {
IE_THROW() << "AUTO device does not support remote context";
}
OPENVINO_ASSERT(deviceName.find("HETERO") != 0, "HETERO device does not support remote context");
OPENVINO_ASSERT(deviceName.find("MULTI") != 0, "MULTI device does not support remote context");
OPENVINO_ASSERT(deviceName.find("AUTO") != 0, "AUTO device does not support remote context");
auto parsed = parseDeviceNameIntoConfig(deviceName, params);
auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).create_context(parsed._config);
return {remoteContext._so, remoteContext._ptr};
OV_CORE_CALL_STATEMENT(auto parsed = parseDeviceNameIntoConfig(deviceName, params);
auto remoteContext =
_impl->GetCPPPluginByName(parsed._deviceName).create_context(parsed._config);
return {remoteContext._so, remoteContext._ptr};);
}
RemoteContext Core::get_default_context(const std::string& deviceName) {
if (deviceName.find("HETERO") == 0) {
IE_THROW() << "HETERO device does not support remote context";
}
if (deviceName.find("MULTI") == 0) {
IE_THROW() << "MULTI device does not support remote context";
}
if (deviceName.find("AUTO") == 0) {
IE_THROW() << "AUTO device does not support remote context";
}
OPENVINO_ASSERT(deviceName.find("HETERO") != 0, "HETERO device does not support remote context");
OPENVINO_ASSERT(deviceName.find("MULTI") != 0, "MULTI device does not support remote context");
OPENVINO_ASSERT(deviceName.find("AUTO") != 0, "AUTO device does not support remote context");
auto parsed = parseDeviceNameIntoConfig(deviceName, ie::ParamMap());
auto remoteContext = _impl->GetCPPPluginByName(parsed._deviceName).get_default_context(parsed._config);
return {remoteContext._so, remoteContext._ptr};
OV_CORE_CALL_STATEMENT(auto parsed = parseDeviceNameIntoConfig(deviceName, ie::ParamMap());
auto remoteContext =
_impl->GetCPPPluginByName(parsed._deviceName).get_default_context(parsed._config);
return {remoteContext._so, remoteContext._ptr};);
}
} // namespace runtime

View File

@ -3,6 +3,7 @@
//
#include <gtest/gtest.h>
#include <openvino/core/except.hpp>
#include <openvino/runtime/executable_network.hpp>
using namespace ::testing;
@ -10,40 +11,40 @@ using namespace std;
TEST(ExecutableNetworkOVTests, throwsOnUninitializedExportStream) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.export_model(std::cout), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.export_model(std::cout), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetFunction) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_runtime_function(), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.get_runtime_function(), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetParameters) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_parameters(), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.get_parameters(), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetResults) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_results(), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.get_results(), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedSetConfig) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.set_config({{}}), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.set_config({{}}), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetConfig) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_config({}), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.get_config({}), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetMetric) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_metric({}), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.get_metric({}), ov::Exception);
}
TEST(ExecutableNetworkOVTests, throwsOnUninitializedGetContext) {
ov::runtime::ExecutableNetwork exec;
ASSERT_THROW(exec.get_context(), InferenceEngine::NotAllocated);
ASSERT_THROW(exec.get_context(), ov::Exception);
}

View File

@ -5,6 +5,7 @@
#include <gtest/gtest.h>
#include <cpp/ie_infer_request.hpp>
#include <openvino/core/except.hpp>
#include <openvino/runtime/infer_request.hpp>
using namespace ::testing;
@ -15,61 +16,61 @@ using namespace InferenceEngine::details;
TEST(InferRequestOVTests, throwsOnUninitializedSetBlob) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.set_blob({}, {}), InferenceEngine::NotAllocated);
ASSERT_THROW(req.set_blob({}, {}), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedGetBlob) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.get_blob({}), InferenceEngine::NotAllocated);
ASSERT_THROW(req.get_blob({}), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedInfer) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.infer(), InferenceEngine::NotAllocated);
ASSERT_THROW(req.infer(), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedGetPerformanceCounts) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.get_profiling_info(), InferenceEngine::NotAllocated);
ASSERT_THROW(req.get_profiling_info(), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedSetInput) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.set_input({{}}), InferenceEngine::NotAllocated);
ASSERT_THROW(req.set_input({{}}), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedSetOutput) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.set_output({{}}), InferenceEngine::NotAllocated);
ASSERT_THROW(req.set_output({{}}), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedSetBatch) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.set_batch({}), InferenceEngine::NotAllocated);
ASSERT_THROW(req.set_batch({}), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedStartAsync) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.start_async(), InferenceEngine::NotAllocated);
ASSERT_THROW(req.start_async(), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedWait) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.wait(), InferenceEngine::NotAllocated);
ASSERT_THROW(req.wait(), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedWaitFor) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.wait_for({}), InferenceEngine::NotAllocated);
ASSERT_THROW(req.wait_for({}), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedSetCompletionCallback) {
ov::runtime::InferRequest req;
std::function<void(std::exception_ptr)> f;
ASSERT_THROW(req.set_callback(f), InferenceEngine::NotAllocated);
ASSERT_THROW(req.set_callback(f), ov::Exception);
}
TEST(InferRequestOVTests, throwsOnUninitializedQueryState) {
ov::runtime::InferRequest req;
ASSERT_THROW(req.query_state(), InferenceEngine::NotAllocated);
ASSERT_THROW(req.query_state(), ov::Exception);
}

View File

@ -4,6 +4,7 @@
#include <gtest/gtest.h>
#include <openvino/core/except.hpp>
#include <openvino/runtime/remote_context.hpp>
using namespace ::testing;
@ -11,15 +12,15 @@ using namespace std;
TEST(RemoteContextOVTests, throwsOnUninitializedReset) {
ov::runtime::RemoteContext ctx;
ASSERT_THROW(ctx.get_device_name(), InferenceEngine::NotAllocated);
ASSERT_THROW(ctx.get_device_name(), ov::Exception);
}
TEST(RemoteContextOVTests, throwsOnUninitializedGetname) {
ov::runtime::RemoteContext ctx;
ASSERT_THROW(ctx.create_blob({}, {}), InferenceEngine::NotAllocated);
ASSERT_THROW(ctx.create_blob({}, {}), ov::Exception);
}
TEST(RemoteContextOVTests, throwsOnUninitializedGetParams) {
ov::runtime::RemoteContext ctx;
ASSERT_THROW(ctx.get_params(), InferenceEngine::NotAllocated);
ASSERT_THROW(ctx.get_params(), ov::Exception);
}

View File

@ -4,6 +4,7 @@
#include <gtest/gtest.h>
#include <openvino/core/except.hpp>
#include <openvino/runtime/variable_state.hpp>
using namespace ::testing;
@ -11,21 +12,21 @@ using namespace std;
TEST(VariableStateOVTests, throwsOnUninitializedReset) {
ov::runtime::VariableState state;
ASSERT_THROW(state.reset(), InferenceEngine::NotAllocated);
ASSERT_THROW(state.reset(), ov::Exception);
}
TEST(VariableStateOVTests, throwsOnUninitializedGetname) {
ov::runtime::VariableState state;
ASSERT_THROW(state.get_name(), InferenceEngine::NotAllocated);
ASSERT_THROW(state.get_name(), ov::Exception);
}
TEST(VariableStateOVTests, throwsOnUninitializedGetState) {
ov::runtime::VariableState state;
ASSERT_THROW(state.get_state(), InferenceEngine::NotAllocated);
ASSERT_THROW(state.get_state(), ov::Exception);
}
TEST(VariableStateOVTests, throwsOnUninitializedSetState) {
ov::runtime::VariableState state;
InferenceEngine::Blob::Ptr blob;
ASSERT_THROW(state.set_state(blob), InferenceEngine::NotAllocated);
ASSERT_THROW(state.set_state(blob), ov::Exception);
}

View File

@ -215,7 +215,7 @@ TEST(OVClassBasicTest, smoke_createDefault) {
TEST_P(OVClassBasicTestP, registerExistingPluginThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
ASSERT_THROW(ie.register_plugin(pluginName, deviceName), Exception);
ASSERT_THROW(ie.register_plugin(pluginName, deviceName), ov::Exception);
}
TEST_P(OVClassBasicTestP, registerNewPluginNoThrows) {
@ -228,12 +228,12 @@ TEST_P(OVClassBasicTestP, registerNewPluginNoThrows) {
TEST(OVClassBasicTest, smoke_registerExistingPluginFileThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
ASSERT_THROW(ie.register_plugins("nonExistPlugins.xml"), Exception);
ASSERT_THROW(ie.register_plugins("nonExistPlugins.xml"), ov::Exception);
}
TEST(OVClassBasicTest, smoke_createNonExistingConfigThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ASSERT_THROW(ov::runtime::Core ie("nonExistPlugins.xml"), Exception);
ASSERT_THROW(ov::runtime::Core ie("nonExistPlugins.xml"), ov::Exception);
}
#ifdef __linux__
@ -252,7 +252,7 @@ TEST(OVClassBasicTest, smoke_createMockEngineConfigThrows) {
std::string filename{"mock_engine.xml"};
std::string content{"<ie><plugins><plugin location=\"libmock_engine.so\"></plugin></plugins></ie>"};
CommonTestUtils::createFile(filename, content);
ASSERT_THROW(ov::runtime::Core ie(filename), Exception);
ASSERT_THROW(ov::runtime::Core ie(filename), ov::Exception);
CommonTestUtils::removeFile(filename.c_str());
}
@ -296,7 +296,7 @@ TEST_P(OVClassBasicTestP, smoke_registerPluginsXMLUnicodePath) {
GTEST_COUT << "Plugin registered and created " << testIndex << std::endl;
GTEST_COUT << "OK" << std::endl;
} catch (const InferenceEngine::Exception& e_next) {
} catch (const ov::Exception& e_next) {
CommonTestUtils::removeFile(pluginsXmlW);
std::remove(pluginXML.c_str());
FAIL() << e_next.what();
@ -337,7 +337,7 @@ TEST_P(OVClassBasicTestP, unregisterExistingPluginNoThrow) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
// device instance is not created yet
ASSERT_THROW(ie.unload_plugin(deviceName), Exception);
ASSERT_THROW(ie.unload_plugin(deviceName), ov::Exception);
// make the first call to IE which created device instance
ie.get_versions(deviceName);
@ -348,7 +348,7 @@ TEST_P(OVClassBasicTestP, unregisterExistingPluginNoThrow) {
TEST_P(OVClassBasicTestP, accessToUnregisteredPluginThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
ASSERT_THROW(ie.unload_plugin(deviceName), Exception);
ASSERT_THROW(ie.unload_plugin(deviceName), ov::Exception);
ASSERT_NO_THROW(ie.get_versions(deviceName));
ASSERT_NO_THROW(ie.unload_plugin(deviceName));
ASSERT_NO_THROW(ie.set_config({}, deviceName));
@ -359,7 +359,7 @@ TEST_P(OVClassBasicTestP, accessToUnregisteredPluginThrows) {
TEST(OVClassBasicTest, smoke_unregisterNonExistingPluginThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
ASSERT_THROW(ie.unload_plugin("unkown_device"), Exception);
ASSERT_THROW(ie.unload_plugin("unkown_device"), ov::Exception);
}
//
@ -376,7 +376,7 @@ TEST_P(OVClassBasicTestP, SetConfigAllThrows) {
TEST_P(OVClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
ASSERT_THROW(ie.set_config({{"unsupported_key", "4"}}, "unregistered_device"), Exception);
ASSERT_THROW(ie.set_config({{"unsupported_key", "4"}}, "unregistered_device"), ov::Exception);
}
TEST_P(OVClassBasicTestP, SetConfigNoThrow) {
@ -501,7 +501,7 @@ TEST_P(OVClassNetworkTestP, QueryNetworkActualNoThrow) {
try {
ie.query_model(actualNetwork, deviceName);
} catch (const InferenceEngine::Exception& ex) {
} catch (const ov::Exception& ex) {
std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
}
@ -519,7 +519,7 @@ TEST_P(OVClassNetworkTestP, QueryNetworkWithKSO) {
FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName;
}
}
} catch (const InferenceEngine::Exception& ex) {
} catch (const ov::Exception& ex) {
std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
}
@ -589,7 +589,7 @@ TEST_P(OVClassNetworkTestP, SetAffinityWithKSO) {
op->get_rt_info()["affinity"] = std::make_shared<ngraph::VariantWrapper<std::string>>(affinity);
}
auto exeNetwork = ie.compile_model(ksoNetwork, deviceName);
} catch (const InferenceEngine::Exception& ex) {
} catch (const ov::Exception& ex) {
std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
}
@ -607,7 +607,7 @@ TEST_P(OVClassNetworkTestP, QueryNetworkHeteroActualNoThrow) {
TEST_P(OVClassNetworkTestP, QueryNetworkMultiThrows) {
SKIP_IF_CURRENT_TEST_IS_DISABLED()
ov::runtime::Core ie = createCoreWithTemplate();
ASSERT_THROW(ie.query_model(actualNetwork, CommonTestUtils::DEVICE_MULTI), Exception);
ASSERT_THROW(ie.query_model(actualNetwork, CommonTestUtils::DEVICE_MULTI), ov::Exception);
}
TEST(OVClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) {
@ -649,7 +649,7 @@ TEST(OVClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroThrows) {
ov::runtime::Core ie = createCoreWithTemplate();
// TODO: check
std::string targetDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + CommonTestUtils::DEVICE_CPU;
ASSERT_THROW(ie.get_metric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)), Exception);
ASSERT_THROW(ie.get_metric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)), ov::Exception);
}
TEST_P(OVClassGetMetricTest_SUPPORTED_METRICS, GetMetricAndPrintNoThrow) {
@ -831,7 +831,7 @@ TEST_P(OVClassGetMetricTest_ThrowUnsupported, GetMetricThrow) {
ov::runtime::Core ie = createCoreWithTemplate();
Parameter p;
ASSERT_THROW(p = ie.get_metric(deviceName, "unsupported_metric"), Exception);
ASSERT_THROW(p = ie.get_metric(deviceName, "unsupported_metric"), ov::Exception);
}
TEST_P(OVClassGetConfigTest, GetConfigNoThrow) {
@ -867,7 +867,7 @@ TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigHeteroThrow) {
ov::runtime::Core ie = createCoreWithTemplate();
Parameter p;
ASSERT_THROW(p = ie.get_config(CommonTestUtils::DEVICE_HETERO, "unsupported_config"), Exception);
ASSERT_THROW(p = ie.get_config(CommonTestUtils::DEVICE_HETERO, "unsupported_config"), ov::Exception);
}
TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) {
@ -877,7 +877,7 @@ TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) {
ASSERT_THROW(p = ie.get_config(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName,
HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)),
Exception);
ov::Exception);
}
TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigThrow) {
@ -885,7 +885,7 @@ TEST_P(OVClassGetConfigTest_ThrowUnsupported, GetConfigThrow) {
ov::runtime::Core ie = createCoreWithTemplate();
Parameter p;
ASSERT_THROW(p = ie.get_config(deviceName, "unsupported_config"), Exception);
ASSERT_THROW(p = ie.get_config(deviceName, "unsupported_config"), ov::Exception);
}
TEST_P(OVClassGetAvailableDevices, GetAvailableDevicesNoThrow) {
@ -987,7 +987,7 @@ TEST_P(OVClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow)
auto exeNetwork = ie.compile_model(simpleNetwork, deviceName);
ASSERT_THROW(p = exeNetwork.get_metric("unsupported_metric"), Exception);
ASSERT_THROW(p = exeNetwork.get_metric("unsupported_metric"), ov::Exception);
}
TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoThrow) {
@ -1014,7 +1014,7 @@ TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigThrows) {
auto exeNetwork = ie.compile_model(simpleNetwork, deviceName);
ASSERT_THROW(p = exeNetwork.get_config("unsupported_config"), Exception);
ASSERT_THROW(p = exeNetwork.get_config("unsupported_config"), ov::Exception);
}
TEST_P(OVClassExecutableNetworkSetConfigTest, SetConfigThrows) {
@ -1024,7 +1024,7 @@ TEST_P(OVClassExecutableNetworkSetConfigTest, SetConfigThrows) {
auto exeNetwork = ie.compile_model(simpleNetwork, deviceName);
ASSERT_THROW(exeNetwork.set_config({{"unsupported_config", "some_value"}}), Exception);
ASSERT_THROW(exeNetwork.set_config({{"unsupported_config", "some_value"}}), ov::Exception);
}
TEST_P(OVClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) {
@ -1045,7 +1045,7 @@ TEST_P(OVClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) {
auto exeNetwork = ie.compile_model(simpleNetwork, deviceName);
ASSERT_THROW(exeNetwork.set_config({{configKey, configValue}}), Exception);
ASSERT_THROW(exeNetwork.set_config({{configKey, configValue}}), ov::Exception);
}
TEST_P(OVClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) {
@ -1205,7 +1205,7 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkWithDeviceID) {
if (supportsDeviceID(ie, deviceName)) {
try {
ie.query_model(simpleNetwork, deviceName + ".0");
} catch (const InferenceEngine::Exception& ex) {
} catch (const ov::Exception& ex) {
std::string message = ex.what();
ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively");
}
@ -1219,7 +1219,7 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) {
ov::runtime::Core ie = createCoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) {
ASSERT_THROW(ie.query_model(actualNetwork, deviceName + ".110"), Exception);
ASSERT_THROW(ie.query_model(actualNetwork, deviceName + ".110"), ov::Exception);
} else {
GTEST_SKIP();
}
@ -1230,7 +1230,7 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) {
ov::runtime::Core ie = createCoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) {
ASSERT_THROW(ie.query_model(actualNetwork, deviceName + ".l0"), Exception);
ASSERT_THROW(ie.query_model(actualNetwork, deviceName + ".l0"), ov::Exception);
} else {
GTEST_SKIP();
}
@ -1244,7 +1244,7 @@ TEST_P(OVClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) {
ASSERT_THROW(ie.query_model(actualNetwork,
CommonTestUtils::DEVICE_HETERO,
{{"TARGET_FALLBACK", deviceName + ".100," + deviceName}}),
Exception);
ov::Exception);
} else {
GTEST_SKIP();
}
@ -1288,7 +1288,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) {
ov::runtime::Core ie = createCoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) {
ASSERT_THROW(ie.compile_model(actualNetwork, deviceName + ".10"), Exception);
ASSERT_THROW(ie.compile_model(actualNetwork, deviceName + ".10"), ov::Exception);
} else {
GTEST_SKIP();
}
@ -1299,7 +1299,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) {
ov::runtime::Core ie = createCoreWithTemplate();
if (supportsDeviceID(ie, deviceName)) {
ASSERT_THROW(ie.compile_model(actualNetwork, deviceName + ".l0"), Exception);
ASSERT_THROW(ie.compile_model(actualNetwork, deviceName + ".l0"), ov::Exception);
} else {
GTEST_SKIP();
}
@ -1313,7 +1313,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) {
ASSERT_THROW(ie.compile_model(actualNetwork,
"HETERO",
{{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}),
Exception);
ov::Exception);
} else {
GTEST_SKIP();
}
@ -1328,7 +1328,7 @@ TEST_P(OVClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) {
CommonTestUtils::DEVICE_HETERO,
{{"TARGET_FALLBACK", deviceName + "," + CommonTestUtils::DEVICE_CPU},
{CONFIG_KEY(DEVICE_ID), "110"}}),
Exception);
ov::Exception);
} else {
GTEST_SKIP();
}