Simplified IE Exceptions Implementation (#4258)
This commit is contained in:
parent
d068810936
commit
a2b8b974b8
@ -3,7 +3,6 @@
|
|||||||
//
|
//
|
||||||
#include "cpu_kernel.hpp"
|
#include "cpu_kernel.hpp"
|
||||||
#include "op.hpp"
|
#include "op.hpp"
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <ie_layouts.h>
|
#include <ie_layouts.h>
|
||||||
|
|
||||||
using namespace TemplateExtension;
|
using namespace TemplateExtension;
|
||||||
@ -25,7 +24,7 @@ OpImplementation::OpImplementation(const std::shared_ptr<ngraph::Node> &node) {
|
|||||||
add = castedNode->getAddAttr();
|
add = castedNode->getAddAttr();
|
||||||
inShape = castedNode->get_input_shape(0);
|
inShape = castedNode->get_input_shape(0);
|
||||||
outShape = castedNode->get_output_shape(0);
|
outShape = castedNode->get_output_shape(0);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException& ex) {
|
} catch (InferenceEngine::Exception& ex) {
|
||||||
error = ex.what();
|
error = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -92,14 +91,15 @@ InferenceEngine::StatusCode OpImplementation::init(InferenceEngine::LayerConfig
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config.inConfs[0].desc.getDims().size() != 4 || config.outConfs[0].desc.getDims().size() != 4) {
|
if (config.inConfs[0].desc.getDims().size() != 4 || config.outConfs[0].desc.getDims().size() != 4) {
|
||||||
THROW_IE_EXCEPTION << "Operation can be initialized only with 4d input/output tensors!";
|
THROW_IE_EXCEPTION
|
||||||
|
<< "Operation can be initialized only with 4d input/output tensors!";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config.outConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32 ||
|
if (config.outConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32 ||
|
||||||
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
|
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
|
||||||
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
|
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
|
||||||
}
|
}
|
||||||
} catch (InferenceEngine::details::InferenceEngineException&) {
|
} catch (InferenceEngine::Exception& ex) {
|
||||||
if (resp) {
|
if (resp) {
|
||||||
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
|
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
|
||||||
resp->msg[sizeof(resp->msg)-1] = 0;
|
resp->msg[sizeof(resp->msg)-1] = 0;
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
//! [fft_kernel:implementation]
|
//! [fft_kernel:implementation]
|
||||||
#include "fft_kernel.hpp"
|
#include "fft_kernel.hpp"
|
||||||
#include "fft_op.hpp"
|
#include "fft_op.hpp"
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <ie_layouts.h>
|
#include <ie_layouts.h>
|
||||||
|
|
||||||
#include <opencv2/opencv.hpp>
|
#include <opencv2/opencv.hpp>
|
||||||
@ -65,7 +64,7 @@ InferenceEngine::StatusCode FFTImpl::init(InferenceEngine::LayerConfig &config,
|
|||||||
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
|
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
|
||||||
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
|
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
|
||||||
}
|
}
|
||||||
} catch (InferenceEngine::details::InferenceEngineException&) {
|
} catch (InferenceEngine::Exception& ex) {
|
||||||
if (resp) {
|
if (resp) {
|
||||||
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
|
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
|
||||||
resp->msg[sizeof(resp->msg)-1] = 0;
|
resp->msg[sizeof(resp->msg)-1] = 0;
|
||||||
|
@ -34,7 +34,7 @@ Configuration::Configuration(const ConfigMap& config, const Configuration & defa
|
|||||||
} else if (CONFIG_KEY(PERF_COUNT) == key) {
|
} else if (CONFIG_KEY(PERF_COUNT) == key) {
|
||||||
perfCount = (CONFIG_VALUE(YES) == value);
|
perfCount = (CONFIG_VALUE(YES) == value);
|
||||||
} else if (throwOnUnsupported) {
|
} else if (throwOnUnsupported) {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << key;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << ": " << key;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -53,6 +53,6 @@ InferenceEngine::Parameter Configuration::Get(const std::string& name) const {
|
|||||||
} else if (name == CONFIG_KEY_INTERNAL(CPU_THREADS_PER_STREAM)) {
|
} else if (name == CONFIG_KEY_INTERNAL(CPU_THREADS_PER_STREAM)) {
|
||||||
return {std::to_string(_streamsExecutorConfig._threadsPerStream)};
|
return {std::to_string(_streamsExecutorConfig._threadsPerStream)};
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << name;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << ": " << name;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<const
|
|||||||
try {
|
try {
|
||||||
CompileNetwork(function);
|
CompileNetwork(function);
|
||||||
InitExecutor(); // creates thread-based executor using for async requests
|
InitExecutor(); // creates thread-based executor using for async requests
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException&) {
|
} catch (const InferenceEngine::Exception&) {
|
||||||
throw;
|
throw;
|
||||||
} catch (const std::exception & e) {
|
} catch (const std::exception & e) {
|
||||||
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
|
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
|
||||||
@ -74,7 +74,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(std::istream & model,
|
|||||||
try {
|
try {
|
||||||
CompileNetwork(cnnnetwork.getFunction());
|
CompileNetwork(cnnnetwork.getFunction());
|
||||||
InitExecutor(); // creates thread-based executor using for async requests
|
InitExecutor(); // creates thread-based executor using for async requests
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException&) {
|
} catch (const InferenceEngine::Exception&) {
|
||||||
throw;
|
throw;
|
||||||
} catch (const std::exception & e) {
|
} catch (const std::exception & e) {
|
||||||
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
|
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
|
||||||
|
@ -215,7 +215,7 @@ InferenceEngine::QueryNetworkResult Plugin::QueryNetwork(const InferenceEngine::
|
|||||||
// ! [plugin:add_extension]
|
// ! [plugin:add_extension]
|
||||||
void Plugin::AddExtension(InferenceEngine::IExtensionPtr /*extension*/) {
|
void Plugin::AddExtension(InferenceEngine::IExtensionPtr /*extension*/) {
|
||||||
// TODO: add extensions if plugin supports extensions
|
// TODO: add extensions if plugin supports extensions
|
||||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented);
|
||||||
}
|
}
|
||||||
// ! [plugin:add_extension]
|
// ! [plugin:add_extension]
|
||||||
|
|
||||||
|
@ -242,7 +242,8 @@ typedef enum {
|
|||||||
RESULT_NOT_READY = -9,
|
RESULT_NOT_READY = -9,
|
||||||
NOT_ALLOCATED = -10,
|
NOT_ALLOCATED = -10,
|
||||||
INFER_NOT_STARTED = -11,
|
INFER_NOT_STARTED = -11,
|
||||||
NETWORK_NOT_READ = -12
|
NETWORK_NOT_READ = -12,
|
||||||
|
INFER_CANCELLED = -13,
|
||||||
} IEStatusCode;
|
} IEStatusCode;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <ie_extension.h>
|
#include <ie_extension.h>
|
||||||
#include "inference_engine.hpp"
|
#include "inference_engine.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "ie_compound_blob.h"
|
#include "ie_compound_blob.h"
|
||||||
#include "c_api/ie_c_api.h"
|
#include "c_api/ie_c_api.h"
|
||||||
|
|
||||||
@ -119,6 +118,23 @@ std::map<IE::ColorFormat, colorformat_e> colorformat_map = {{IE::ColorFormat::RA
|
|||||||
{IE::ColorFormat::NV12, colorformat_e::NV12},
|
{IE::ColorFormat::NV12, colorformat_e::NV12},
|
||||||
{IE::ColorFormat::I420, colorformat_e::I420}};
|
{IE::ColorFormat::I420, colorformat_e::I420}};
|
||||||
|
|
||||||
|
#define CATCH_IE_EXCEPTION(StatusCode, ExceptionType) catch (const IE::ExceptionType&) {return IEStatusCode::StatusCode;}
|
||||||
|
|
||||||
|
#define CATCH_IE_EXCEPTIONS \
|
||||||
|
CATCH_IE_EXCEPTION(GENERAL_ERROR, GeneralError) \
|
||||||
|
CATCH_IE_EXCEPTION(NOT_IMPLEMENTED, NotImplemented) \
|
||||||
|
CATCH_IE_EXCEPTION(NETWORK_NOT_LOADED, NetworkNotLoaded) \
|
||||||
|
CATCH_IE_EXCEPTION(PARAMETER_MISMATCH, ParameterMismatch) \
|
||||||
|
CATCH_IE_EXCEPTION(NOT_FOUND, NotFound) \
|
||||||
|
CATCH_IE_EXCEPTION(OUT_OF_BOUNDS, OutOfBounds) \
|
||||||
|
CATCH_IE_EXCEPTION(UNEXPECTED, Unexpected) \
|
||||||
|
CATCH_IE_EXCEPTION(REQUEST_BUSY, RequestBusy) \
|
||||||
|
CATCH_IE_EXCEPTION(RESULT_NOT_READY, ResultNotReady) \
|
||||||
|
CATCH_IE_EXCEPTION(NOT_ALLOCATED, NotAllocated) \
|
||||||
|
CATCH_IE_EXCEPTION(INFER_NOT_STARTED, InferNotStarted) \
|
||||||
|
CATCH_IE_EXCEPTION(NETWORK_NOT_READ, NetworkNotRead) \
|
||||||
|
CATCH_IE_EXCEPTION(INFER_CANCELLED, InferCancelled)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*@brief convert the config type data to map type data.
|
*@brief convert the config type data to map type data.
|
||||||
*/
|
*/
|
||||||
@ -222,9 +238,7 @@ IEStatusCode ie_core_create(const char *xml_config_file, ie_core_t **core) {
|
|||||||
std::unique_ptr<ie_core_t> tmp(new ie_core_t);
|
std::unique_ptr<ie_core_t> tmp(new ie_core_t);
|
||||||
tmp->object = IE::Core(xml_config_file);
|
tmp->object = IE::Core(xml_config_file);
|
||||||
*core = tmp.release();
|
*core = tmp.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,9 +282,7 @@ IEStatusCode ie_core_get_versions(const ie_core_t *core, const char *device_name
|
|||||||
vers_ptrs[i].description = iter->second.description;
|
vers_ptrs[i].description = iter->second.description;
|
||||||
}
|
}
|
||||||
versions->versions = vers_ptrs.release();
|
versions->versions = vers_ptrs.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -303,9 +315,7 @@ IEStatusCode ie_core_read_network(ie_core_t *core, const char *xml, const char *
|
|||||||
}
|
}
|
||||||
network_result->object = core->object.ReadNetwork(xml, bin);
|
network_result->object = core->object.ReadNetwork(xml, bin);
|
||||||
*network = network_result.release();
|
*network = network_result.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,9 +335,7 @@ IEStatusCode ie_core_read_network_from_memory(ie_core_t *core, const uint8_t *xm
|
|||||||
network_result->object = core->object.ReadNetwork(std::string(reinterpret_cast<const char *>(xml_content),
|
network_result->object = core->object.ReadNetwork(std::string(reinterpret_cast<const char *>(xml_content),
|
||||||
reinterpret_cast<const char *>(xml_content + xml_content_size)), weight_blob->object);
|
reinterpret_cast<const char *>(xml_content + xml_content_size)), weight_blob->object);
|
||||||
*network = network_result.release();
|
*network = network_result.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -351,9 +359,7 @@ IEStatusCode ie_core_load_network(ie_core_t *core, const ie_network_t *network,
|
|||||||
// create plugin in the registery and then create ExecutableNetwork.
|
// create plugin in the registery and then create ExecutableNetwork.
|
||||||
exe_net->object = core->object.LoadNetwork(network->object, device_name, conf_map);
|
exe_net->object = core->object.LoadNetwork(network->object, device_name, conf_map);
|
||||||
*exe_network = exe_net.release();
|
*exe_network = exe_net.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -376,9 +382,7 @@ IEStatusCode ie_core_set_config(ie_core_t *core, const ie_config_t *ie_core_conf
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
core->object.SetConfig(conf_map, deviceName);
|
core->object.SetConfig(conf_map, deviceName);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -395,9 +399,7 @@ IEStatusCode ie_core_register_plugin(ie_core_t *core, const char *plugin_name, c
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
core->object.RegisterPlugin(plugin_name, device_name);
|
core->object.RegisterPlugin(plugin_name, device_name);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -414,9 +416,7 @@ IEStatusCode ie_core_register_plugins(ie_core_t *core, const char *xml_config_fi
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
core->object.RegisterPlugins(xml_config_file);
|
core->object.RegisterPlugins(xml_config_file);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -433,9 +433,7 @@ IEStatusCode ie_core_unregister_plugin(ie_core_t *core, const char *device_name)
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
core->object.UnregisterPlugin(device_name);
|
core->object.UnregisterPlugin(device_name);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -454,9 +452,7 @@ IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path,
|
|||||||
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{extension_path});
|
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{extension_path});
|
||||||
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
|
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
|
||||||
core->object.AddExtension(extension, device_name);
|
core->object.AddExtension(extension, device_name);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -474,9 +470,7 @@ IEStatusCode ie_core_get_metric(const ie_core_t *core, const char *device_name,
|
|||||||
try {
|
try {
|
||||||
IE::Parameter param = core->object.GetMetric(device_name, metric_name);
|
IE::Parameter param = core->object.GetMetric(device_name, metric_name);
|
||||||
parameter2IEparam(param, param_result);
|
parameter2IEparam(param, param_result);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -496,9 +490,7 @@ IEStatusCode ie_core_get_config(const ie_core_t *core, const char *device_name,
|
|||||||
|
|
||||||
// convert the parameter to ie_param_t
|
// convert the parameter to ie_param_t
|
||||||
parameter2IEparam(param, param_result);
|
parameter2IEparam(param, param_result);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -522,11 +514,7 @@ IEStatusCode ie_core_get_available_devices(const ie_core_t *core, ie_available_d
|
|||||||
memcpy(dev_ptrs[i], _devices[i].c_str(), _devices[i].length() + 1);
|
memcpy(dev_ptrs[i], _devices[i].c_str(), _devices[i].length() + 1);
|
||||||
}
|
}
|
||||||
avai_devices->devices = dev_ptrs.release();
|
avai_devices->devices = dev_ptrs.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (const std::exception&) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -565,9 +553,7 @@ IEStatusCode ie_exec_network_create_infer_request(ie_executable_network_t *ie_ex
|
|||||||
std::unique_ptr<ie_infer_request_t> req(new ie_infer_request_t);
|
std::unique_ptr<ie_infer_request_t> req(new ie_infer_request_t);
|
||||||
req->object = ie_exec_network->object.CreateInferRequest();
|
req->object = ie_exec_network->object.CreateInferRequest();
|
||||||
*request = req.release();
|
*request = req.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -585,9 +571,7 @@ IEStatusCode ie_exec_network_get_metric(const ie_executable_network_t *ie_exec_n
|
|||||||
try {
|
try {
|
||||||
InferenceEngine::Parameter parameter = ie_exec_network->object.GetMetric(metric_name);
|
InferenceEngine::Parameter parameter = ie_exec_network->object.GetMetric(metric_name);
|
||||||
parameter2IEparam(parameter, param_result);
|
parameter2IEparam(parameter, param_result);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -605,9 +589,7 @@ IEStatusCode ie_exec_network_set_config(ie_executable_network_t *ie_exec_network
|
|||||||
try {
|
try {
|
||||||
const std::map<std::string, IE::Parameter> conf_map = config2ParamMap(param_config);
|
const std::map<std::string, IE::Parameter> conf_map = config2ParamMap(param_config);
|
||||||
ie_exec_network->object.SetConfig(conf_map);
|
ie_exec_network->object.SetConfig(conf_map);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -625,9 +607,7 @@ IEStatusCode ie_exec_network_get_config(const ie_executable_network_t *ie_exec_n
|
|||||||
try {
|
try {
|
||||||
InferenceEngine::Parameter parameter = ie_exec_network->object.GetConfig(metric_config);
|
InferenceEngine::Parameter parameter = ie_exec_network->object.GetConfig(metric_config);
|
||||||
parameter2IEparam(parameter, param_result);
|
parameter2IEparam(parameter, param_result);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -651,9 +631,7 @@ IEStatusCode ie_network_get_name(const ie_network_t *network, char **name) {
|
|||||||
std::unique_ptr<char[]> netName(new char[_name.length() + 1]);
|
std::unique_ptr<char[]> netName(new char[_name.length() + 1]);
|
||||||
*name = netName.release();
|
*name = netName.release();
|
||||||
memcpy(*name, _name.c_str(), _name.length() + 1);
|
memcpy(*name, _name.c_str(), _name.length() + 1);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -670,9 +648,7 @@ IEStatusCode ie_network_get_inputs_number(const ie_network_t *network, size_t *s
|
|||||||
try {
|
try {
|
||||||
IE::InputsDataMap inputs = network->object.getInputsInfo();
|
IE::InputsDataMap inputs = network->object.getInputsInfo();
|
||||||
*size_result = inputs.size();
|
*size_result = inputs.size();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -701,9 +677,7 @@ IEStatusCode ie_network_get_input_name(const ie_network_t *network, size_t numbe
|
|||||||
*name = inputName.release();
|
*name = inputName.release();
|
||||||
memcpy(*name, iter->first.c_str(), iter->first.length() + 1);
|
memcpy(*name, iter->first.c_str(), iter->first.length() + 1);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -726,9 +700,7 @@ IEStatusCode ie_network_get_input_precision(const ie_network_t *network, const c
|
|||||||
IE::Precision p = inputs[input_name]->getPrecision();
|
IE::Precision p = inputs[input_name]->getPrecision();
|
||||||
*prec_result = precision_map[p];
|
*prec_result = precision_map[p];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -757,9 +729,7 @@ IEStatusCode ie_network_set_input_precision(ie_network_t *network, const char *i
|
|||||||
}
|
}
|
||||||
inputs[input_name]->setPrecision(precision);
|
inputs[input_name]->setPrecision(precision);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -782,9 +752,7 @@ IEStatusCode ie_network_get_input_layout(const ie_network_t *network, const char
|
|||||||
IE::Layout l = inputs[input_name]->getLayout();
|
IE::Layout l = inputs[input_name]->getLayout();
|
||||||
*layout_result = layout_map[l];
|
*layout_result = layout_map[l];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -813,9 +781,7 @@ IEStatusCode ie_network_set_input_layout(ie_network_t *network, const char *inpu
|
|||||||
}
|
}
|
||||||
inputs[input_name]->setLayout(layout);
|
inputs[input_name]->setLayout(layout);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -841,9 +807,7 @@ IEStatusCode ie_network_get_input_dims(const ie_network_t *network, const char *
|
|||||||
dims_result->dims[i] = dims[i];
|
dims_result->dims[i] = dims[i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -866,9 +830,7 @@ IEStatusCode ie_network_get_input_resize_algorithm(const ie_network_t *network,
|
|||||||
IE::ResizeAlgorithm resize = inputs[input_name]->getPreProcess().getResizeAlgorithm();
|
IE::ResizeAlgorithm resize = inputs[input_name]->getPreProcess().getResizeAlgorithm();
|
||||||
*resize_alg_result = resize_alg_map[resize];
|
*resize_alg_result = resize_alg_map[resize];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -897,9 +859,7 @@ IEStatusCode ie_network_set_input_resize_algorithm(ie_network_t *network, const
|
|||||||
}
|
}
|
||||||
inputs[input_name]->getPreProcess().setResizeAlgorithm(resize);
|
inputs[input_name]->getPreProcess().setResizeAlgorithm(resize);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -922,9 +882,7 @@ IEStatusCode ie_network_get_color_format(const ie_network_t *network, const char
|
|||||||
IE::ColorFormat color = inputs[input_name]->getPreProcess().getColorFormat();
|
IE::ColorFormat color = inputs[input_name]->getPreProcess().getColorFormat();
|
||||||
*colformat_result = colorformat_map[color];
|
*colformat_result = colorformat_map[color];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -953,9 +911,7 @@ IEStatusCode ie_network_set_color_format(ie_network_t *network, const char *inpu
|
|||||||
}
|
}
|
||||||
inputs[input_name]->getPreProcess().setColorFormat(color);
|
inputs[input_name]->getPreProcess().setColorFormat(color);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -993,9 +949,7 @@ IEStatusCode ie_network_get_input_shapes(ie_network *network, input_shapes_t *sh
|
|||||||
}
|
}
|
||||||
shapes->shapes = shape_ptrs.release();
|
shapes->shapes = shape_ptrs.release();
|
||||||
status = IEStatusCode::OK;
|
status = IEStatusCode::OK;
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1022,9 +976,7 @@ IEStatusCode ie_network_reshape(ie_network_t *network, const input_shapes_t shap
|
|||||||
}
|
}
|
||||||
|
|
||||||
network->object.reshape(net_shapes);
|
network->object.reshape(net_shapes);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1042,9 +994,7 @@ IEStatusCode ie_network_get_outputs_number(const ie_network_t *network, size_t *
|
|||||||
try {
|
try {
|
||||||
IE::OutputsDataMap outputs = network->object.getOutputsInfo();
|
IE::OutputsDataMap outputs = network->object.getOutputsInfo();
|
||||||
*size_result = outputs.size();
|
*size_result = outputs.size();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1073,9 +1023,7 @@ IEStatusCode ie_network_get_output_name(const ie_network_t *network, const size_
|
|||||||
*name = outputName.release();
|
*name = outputName.release();
|
||||||
memcpy(*name, iter->first.c_str(), iter->first.length() + 1);
|
memcpy(*name, iter->first.c_str(), iter->first.length() + 1);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1098,9 +1046,7 @@ IEStatusCode ie_network_get_output_precision(const ie_network_t *network, const
|
|||||||
IE::Precision p = outputs[output_name]->getPrecision();
|
IE::Precision p = outputs[output_name]->getPrecision();
|
||||||
*prec_result = precision_map[p];
|
*prec_result = precision_map[p];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1129,9 +1075,7 @@ IEStatusCode ie_network_set_output_precision(ie_network_t *network, const char *
|
|||||||
}
|
}
|
||||||
outputs[output_name]->setPrecision(precision);
|
outputs[output_name]->setPrecision(precision);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1154,9 +1098,7 @@ IEStatusCode ie_network_get_output_layout(const ie_network_t *network, const cha
|
|||||||
IE::Layout l = outputs[output_name]->getLayout();
|
IE::Layout l = outputs[output_name]->getLayout();
|
||||||
*layout_result = layout_map[l];
|
*layout_result = layout_map[l];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1185,9 +1127,7 @@ IEStatusCode ie_network_set_output_layout(ie_network_t *network, const char *out
|
|||||||
}
|
}
|
||||||
outputs[output_name]->setLayout(layout);
|
outputs[output_name]->setLayout(layout);
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1213,9 +1153,7 @@ IEStatusCode ie_network_get_output_dims(const ie_network_t *network, const char
|
|||||||
dims_result->dims[i] = dims[i];
|
dims_result->dims[i] = dims[i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1260,9 +1198,7 @@ IEStatusCode ie_infer_request_get_blob(ie_infer_request_t *infer_request, const
|
|||||||
std::unique_ptr<ie_blob_t> blob_result(new ie_blob_t);
|
std::unique_ptr<ie_blob_t> blob_result(new ie_blob_t);
|
||||||
blob_result->object = blob_ptr;
|
blob_result->object = blob_ptr;
|
||||||
*blob = blob_result.release();
|
*blob = blob_result.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1279,9 +1215,7 @@ IEStatusCode ie_infer_request_set_blob(ie_infer_request_t *infer_request, const
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
infer_request->object.SetBlob(name, blob->object);
|
infer_request->object.SetBlob(name, blob->object);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1298,9 +1232,7 @@ IEStatusCode ie_infer_request_infer(ie_infer_request_t *infer_request) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
infer_request->object.Infer();
|
infer_request->object.Infer();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1317,9 +1249,7 @@ IEStatusCode ie_infer_request_infer_async(ie_infer_request_t *infer_request) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
infer_request->object.StartAsync();
|
infer_request->object.StartAsync();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1339,9 +1269,7 @@ IEStatusCode ie_infer_set_completion_callback(ie_infer_request_t *infer_request,
|
|||||||
callback->completeCallBackFunc(callback->args);
|
callback->completeCallBackFunc(callback->args);
|
||||||
};
|
};
|
||||||
infer_request->object.SetCompletionCallback(fun);
|
infer_request->object.SetCompletionCallback(fun);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1359,9 +1287,7 @@ IEStatusCode ie_infer_request_wait(ie_infer_request_t *infer_request, const int6
|
|||||||
try {
|
try {
|
||||||
IE::StatusCode status_code = infer_request->object.Wait(timeout);
|
IE::StatusCode status_code = infer_request->object.Wait(timeout);
|
||||||
status = status_map[status_code];
|
status = status_map[status_code];
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1378,9 +1304,7 @@ IEStatusCode ie_infer_request_set_batch(ie_infer_request_t *infer_request, const
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
infer_request->object.SetBatch(size);
|
infer_request->object.SetBatch(size);
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1444,9 +1368,7 @@ IEStatusCode ie_blob_make_memory(const tensor_desc_t *tensorDesc, ie_blob_t **bl
|
|||||||
|
|
||||||
_blob->object->allocate();
|
_blob->object->allocate();
|
||||||
*blob = _blob.release();
|
*blob = _blob.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1518,9 +1440,7 @@ IEStatusCode ie_blob_make_memory_from_preallocated(const tensor_desc_t *tensorDe
|
|||||||
_blob->object = IE::make_shared_blob(tensor, p, size);
|
_blob->object = IE::make_shared_blob(tensor, p, size);
|
||||||
}
|
}
|
||||||
*blob = _blob.release();
|
*blob = _blob.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1538,9 +1458,7 @@ IEStatusCode ie_blob_make_memory_with_roi(const ie_blob_t *inputBlob, const roi_
|
|||||||
IE::ROI roi_d = {roi->id, roi->posX, roi->posY, roi->sizeX, roi->sizeY};
|
IE::ROI roi_d = {roi->id, roi->posX, roi->posY, roi->sizeX, roi->sizeY};
|
||||||
_blob->object = IE::make_shared_blob(inputBlob->object, roi_d);
|
_blob->object = IE::make_shared_blob(inputBlob->object, roi_d);
|
||||||
*blob = _blob.release();
|
*blob = _blob.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1556,9 +1474,7 @@ IEStatusCode ie_blob_make_memory_nv12(const ie_blob_t *y, const ie_blob_t *uv, i
|
|||||||
std::unique_ptr<ie_blob_t> _blob(new ie_blob_t);
|
std::unique_ptr<ie_blob_t> _blob(new ie_blob_t);
|
||||||
_blob->object = IE::make_shared_blob<IE::NV12Blob>(y->object, uv->object);
|
_blob->object = IE::make_shared_blob<IE::NV12Blob>(y->object, uv->object);
|
||||||
*nv12Blob = _blob.release();
|
*nv12Blob = _blob.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1574,9 +1490,7 @@ IEStatusCode ie_blob_make_memory_i420(const ie_blob_t *y, const ie_blob_t *u, co
|
|||||||
std::unique_ptr<ie_blob_t> _blob(new ie_blob_t);
|
std::unique_ptr<ie_blob_t> _blob(new ie_blob_t);
|
||||||
_blob->object = IE::make_shared_blob<IE::I420Blob>(y->object, u->object, v->object);
|
_blob->object = IE::make_shared_blob<IE::I420Blob>(y->object, u->object, v->object);
|
||||||
*i420Blob = _blob.release();
|
*i420Blob = _blob.release();
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1651,9 +1565,7 @@ IEStatusCode ie_blob_get_dims(const ie_blob_t *blob, dimensions_t *dims_result)
|
|||||||
for (size_t i = 0; i< dims_result->ranks; ++i) {
|
for (size_t i = 0; i< dims_result->ranks; ++i) {
|
||||||
dims_result->dims[i] = size_vector[i];
|
dims_result->dims[i] = size_vector[i];
|
||||||
}
|
}
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1671,9 +1583,7 @@ IEStatusCode ie_blob_get_layout(const ie_blob_t *blob, layout_e *layout_result)
|
|||||||
try {
|
try {
|
||||||
IE::Layout l = blob->object->getTensorDesc().getLayout();
|
IE::Layout l = blob->object->getTensorDesc().getLayout();
|
||||||
*layout_result = layout_map[l];
|
*layout_result = layout_map[l];
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1691,9 +1601,7 @@ IEStatusCode ie_blob_get_precision(const ie_blob_t *blob, precision_e *prec_resu
|
|||||||
try {
|
try {
|
||||||
IE::Precision p = blob->object->getTensorDesc().getPrecision();
|
IE::Precision p = blob->object->getTensorDesc().getPrecision();
|
||||||
*prec_result = precision_map[p];
|
*prec_result = precision_map[p];
|
||||||
} catch (const IE::details::InferenceEngineException& e) {
|
} CATCH_IE_EXCEPTIONS catch (...) {
|
||||||
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
|
|
||||||
} catch (...) {
|
|
||||||
return IEStatusCode::UNEXPECTED;
|
return IEStatusCode::UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -383,7 +383,10 @@ void InferenceEnginePython::InferRequestWrap::setBatch(int size) {
|
|||||||
|
|
||||||
void latency_callback(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) {
|
void latency_callback(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) {
|
||||||
if (code != InferenceEngine::StatusCode::OK) {
|
if (code != InferenceEngine::StatusCode::OK) {
|
||||||
THROW_IE_EXCEPTION << "Async Infer Request failed with status code " << code;
|
IE_EXCEPTION_SWITCH(code, ExceptionType,
|
||||||
|
InferenceEngine::details::ThrowNow<ExceptionType>{}
|
||||||
|
<<= std::stringstream{} << IE_LOCATION
|
||||||
|
<< InferenceEngine::details::ExceptionTraits<ExceptionType>::string());
|
||||||
}
|
}
|
||||||
InferenceEnginePython::InferRequestWrap *requestWrap;
|
InferenceEnginePython::InferRequestWrap *requestWrap;
|
||||||
InferenceEngine::ResponseDesc dsc;
|
InferenceEngine::ResponseDesc dsc;
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
#include "ie_blob.h"
|
#include "ie_blob.h"
|
||||||
#include "ie_common.h"
|
#include "ie_common.h"
|
||||||
#include "ie_data.h"
|
#include "ie_data.h"
|
||||||
#include "details/ie_exception_conversion.hpp"
|
|
||||||
#include "ie_extension.h"
|
#include "ie_extension.h"
|
||||||
|
|
||||||
namespace ngraph {
|
namespace ngraph {
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
#include "cpp/ie_infer_request.hpp"
|
#include "cpp/ie_infer_request.hpp"
|
||||||
#include "cpp/ie_memory_state.hpp"
|
#include "cpp/ie_memory_state.hpp"
|
||||||
#include "ie_iexecutable_network.hpp"
|
#include "ie_iexecutable_network.hpp"
|
||||||
#include "details/ie_exception_conversion.hpp"
|
|
||||||
#include "details/ie_so_loader.h"
|
#include "details/ie_so_loader.h"
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
#include "cpp/ie_memory_state.hpp"
|
#include "cpp/ie_memory_state.hpp"
|
||||||
#include "ie_remote_context.hpp"
|
#include "ie_remote_context.hpp"
|
||||||
#include "ie_iinfer_request.hpp"
|
#include "ie_iinfer_request.hpp"
|
||||||
#include "details/ie_exception_conversion.hpp"
|
|
||||||
#include "details/ie_so_loader.h"
|
#include "details/ie_so_loader.h"
|
||||||
#include "ie_blob.h"
|
#include "ie_blob.h"
|
||||||
|
|
||||||
@ -245,7 +244,9 @@ public:
|
|||||||
auto res = actual->Wait(millis_timeout, &resp);
|
auto res = actual->Wait(millis_timeout, &resp);
|
||||||
if (res != OK && res != RESULT_NOT_READY &&
|
if (res != OK && res != RESULT_NOT_READY &&
|
||||||
res != INFER_NOT_STARTED && res != INFER_CANCELLED) {
|
res != INFER_NOT_STARTED && res != INFER_CANCELLED) {
|
||||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << res << resp.msg;
|
IE_EXCEPTION_SWITCH(res, ExceptionType,
|
||||||
|
InferenceEngine::details::ThrowNow<ExceptionType>{}
|
||||||
|
<<= std::stringstream{} << IE_LOCATION << resp.msg)
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "ie_blob.h"
|
#include "ie_blob.h"
|
||||||
#include "details/ie_exception_conversion.hpp"
|
|
||||||
#include "details/ie_so_loader.h"
|
#include "details/ie_so_loader.h"
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
@ -2,168 +2,6 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
//
|
//
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A header file for the main Inference Engine exception
|
|
||||||
*
|
|
||||||
* @file ie_exception.hpp
|
|
||||||
*/
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "ie_api.h"
|
#include "ie_common.h"
|
||||||
|
|
||||||
#include <functional>
|
|
||||||
#include <memory>
|
|
||||||
#include <sstream>
|
|
||||||
#include <string>
|
|
||||||
#include <utility>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @def THROW_IE_EXCEPTION
|
|
||||||
* @brief A macro used to throw general exception with a description
|
|
||||||
*/
|
|
||||||
#define THROW_IE_EXCEPTION throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @def IE_ASSERT
|
|
||||||
* @brief Uses assert() function if NDEBUG is not defined, InferenceEngine exception otherwise
|
|
||||||
*/
|
|
||||||
#ifdef NDEBUG
|
|
||||||
#define IE_ASSERT(EXPRESSION) \
|
|
||||||
if (!(EXPRESSION)) \
|
|
||||||
throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__) \
|
|
||||||
<< "AssertionFailed: " << #EXPRESSION // NOLINT
|
|
||||||
#else
|
|
||||||
#include <cassert>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
class NullStream {
|
|
||||||
public:
|
|
||||||
template <class T>
|
|
||||||
NullStream& operator<<(const T&) noexcept {
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
NullStream& operator<<(std::ostream& (*)(std::ostream&)) noexcept {
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
#define IE_ASSERT(EXPRESSION) \
|
|
||||||
assert((EXPRESSION)); \
|
|
||||||
NullStream()
|
|
||||||
#endif // NDEBUG
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
|
||||||
enum StatusCode : int;
|
|
||||||
namespace details {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief The InferenceEngineException class implements the main Inference Engine exception
|
|
||||||
*/
|
|
||||||
class INFERENCE_ENGINE_API_CLASS(InferenceEngineException): public std::exception {
|
|
||||||
mutable std::string errorDesc;
|
|
||||||
StatusCode status_code = static_cast<StatusCode>(0);
|
|
||||||
std::string _file;
|
|
||||||
int _line;
|
|
||||||
std::shared_ptr<std::stringstream> exception_stream;
|
|
||||||
bool save_to_status_code = false;
|
|
||||||
|
|
||||||
public:
|
|
||||||
/**
|
|
||||||
* @brief A C++ std::exception API member
|
|
||||||
* @return An exception description with a file name and file line
|
|
||||||
*/
|
|
||||||
const char* what() const noexcept override {
|
|
||||||
if (errorDesc.empty() && exception_stream) {
|
|
||||||
errorDesc = exception_stream->str();
|
|
||||||
#ifndef NDEBUG
|
|
||||||
errorDesc += "\n" + _file + ":" + std::to_string(_line);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
return errorDesc.c_str();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A constructor. Creates an InferenceEngineException object from a specific file and line
|
|
||||||
* @param filename File where exception has been thrown
|
|
||||||
* @param line Line of the exception emitter
|
|
||||||
* @param message Exception message
|
|
||||||
*/
|
|
||||||
InferenceEngineException(const std::string& filename, const int line, const std::string& message = "") noexcept;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief noexcept required for copy ctor
|
|
||||||
* @details The C++ Standard, [except.throw], paragraph 3 [ISO/IEC 14882-2014]
|
|
||||||
*/
|
|
||||||
InferenceEngineException(const InferenceEngineException& that) noexcept;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A stream output operator to be used within exception
|
|
||||||
* @param arg Object for serialization in the exception message
|
|
||||||
*/
|
|
||||||
template <class T>
|
|
||||||
InferenceEngineException& operator<<(const T& arg) {
|
|
||||||
if (save_to_status_code) {
|
|
||||||
auto can_convert = status_code_assign(arg);
|
|
||||||
save_to_status_code = false;
|
|
||||||
if (can_convert.second) {
|
|
||||||
this->status_code = can_convert.first;
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!exception_stream) {
|
|
||||||
exception_stream.reset(new std::stringstream());
|
|
||||||
}
|
|
||||||
(*exception_stream) << arg;
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief Manipulator to indicate that next item has to be converted to StatusCode to save
|
|
||||||
* @param iex InferenceEngineException object
|
|
||||||
*/
|
|
||||||
friend InferenceEngineException& as_status(InferenceEngineException& iex) {
|
|
||||||
iex.save_to_status_code = true;
|
|
||||||
return iex;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A stream output operator to catch InferenceEngineException manipulators
|
|
||||||
* @param manip InferenceEngineException manipulator to call
|
|
||||||
*/
|
|
||||||
InferenceEngineException& operator<<(InferenceEngineException& (*manip)(InferenceEngineException&)) {
|
|
||||||
return manip(*this);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @brief Check if it has StatusCode value */
|
|
||||||
bool hasStatus() const {
|
|
||||||
return this->status_code == 0 ? false : true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @brief Get StatusCode value */
|
|
||||||
StatusCode getStatus() const {
|
|
||||||
return this->status_code;
|
|
||||||
}
|
|
||||||
|
|
||||||
~InferenceEngineException() noexcept override;
|
|
||||||
|
|
||||||
private:
|
|
||||||
std::pair<StatusCode, bool> status_code_assign(const StatusCode& status) {
|
|
||||||
return {status, true};
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
std::pair<StatusCode, bool> status_code_assign(const T&) {
|
|
||||||
return {static_cast<StatusCode>(0), false};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
InferenceEngineException& as_status(InferenceEngineException& iex);
|
|
||||||
|
|
||||||
static_assert(std::is_nothrow_copy_constructible<InferenceEngineException>::value,
|
|
||||||
"InferenceEngineException must be nothrow copy constructible");
|
|
||||||
} // namespace details
|
|
||||||
} // namespace InferenceEngine
|
|
||||||
|
@ -1,71 +0,0 @@
|
|||||||
// Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @brief A header file that provides macros to handle no exception methods
|
|
||||||
*
|
|
||||||
* @file ie_exception_conversion.hpp
|
|
||||||
*/
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ie_common.h"
|
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
|
|
||||||
#define CALL_STATUS_FNC(function, ...) \
|
|
||||||
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC was not initialized."; \
|
|
||||||
ResponseDesc resp; \
|
|
||||||
auto res = actual->function(__VA_ARGS__, &resp); \
|
|
||||||
if (res != OK) InferenceEngine::details::extract_exception(res, resp.msg);
|
|
||||||
|
|
||||||
#define CALL_STATUS_FNC_NO_ARGS(function) \
|
|
||||||
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC_NO_ARGS was not initialized."; \
|
|
||||||
ResponseDesc resp; \
|
|
||||||
auto res = actual->function(&resp); \
|
|
||||||
if (res != OK) InferenceEngine::details::extract_exception(res, resp.msg);
|
|
||||||
|
|
||||||
#define CALL_FNC_NO_ARGS(function) \
|
|
||||||
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_FNC_NO_ARGS was not initialized."; \
|
|
||||||
ResponseDesc resp; \
|
|
||||||
auto result = actual->function(&resp); \
|
|
||||||
if (resp.msg[0] != '\0') { \
|
|
||||||
THROW_IE_EXCEPTION << resp.msg; \
|
|
||||||
} \
|
|
||||||
return result;
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
|
||||||
namespace details {
|
|
||||||
|
|
||||||
inline void extract_exception(StatusCode status, const char* msg) {
|
|
||||||
switch (status) {
|
|
||||||
case NOT_IMPLEMENTED:
|
|
||||||
throw NotImplemented(msg);
|
|
||||||
case NETWORK_NOT_LOADED:
|
|
||||||
throw NetworkNotLoaded(msg);
|
|
||||||
case PARAMETER_MISMATCH:
|
|
||||||
throw ParameterMismatch(msg);
|
|
||||||
case NOT_FOUND:
|
|
||||||
throw NotFound(msg);
|
|
||||||
case OUT_OF_BOUNDS:
|
|
||||||
throw OutOfBounds(msg);
|
|
||||||
case UNEXPECTED:
|
|
||||||
throw Unexpected(msg);
|
|
||||||
case REQUEST_BUSY:
|
|
||||||
throw RequestBusy(msg);
|
|
||||||
case RESULT_NOT_READY:
|
|
||||||
throw ResultNotReady(msg);
|
|
||||||
case NOT_ALLOCATED:
|
|
||||||
throw NotAllocated(msg);
|
|
||||||
case INFER_NOT_STARTED:
|
|
||||||
throw InferNotStarted(msg);
|
|
||||||
case NETWORK_NOT_READ:
|
|
||||||
throw NetworkNotRead(msg);
|
|
||||||
case INFER_CANCELLED:
|
|
||||||
throw InferCancelled(msg);
|
|
||||||
default:
|
|
||||||
THROW_IE_EXCEPTION << msg << InferenceEngine::details::as_status << status;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace details
|
|
||||||
} // namespace InferenceEngine
|
|
@ -12,7 +12,6 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
|
|
||||||
#include "ie_allocator.hpp"
|
#include "ie_allocator.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
namespace details {
|
namespace details {
|
||||||
|
@ -52,7 +52,7 @@ public:
|
|||||||
* @brief Searches for a function symbol in the loaded module
|
* @brief Searches for a function symbol in the loaded module
|
||||||
* @param symbolName Name of function to find
|
* @param symbolName Name of function to find
|
||||||
* @return A pointer to the function if found
|
* @return A pointer to the function if found
|
||||||
* @throws InferenceEngineException if the function is not found
|
* @throws Exception if the function is not found
|
||||||
*/
|
*/
|
||||||
void* get_symbol(const char* symbolName) const;
|
void* get_symbol(const char* symbolName) const;
|
||||||
};
|
};
|
||||||
|
@ -12,10 +12,10 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
|
#include <functional>
|
||||||
|
|
||||||
#include "ie_common.h"
|
#include "ie_common.h"
|
||||||
#include "ie_so_loader.h"
|
#include "ie_so_loader.h"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
namespace details {
|
namespace details {
|
||||||
@ -150,6 +150,22 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
#define CATCH_IE_EXCEPTION(ExceptionType) catch (const InferenceEngine::ExceptionType& e) {throw e;}
|
||||||
|
#define CATCH_IE_EXCEPTIONS \
|
||||||
|
CATCH_IE_EXCEPTION(GeneralError) \
|
||||||
|
CATCH_IE_EXCEPTION(NotImplemented) \
|
||||||
|
CATCH_IE_EXCEPTION(NetworkNotLoaded) \
|
||||||
|
CATCH_IE_EXCEPTION(ParameterMismatch) \
|
||||||
|
CATCH_IE_EXCEPTION(NotFound) \
|
||||||
|
CATCH_IE_EXCEPTION(OutOfBounds) \
|
||||||
|
CATCH_IE_EXCEPTION(Unexpected) \
|
||||||
|
CATCH_IE_EXCEPTION(RequestBusy) \
|
||||||
|
CATCH_IE_EXCEPTION(ResultNotReady) \
|
||||||
|
CATCH_IE_EXCEPTION(NotAllocated) \
|
||||||
|
CATCH_IE_EXCEPTION(InferNotStarted) \
|
||||||
|
CATCH_IE_EXCEPTION(NetworkNotRead) \
|
||||||
|
CATCH_IE_EXCEPTION(InferCancelled)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Implements load of object from library if Release method is presented
|
* @brief Implements load of object from library if Release method is presented
|
||||||
*/
|
*/
|
||||||
@ -158,13 +174,7 @@ protected:
|
|||||||
void* create = nullptr;
|
void* create = nullptr;
|
||||||
try {
|
try {
|
||||||
create = _so_loader->get_symbol((SOCreatorTrait<T>::name + std::string("Shared")).c_str());
|
create = _so_loader->get_symbol((SOCreatorTrait<T>::name + std::string("Shared")).c_str());
|
||||||
} catch (const details::InferenceEngineException& ex) {
|
} catch (const NotFound&) {}
|
||||||
if ((ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) == NOT_FOUND) {
|
|
||||||
create = nullptr;
|
|
||||||
} else {
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (create == nullptr) {
|
if (create == nullptr) {
|
||||||
create = _so_loader->get_symbol(SOCreatorTrait<T>::name);
|
create = _so_loader->get_symbol(SOCreatorTrait<T>::name);
|
||||||
using CreateF = StatusCode(T*&, ResponseDesc*);
|
using CreateF = StatusCode(T*&, ResponseDesc*);
|
||||||
@ -172,7 +182,8 @@ protected:
|
|||||||
ResponseDesc desc;
|
ResponseDesc desc;
|
||||||
StatusCode sts = reinterpret_cast<CreateF*>(create)(object, &desc);
|
StatusCode sts = reinterpret_cast<CreateF*>(create)(object, &desc);
|
||||||
if (sts != OK) {
|
if (sts != OK) {
|
||||||
THROW_IE_EXCEPTION << as_status << sts << desc.msg;
|
IE_EXCEPTION_SWITCH(sts, ExceptionType,
|
||||||
|
InferenceEngine::details::ThrowNow<ExceptionType>{} <<= std::stringstream{} << IE_LOCATION << desc.msg)
|
||||||
}
|
}
|
||||||
IE_SUPPRESS_DEPRECATED_START
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
_pointedObj = std::shared_ptr<T>(object, [] (T* ptr){ptr->Release();});
|
_pointedObj = std::shared_ptr<T>(object, [] (T* ptr){ptr->Release();});
|
||||||
@ -181,12 +192,10 @@ protected:
|
|||||||
using CreateF = void(std::shared_ptr<T>&);
|
using CreateF = void(std::shared_ptr<T>&);
|
||||||
reinterpret_cast<CreateF*>(create)(_pointedObj);
|
reinterpret_cast<CreateF*>(create)(_pointedObj);
|
||||||
}
|
}
|
||||||
} catch (const InferenceEngineException& ex) {
|
} CATCH_IE_EXCEPTIONS catch (const std::exception& ex) {
|
||||||
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
|
THROW_IE_EXCEPTION << ex.what();
|
||||||
} catch (const std::exception& ex) {
|
|
||||||
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
|
|
||||||
} catch(...) {
|
} catch(...) {
|
||||||
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
|
THROW_IE_EXCEPTION_WITH_STATUS(Unexpected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -197,14 +206,14 @@ protected:
|
|||||||
try {
|
try {
|
||||||
using CreateF = void(std::shared_ptr<T>&);
|
using CreateF = void(std::shared_ptr<T>&);
|
||||||
reinterpret_cast<CreateF*>(_so_loader->get_symbol(SOCreatorTrait<T>::name))(_pointedObj);
|
reinterpret_cast<CreateF*>(_so_loader->get_symbol(SOCreatorTrait<T>::name))(_pointedObj);
|
||||||
} catch (const InferenceEngineException& ex) {
|
} CATCH_IE_EXCEPTIONS catch (const std::exception& ex) {
|
||||||
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
|
THROW_IE_EXCEPTION << ex.what();
|
||||||
} catch (const std::exception& ex) {
|
|
||||||
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
|
|
||||||
} catch(...) {
|
} catch(...) {
|
||||||
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
|
THROW_IE_EXCEPTION_WITH_STATUS(Unexpected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#undef CATCH_IE_EXCEPTION
|
||||||
|
#undef CATCH_IE_EXCEPTIONS
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Gets a smart pointer to the DLL
|
* @brief Gets a smart pointer to the DLL
|
||||||
|
@ -25,7 +25,6 @@
|
|||||||
#include "ie_locked_memory.hpp"
|
#include "ie_locked_memory.hpp"
|
||||||
#include "ie_precision.hpp"
|
#include "ie_precision.hpp"
|
||||||
#include "details/ie_blob_iterator.hpp"
|
#include "details/ie_blob_iterator.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "details/ie_pre_allocator.hpp"
|
#include "details/ie_pre_allocator.hpp"
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
@ -16,7 +16,14 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <map>
|
#include <map>
|
||||||
|
#include <sstream>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include <iterator>
|
||||||
|
|
||||||
|
#include <ie_api.h>
|
||||||
|
#ifndef NDEBUG
|
||||||
|
#include <cassert>
|
||||||
|
#endif
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
/**
|
/**
|
||||||
* @brief Represents tensor size.
|
* @brief Represents tensor size.
|
||||||
@ -274,73 +281,211 @@ struct QueryNetworkResult {
|
|||||||
ResponseDesc resp;
|
ResponseDesc resp;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::GENERIC_ERROR exception */
|
namespace details {
|
||||||
class GeneralError : public std::logic_error {
|
struct INFERENCE_ENGINE_DEPRECATED("Use InferRequest::Exception")
|
||||||
using std::logic_error::logic_error;
|
INFERENCE_ENGINE_API_CLASS(InferenceEngineException) : public std::runtime_error {
|
||||||
|
using std::runtime_error::runtime_error;
|
||||||
|
bool hasStatus() const {return true;}
|
||||||
|
StatusCode getStatus() const;
|
||||||
};
|
};
|
||||||
|
} // namespace details
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Base Inference Engine exception class
|
||||||
|
*/
|
||||||
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
|
struct INFERENCE_ENGINE_API_CLASS(Exception) : public details::InferenceEngineException {
|
||||||
|
using InferenceEngineException::InferenceEngineException;
|
||||||
|
};
|
||||||
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
|
||||||
|
/// @cond
|
||||||
|
namespace details {
|
||||||
|
template<typename ExceptionType> struct ExceptionTraits;
|
||||||
|
}
|
||||||
|
|
||||||
|
#define INFERENCE_ENGINE_DECLARE_EXCEPTION(ExceptionType, statusCode) \
|
||||||
|
struct INFERENCE_ENGINE_API_CLASS(ExceptionType) final : public InferenceEngine::Exception { \
|
||||||
|
using Exception::Exception; \
|
||||||
|
}; \
|
||||||
|
namespace details { \
|
||||||
|
template<> struct ExceptionTraits<ExceptionType> { \
|
||||||
|
static const char* string() {return "[ " #statusCode " ]";} \
|
||||||
|
}; \
|
||||||
|
}
|
||||||
|
/// @endcond
|
||||||
|
|
||||||
|
/** @brief This class represents StatusCode::GENERAL_ERROR exception */
|
||||||
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(GeneralError, GENERAL_ERROR)
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::NOT_IMPLEMENTED exception */
|
/** @brief This class represents StatusCode::NOT_IMPLEMENTED exception */
|
||||||
class NotImplemented : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(NotImplemented, NOT_IMPLEMENTED)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::NETWORK_NOT_LOADED exception */
|
/** @brief This class represents StatusCode::NETWORK_NOT_LOADED exception */
|
||||||
class NetworkNotLoaded : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(NetworkNotLoaded, NETWORK_NOT_LOADED)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::PARAMETER_MISMATCH exception */
|
/** @brief This class represents StatusCode::PARAMETER_MISMATCH exception */
|
||||||
class ParameterMismatch : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(ParameterMismatch, PARAMETER_MISMATCH)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::NOT_FOUND exception */
|
/** @brief This class represents StatusCode::NOT_FOUND exception */
|
||||||
class NotFound : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(NotFound, NOT_FOUND)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::OUT_OF_BOUNDS exception */
|
/** @brief This class represents StatusCode::OUT_OF_BOUNDS exception */
|
||||||
class OutOfBounds : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(OutOfBounds, OUT_OF_BOUNDS)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::UNEXPECTED exception */
|
/** @brief This class represents StatusCode::UNEXPECTED exception */
|
||||||
class Unexpected : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(Unexpected, UNEXPECTED)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::REQUEST_BUSY exception */
|
/** @brief This class represents StatusCode::REQUEST_BUSY exception */
|
||||||
class RequestBusy : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(RequestBusy, REQUEST_BUSY)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::RESULT_NOT_READY exception */
|
/** @brief This class represents StatusCode::RESULT_NOT_READY exception */
|
||||||
class ResultNotReady : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(ResultNotReady, RESULT_NOT_READY)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::NOT_ALLOCATED exception */
|
/** @brief This class represents StatusCode::NOT_ALLOCATED exception */
|
||||||
class NotAllocated : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(NotAllocated, NOT_ALLOCATED)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::INFER_NOT_STARTED exception */
|
/** @brief This class represents StatusCode::INFER_NOT_STARTED exception */
|
||||||
class InferNotStarted : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(InferNotStarted, INFER_NOT_STARTED)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::NETWORK_NOT_READ exception */
|
/** @brief This class represents StatusCode::NETWORK_NOT_READ exception */
|
||||||
class NetworkNotRead : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(NetworkNotRead, NETWORK_NOT_READ)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** @brief This class represents StatusCode::INFER_CANCELLED exception */
|
/** @brief This class represents StatusCode::INFER_CANCELLED exception */
|
||||||
class InferCancelled : public std::logic_error {
|
INFERENCE_ENGINE_DECLARE_EXCEPTION(InferCancelled, INFER_CANCELLED)
|
||||||
using std::logic_error::logic_error;
|
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
#undef INFERENCE_ENGINE_DECLARE_EXCEPTION
|
||||||
|
|
||||||
|
namespace details {
|
||||||
|
/**
|
||||||
|
* @brief Tag struct used to throw exception
|
||||||
|
*/
|
||||||
|
template<typename ExceptionType>
|
||||||
|
struct ThrowNow final {
|
||||||
|
[[noreturn]] void operator<<=(const std::ostream& ostream) {
|
||||||
|
std::ostringstream stream;
|
||||||
|
stream << ostream.rdbuf();
|
||||||
|
throw ExceptionType{stream.str()};
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace InferenceEngine
|
/// @cond
|
||||||
|
#ifndef NDEBUG
|
||||||
|
#define IE_LOCATION '\n' << __FILE__ << ':' << __LINE__<< ' '
|
||||||
|
#else
|
||||||
|
#define IE_LOCATION ""
|
||||||
|
#endif // NDEBUG
|
||||||
|
/// @endcond
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @def IE_THROW
|
||||||
|
* @brief A macro used to throw specefied exception with a description
|
||||||
|
*/
|
||||||
|
#define IE_THROW(ExceptionType) \
|
||||||
|
InferenceEngine::details::ThrowNow<InferenceEngine::ExceptionType>{} <<= std::stringstream{} << IE_LOCATION
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @def THROW_IE_EXCEPTION
|
||||||
|
* @brief A macro used to throw general exception with a description
|
||||||
|
*/
|
||||||
|
#define THROW_IE_EXCEPTION IE_THROW(GeneralError)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @def THROW_IE_EXCEPTION_WITH_STATUS
|
||||||
|
* @brief A macro used to throw general exception with a description and status
|
||||||
|
*/
|
||||||
|
#define THROW_IE_EXCEPTION_WITH_STATUS(ExceptionType) \
|
||||||
|
IE_THROW(ExceptionType) << InferenceEngine::details::ExceptionTraits<InferenceEngine::ExceptionType>::string() << ' '
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @def IE_ASSERT
|
||||||
|
* @brief Uses assert() function if NDEBUG is not defined, InferenceEngine exception otherwise
|
||||||
|
*/
|
||||||
|
#ifdef NDEBUG
|
||||||
|
#define IE_ASSERT(EXPRESSION) \
|
||||||
|
if (!(EXPRESSION)) \
|
||||||
|
IE_THROW(GeneralError) << " AssertionFailed: " << #EXPRESSION // NOLINT
|
||||||
|
#else
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
struct NullStream {
|
||||||
|
template <typename T>
|
||||||
|
NullStream& operator<<(const T&) noexcept {return *this;}
|
||||||
|
};
|
||||||
|
|
||||||
|
#define IE_ASSERT(EXPRESSION) \
|
||||||
|
assert((EXPRESSION)); \
|
||||||
|
InferenceEngine::details::NullStream()
|
||||||
|
#endif // NDEBUG
|
||||||
|
|
||||||
|
/// @cond
|
||||||
|
#define IE_EXCEPTION_CASE(TYPE_ALIAS, STATUS_CODE, EXCEPTION_TYPE, ...) \
|
||||||
|
case InferenceEngine::STATUS_CODE : { \
|
||||||
|
using InferenceEngine::EXCEPTION_TYPE; using TYPE_ALIAS = EXCEPTION_TYPE; __VA_ARGS__; \
|
||||||
|
} break;
|
||||||
|
/// @endcond
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @def IE_EXCEPTION_SWITCH
|
||||||
|
* @brief Generate Switch statement over error codes adn maps them to coresponding exceptions type
|
||||||
|
*/
|
||||||
|
#define IE_EXCEPTION_SWITCH(STATUS, TYPE_ALIAS, ...) \
|
||||||
|
switch (STATUS) { \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, GENERAL_ERROR , GeneralError , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_IMPLEMENTED , NotImplemented , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, NETWORK_NOT_LOADED , NetworkNotLoaded , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, PARAMETER_MISMATCH , ParameterMismatch , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_FOUND , NotFound , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, OUT_OF_BOUNDS , OutOfBounds , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, UNEXPECTED , Unexpected , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, REQUEST_BUSY , RequestBusy , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, RESULT_NOT_READY , ResultNotReady , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_ALLOCATED , NotAllocated , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, INFER_NOT_STARTED , InferNotStarted , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, NETWORK_NOT_READ , NetworkNotRead , __VA_ARGS__) \
|
||||||
|
IE_EXCEPTION_CASE(TYPE_ALIAS, INFER_CANCELLED , InferCancelled , __VA_ARGS__) \
|
||||||
|
default: IE_ASSERT(!"Unreachable"); \
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
#define CALL_STATUS_FNC(function, ...) \
|
||||||
|
if (!actual) THROW_IE_EXCEPTION << "Wrapper used was not initialized."; \
|
||||||
|
ResponseDesc resp; \
|
||||||
|
auto res = actual->function(__VA_ARGS__, &resp); \
|
||||||
|
if (res != OK) IE_EXCEPTION_SWITCH(res, ExceptionType, \
|
||||||
|
InferenceEngine::details::ThrowNow<ExceptionType>{} \
|
||||||
|
<<= std::stringstream{} << IE_LOCATION << resp.msg)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
#define CALL_STATUS_FNC_NO_ARGS(function) \
|
||||||
|
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC_NO_ARGS was not initialized."; \
|
||||||
|
ResponseDesc resp; \
|
||||||
|
auto res = actual->function(&resp); \
|
||||||
|
if (res != OK) IE_EXCEPTION_SWITCH(res, ExceptionType, \
|
||||||
|
InferenceEngine::details::ThrowNow<ExceptionType>{} \
|
||||||
|
<<= std::stringstream{} << IE_LOCATION)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
#define CALL_FNC_NO_ARGS(function) \
|
||||||
|
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_FNC_NO_ARGS was not initialized."; \
|
||||||
|
ResponseDesc resp; \
|
||||||
|
auto result = actual->function(&resp); \
|
||||||
|
if (resp.msg[0] != '\0') { \
|
||||||
|
THROW_IE_EXCEPTION << resp.msg \
|
||||||
|
} \
|
||||||
|
return result;
|
||||||
|
} // namespace details
|
||||||
|
} // namespace InferenceEngine
|
||||||
#if defined(_WIN32)
|
#if defined(_WIN32)
|
||||||
#define __PRETTY_FUNCTION__ __FUNCSIG__
|
#define __PRETTY_FUNCTION__ __FUNCSIG__
|
||||||
#else
|
#else
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cctype>
|
#include <cctype>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <iterator>
|
#include <iterator>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
@ -13,7 +13,8 @@
|
|||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "details/ie_exception.hpp"
|
#include "ie_common.h"
|
||||||
|
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
|
@ -432,7 +432,7 @@ int main(int argc, char *argv[]) {
|
|||||||
std::string key = METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS);
|
std::string key = METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS);
|
||||||
try {
|
try {
|
||||||
nireq = exeNetwork.GetMetric(key).as<unsigned int>();
|
nireq = exeNetwork.GetMetric(key).as<unsigned int>();
|
||||||
} catch (const details::InferenceEngineException& ex) {
|
} catch (const std::exception& ex) {
|
||||||
THROW_IE_EXCEPTION
|
THROW_IE_EXCEPTION
|
||||||
<< "Every device used with the benchmark_app should "
|
<< "Every device used with the benchmark_app should "
|
||||||
<< "support OPTIMAL_NUMBER_OF_INFER_REQUESTS ExecutableNetwork metric. "
|
<< "support OPTIMAL_NUMBER_OF_INFER_REQUESTS ExecutableNetwork metric. "
|
||||||
|
@ -642,7 +642,7 @@ inline std::map<std::string, std::string> getMapFullDevicesNames(InferenceEngine
|
|||||||
p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME));
|
p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME));
|
||||||
devicesMap.insert(std::pair<std::string, std::string>(deviceName, p.as<std::string>()));
|
devicesMap.insert(std::pair<std::string, std::string>(deviceName, p.as<std::string>()));
|
||||||
}
|
}
|
||||||
catch (InferenceEngine::details::InferenceEngineException &) {
|
catch (InferenceEngine::Exception &) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -664,7 +664,7 @@ inline std::string getFullDeviceName(InferenceEngine::Core& ie, std::string devi
|
|||||||
p = ie.GetMetric(device, METRIC_KEY(FULL_DEVICE_NAME));
|
p = ie.GetMetric(device, METRIC_KEY(FULL_DEVICE_NAME));
|
||||||
return p.as<std::string>();
|
return p.as<std::string>();
|
||||||
}
|
}
|
||||||
catch (InferenceEngine::details::InferenceEngineException &) {
|
catch (InferenceEngine::Exception &) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <ie_layouts.h>
|
#include <ie_layouts.h>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <cpp_interfaces/exception2status.hpp>
|
#include <cpp_interfaces/exception2status.hpp>
|
||||||
#include <api/layout.hpp>
|
#include <api/layout.hpp>
|
||||||
|
|
||||||
@ -49,7 +48,8 @@ inline cldnn::data_types DataTypeFromPrecision(InferenceEngine::Precision p) {
|
|||||||
case InferenceEngine::Precision::BOOL:
|
case InferenceEngine::Precision::BOOL:
|
||||||
return cldnn::data_types::i8;
|
return cldnn::data_types::i8;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << p.name() << " precision";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
|
||||||
|
<< "The plugin does not support " << p.name() << " precision";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -74,7 +74,8 @@ inline cldnn::data_types DataTypeFromPrecision(ngraph::element::Type t) {
|
|||||||
case ngraph::element::Type_t::u1:
|
case ngraph::element::Type_t::u1:
|
||||||
return cldnn::data_types::bin;
|
return cldnn::data_types::bin;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << t.get_type_name()<< " precision";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
|
||||||
|
<< "The plugin does not support " << t.get_type_name()<< " precision";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,7 +95,7 @@ inline cldnn::format FormatFromLayout(InferenceEngine::Layout l) {
|
|||||||
case InferenceEngine::Layout::NHWC:
|
case InferenceEngine::Layout::NHWC:
|
||||||
return cldnn::format::byxf;
|
return cldnn::format::byxf;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << l << " layout";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "The plugin does not support " << l << " layout";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +120,8 @@ inline cldnn::format FormatFromTensorDesc(InferenceEngine::TensorDesc desc) {
|
|||||||
case InferenceEngine::Layout::NHWC:
|
case InferenceEngine::Layout::NHWC:
|
||||||
return cldnn::format::byxf;
|
return cldnn::format::byxf;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << desc.getLayout() << " layout";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
|
||||||
|
<< "The plugin does not support " << desc.getLayout() << " layout";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,7 +137,8 @@ inline cldnn::format ImageFormatFromLayout(InferenceEngine::Layout l) {
|
|||||||
case InferenceEngine::Layout::NHWC:
|
case InferenceEngine::Layout::NHWC:
|
||||||
return cldnn::format::nv12;
|
return cldnn::format::nv12;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << l << " image layout";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
|
||||||
|
<< "The plugin does not support " << l << " image layout";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
#include <cldnn/cldnn_config.hpp>
|
#include <cldnn/cldnn_config.hpp>
|
||||||
#include "cldnn_config.h"
|
#include "cldnn_config.h"
|
||||||
#include "cpp_interfaces/exception2status.hpp"
|
#include "cpp_interfaces/exception2status.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
|
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
|
||||||
#include "ie_api.h"
|
#include "ie_api.h"
|
||||||
#include "file_utils.h"
|
#include "file_utils.h"
|
||||||
@ -52,7 +51,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
useProfiling = false;
|
useProfiling = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(PluginConfigParams::KEY_DYN_BATCH_ENABLED) == 0) {
|
} else if (key.compare(PluginConfigParams::KEY_DYN_BATCH_ENABLED) == 0) {
|
||||||
if (val.compare(PluginConfigParams::YES) == 0) {
|
if (val.compare(PluginConfigParams::YES) == 0) {
|
||||||
@ -60,7 +59,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
enableDynamicBatch = false;
|
enableDynamicBatch = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(PluginConfigParams::KEY_DUMP_KERNELS) == 0) {
|
} else if (key.compare(PluginConfigParams::KEY_DUMP_KERNELS) == 0) {
|
||||||
if (val.compare(PluginConfigParams::YES) == 0) {
|
if (val.compare(PluginConfigParams::YES) == 0) {
|
||||||
@ -68,14 +67,14 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
dumpCustomKernels = false;
|
dumpCustomKernels = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_PRIORITY) == 0) {
|
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_PRIORITY) == 0) {
|
||||||
std::stringstream ss(val);
|
std::stringstream ss(val);
|
||||||
uint32_t uVal(0);
|
uint32_t uVal(0);
|
||||||
ss >> uVal;
|
ss >> uVal;
|
||||||
if (ss.fail()) {
|
if (ss.fail()) {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
switch (uVal) {
|
switch (uVal) {
|
||||||
case 0:
|
case 0:
|
||||||
@ -91,7 +90,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
queuePriority = cldnn::priority_mode_types::high;
|
queuePriority = cldnn::priority_mode_types::high;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported queue priority value: " << uVal;
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Unsupported queue priority value: " << uVal;
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_THROTTLE) == 0) {
|
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_THROTTLE) == 0) {
|
||||||
@ -99,7 +98,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
uint32_t uVal(0);
|
uint32_t uVal(0);
|
||||||
ss >> uVal;
|
ss >> uVal;
|
||||||
if (ss.fail()) {
|
if (ss.fail()) {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
switch (uVal) {
|
switch (uVal) {
|
||||||
case 0:
|
case 0:
|
||||||
@ -115,7 +114,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
queueThrottle = cldnn::throttle_mode_types::high;
|
queueThrottle = cldnn::throttle_mode_types::high;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported queue throttle value: " << uVal;
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Unsupported queue throttle value: " << uVal;
|
||||||
}
|
}
|
||||||
} else if (key.compare(PluginConfigParams::KEY_CONFIG_FILE) == 0) {
|
} else if (key.compare(PluginConfigParams::KEY_CONFIG_FILE) == 0) {
|
||||||
std::stringstream ss(val);
|
std::stringstream ss(val);
|
||||||
@ -137,7 +136,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::TUNING_RETUNE) == 0) {
|
} else if (val.compare(PluginConfigParams::TUNING_RETUNE) == 0) {
|
||||||
tuningConfig.mode = cldnn::tuning_mode::tuning_retune_and_cache;
|
tuningConfig.mode = cldnn::tuning_mode::tuning_retune_and_cache;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported tuning mode value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported tuning mode value by plugin: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(PluginConfigParams::KEY_TUNING_FILE) == 0) {
|
} else if (key.compare(PluginConfigParams::KEY_TUNING_FILE) == 0) {
|
||||||
tuningConfig.cache_file_path = val;
|
tuningConfig.cache_file_path = val;
|
||||||
@ -147,7 +146,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
memory_pool_on = false;
|
memory_pool_on = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported memory pool flag value: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported memory pool flag value: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_GRAPH_DUMPS_DIR) == 0) {
|
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_GRAPH_DUMPS_DIR) == 0) {
|
||||||
if (!val.empty()) {
|
if (!val.empty()) {
|
||||||
@ -170,7 +169,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
exclusiveAsyncRequests = false;
|
exclusiveAsyncRequests = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS) == 0) {
|
} else if (key.compare(PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS) == 0) {
|
||||||
if (val.compare(PluginConfigParams::GPU_THROUGHPUT_AUTO) == 0) {
|
if (val.compare(PluginConfigParams::GPU_THROUGHPUT_AUTO) == 0) {
|
||||||
@ -204,7 +203,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
enableInt8 = false;
|
enableInt8 = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_NV12_TWO_INPUTS) == 0) {
|
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_NV12_TWO_INPUTS) == 0) {
|
||||||
if (val.compare(PluginConfigParams::YES) == 0) {
|
if (val.compare(PluginConfigParams::YES) == 0) {
|
||||||
@ -212,7 +211,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
nv12_two_inputs = false;
|
nv12_two_inputs = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported NV12 flag value: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported NV12 flag value: " << val;
|
||||||
}
|
}
|
||||||
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS) == 0) {
|
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS) == 0) {
|
||||||
if (val.compare(PluginConfigParams::YES) == 0) {
|
if (val.compare(PluginConfigParams::YES) == 0) {
|
||||||
@ -220,10 +219,10 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
|
|||||||
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
} else if (val.compare(PluginConfigParams::NO) == 0) {
|
||||||
enable_fp16_for_quantized_models = false;
|
enable_fp16_for_quantized_models = false;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS flag value: " << val;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS flag value: " << val;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property key by plugin: " << key;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property key by plugin: " << key;
|
||||||
}
|
}
|
||||||
|
|
||||||
adjustKeyMapValues();
|
adjustKeyMapValues();
|
||||||
|
@ -416,7 +416,7 @@ auto check_inputs = [](InferenceEngine::InputsDataMap _networkInputs) {
|
|||||||
input_precision != InferenceEngine::Precision::I32 &&
|
input_precision != InferenceEngine::Precision::I32 &&
|
||||||
input_precision != InferenceEngine::Precision::I64 &&
|
input_precision != InferenceEngine::Precision::I64 &&
|
||||||
input_precision != InferenceEngine::Precision::BOOL) {
|
input_precision != InferenceEngine::Precision::BOOL) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
|
||||||
<< "Input image format " << input_precision << " is not supported yet...";
|
<< "Input image format " << input_precision << " is not supported yet...";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -66,16 +66,16 @@ InferRequestInternal::Ptr CLDNNExecNetwork::CreateInferRequestImpl(InputsDataMap
|
|||||||
OutputsDataMap networkOutputs) {
|
OutputsDataMap networkOutputs) {
|
||||||
OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "CLDNNExecNetwork::CreateInferRequestImpl");
|
OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "CLDNNExecNetwork::CreateInferRequestImpl");
|
||||||
if (m_graphs.empty()) {
|
if (m_graphs.empty()) {
|
||||||
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto& graph : m_graphs) {
|
for (auto& graph : m_graphs) {
|
||||||
if (graph == nullptr) {
|
if (graph == nullptr) {
|
||||||
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!graph->IsLoaded()) {
|
if (!graph->IsLoaded()) {
|
||||||
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str << ": no networks created";
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded) << ": no networks created";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ IInferRequest::Ptr CLDNNExecNetwork::CreateInferRequest() {
|
|||||||
|
|
||||||
InferenceEngine::CNNNetwork CLDNNExecNetwork::GetExecGraphInfo() {
|
InferenceEngine::CNNNetwork CLDNNExecNetwork::GetExecGraphInfo() {
|
||||||
if (m_graphs.empty())
|
if (m_graphs.empty())
|
||||||
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
|
||||||
|
|
||||||
return m_graphs.front()->GetExecGraphInfo();
|
return m_graphs.front()->GetExecGraphInfo();
|
||||||
}
|
}
|
||||||
|
@ -337,7 +337,7 @@ void checkInputBlob(const Blob::Ptr &blob,
|
|||||||
auto nv12_ptr = blob->as<NV12Blob>();
|
auto nv12_ptr = blob->as<NV12Blob>();
|
||||||
|
|
||||||
if (nv12_ptr == nullptr) {
|
if (nv12_ptr == nullptr) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << wrong_nv12_blob;
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << wrong_nv12_blob;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto y_ptr = nv12_ptr->y()->as<gpu::ClBlob>();
|
auto y_ptr = nv12_ptr->y()->as<gpu::ClBlob>();
|
||||||
@ -402,7 +402,8 @@ void CLDNNInferRequest::checkBlobs() {
|
|||||||
if (foundInputPair != std::end(_networkInputs)) {
|
if (foundInputPair != std::end(_networkInputs)) {
|
||||||
foundInput = foundInputPair->second;
|
foundInput = foundInputPair->second;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input with name: \'" << input.first << "\'";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
|
||||||
|
<< "Failed to find input with name: \'" << input.first << "\'";
|
||||||
}
|
}
|
||||||
checkInputBlob(input.second, input.first, foundInput, m_graph->getConfig().nv12_two_inputs);
|
checkInputBlob(input.second, input.first, foundInput, m_graph->getConfig().nv12_two_inputs);
|
||||||
}
|
}
|
||||||
@ -415,7 +416,8 @@ void CLDNNInferRequest::checkBlobs() {
|
|||||||
if (foundOutputPair != std::end(_networkOutputs)) {
|
if (foundOutputPair != std::end(_networkOutputs)) {
|
||||||
foundOutput = foundOutputPair->second;
|
foundOutput = foundOutputPair->second;
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find output with name: \'" << output.first << "\'";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
|
||||||
|
<< "Failed to find output with name: \'" << output.first << "\'";
|
||||||
}
|
}
|
||||||
checkOutputBlob(output.second, output.first, foundOutput);
|
checkOutputBlob(output.second, output.first, foundOutput);
|
||||||
}
|
}
|
||||||
@ -449,10 +451,10 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
|
|||||||
|
|
||||||
// perform all common checks first
|
// perform all common checks first
|
||||||
if (name.empty()) {
|
if (name.empty()) {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name";
|
||||||
}
|
}
|
||||||
if (!data)
|
if (!data)
|
||||||
THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'";
|
||||||
|
|
||||||
size_t dataSize = data->size();
|
size_t dataSize = data->size();
|
||||||
if (0 == dataSize) {
|
if (0 == dataSize) {
|
||||||
@ -470,7 +472,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
|
|||||||
: foundOutput->getTensorDesc();
|
: foundOutput->getTensorDesc();
|
||||||
|
|
||||||
if (desc.getPrecision() != blobDesc.getPrecision()) {
|
if (desc.getPrecision() != blobDesc.getPrecision()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
|
||||||
<< "Failed to set Blob with precision not corresponding to user "
|
<< "Failed to set Blob with precision not corresponding to user "
|
||||||
<< (is_input ? "input" : "output") << " precision";
|
<< (is_input ? "input" : "output") << " precision";
|
||||||
}
|
}
|
||||||
@ -498,7 +500,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
|
|||||||
auto nv12_ptr = data->as<NV12Blob>();
|
auto nv12_ptr = data->as<NV12Blob>();
|
||||||
|
|
||||||
if (nv12_ptr == nullptr) {
|
if (nv12_ptr == nullptr) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << wrong_nv12_blob;
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << wrong_nv12_blob;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto y_ptr = nv12_ptr->y()->as<gpu::ClBlob>();
|
auto y_ptr = nv12_ptr->y()->as<gpu::ClBlob>();
|
||||||
@ -530,7 +532,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
|
|||||||
_preProcData[name]->setRoiBlob(data);
|
_preProcData[name]->setRoiBlob(data);
|
||||||
} else {
|
} else {
|
||||||
if (compoundBlobPassed) {
|
if (compoundBlobPassed) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << cannot_set_compound;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << cannot_set_compound;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t blobSize = desc.getLayout() != SCALAR
|
size_t blobSize = desc.getLayout() != SCALAR
|
||||||
@ -548,7 +550,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (compoundBlobPassed) {
|
if (compoundBlobPassed) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << cannot_set_compound;
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << cannot_set_compound;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (is_remote) {
|
if (is_remote) {
|
||||||
@ -697,7 +699,7 @@ void CLDNNInferRequest::SetGraph(std::shared_ptr<CLDNNPlugin::CLDNNGraph> graph)
|
|||||||
m_graph = graph;
|
m_graph = graph;
|
||||||
|
|
||||||
if (m_graph == nullptr) {
|
if (m_graph == nullptr) {
|
||||||
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (m_graph->GetMaxDynamicBatchSize() > 1) {
|
if (m_graph->GetMaxDynamicBatchSize() > 1) {
|
||||||
|
@ -12,7 +12,6 @@
|
|||||||
#include <mutex>
|
#include <mutex>
|
||||||
|
|
||||||
#include <cpp/ie_cnn_network.h>
|
#include <cpp/ie_cnn_network.h>
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
|
|
||||||
#include "cldnn_config.h"
|
#include "cldnn_config.h"
|
||||||
|
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
#include <cstdio>
|
#include <cstdio>
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
|
||||||
#if GNA_LIB_VER == 2
|
#if GNA_LIB_VER == 2
|
||||||
#include <gna2-model-api.h>
|
#include <gna2-model-api.h>
|
||||||
|
@ -269,8 +269,9 @@ inline void quantizeWeightsBiases(const QuantDesc & quantDesc,
|
|||||||
make_custom_blob<typename QuantDesc::WeightsPrecision>(InferenceEngine::C, InferenceEngine::SizeVector({wl->_weights->size()}));
|
make_custom_blob<typename QuantDesc::WeightsPrecision>(InferenceEngine::C, InferenceEngine::SizeVector({wl->_weights->size()}));
|
||||||
intWeights->allocate();
|
intWeights->allocate();
|
||||||
if (intWeights->buffer() == nullptr) {
|
if (intWeights->buffer() == nullptr) {
|
||||||
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
|
||||||
<< "cannot copy weights for layer :"<< wl->name << " of size" << intWeights->byteSize();
|
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
|
||||||
|
<< "cannot copy weights for layer :"<< wl->name << " of size" << intWeights->byteSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
int oIdx = wl->outData[0]->getDims().size() - 1;
|
int oIdx = wl->outData[0]->getDims().size() - 1;
|
||||||
@ -296,8 +297,9 @@ inline void quantizeWeightsBiases(const QuantDesc & quantDesc,
|
|||||||
}));
|
}));
|
||||||
bias->allocate();
|
bias->allocate();
|
||||||
if (bias->buffer() == nullptr) {
|
if (bias->buffer() == nullptr) {
|
||||||
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
|
||||||
<< "cannot copy bias for layer :"<< wl->name <<"of size" << bias->byteSize();
|
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
|
||||||
|
<< "cannot copy bias for layer :"<< wl->name <<"of size" << bias->byteSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
memset(bias->buffer(), 0, bias->byteSize());
|
memset(bias->buffer(), 0, bias->byteSize());
|
||||||
@ -386,8 +388,9 @@ inline void quantizeWeightsBiasesConv(const QuantDesc & quantDesc,
|
|||||||
auto intWeights = make_custom_blob<typename QuantDesc::WeightsPrecision>(InferenceEngine::C, InferenceEngine::SizeVector({conv->_weights->size()}));
|
auto intWeights = make_custom_blob<typename QuantDesc::WeightsPrecision>(InferenceEngine::C, InferenceEngine::SizeVector({conv->_weights->size()}));
|
||||||
intWeights->allocate();
|
intWeights->allocate();
|
||||||
if (intWeights->buffer() == nullptr) {
|
if (intWeights->buffer() == nullptr) {
|
||||||
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
|
||||||
<< "cannot copy weights for layer :"<< conv->name << " of size" << intWeights->byteSize();
|
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
|
||||||
|
<< "cannot copy weights for layer :"<< conv->name << " of size" << intWeights->byteSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
auto getBiasSizeForLayer = [](InferenceEngine::WeightableLayer *wl) {
|
auto getBiasSizeForLayer = [](InferenceEngine::WeightableLayer *wl) {
|
||||||
@ -410,8 +413,9 @@ inline void quantizeWeightsBiasesConv(const QuantDesc & quantDesc,
|
|||||||
}));
|
}));
|
||||||
bias->allocate();
|
bias->allocate();
|
||||||
if (bias->buffer() == nullptr) {
|
if (bias->buffer() == nullptr) {
|
||||||
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
|
||||||
<< "cannot copy bias for layer :"<< conv->name <<"of size" << bias->byteSize();
|
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
|
||||||
|
<< "cannot copy bias for layer :"<< conv->name <<"of size" << bias->byteSize();
|
||||||
}
|
}
|
||||||
memset(bias->buffer(), 0, bias->byteSize());
|
memset(bias->buffer(), 0, bias->byteSize());
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <gna_plugin_log.hpp>
|
#include <gna_plugin_log.hpp>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
#include "backend/gna_types.h"
|
#include "backend/gna_types.h"
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
#if GNA_LIB_VER == 2
|
#if GNA_LIB_VER == 2
|
||||||
#include "gna2_model_debug_log.hpp"
|
#include "gna2_model_debug_log.hpp"
|
||||||
#include "gna2-model-api.h"
|
#include "gna2-model-api.h"
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
#include "gna-api.h"
|
#include "gna-api.h"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "gna_plugin_log.hpp"
|
#include "gna_plugin_log.hpp"
|
||||||
|
|
||||||
//#define MODEL_DUMP
|
//#define MODEL_DUMP
|
||||||
|
@ -90,7 +90,7 @@ class GNAInferRequest : public InferenceEngine::AsyncInferRequestInternal {
|
|||||||
if (inferRequestIdx == -1) {
|
if (inferRequestIdx == -1) {
|
||||||
return InferenceEngine::INFER_NOT_STARTED;
|
return InferenceEngine::INFER_NOT_STARTED;
|
||||||
} else if (millis_timeout < -1) {
|
} else if (millis_timeout < -1) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str;
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (millis_timeout == InferenceEngine::IInferRequest::WaitMode::RESULT_READY) {
|
if (millis_timeout == InferenceEngine::IInferRequest::WaitMode::RESULT_READY) {
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <array>
|
#include <array>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <ios>
|
#include <ios>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <map>
|
#include <map>
|
||||||
|
@ -39,6 +39,7 @@
|
|||||||
#include <layers/gna_fake_quantize_layer.hpp>
|
#include <layers/gna_fake_quantize_layer.hpp>
|
||||||
#include "gna_graph_patterns.hpp"
|
#include "gna_graph_patterns.hpp"
|
||||||
#include "gna_tensor_tools.hpp"
|
#include "gna_tensor_tools.hpp"
|
||||||
|
#include <debug.h>
|
||||||
|
|
||||||
#include <ngraph/pass/manager.hpp>
|
#include <ngraph/pass/manager.hpp>
|
||||||
#include <legacy/convert_function_to_cnn_network.hpp>
|
#include <legacy/convert_function_to_cnn_network.hpp>
|
||||||
@ -1108,7 +1109,7 @@ uint32_t GNAPlugin::QueueInference(const InferenceEngine::BlobMap &inputs, Infer
|
|||||||
Wait(0);
|
Wait(0);
|
||||||
freeNnet = nnets.begin();
|
freeNnet = nnets.begin();
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << as_status << REQUEST_BUSY
|
THROW_IE_EXCEPTION_WITH_STATUS(RequestBusy)
|
||||||
<< "GNA executable network has max of "
|
<< "GNA executable network has max of "
|
||||||
<< static_cast<uint32_t >(gnaFlags->gna_lib_async_threads_num)
|
<< static_cast<uint32_t >(gnaFlags->gna_lib_async_threads_num)
|
||||||
<< " parallel infer requests, please sync one of already running";
|
<< " parallel infer requests, please sync one of already running";
|
||||||
@ -1589,7 +1590,7 @@ InferenceEngine::QueryNetworkResult GNAPlugin::QueryNetwork(const InferenceEngin
|
|||||||
InferenceEngine::QueryNetworkResult res;
|
InferenceEngine::QueryNetworkResult res;
|
||||||
|
|
||||||
if (network.getFunction()) {
|
if (network.getFunction()) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << " ngraph::Function is not supported natively";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << " ngraph::Function is not supported natively";
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unordered_set<CNNLayer *> allLayers;
|
std::unordered_set<CNNLayer *> allLayers;
|
||||||
|
@ -211,8 +211,9 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& config) {
|
|||||||
THROW_GNA_EXCEPTION << "EXCLUSIVE_ASYNC_REQUESTS should be YES/NO, but not" << value;
|
THROW_GNA_EXCEPTION << "EXCLUSIVE_ASYNC_REQUESTS should be YES/NO, but not" << value;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
THROW_GNA_EXCEPTION << as_status << NOT_FOUND << "Incorrect GNA Plugin config. Key " << item.first
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
|
||||||
<< " not supported";
|
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
|
||||||
|
<< "Incorrect GNA Plugin config. Key " << item.first << " not supported";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (gnaFlags.sw_fp32 && gnaFlags.gna_lib_async_threads_num > 1) {
|
if (gnaFlags.sw_fp32 && gnaFlags.gna_lib_async_threads_num > 1) {
|
||||||
|
@ -75,7 +75,7 @@ public:
|
|||||||
auto plg = GetCurrentPlugin();
|
auto plg = GetCurrentPlugin();
|
||||||
try {
|
try {
|
||||||
plg->SetConfig(config);
|
plg->SetConfig(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException) {}
|
} catch (InferenceEngine::Exception&) {}
|
||||||
return plg->QueryNetwork(network, config);
|
return plg->QueryNetwork(network, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include <ie_common.h>
|
#include <ie_common.h>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
|
||||||
// #define GNA_DEBUG
|
// #define GNA_DEBUG
|
||||||
#ifdef GNA_DEBUG
|
#ifdef GNA_DEBUG
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
#include "gna_memory_util.hpp"
|
#include "gna_memory_util.hpp"
|
||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include "gna_plugin_log.hpp"
|
#include "gna_plugin_log.hpp"
|
||||||
|
|
||||||
int32_t GNAPluginNS::memory::MemoryOffset(void *ptr_target, void *ptr_base) {
|
int32_t GNAPluginNS::memory::MemoryOffset(void *ptr_target, void *ptr_base) {
|
||||||
|
@ -35,10 +35,13 @@ HeteroAsyncInferRequest::HeteroAsyncInferRequest(const InferRequestInternal::Ptr
|
|||||||
Task _task;
|
Task _task;
|
||||||
};
|
};
|
||||||
|
|
||||||
auto reuestExecutor = std::make_shared<RequestExecutor>(_heteroInferRequest->_inferRequests[requestId]._request.get());
|
auto requestExecutor = std::make_shared<RequestExecutor>(_heteroInferRequest->_inferRequests[requestId]._request.get());
|
||||||
_pipeline.emplace_back(reuestExecutor, [reuestExecutor] {
|
_pipeline.emplace_back(requestExecutor, [requestExecutor] {
|
||||||
if (StatusCode::OK != reuestExecutor->_status) {
|
if (StatusCode::OK != requestExecutor->_status) {
|
||||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << reuestExecutor->_status;
|
IE_EXCEPTION_SWITCH(requestExecutor->_status, ExceptionType,
|
||||||
|
InferenceEngine::details::ThrowNow<ExceptionType>{}
|
||||||
|
<<= std::stringstream{} << IE_LOCATION
|
||||||
|
<< InferenceEngine::details::ExceptionTraits<ExceptionType>::string());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -431,7 +431,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream&
|
|||||||
pugi::xml_parse_result res = heteroXmlDoc.load_string(heteroXmlStr.c_str());
|
pugi::xml_parse_result res = heteroXmlDoc.load_string(heteroXmlStr.c_str());
|
||||||
|
|
||||||
if (res.status != pugi::status_ok) {
|
if (res.status != pugi::status_ok) {
|
||||||
THROW_IE_EXCEPTION_WITH_STATUS(NETWORK_NOT_READ) << "Error reading HETERO plugin xml header";
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead) << "Error reading HETERO plugin xml header";
|
||||||
}
|
}
|
||||||
|
|
||||||
using namespace XMLParseUtils;
|
using namespace XMLParseUtils;
|
||||||
@ -480,7 +480,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream&
|
|||||||
bool loaded = false;
|
bool loaded = false;
|
||||||
try {
|
try {
|
||||||
executableNetwork = _heteroPlugin->GetCore()->ImportNetwork(heteroModel, deviceName, loadConfig);
|
executableNetwork = _heteroPlugin->GetCore()->ImportNetwork(heteroModel, deviceName, loadConfig);
|
||||||
} catch (const InferenceEngine::NotImplemented &) {
|
} catch (const InferenceEngine::NotImplemented& ex) {
|
||||||
// read XML content
|
// read XML content
|
||||||
std::string xmlString;
|
std::string xmlString;
|
||||||
std::uint64_t dataSize = 0;
|
std::uint64_t dataSize = 0;
|
||||||
@ -608,7 +608,7 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) {
|
|||||||
for (auto&& subnetwork : networks) {
|
for (auto&& subnetwork : networks) {
|
||||||
try {
|
try {
|
||||||
subnetwork._network.Export(heteroModel);
|
subnetwork._network.Export(heteroModel);
|
||||||
} catch (const InferenceEngine::NotImplemented &) {
|
} catch (const InferenceEngine::NotImplemented& ex) {
|
||||||
auto subnet = subnetwork._clonedNetwork;
|
auto subnet = subnetwork._clonedNetwork;
|
||||||
if (!subnet.getFunction()) {
|
if (!subnet.getFunction()) {
|
||||||
THROW_IE_EXCEPTION << "Hetero plugin supports only ngraph function representation";
|
THROW_IE_EXCEPTION << "Hetero plugin supports only ngraph function representation";
|
||||||
|
@ -77,11 +77,7 @@ void HeteroInferRequest::SetBlob(const std::string& name, const InferenceEngine:
|
|||||||
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
|
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
|
||||||
r->SetBlob(name, data, foundInput->getPreProcess());
|
r->SetBlob(name, data, foundInput->getPreProcess());
|
||||||
}
|
}
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException & ex) {
|
} catch (const InferenceEngine::NotFound& ex) {}
|
||||||
std::string message = ex.what();
|
|
||||||
if (message.find(NOT_FOUND_str) == std::string::npos)
|
|
||||||
throw ex;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -427,7 +427,7 @@ StatusCode CNNNetworkNGraphImpl::serialize(const std::string& xmlPath,
|
|||||||
xmlPath, binPath, ngraph::pass::Serialize::Version::IR_V10,
|
xmlPath, binPath, ngraph::pass::Serialize::Version::IR_V10,
|
||||||
custom_opsets);
|
custom_opsets);
|
||||||
manager.run_passes(_ngraph_function);
|
manager.run_passes(_ngraph_function);
|
||||||
} catch (const InferenceEngineException& e) {
|
} catch (const Exception& e) {
|
||||||
return DescriptionBuffer(GENERAL_ERROR, resp) << e.what();
|
return DescriptionBuffer(GENERAL_ERROR, resp) << e.what();
|
||||||
} catch (const std::exception& e) {
|
} catch (const std::exception& e) {
|
||||||
return DescriptionBuffer(UNEXPECTED, resp) << e.what();
|
return DescriptionBuffer(UNEXPECTED, resp) << e.what();
|
||||||
|
@ -193,7 +193,7 @@ std::istream& operator >> (std::istream& stream, CompiledBlobHeader& header) {
|
|||||||
pugi::xml_parse_result res = document.load_string(xmlStr.c_str());
|
pugi::xml_parse_result res = document.load_string(xmlStr.c_str());
|
||||||
|
|
||||||
if (res.status != pugi::status_ok) {
|
if (res.status != pugi::status_ok) {
|
||||||
THROW_IE_EXCEPTION_WITH_STATUS(NETWORK_NOT_READ) << "Error reading compiled blob header";
|
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead) << "Error reading compiled blob header";
|
||||||
}
|
}
|
||||||
|
|
||||||
pugi::xml_node compiledBlobNode = document.document_element();
|
pugi::xml_node compiledBlobNode = document.document_element();
|
||||||
|
@ -12,10 +12,9 @@
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include <file_utils.h>
|
#include <file_utils.h>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
|
#include "ie_common.h"
|
||||||
#ifndef _WIN32
|
#ifndef _WIN32
|
||||||
# include <limits.h>
|
# include <limits.h>
|
||||||
# include <unistd.h>
|
# include <unistd.h>
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
Blob::Ptr Blob::createROI(const ROI&) const {
|
Blob::Ptr Blob::createROI(const ROI&) const {
|
||||||
THROW_IE_EXCEPTION << "[NOT_IMPLEMENTED] createROI is not implemented for current type of Blob";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "createROI is not implemented for current type of Blob";
|
||||||
}
|
}
|
||||||
|
|
||||||
Blob::Ptr make_shared_blob(const Blob::Ptr& inputBlob, const ROI& roi) {
|
Blob::Ptr make_shared_blob(const Blob::Ptr& inputBlob, const ROI& roi) {
|
||||||
|
145
inference-engine/src/inference_engine/ie_common.cpp
Normal file
145
inference-engine/src/inference_engine/ie_common.cpp
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
// Copyright (C) 2018-2020 Intel Corporation
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include <tuple>
|
||||||
|
#include <memory>
|
||||||
|
#include <map>
|
||||||
|
#include <cassert>
|
||||||
|
|
||||||
|
#include <ie_common.h>
|
||||||
|
#include <ie_blob.h>
|
||||||
|
#include <ie_parameter.hpp>
|
||||||
|
#include <ie_iextension.h>
|
||||||
|
#include <ie_extension.h>
|
||||||
|
#include <exec_graph_info.hpp>
|
||||||
|
|
||||||
|
#include <ngraph/opsets/opset.hpp>
|
||||||
|
#include <cpp_interfaces/exception2status.hpp>
|
||||||
|
|
||||||
|
namespace ExecGraphInfoSerialization {
|
||||||
|
//
|
||||||
|
// exec_graph_info.hpp
|
||||||
|
//
|
||||||
|
constexpr ngraph::NodeTypeInfo ExecutionNode::type_info;
|
||||||
|
|
||||||
|
const ngraph::NodeTypeInfo& ExecutionNode::get_type_info() const {
|
||||||
|
return type_info;
|
||||||
|
}
|
||||||
|
} // namespace ExecGraphInfoSerialization
|
||||||
|
|
||||||
|
namespace InferenceEngine {
|
||||||
|
//
|
||||||
|
// ie_blob.h
|
||||||
|
//
|
||||||
|
|
||||||
|
Blob::~Blob() {}
|
||||||
|
MemoryBlob::~MemoryBlob() {}
|
||||||
|
|
||||||
|
//
|
||||||
|
// ie_iextension.h
|
||||||
|
//
|
||||||
|
ILayerImpl::~ILayerImpl() {}
|
||||||
|
ILayerExecImpl::~ILayerExecImpl() {}
|
||||||
|
std::map<std::string, ngraph::OpSet> IExtension::getOpSets() {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// ie_extension.h
|
||||||
|
//
|
||||||
|
std::map<std::string, ngraph::OpSet> Extension::getOpSets() {
|
||||||
|
return actual->getOpSets();
|
||||||
|
}
|
||||||
|
namespace details {
|
||||||
|
IE_SUPPRESS_DEPRECATED_START
|
||||||
|
|
||||||
|
StatusCode InferenceEngineException::getStatus() const {
|
||||||
|
return ExceptionToStatus(dynamic_cast<const Exception&>(*this));
|
||||||
|
}
|
||||||
|
} // namespace details
|
||||||
|
IE_SUPPRESS_DEPRECATED_END
|
||||||
|
|
||||||
|
INFERENCE_ENGINE_API_CPP(StatusCode) ExceptionToStatus(const Exception& exception) {
|
||||||
|
if (dynamic_cast<const GeneralError*>(&exception) != nullptr) {
|
||||||
|
return GENERAL_ERROR;
|
||||||
|
} else if (dynamic_cast<const NotImplemented*>(&exception) != nullptr) {
|
||||||
|
return NOT_IMPLEMENTED;
|
||||||
|
} else if (dynamic_cast<const NetworkNotLoaded*>(&exception) != nullptr) {
|
||||||
|
return NETWORK_NOT_LOADED;
|
||||||
|
} else if (dynamic_cast<const ParameterMismatch*>(&exception) != nullptr) {
|
||||||
|
return PARAMETER_MISMATCH;
|
||||||
|
} else if (dynamic_cast<const NotFound*>(&exception) != nullptr) {
|
||||||
|
return NOT_FOUND;
|
||||||
|
} else if (dynamic_cast<const OutOfBounds*>(&exception) != nullptr) {
|
||||||
|
return OUT_OF_BOUNDS;
|
||||||
|
} else if (dynamic_cast<const Unexpected*>(&exception) != nullptr) {
|
||||||
|
return UNEXPECTED;
|
||||||
|
} else if (dynamic_cast<const RequestBusy*>(&exception) != nullptr) {
|
||||||
|
return REQUEST_BUSY;
|
||||||
|
} else if (dynamic_cast<const ResultNotReady*>(&exception) != nullptr) {
|
||||||
|
return RESULT_NOT_READY;
|
||||||
|
} else if (dynamic_cast<const NotAllocated*>(&exception) != nullptr) {
|
||||||
|
return NOT_ALLOCATED;
|
||||||
|
} else if (dynamic_cast<const InferNotStarted*>(&exception) != nullptr) {
|
||||||
|
return INFER_NOT_STARTED;
|
||||||
|
} else if (dynamic_cast<const NetworkNotRead*>(&exception) != nullptr) {
|
||||||
|
return NETWORK_NOT_READ;
|
||||||
|
} else if (dynamic_cast<const InferCancelled*>(&exception) != nullptr) {
|
||||||
|
return INFER_CANCELLED;
|
||||||
|
} else {
|
||||||
|
assert(!"Unreachable"); return OK;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// ie_parameter.hpp
|
||||||
|
//
|
||||||
|
|
||||||
|
Parameter::~Parameter() {
|
||||||
|
clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
|
||||||
|
Parameter::Any::~Any() {}
|
||||||
|
|
||||||
|
template struct Parameter::RealData<int>;
|
||||||
|
template struct Parameter::RealData<bool>;
|
||||||
|
template struct Parameter::RealData<float>;
|
||||||
|
template struct Parameter::RealData<double>;
|
||||||
|
template struct Parameter::RealData<uint32_t>;
|
||||||
|
template struct Parameter::RealData<std::string>;
|
||||||
|
template struct Parameter::RealData<unsigned long>;
|
||||||
|
template struct Parameter::RealData<std::vector<int>>;
|
||||||
|
template struct Parameter::RealData<std::vector<std::string>>;
|
||||||
|
template struct Parameter::RealData<std::vector<unsigned long>>;
|
||||||
|
template struct Parameter::RealData<std::tuple<unsigned int, unsigned int>>;
|
||||||
|
template struct Parameter::RealData<std::tuple<unsigned int, unsigned int, unsigned int>>;
|
||||||
|
template struct Parameter::RealData<Blob::Ptr>;
|
||||||
|
|
||||||
|
//
|
||||||
|
// ie_blob.h
|
||||||
|
//
|
||||||
|
|
||||||
|
template <typename T, typename U>
|
||||||
|
TBlob<T, U>::~TBlob() {
|
||||||
|
free();
|
||||||
|
}
|
||||||
|
|
||||||
|
template class TBlob<float>;
|
||||||
|
template class TBlob<double>;
|
||||||
|
template class TBlob<int8_t>;
|
||||||
|
template class TBlob<uint8_t>;
|
||||||
|
template class TBlob<int16_t>;
|
||||||
|
template class TBlob<uint16_t>;
|
||||||
|
template class TBlob<int32_t>;
|
||||||
|
template class TBlob<uint32_t>;
|
||||||
|
template class TBlob<long>;
|
||||||
|
template class TBlob<long long>;
|
||||||
|
template class TBlob<unsigned long>;
|
||||||
|
template class TBlob<unsigned long long>;
|
||||||
|
#endif // defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
|
||||||
|
|
||||||
|
} // namespace InferenceEngine
|
@ -620,7 +620,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
plugins[deviceName] = plugin;
|
plugins[deviceName] = plugin;
|
||||||
} catch (const details::InferenceEngineException& ex) {
|
} catch (const Exception& ex) {
|
||||||
THROW_IE_EXCEPTION << "Failed to create plugin " << FileUtils::fromFilePath(desc.libraryLocation) << " for device " << deviceName
|
THROW_IE_EXCEPTION << "Failed to create plugin " << FileUtils::fromFilePath(desc.libraryLocation) << " for device " << deviceName
|
||||||
<< "\n"
|
<< "\n"
|
||||||
<< "Please, check your environment\n"
|
<< "Please, check your environment\n"
|
||||||
@ -993,7 +993,7 @@ std::vector<std::string> Core::GetAvailableDevices() const {
|
|||||||
try {
|
try {
|
||||||
Parameter p = GetMetric(deviceName, propertyName);
|
Parameter p = GetMetric(deviceName, propertyName);
|
||||||
devicesIDs = p.as<std::vector<std::string>>();
|
devicesIDs = p.as<std::vector<std::string>>();
|
||||||
} catch (details::InferenceEngineException&) {
|
} catch (Exception&) {
|
||||||
// plugin is not created by e.g. invalid env
|
// plugin is not created by e.g. invalid env
|
||||||
} catch (const std::exception& ex) {
|
} catch (const std::exception& ex) {
|
||||||
THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName
|
THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName
|
||||||
|
@ -16,25 +16,39 @@
|
|||||||
#include "file_utils.h"
|
#include "file_utils.h"
|
||||||
#include "cpp/ie_executable_network.hpp"
|
#include "cpp/ie_executable_network.hpp"
|
||||||
#include "cpp/ie_cnn_network.h"
|
#include "cpp/ie_cnn_network.h"
|
||||||
#include "details/ie_exception_conversion.hpp"
|
|
||||||
#include "ie_plugin_ptr.hpp"
|
#include "ie_plugin_ptr.hpp"
|
||||||
|
#include "cpp_interfaces/exception2status.hpp"
|
||||||
|
|
||||||
#if defined __GNUC__
|
#if defined __GNUC__
|
||||||
# pragma GCC diagnostic push
|
# pragma GCC diagnostic push
|
||||||
# pragma GCC diagnostic ignored "-Wreturn-type"
|
# pragma GCC diagnostic ignored "-Wreturn-type"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#define CATCH_IE_EXCEPTION(ExceptionType) catch (const InferenceEngine::ExceptionType& e) {throw e;}
|
||||||
|
|
||||||
|
#define CATCH_IE_EXCEPTIONS \
|
||||||
|
CATCH_IE_EXCEPTION(GeneralError) \
|
||||||
|
CATCH_IE_EXCEPTION(NotImplemented) \
|
||||||
|
CATCH_IE_EXCEPTION(NetworkNotLoaded) \
|
||||||
|
CATCH_IE_EXCEPTION(ParameterMismatch) \
|
||||||
|
CATCH_IE_EXCEPTION(NotFound) \
|
||||||
|
CATCH_IE_EXCEPTION(OutOfBounds) \
|
||||||
|
CATCH_IE_EXCEPTION(Unexpected) \
|
||||||
|
CATCH_IE_EXCEPTION(RequestBusy) \
|
||||||
|
CATCH_IE_EXCEPTION(ResultNotReady) \
|
||||||
|
CATCH_IE_EXCEPTION(NotAllocated) \
|
||||||
|
CATCH_IE_EXCEPTION(InferNotStarted) \
|
||||||
|
CATCH_IE_EXCEPTION(NetworkNotRead) \
|
||||||
|
CATCH_IE_EXCEPTION(InferCancelled)
|
||||||
|
|
||||||
#define CALL_STATEMENT(...) \
|
#define CALL_STATEMENT(...) \
|
||||||
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATEMENT was not initialized."; \
|
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATEMENT was not initialized."; \
|
||||||
try { \
|
try { \
|
||||||
__VA_ARGS__; \
|
__VA_ARGS__; \
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException& iex) { \
|
} CATCH_IE_EXCEPTIONS catch (const std::exception& ex) { \
|
||||||
InferenceEngine::details::extract_exception(iex.hasStatus() ? \
|
THROW_IE_EXCEPTION << ex.what(); \
|
||||||
iex.getStatus() : GENERAL_ERROR, iex.what()); \
|
|
||||||
} catch (const std::exception& ex) { \
|
|
||||||
InferenceEngine::details::extract_exception(GENERAL_ERROR, ex.what()); \
|
|
||||||
} catch (...) { \
|
} catch (...) { \
|
||||||
InferenceEngine::details::extract_exception(UNEXPECTED, ""); \
|
THROW_IE_EXCEPTION_WITH_STATUS(Unexpected); \
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
@ -1,123 +0,0 @@
|
|||||||
// Copyright (C) 2018-2020 Intel Corporation
|
|
||||||
// SPDX-License-Identifier: Apache-2.0
|
|
||||||
//
|
|
||||||
|
|
||||||
#include <string>
|
|
||||||
#include <vector>
|
|
||||||
#include <tuple>
|
|
||||||
#include <memory>
|
|
||||||
#include <map>
|
|
||||||
|
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
#include <ie_blob.h>
|
|
||||||
#include <ie_parameter.hpp>
|
|
||||||
#include <ie_iextension.h>
|
|
||||||
#include <ie_extension.h>
|
|
||||||
#include <exec_graph_info.hpp>
|
|
||||||
|
|
||||||
#include <ngraph/opsets/opset.hpp>
|
|
||||||
|
|
||||||
using namespace InferenceEngine;
|
|
||||||
|
|
||||||
//
|
|
||||||
// exec_graph_info.hpp
|
|
||||||
//
|
|
||||||
constexpr ngraph::NodeTypeInfo ExecGraphInfoSerialization::ExecutionNode::type_info;
|
|
||||||
|
|
||||||
const ngraph::NodeTypeInfo&
|
|
||||||
ExecGraphInfoSerialization::ExecutionNode::get_type_info() const {
|
|
||||||
return type_info;
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
// ie_blob.h
|
|
||||||
//
|
|
||||||
|
|
||||||
Blob::~Blob() {}
|
|
||||||
MemoryBlob::~MemoryBlob() {}
|
|
||||||
|
|
||||||
//
|
|
||||||
// ie_iextension.h
|
|
||||||
//
|
|
||||||
ILayerImpl::~ILayerImpl() {}
|
|
||||||
ILayerExecImpl::~ILayerExecImpl() {}
|
|
||||||
std::map<std::string, ngraph::OpSet> IExtension::getOpSets() {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
// ie_extension.h
|
|
||||||
//
|
|
||||||
std::map<std::string, ngraph::OpSet> Extension::getOpSets() {
|
|
||||||
return actual->getOpSets();
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
// details/ie_exception.hpp
|
|
||||||
//
|
|
||||||
|
|
||||||
details::InferenceEngineException::~InferenceEngineException() noexcept {}
|
|
||||||
|
|
||||||
details::InferenceEngineException::InferenceEngineException(const std::string& filename, const int line, const std::string& message) noexcept :
|
|
||||||
std::exception(), _file(filename), _line(line) {
|
|
||||||
if (!message.empty()) {
|
|
||||||
exception_stream = std::make_shared<std::stringstream>(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
details::InferenceEngineException::InferenceEngineException(const InferenceEngineException& that) noexcept :
|
|
||||||
std::exception() {
|
|
||||||
errorDesc = that.errorDesc;
|
|
||||||
status_code = that.status_code;
|
|
||||||
_file = that._file;
|
|
||||||
_line = that._line;
|
|
||||||
exception_stream = that.exception_stream;
|
|
||||||
}
|
|
||||||
//
|
|
||||||
// ie_parameter.hpp
|
|
||||||
//
|
|
||||||
|
|
||||||
Parameter::~Parameter() {
|
|
||||||
clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
|
|
||||||
Parameter::Any::~Any() {}
|
|
||||||
|
|
||||||
template struct InferenceEngine::Parameter::RealData<int>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<bool>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<float>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<double>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<uint32_t>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<std::string>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<unsigned long>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<std::vector<int>>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<std::vector<std::string>>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<std::vector<unsigned long>>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<std::tuple<unsigned int, unsigned int>>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<std::tuple<unsigned int, unsigned int, unsigned int>>;
|
|
||||||
template struct InferenceEngine::Parameter::RealData<InferenceEngine::Blob::Ptr>;
|
|
||||||
#endif // __clang__ && !__SYCL_COMPILER_VERSION
|
|
||||||
//
|
|
||||||
// ie_blob.h
|
|
||||||
//
|
|
||||||
|
|
||||||
#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
|
|
||||||
template <typename T, typename U>
|
|
||||||
TBlob<T, U>::~TBlob() {
|
|
||||||
free();
|
|
||||||
}
|
|
||||||
|
|
||||||
template class InferenceEngine::TBlob<float>;
|
|
||||||
template class InferenceEngine::TBlob<double>;
|
|
||||||
template class InferenceEngine::TBlob<int8_t>;
|
|
||||||
template class InferenceEngine::TBlob<uint8_t>;
|
|
||||||
template class InferenceEngine::TBlob<int16_t>;
|
|
||||||
template class InferenceEngine::TBlob<uint16_t>;
|
|
||||||
template class InferenceEngine::TBlob<int32_t>;
|
|
||||||
template class InferenceEngine::TBlob<uint32_t>;
|
|
||||||
template class InferenceEngine::TBlob<long>;
|
|
||||||
template class InferenceEngine::TBlob<long long>;
|
|
||||||
template class InferenceEngine::TBlob<unsigned long>;
|
|
||||||
template class InferenceEngine::TBlob<unsigned long long>;
|
|
||||||
#endif // __clang__ && !__SYCL_COMPILER_VERSION
|
|
@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
#include <dlfcn.h>
|
#include <dlfcn.h>
|
||||||
|
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "details/ie_so_loader.h"
|
#include "details/ie_so_loader.h"
|
||||||
#include "file_utils.h"
|
#include "file_utils.h"
|
||||||
|
|
||||||
@ -38,14 +37,14 @@ public:
|
|||||||
* @brief Searches for a function symbol in the loaded module
|
* @brief Searches for a function symbol in the loaded module
|
||||||
* @param symbolName Name of the function to find
|
* @param symbolName Name of the function to find
|
||||||
* @return A pointer to the function if found
|
* @return A pointer to the function if found
|
||||||
* @throws InferenceEngineException if the function is not found
|
* @throws Exception if the function is not found
|
||||||
*/
|
*/
|
||||||
void* get_symbol(const char* symbolName) const {
|
void* get_symbol(const char* symbolName) const {
|
||||||
void* procAddr = nullptr;
|
void* procAddr = nullptr;
|
||||||
|
|
||||||
procAddr = dlsym(shared_object, symbolName);
|
procAddr = dlsym(shared_object, symbolName);
|
||||||
if (procAddr == nullptr)
|
if (procAddr == nullptr)
|
||||||
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
|
||||||
<< "dlSym cannot locate method '" << symbolName << "': " << dlerror();
|
<< "dlSym cannot locate method '" << symbolName << "': " << dlerror();
|
||||||
return procAddr;
|
return procAddr;
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
#include <sched.h>
|
#include <sched.h>
|
||||||
#include "ie_system_conf.h"
|
#include "ie_system_conf.h"
|
||||||
#include "ie_parallel.hpp"
|
#include "ie_parallel.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
#include "ie_common.h"
|
||||||
#include <numeric>
|
#include <numeric>
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
//
|
//
|
||||||
|
|
||||||
#include "details/ie_exception.hpp"
|
#include "ie_common.h"
|
||||||
#include "details/ie_so_loader.h"
|
#include "details/ie_so_loader.h"
|
||||||
#include "file_utils.h"
|
#include "file_utils.h"
|
||||||
|
|
||||||
@ -239,7 +239,7 @@ class SharedObjectLoader::Impl {
|
|||||||
* @brief Searches for a function symbol in the loaded module
|
* @brief Searches for a function symbol in the loaded module
|
||||||
* @param symbolName Name of function to find
|
* @param symbolName Name of function to find
|
||||||
* @return A pointer to the function if found
|
* @return A pointer to the function if found
|
||||||
* @throws InferenceEngineException if the function is not found
|
* @throws Exception if the function is not found
|
||||||
*/
|
*/
|
||||||
void* get_symbol(const char* symbolName) const {
|
void* get_symbol(const char* symbolName) const {
|
||||||
if (!shared_object) {
|
if (!shared_object) {
|
||||||
@ -247,7 +247,7 @@ class SharedObjectLoader::Impl {
|
|||||||
}
|
}
|
||||||
auto procAddr = reinterpret_cast<void*>(GetProcAddress(shared_object, symbolName));
|
auto procAddr = reinterpret_cast<void*>(GetProcAddress(shared_object, symbolName));
|
||||||
if (procAddr == nullptr)
|
if (procAddr == nullptr)
|
||||||
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
|
||||||
<< "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
|
<< "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
|
||||||
|
|
||||||
return procAddr;
|
return procAddr;
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
#include "precision_utils.h"
|
#include "precision_utils.h"
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
#include "ie_parallel.hpp"
|
#include "ie_parallel.hpp"
|
||||||
#include "ie_system_conf.h"
|
#include "ie_system_conf.h"
|
||||||
#include "threading/ie_thread_affinity.hpp"
|
#include "threading/ie_thread_affinity.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "threading/ie_cpu_streams_executor.hpp"
|
#include "threading/ie_cpu_streams_executor.hpp"
|
||||||
#include <openvino/itt.hpp>
|
#include <openvino/itt.hpp>
|
||||||
|
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
#include "threading/ie_istreams_executor.hpp"
|
#include "threading/ie_istreams_executor.hpp"
|
||||||
#include "ie_plugin_config.hpp"
|
#include "ie_plugin_config.hpp"
|
||||||
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
|
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "ie_parallel.hpp"
|
#include "ie_parallel.hpp"
|
||||||
#include "ie_system_conf.h"
|
#include "ie_system_conf.h"
|
||||||
#include "ie_parameter.hpp"
|
#include "ie_parameter.hpp"
|
||||||
|
@ -10,7 +10,6 @@
|
|||||||
#include <set>
|
#include <set>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include "ie_precision.hpp"
|
#include "ie_precision.hpp"
|
||||||
|
|
||||||
int XMLParseUtils::GetIntAttr(const pugi::xml_node& node, const char* str) {
|
int XMLParseUtils::GetIntAttr(const pugi::xml_node& node, const char* str) {
|
||||||
|
@ -205,7 +205,7 @@ public:
|
|||||||
*
|
*
|
||||||
* @param str input string with float value
|
* @param str input string with float value
|
||||||
* @return float value if parsing was successful
|
* @return float value if parsing was successful
|
||||||
* @throws InferenceEngineException in case of parsing error
|
* @throws Exception in case of parsing error
|
||||||
*/
|
*/
|
||||||
static float ie_parse_float(const std::string& str);
|
static float ie_parse_float(const std::string& str);
|
||||||
|
|
||||||
|
@ -4,13 +4,14 @@
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief a header file for describing property style structure used by CNNLayers
|
* @brief a header file for describing property style structure used by CNNLayers
|
||||||
*
|
*
|
||||||
* @file ie_layers_property.hpp
|
* @file ie_layers_property.hpp
|
||||||
*/
|
*/
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
#include <ie_common.h>
|
||||||
|
|
||||||
namespace InferenceEngine {
|
namespace InferenceEngine {
|
||||||
|
|
||||||
|
@ -400,7 +400,7 @@ StatusCode CNNNetworkImpl::serialize(const std::string& xmlPath, const std::stri
|
|||||||
std::const_pointer_cast<ICNNNetwork>(shared_from_this())));
|
std::const_pointer_cast<ICNNNetwork>(shared_from_this())));
|
||||||
return OK;
|
return OK;
|
||||||
#endif
|
#endif
|
||||||
} catch (const InferenceEngineException& e) {
|
} catch (const Exception& e) {
|
||||||
return DescriptionBuffer(GENERAL_ERROR, resp) << e.what();
|
return DescriptionBuffer(GENERAL_ERROR, resp) << e.what();
|
||||||
} catch (const std::exception& e) {
|
} catch (const std::exception& e) {
|
||||||
return DescriptionBuffer(UNEXPECTED, resp) << e.what();
|
return DescriptionBuffer(UNEXPECTED, resp) << e.what();
|
||||||
@ -448,7 +448,7 @@ StatusCode CNNNetworkImpl::setBatchSize(size_t size, ResponseDesc* responseDesc)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
return OK;
|
return OK;
|
||||||
} catch (const InferenceEngineException& e) {
|
} catch (const Exception& e) {
|
||||||
return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what();
|
return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what();
|
||||||
} catch (const std::exception& e) {
|
} catch (const std::exception& e) {
|
||||||
return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what();
|
return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what();
|
||||||
@ -472,7 +472,7 @@ StatusCode CNNNetworkImpl::setBatchSizeReshape(size_t size, ResponseDesc* respon
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
return reshape(inputShapes, responseDesc);
|
return reshape(inputShapes, responseDesc);
|
||||||
} catch (const InferenceEngineException& e) {
|
} catch (const Exception& e) {
|
||||||
return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what();
|
return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what();
|
||||||
} catch (const std::exception& e) {
|
} catch (const std::exception& e) {
|
||||||
return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what();
|
return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what();
|
||||||
|
@ -36,7 +36,7 @@ void CNNLayer::parseParams() {
|
|||||||
try {
|
try {
|
||||||
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
|
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
|
||||||
validator->parseParams(this);
|
validator->parseParams(this);
|
||||||
} catch (const InferenceEngineException& ie_e) {
|
} catch (const Exception& ie_e) {
|
||||||
THROW_IE_EXCEPTION << "Error of validate layer: " << this->name << " with type: " << this->type << ". "
|
THROW_IE_EXCEPTION << "Error of validate layer: " << this->name << " with type: " << this->type << ". "
|
||||||
<< ie_e.what();
|
<< ie_e.what();
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ Paddings getPaddingsInternal(const Layer& layer) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {layer._padding, layer._pads_end};
|
return {layer._padding, layer._pads_end};
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException& iee) {
|
} catch (const InferenceEngine::Exception& iee) {
|
||||||
THROW_IE_EXCEPTION << errorPrefix << iee.what();
|
THROW_IE_EXCEPTION << errorPrefix << iee.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,12 +19,12 @@ namespace ngraph {
|
|||||||
namespace pass {
|
namespace pass {
|
||||||
namespace low_precision {
|
namespace low_precision {
|
||||||
|
|
||||||
class TRANSFORMATIONS_API InferenceEngineException : std::exception {
|
class TRANSFORMATIONS_API Exception : std::exception {
|
||||||
std::shared_ptr<std::ostringstream> buffer;
|
std::shared_ptr<std::ostringstream> buffer;
|
||||||
mutable std::string buffer_str;
|
mutable std::string buffer_str;
|
||||||
public:
|
public:
|
||||||
template <typename T>
|
template <typename T>
|
||||||
InferenceEngineException& operator<< (const T& x) {
|
Exception& operator<< (const T& x) {
|
||||||
*buffer << x;
|
*buffer << x;
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
@ -35,10 +35,10 @@ public:
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
#define THROW_TRANSFORMATION_EXCEPTION throw ::ngraph::pass::low_precision::InferenceEngineException() << __FILE__ << ":" << __LINE__ << " "
|
#define THROW_TRANSFORMATION_EXCEPTION throw ::ngraph::pass::low_precision::Exception() << __FILE__ << ":" << __LINE__ << " "
|
||||||
|
|
||||||
|
|
||||||
class TRANSFORMATIONS_API InferenceEngineLptException : public InferenceEngineException {
|
class TRANSFORMATIONS_API InferenceEngineLptException : public Exception {
|
||||||
public:
|
public:
|
||||||
InferenceEngineLptException(const std::string& filename, const size_t line, const Node& node) {
|
InferenceEngineLptException(const std::string& filename, const size_t line, const Node& node) {
|
||||||
*this
|
*this
|
||||||
|
@ -106,7 +106,7 @@ void Config::readProperties(const std::map<std::string, std::string> &prop) {
|
|||||||
<< ". Expected only YES/NO";
|
<< ". Expected only YES/NO";
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property " << key << " by CPU plugin";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property " << key << " by CPU plugin";
|
||||||
}
|
}
|
||||||
_config.clear();
|
_config.clear();
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,8 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
//
|
//
|
||||||
|
|
||||||
#include <details/ie_exception.hpp>
|
#include <ie_common.h>
|
||||||
|
|
||||||
#include "mkldnn_descriptor.h"
|
#include "mkldnn_descriptor.h"
|
||||||
|
|
||||||
mkldnn::primitive_desc_iterator MKLDNNDescriptor::createPrimitiveDescriptorIterator(const mkldnn::engine &engine,
|
mkldnn::primitive_desc_iterator MKLDNNDescriptor::createPrimitiveDescriptorIterator(const mkldnn::engine &engine,
|
||||||
|
@ -296,14 +296,14 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
|
|||||||
void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr &data) {
|
void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr &data) {
|
||||||
OV_ITT_SCOPED_TASK(itt::domains::MKLDNNPlugin, "SetBlob");
|
OV_ITT_SCOPED_TASK(itt::domains::MKLDNNPlugin, "SetBlob");
|
||||||
if (name.empty()) {
|
if (name.empty()) {
|
||||||
THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!data)
|
if (!data)
|
||||||
THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'";
|
||||||
const bool compoundBlobPassed = data->is<InferenceEngine::CompoundBlob>();
|
const bool compoundBlobPassed = data->is<InferenceEngine::CompoundBlob>();
|
||||||
if (!compoundBlobPassed && data->buffer() == nullptr)
|
if (!compoundBlobPassed && data->buffer() == nullptr)
|
||||||
THROW_IE_EXCEPTION << "Input data was not allocated. Input name: \'" << name << "\'";
|
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Input data was not allocated. Input name: \'" << name << "\'";
|
||||||
if (data->size() == 0) {
|
if (data->size() == 0) {
|
||||||
THROW_IE_EXCEPTION << "Input data is empty. Input name: \'" << name << "\'";
|
THROW_IE_EXCEPTION << "Input data is empty. Input name: \'" << name << "\'";
|
||||||
}
|
}
|
||||||
@ -313,13 +313,13 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
|
|||||||
size_t dataSize = data->size();
|
size_t dataSize = data->size();
|
||||||
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
|
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
|
||||||
if (foundInput->getPrecision() != data->getTensorDesc().getPrecision()) {
|
if (foundInput->getPrecision() != data->getTensorDesc().getPrecision()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob with precision: "
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob with precision: "
|
||||||
<< data->getTensorDesc().getPrecision() << ", if CNNNetwork input blob precision is: " << foundInput->getPrecision();
|
<< data->getTensorDesc().getPrecision() << ", if CNNNetwork input blob precision is: " << foundInput->getPrecision();
|
||||||
}
|
}
|
||||||
|
|
||||||
const bool preProcRequired = preProcessingRequired(foundInput, data);
|
const bool preProcRequired = preProcessingRequired(foundInput, data);
|
||||||
if (compoundBlobPassed && !preProcRequired) {
|
if (compoundBlobPassed && !preProcRequired) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
|
||||||
<< "cannot set compound blob: supported only for input pre-processing";
|
<< "cannot set compound blob: supported only for input pre-processing";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,12 +341,12 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (foundInput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) {
|
if (foundInput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob. Dimensions mismatch.";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob. Dimensions mismatch.";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundInput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY &&
|
if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundInput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY &&
|
||||||
foundInput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) {
|
foundInput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob. Blocking descriptor mismatch.";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob. Blocking descriptor mismatch.";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 &&
|
if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 &&
|
||||||
@ -359,11 +359,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (compoundBlobPassed) {
|
if (compoundBlobPassed) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
|
||||||
<< "cannot set compound blob: supported only for input pre-processing";
|
<< "cannot set compound blob: supported only for input pre-processing";
|
||||||
}
|
}
|
||||||
if (foundOutput->getPrecision() != data->getTensorDesc().getPrecision()) {
|
if (foundOutput->getPrecision() != data->getTensorDesc().getPrecision()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output blob with precision: "
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output blob with precision: "
|
||||||
<< data->getTensorDesc().getPrecision() << ", if CNNNetwork output blob precision is: " << foundOutput->getPrecision();
|
<< data->getTensorDesc().getPrecision() << ", if CNNNetwork output blob precision is: " << foundOutput->getPrecision();
|
||||||
}
|
}
|
||||||
size_t outputSize = foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::SCALAR
|
size_t outputSize = foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::SCALAR
|
||||||
@ -374,11 +374,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
|
|||||||
<< dataSize << "!=" << outputSize << ").";
|
<< dataSize << "!=" << outputSize << ").";
|
||||||
}
|
}
|
||||||
if (foundOutput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) {
|
if (foundOutput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output Blob. Dimensions mismatch.";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output Blob. Dimensions mismatch.";
|
||||||
}
|
}
|
||||||
if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY &&
|
if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY &&
|
||||||
foundOutput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) {
|
foundOutput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) {
|
||||||
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output blob. Blocking descriptor mismatch.";
|
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output blob. Blocking descriptor mismatch.";
|
||||||
}
|
}
|
||||||
if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 &&
|
if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 &&
|
||||||
!graph->getProperty().batchLimit) {
|
!graph->getProperty().batchLimit) {
|
||||||
|
@ -2,9 +2,10 @@
|
|||||||
// SPDX-License-Identifier: Apache-2.0
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include <ie_common.h>
|
||||||
|
|
||||||
#include "mkldnn_memory_solver.hpp"
|
#include "mkldnn_memory_solver.hpp"
|
||||||
|
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
@ -345,7 +345,7 @@ Engine::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network, const std
|
|||||||
input_precision != InferenceEngine::Precision::BOOL &&
|
input_precision != InferenceEngine::Precision::BOOL &&
|
||||||
input_precision != InferenceEngine::Precision::I64 &&
|
input_precision != InferenceEngine::Precision::I64 &&
|
||||||
input_precision != InferenceEngine::Precision::U64) {
|
input_precision != InferenceEngine::Precision::U64) {
|
||||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
|
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
|
||||||
<< "Input image format " << input_precision << " is not supported yet...";
|
<< "Input image format " << input_precision << " is not supported yet...";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -513,7 +513,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
|
|||||||
std::unique_ptr<MKLDNNNode> ptr;
|
std::unique_ptr<MKLDNNNode> ptr;
|
||||||
try {
|
try {
|
||||||
ptr.reset(MKLDNNNode::factory().create(*itLayer, {mkldnn::engine::kind::cpu, 0}, extensionManager, fake_w_cache));
|
ptr.reset(MKLDNNNode::factory().create(*itLayer, {mkldnn::engine::kind::cpu, 0}, extensionManager, fake_w_cache));
|
||||||
} catch (InferenceEngine::details::InferenceEngineException&) {
|
} catch (InferenceEngine::Exception&) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -569,7 +569,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
|
|||||||
// if we can create and have not thrown exception, then layer is supported
|
// if we can create and have not thrown exception, then layer is supported
|
||||||
std::unique_ptr <MKLDNNNode>(MKLDNNNode::factory().create(*i, eng, extensionManager, fake_w_cache));
|
std::unique_ptr <MKLDNNNode>(MKLDNNNode::factory().create(*i, eng, extensionManager, fake_w_cache));
|
||||||
res.supportedLayersMap.insert({ (*i)->name, GetName() });
|
res.supportedLayersMap.insert({ (*i)->name, GetName() });
|
||||||
} catch (InferenceEngine::details::InferenceEngineException&) {
|
} catch (InferenceEngine::Exception&) {
|
||||||
}
|
}
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
#include <ie_common.h>
|
#include <ie_common.h>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <details/ie_exception.hpp>
|
|
||||||
|
|
||||||
namespace MKLDNNPlugin {
|
namespace MKLDNNPlugin {
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ public:
|
|||||||
std::stoi(layer->params.at("axis")) :0;
|
std::stoi(layer->params.at("axis")) :0;
|
||||||
|
|
||||||
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -66,7 +66,7 @@ public:
|
|||||||
config.outConfs.push_back(outConfig);
|
config.outConfs.push_back(outConfig);
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ public:
|
|||||||
config.outConfs.push_back(outConfig);
|
config.outConfs.push_back(outConfig);
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,7 @@ public:
|
|||||||
{ DataConfigurator(ConfLayout::PLN, input_precision), DataConfigurator(ConfLayout::PLN, boundaries_precision) },
|
{ DataConfigurator(ConfLayout::PLN, input_precision), DataConfigurator(ConfLayout::PLN, boundaries_precision) },
|
||||||
{ DataConfigurator(ConfLayout::PLN, output_precision) });
|
{ DataConfigurator(ConfLayout::PLN, output_precision) });
|
||||||
}
|
}
|
||||||
catch (InferenceEngine::details::InferenceEngineException &ex) {
|
catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <ie_layouts.h>
|
#include <ie_layouts.h>
|
||||||
|
#include <functional>
|
||||||
|
|
||||||
namespace MKLDNNPlugin {
|
namespace MKLDNNPlugin {
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ public:
|
|||||||
|
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,7 @@ public:
|
|||||||
|
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +114,7 @@ public:
|
|||||||
|
|
||||||
std::vector<DataConfigurator> in_data_conf(layer->insData.size(), DataConfigurator(ConfLayout::PLN, Precision::FP32));
|
std::vector<DataConfigurator> in_data_conf(layer->insData.size(), DataConfigurator(ConfLayout::PLN, Precision::FP32));
|
||||||
addConfig(layer, in_data_conf, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
addConfig(layer, in_data_conf, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -273,7 +273,7 @@ public:
|
|||||||
config.outConfs.push_back(dataS);
|
config.outConfs.push_back(dataS);
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ MKLDNNEmbeddingBagSum::MKLDNNEmbeddingBagSum(
|
|||||||
for (size_t i = 1lu; i < inDataDims.size(); i++) {
|
for (size_t i = 1lu; i < inDataDims.size(); i++) {
|
||||||
_embDepth *= inDataDims[i];
|
_embDepth *= inDataDims[i];
|
||||||
}
|
}
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,7 @@ public:
|
|||||||
|
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,7 @@ public:
|
|||||||
addConfig(layer, { DataConfigurator(ConfLayout::PLN, Precision::I32), DataConfigurator(ConfLayout::PLN) },
|
addConfig(layer, { DataConfigurator(ConfLayout::PLN, Precision::I32), DataConfigurator(ConfLayout::PLN) },
|
||||||
{ DataConfigurator(ConfLayout::PLN) });
|
{ DataConfigurator(ConfLayout::PLN) });
|
||||||
}
|
}
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,7 +68,7 @@ public:
|
|||||||
config.outConfs.push_back(dataConfigOut);
|
config.outConfs.push_back(dataConfigOut);
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ public:
|
|||||||
addConfig(layer, { DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision),
|
addConfig(layer, { DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision),
|
||||||
DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision) },
|
DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision) },
|
||||||
{ DataConfigurator(ConfLayout::PLN, precision) });
|
{ DataConfigurator(ConfLayout::PLN, precision) });
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,7 @@ public:
|
|||||||
bias = layer->GetParamAsFloat("bias");
|
bias = layer->GetParamAsFloat("bias");
|
||||||
|
|
||||||
addConfig(layer, {{ConfLayout::PLN, false, 0, Precision::FP32}}, {{ConfLayout::PLN, false, 0, Precision::FP32}});
|
addConfig(layer, {{ConfLayout::PLN, false, 0, Precision::FP32}}, {{ConfLayout::PLN, false, 0, Precision::FP32}});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ public:
|
|||||||
reduced_axis_stride *= dims[i];
|
reduced_axis_stride *= dims[i];
|
||||||
|
|
||||||
addConfig(layer, { { ConfLayout::PLN, false, 0, Precision::FP32 } }, { { ConfLayout::PLN, false, 0, Precision::FP32 } });
|
addConfig(layer, { { ConfLayout::PLN, false, 0, Precision::FP32 } }, { { ConfLayout::PLN, false, 0, Precision::FP32 } });
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -87,7 +87,7 @@ public:
|
|||||||
THROW_IE_EXCEPTION << layer->name << " Incorrect Math layer type!";
|
THROW_IE_EXCEPTION << layer->name << " Incorrect Math layer type!";
|
||||||
|
|
||||||
addConfig(layer, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)});
|
addConfig(layer, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
#include <vector>
|
#include <vector>
|
||||||
#include <mkldnn_extension_utils.h>
|
#include <mkldnn_extension_utils.h>
|
||||||
|
|
||||||
#include "details/ie_exception.hpp"
|
|
||||||
#include <legacy/ie_layers.h>
|
#include <legacy/ie_layers.h>
|
||||||
#include "mkldnn.hpp"
|
#include "mkldnn.hpp"
|
||||||
#include "mkldnn/iml_type_mapper.h"
|
#include "mkldnn/iml_type_mapper.h"
|
||||||
|
@ -138,7 +138,7 @@ public:
|
|||||||
|
|
||||||
config.dynBatchSupport = false;
|
config.dynBatchSupport = false;
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,7 @@ public:
|
|||||||
config.outConfs.push_back(dataConfig);
|
config.outConfs.push_back(dataConfig);
|
||||||
|
|
||||||
confs.push_back(config);
|
confs.push_back(config);
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception& ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ public:
|
|||||||
shift_.push_back(0);
|
shift_.push_back(0);
|
||||||
|
|
||||||
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -117,7 +117,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
addConfig(layer, {{ConfLayout::ANY, true}, {ConfLayout::ANY, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}});
|
addConfig(layer, {{ConfLayout::ANY, true}, {ConfLayout::ANY, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,7 @@ public:
|
|||||||
offset_ = layer->GetParamAsFloat("offset");
|
offset_ = layer->GetParamAsFloat("offset");
|
||||||
|
|
||||||
addConfig(layer, {{ConfLayout::PLN, true}, {ConfLayout::PLN, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}});
|
addConfig(layer, {{ConfLayout::PLN, true}, {ConfLayout::PLN, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ public:
|
|||||||
addConfig(layer,
|
addConfig(layer,
|
||||||
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::ANY), DataConfigurator(ConfLayout::ANY)},
|
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::ANY), DataConfigurator(ConfLayout::ANY)},
|
||||||
{DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
{DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ public:
|
|||||||
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32),
|
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32),
|
||||||
DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
||||||
}
|
}
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (const InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -179,7 +179,7 @@ public:
|
|||||||
{img_H, img_W, scale_H, scale_W}, anchors.data(), roi_indices.data(), p_roi_item, p_prob_item, conf);
|
{img_H, img_W, scale_H, scale_W}, anchors.data(), roi_indices.data(), p_roi_item, p_prob_item, conf);
|
||||||
|
|
||||||
return OK;
|
return OK;
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException& e) {
|
} catch (const InferenceEngine::Exception& e) {
|
||||||
if (resp) {
|
if (resp) {
|
||||||
std::string errorMsg = e.what();
|
std::string errorMsg = e.what();
|
||||||
errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);
|
errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);
|
||||||
|
@ -299,7 +299,7 @@ public:
|
|||||||
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32),
|
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32),
|
||||||
DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)},
|
DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)},
|
||||||
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -408,7 +408,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
return OK;
|
return OK;
|
||||||
} catch (const InferenceEngine::details::InferenceEngineException& e) {
|
} catch (const std::exception& e) {
|
||||||
if (resp) {
|
if (resp) {
|
||||||
std::string errorMsg = e.what();
|
std::string errorMsg = e.what();
|
||||||
errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);
|
errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);
|
||||||
|
@ -89,7 +89,7 @@ public:
|
|||||||
DataConfigurator(ConfLayout::PLN, Precision::FP32),
|
DataConfigurator(ConfLayout::PLN, Precision::FP32),
|
||||||
DataConfigurator(ConfLayout::PLN)}, {DataConfigurator(ConfLayout::PLN, supportedPrecision)});
|
DataConfigurator(ConfLayout::PLN)}, {DataConfigurator(ConfLayout::PLN, supportedPrecision)});
|
||||||
}
|
}
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,7 @@ public:
|
|||||||
addConfig(layer, { DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN) },
|
addConfig(layer, { DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN) },
|
||||||
{ DataConfigurator(ConfLayout::PLN) });
|
{ DataConfigurator(ConfLayout::PLN) });
|
||||||
}
|
}
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -310,7 +310,7 @@ public:
|
|||||||
logistic_kernel->create_ker();
|
logistic_kernel->create_ker();
|
||||||
|
|
||||||
addConfig(layer, {DataConfigurator(ConfLayout::PLN, input_prec)}, {DataConfigurator(ConfLayout::PLN, output_prec)});
|
addConfig(layer, {DataConfigurator(ConfLayout::PLN, input_prec)}, {DataConfigurator(ConfLayout::PLN, output_prec)});
|
||||||
} catch (InferenceEngine::details::InferenceEngineException &ex) {
|
} catch (InferenceEngine::Exception &ex) {
|
||||||
errorMsg = ex.what();
|
errorMsg = ex.what();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user