Simplified IE Exceptions Implementation (#4258)

This commit is contained in:
Anton Pankratv 2021-03-18 16:30:16 +03:00 committed by GitHub
parent d068810936
commit a2b8b974b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
305 changed files with 1192 additions and 1585 deletions

View File

@ -3,7 +3,6 @@
//
#include "cpu_kernel.hpp"
#include "op.hpp"
#include <details/ie_exception.hpp>
#include <ie_layouts.h>
using namespace TemplateExtension;
@ -25,7 +24,7 @@ OpImplementation::OpImplementation(const std::shared_ptr<ngraph::Node> &node) {
add = castedNode->getAddAttr();
inShape = castedNode->get_input_shape(0);
outShape = castedNode->get_output_shape(0);
} catch (InferenceEngine::details::InferenceEngineException& ex) {
} catch (InferenceEngine::Exception& ex) {
error = ex.what();
}
}
@ -92,14 +91,15 @@ InferenceEngine::StatusCode OpImplementation::init(InferenceEngine::LayerConfig
}
if (config.inConfs[0].desc.getDims().size() != 4 || config.outConfs[0].desc.getDims().size() != 4) {
THROW_IE_EXCEPTION << "Operation can be initialized only with 4d input/output tensors!";
THROW_IE_EXCEPTION
<< "Operation can be initialized only with 4d input/output tensors!";
}
if (config.outConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32 ||
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
}
} catch (InferenceEngine::details::InferenceEngineException&) {
} catch (InferenceEngine::Exception& ex) {
if (resp) {
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
resp->msg[sizeof(resp->msg)-1] = 0;

View File

@ -5,7 +5,6 @@
//! [fft_kernel:implementation]
#include "fft_kernel.hpp"
#include "fft_op.hpp"
#include <details/ie_exception.hpp>
#include <ie_layouts.h>
#include <opencv2/opencv.hpp>
@ -65,7 +64,7 @@ InferenceEngine::StatusCode FFTImpl::init(InferenceEngine::LayerConfig &config,
config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) {
THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!";
}
} catch (InferenceEngine::details::InferenceEngineException&) {
} catch (InferenceEngine::Exception& ex) {
if (resp) {
strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1);
resp->msg[sizeof(resp->msg)-1] = 0;

View File

@ -34,7 +34,7 @@ Configuration::Configuration(const ConfigMap& config, const Configuration & defa
} else if (CONFIG_KEY(PERF_COUNT) == key) {
perfCount = (CONFIG_VALUE(YES) == value);
} else if (throwOnUnsupported) {
THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << key;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << ": " << key;
}
}
}
@ -53,6 +53,6 @@ InferenceEngine::Parameter Configuration::Get(const std::string& name) const {
} else if (name == CONFIG_KEY_INTERNAL(CPU_THREADS_PER_STREAM)) {
return {std::to_string(_streamsExecutorConfig._threadsPerStream)};
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << name;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << ": " << name;
}
}

View File

@ -27,7 +27,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(const std::shared_ptr<const
try {
CompileNetwork(function);
InitExecutor(); // creates thread-based executor using for async requests
} catch (const InferenceEngine::details::InferenceEngineException&) {
} catch (const InferenceEngine::Exception&) {
throw;
} catch (const std::exception & e) {
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();
@ -74,7 +74,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(std::istream & model,
try {
CompileNetwork(cnnnetwork.getFunction());
InitExecutor(); // creates thread-based executor using for async requests
} catch (const InferenceEngine::details::InferenceEngineException&) {
} catch (const InferenceEngine::Exception&) {
throw;
} catch (const std::exception & e) {
THROW_IE_EXCEPTION << "Standard exception from compilation library: " << e.what();

View File

@ -215,7 +215,7 @@ InferenceEngine::QueryNetworkResult Plugin::QueryNetwork(const InferenceEngine::
// ! [plugin:add_extension]
void Plugin::AddExtension(InferenceEngine::IExtensionPtr /*extension*/) {
// TODO: add extensions if plugin supports extensions
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented);
}
// ! [plugin:add_extension]

View File

@ -242,7 +242,8 @@ typedef enum {
RESULT_NOT_READY = -9,
NOT_ALLOCATED = -10,
INFER_NOT_STARTED = -11,
NETWORK_NOT_READ = -12
NETWORK_NOT_READ = -12,
INFER_CANCELLED = -13,
} IEStatusCode;
/**

View File

@ -16,7 +16,6 @@
#include <memory>
#include <ie_extension.h>
#include "inference_engine.hpp"
#include "details/ie_exception.hpp"
#include "ie_compound_blob.h"
#include "c_api/ie_c_api.h"
@ -119,6 +118,23 @@ std::map<IE::ColorFormat, colorformat_e> colorformat_map = {{IE::ColorFormat::RA
{IE::ColorFormat::NV12, colorformat_e::NV12},
{IE::ColorFormat::I420, colorformat_e::I420}};
#define CATCH_IE_EXCEPTION(StatusCode, ExceptionType) catch (const IE::ExceptionType&) {return IEStatusCode::StatusCode;}
#define CATCH_IE_EXCEPTIONS \
CATCH_IE_EXCEPTION(GENERAL_ERROR, GeneralError) \
CATCH_IE_EXCEPTION(NOT_IMPLEMENTED, NotImplemented) \
CATCH_IE_EXCEPTION(NETWORK_NOT_LOADED, NetworkNotLoaded) \
CATCH_IE_EXCEPTION(PARAMETER_MISMATCH, ParameterMismatch) \
CATCH_IE_EXCEPTION(NOT_FOUND, NotFound) \
CATCH_IE_EXCEPTION(OUT_OF_BOUNDS, OutOfBounds) \
CATCH_IE_EXCEPTION(UNEXPECTED, Unexpected) \
CATCH_IE_EXCEPTION(REQUEST_BUSY, RequestBusy) \
CATCH_IE_EXCEPTION(RESULT_NOT_READY, ResultNotReady) \
CATCH_IE_EXCEPTION(NOT_ALLOCATED, NotAllocated) \
CATCH_IE_EXCEPTION(INFER_NOT_STARTED, InferNotStarted) \
CATCH_IE_EXCEPTION(NETWORK_NOT_READ, NetworkNotRead) \
CATCH_IE_EXCEPTION(INFER_CANCELLED, InferCancelled)
/**
*@brief convert the config type data to map type data.
*/
@ -222,9 +238,7 @@ IEStatusCode ie_core_create(const char *xml_config_file, ie_core_t **core) {
std::unique_ptr<ie_core_t> tmp(new ie_core_t);
tmp->object = IE::Core(xml_config_file);
*core = tmp.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -268,9 +282,7 @@ IEStatusCode ie_core_get_versions(const ie_core_t *core, const char *device_name
vers_ptrs[i].description = iter->second.description;
}
versions->versions = vers_ptrs.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -303,9 +315,7 @@ IEStatusCode ie_core_read_network(ie_core_t *core, const char *xml, const char *
}
network_result->object = core->object.ReadNetwork(xml, bin);
*network = network_result.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -325,9 +335,7 @@ IEStatusCode ie_core_read_network_from_memory(ie_core_t *core, const uint8_t *xm
network_result->object = core->object.ReadNetwork(std::string(reinterpret_cast<const char *>(xml_content),
reinterpret_cast<const char *>(xml_content + xml_content_size)), weight_blob->object);
*network = network_result.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -351,9 +359,7 @@ IEStatusCode ie_core_load_network(ie_core_t *core, const ie_network_t *network,
// create plugin in the registery and then create ExecutableNetwork.
exe_net->object = core->object.LoadNetwork(network->object, device_name, conf_map);
*exe_network = exe_net.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -376,9 +382,7 @@ IEStatusCode ie_core_set_config(ie_core_t *core, const ie_config_t *ie_core_conf
try {
core->object.SetConfig(conf_map, deviceName);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -395,9 +399,7 @@ IEStatusCode ie_core_register_plugin(ie_core_t *core, const char *plugin_name, c
try {
core->object.RegisterPlugin(plugin_name, device_name);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -414,9 +416,7 @@ IEStatusCode ie_core_register_plugins(ie_core_t *core, const char *xml_config_fi
try {
core->object.RegisterPlugins(xml_config_file);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -433,9 +433,7 @@ IEStatusCode ie_core_unregister_plugin(ie_core_t *core, const char *device_name)
try {
core->object.UnregisterPlugin(device_name);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -454,9 +452,7 @@ IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path,
auto extension_ptr = std::make_shared<InferenceEngine::Extension>(std::string{extension_path});
auto extension = std::dynamic_pointer_cast<InferenceEngine::IExtension>(extension_ptr);
core->object.AddExtension(extension, device_name);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -474,9 +470,7 @@ IEStatusCode ie_core_get_metric(const ie_core_t *core, const char *device_name,
try {
IE::Parameter param = core->object.GetMetric(device_name, metric_name);
parameter2IEparam(param, param_result);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -496,9 +490,7 @@ IEStatusCode ie_core_get_config(const ie_core_t *core, const char *device_name,
// convert the parameter to ie_param_t
parameter2IEparam(param, param_result);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -522,11 +514,7 @@ IEStatusCode ie_core_get_available_devices(const ie_core_t *core, ie_available_d
memcpy(dev_ptrs[i], _devices[i].c_str(), _devices[i].length() + 1);
}
avai_devices->devices = dev_ptrs.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (const std::exception&) {
return IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -565,9 +553,7 @@ IEStatusCode ie_exec_network_create_infer_request(ie_executable_network_t *ie_ex
std::unique_ptr<ie_infer_request_t> req(new ie_infer_request_t);
req->object = ie_exec_network->object.CreateInferRequest();
*request = req.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -585,9 +571,7 @@ IEStatusCode ie_exec_network_get_metric(const ie_executable_network_t *ie_exec_n
try {
InferenceEngine::Parameter parameter = ie_exec_network->object.GetMetric(metric_name);
parameter2IEparam(parameter, param_result);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -605,9 +589,7 @@ IEStatusCode ie_exec_network_set_config(ie_executable_network_t *ie_exec_network
try {
const std::map<std::string, IE::Parameter> conf_map = config2ParamMap(param_config);
ie_exec_network->object.SetConfig(conf_map);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -625,9 +607,7 @@ IEStatusCode ie_exec_network_get_config(const ie_executable_network_t *ie_exec_n
try {
InferenceEngine::Parameter parameter = ie_exec_network->object.GetConfig(metric_config);
parameter2IEparam(parameter, param_result);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -651,9 +631,7 @@ IEStatusCode ie_network_get_name(const ie_network_t *network, char **name) {
std::unique_ptr<char[]> netName(new char[_name.length() + 1]);
*name = netName.release();
memcpy(*name, _name.c_str(), _name.length() + 1);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -670,9 +648,7 @@ IEStatusCode ie_network_get_inputs_number(const ie_network_t *network, size_t *s
try {
IE::InputsDataMap inputs = network->object.getInputsInfo();
*size_result = inputs.size();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -701,9 +677,7 @@ IEStatusCode ie_network_get_input_name(const ie_network_t *network, size_t numbe
*name = inputName.release();
memcpy(*name, iter->first.c_str(), iter->first.length() + 1);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -726,9 +700,7 @@ IEStatusCode ie_network_get_input_precision(const ie_network_t *network, const c
IE::Precision p = inputs[input_name]->getPrecision();
*prec_result = precision_map[p];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -757,9 +729,7 @@ IEStatusCode ie_network_set_input_precision(ie_network_t *network, const char *i
}
inputs[input_name]->setPrecision(precision);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -782,9 +752,7 @@ IEStatusCode ie_network_get_input_layout(const ie_network_t *network, const char
IE::Layout l = inputs[input_name]->getLayout();
*layout_result = layout_map[l];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -813,9 +781,7 @@ IEStatusCode ie_network_set_input_layout(ie_network_t *network, const char *inpu
}
inputs[input_name]->setLayout(layout);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -841,9 +807,7 @@ IEStatusCode ie_network_get_input_dims(const ie_network_t *network, const char *
dims_result->dims[i] = dims[i];
}
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -866,9 +830,7 @@ IEStatusCode ie_network_get_input_resize_algorithm(const ie_network_t *network,
IE::ResizeAlgorithm resize = inputs[input_name]->getPreProcess().getResizeAlgorithm();
*resize_alg_result = resize_alg_map[resize];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -897,9 +859,7 @@ IEStatusCode ie_network_set_input_resize_algorithm(ie_network_t *network, const
}
inputs[input_name]->getPreProcess().setResizeAlgorithm(resize);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -922,9 +882,7 @@ IEStatusCode ie_network_get_color_format(const ie_network_t *network, const char
IE::ColorFormat color = inputs[input_name]->getPreProcess().getColorFormat();
*colformat_result = colorformat_map[color];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -953,9 +911,7 @@ IEStatusCode ie_network_set_color_format(ie_network_t *network, const char *inpu
}
inputs[input_name]->getPreProcess().setColorFormat(color);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -993,9 +949,7 @@ IEStatusCode ie_network_get_input_shapes(ie_network *network, input_shapes_t *sh
}
shapes->shapes = shape_ptrs.release();
status = IEStatusCode::OK;
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1022,9 +976,7 @@ IEStatusCode ie_network_reshape(ie_network_t *network, const input_shapes_t shap
}
network->object.reshape(net_shapes);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1042,9 +994,7 @@ IEStatusCode ie_network_get_outputs_number(const ie_network_t *network, size_t *
try {
IE::OutputsDataMap outputs = network->object.getOutputsInfo();
*size_result = outputs.size();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1073,9 +1023,7 @@ IEStatusCode ie_network_get_output_name(const ie_network_t *network, const size_
*name = outputName.release();
memcpy(*name, iter->first.c_str(), iter->first.length() + 1);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1098,9 +1046,7 @@ IEStatusCode ie_network_get_output_precision(const ie_network_t *network, const
IE::Precision p = outputs[output_name]->getPrecision();
*prec_result = precision_map[p];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1129,9 +1075,7 @@ IEStatusCode ie_network_set_output_precision(ie_network_t *network, const char *
}
outputs[output_name]->setPrecision(precision);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1154,9 +1098,7 @@ IEStatusCode ie_network_get_output_layout(const ie_network_t *network, const cha
IE::Layout l = outputs[output_name]->getLayout();
*layout_result = layout_map[l];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1185,9 +1127,7 @@ IEStatusCode ie_network_set_output_layout(ie_network_t *network, const char *out
}
outputs[output_name]->setLayout(layout);
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1213,9 +1153,7 @@ IEStatusCode ie_network_get_output_dims(const ie_network_t *network, const char
dims_result->dims[i] = dims[i];
}
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1260,9 +1198,7 @@ IEStatusCode ie_infer_request_get_blob(ie_infer_request_t *infer_request, const
std::unique_ptr<ie_blob_t> blob_result(new ie_blob_t);
blob_result->object = blob_ptr;
*blob = blob_result.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1279,9 +1215,7 @@ IEStatusCode ie_infer_request_set_blob(ie_infer_request_t *infer_request, const
try {
infer_request->object.SetBlob(name, blob->object);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1298,9 +1232,7 @@ IEStatusCode ie_infer_request_infer(ie_infer_request_t *infer_request) {
try {
infer_request->object.Infer();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1317,9 +1249,7 @@ IEStatusCode ie_infer_request_infer_async(ie_infer_request_t *infer_request) {
try {
infer_request->object.StartAsync();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1339,9 +1269,7 @@ IEStatusCode ie_infer_set_completion_callback(ie_infer_request_t *infer_request,
callback->completeCallBackFunc(callback->args);
};
infer_request->object.SetCompletionCallback(fun);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1359,9 +1287,7 @@ IEStatusCode ie_infer_request_wait(ie_infer_request_t *infer_request, const int6
try {
IE::StatusCode status_code = infer_request->object.Wait(timeout);
status = status_map[status_code];
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1378,9 +1304,7 @@ IEStatusCode ie_infer_request_set_batch(ie_infer_request_t *infer_request, const
try {
infer_request->object.SetBatch(size);
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1444,9 +1368,7 @@ IEStatusCode ie_blob_make_memory(const tensor_desc_t *tensorDesc, ie_blob_t **bl
_blob->object->allocate();
*blob = _blob.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1518,9 +1440,7 @@ IEStatusCode ie_blob_make_memory_from_preallocated(const tensor_desc_t *tensorDe
_blob->object = IE::make_shared_blob(tensor, p, size);
}
*blob = _blob.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1538,9 +1458,7 @@ IEStatusCode ie_blob_make_memory_with_roi(const ie_blob_t *inputBlob, const roi_
IE::ROI roi_d = {roi->id, roi->posX, roi->posY, roi->sizeX, roi->sizeY};
_blob->object = IE::make_shared_blob(inputBlob->object, roi_d);
*blob = _blob.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1556,9 +1474,7 @@ IEStatusCode ie_blob_make_memory_nv12(const ie_blob_t *y, const ie_blob_t *uv, i
std::unique_ptr<ie_blob_t> _blob(new ie_blob_t);
_blob->object = IE::make_shared_blob<IE::NV12Blob>(y->object, uv->object);
*nv12Blob = _blob.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1574,9 +1490,7 @@ IEStatusCode ie_blob_make_memory_i420(const ie_blob_t *y, const ie_blob_t *u, co
std::unique_ptr<ie_blob_t> _blob(new ie_blob_t);
_blob->object = IE::make_shared_blob<IE::I420Blob>(y->object, u->object, v->object);
*i420Blob = _blob.release();
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1651,9 +1565,7 @@ IEStatusCode ie_blob_get_dims(const ie_blob_t *blob, dimensions_t *dims_result)
for (size_t i = 0; i< dims_result->ranks; ++i) {
dims_result->dims[i] = size_vector[i];
}
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1671,9 +1583,7 @@ IEStatusCode ie_blob_get_layout(const ie_blob_t *blob, layout_e *layout_result)
try {
IE::Layout l = blob->object->getTensorDesc().getLayout();
*layout_result = layout_map[l];
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}
@ -1691,9 +1601,7 @@ IEStatusCode ie_blob_get_precision(const ie_blob_t *blob, precision_e *prec_resu
try {
IE::Precision p = blob->object->getTensorDesc().getPrecision();
*prec_result = precision_map[p];
} catch (const IE::details::InferenceEngineException& e) {
return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED;
} catch (...) {
} CATCH_IE_EXCEPTIONS catch (...) {
return IEStatusCode::UNEXPECTED;
}

View File

@ -383,7 +383,10 @@ void InferenceEnginePython::InferRequestWrap::setBatch(int size) {
void latency_callback(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) {
if (code != InferenceEngine::StatusCode::OK) {
THROW_IE_EXCEPTION << "Async Infer Request failed with status code " << code;
IE_EXCEPTION_SWITCH(code, ExceptionType,
InferenceEngine::details::ThrowNow<ExceptionType>{}
<<= std::stringstream{} << IE_LOCATION
<< InferenceEngine::details::ExceptionTraits<ExceptionType>::string());
}
InferenceEnginePython::InferRequestWrap *requestWrap;
InferenceEngine::ResponseDesc dsc;

View File

@ -19,7 +19,6 @@
#include "ie_blob.h"
#include "ie_common.h"
#include "ie_data.h"
#include "details/ie_exception_conversion.hpp"
#include "ie_extension.h"
namespace ngraph {

View File

@ -19,7 +19,6 @@
#include "cpp/ie_infer_request.hpp"
#include "cpp/ie_memory_state.hpp"
#include "ie_iexecutable_network.hpp"
#include "details/ie_exception_conversion.hpp"
#include "details/ie_so_loader.h"
namespace InferenceEngine {

View File

@ -16,7 +16,6 @@
#include "cpp/ie_memory_state.hpp"
#include "ie_remote_context.hpp"
#include "ie_iinfer_request.hpp"
#include "details/ie_exception_conversion.hpp"
#include "details/ie_so_loader.h"
#include "ie_blob.h"
@ -245,7 +244,9 @@ public:
auto res = actual->Wait(millis_timeout, &resp);
if (res != OK && res != RESULT_NOT_READY &&
res != INFER_NOT_STARTED && res != INFER_CANCELLED) {
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << res << resp.msg;
IE_EXCEPTION_SWITCH(res, ExceptionType,
InferenceEngine::details::ThrowNow<ExceptionType>{}
<<= std::stringstream{} << IE_LOCATION << resp.msg)
}
return res;
}

View File

@ -13,7 +13,6 @@
#include <string>
#include "ie_blob.h"
#include "details/ie_exception_conversion.hpp"
#include "details/ie_so_loader.h"
namespace InferenceEngine {

View File

@ -2,168 +2,6 @@
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief A header file for the main Inference Engine exception
*
* @file ie_exception.hpp
*/
#pragma once
#include "ie_api.h"
#include <functional>
#include <memory>
#include <sstream>
#include <string>
#include <utility>
#include <vector>
/**
* @def THROW_IE_EXCEPTION
* @brief A macro used to throw general exception with a description
*/
#define THROW_IE_EXCEPTION throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__)
/**
* @def IE_ASSERT
* @brief Uses assert() function if NDEBUG is not defined, InferenceEngine exception otherwise
*/
#ifdef NDEBUG
#define IE_ASSERT(EXPRESSION) \
if (!(EXPRESSION)) \
throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__) \
<< "AssertionFailed: " << #EXPRESSION // NOLINT
#else
#include <cassert>
/**
* @private
*/
class NullStream {
public:
template <class T>
NullStream& operator<<(const T&) noexcept {
return *this;
}
NullStream& operator<<(std::ostream& (*)(std::ostream&)) noexcept {
return *this;
}
};
#define IE_ASSERT(EXPRESSION) \
assert((EXPRESSION)); \
NullStream()
#endif // NDEBUG
namespace InferenceEngine {
enum StatusCode : int;
namespace details {
/**
* @brief The InferenceEngineException class implements the main Inference Engine exception
*/
class INFERENCE_ENGINE_API_CLASS(InferenceEngineException): public std::exception {
mutable std::string errorDesc;
StatusCode status_code = static_cast<StatusCode>(0);
std::string _file;
int _line;
std::shared_ptr<std::stringstream> exception_stream;
bool save_to_status_code = false;
public:
/**
* @brief A C++ std::exception API member
* @return An exception description with a file name and file line
*/
const char* what() const noexcept override {
if (errorDesc.empty() && exception_stream) {
errorDesc = exception_stream->str();
#ifndef NDEBUG
errorDesc += "\n" + _file + ":" + std::to_string(_line);
#endif
}
return errorDesc.c_str();
}
/**
* @brief A constructor. Creates an InferenceEngineException object from a specific file and line
* @param filename File where exception has been thrown
* @param line Line of the exception emitter
* @param message Exception message
*/
InferenceEngineException(const std::string& filename, const int line, const std::string& message = "") noexcept;
/**
* @brief noexcept required for copy ctor
* @details The C++ Standard, [except.throw], paragraph 3 [ISO/IEC 14882-2014]
*/
InferenceEngineException(const InferenceEngineException& that) noexcept;
/**
* @brief A stream output operator to be used within exception
* @param arg Object for serialization in the exception message
*/
template <class T>
InferenceEngineException& operator<<(const T& arg) {
if (save_to_status_code) {
auto can_convert = status_code_assign(arg);
save_to_status_code = false;
if (can_convert.second) {
this->status_code = can_convert.first;
return *this;
}
}
if (!exception_stream) {
exception_stream.reset(new std::stringstream());
}
(*exception_stream) << arg;
return *this;
}
/**
* @brief Manipulator to indicate that next item has to be converted to StatusCode to save
* @param iex InferenceEngineException object
*/
friend InferenceEngineException& as_status(InferenceEngineException& iex) {
iex.save_to_status_code = true;
return iex;
}
/**
* @brief A stream output operator to catch InferenceEngineException manipulators
* @param manip InferenceEngineException manipulator to call
*/
InferenceEngineException& operator<<(InferenceEngineException& (*manip)(InferenceEngineException&)) {
return manip(*this);
}
/** @brief Check if it has StatusCode value */
bool hasStatus() const {
return this->status_code == 0 ? false : true;
}
/** @brief Get StatusCode value */
StatusCode getStatus() const {
return this->status_code;
}
~InferenceEngineException() noexcept override;
private:
std::pair<StatusCode, bool> status_code_assign(const StatusCode& status) {
return {status, true};
}
template <typename T>
std::pair<StatusCode, bool> status_code_assign(const T&) {
return {static_cast<StatusCode>(0), false};
}
};
InferenceEngineException& as_status(InferenceEngineException& iex);
static_assert(std::is_nothrow_copy_constructible<InferenceEngineException>::value,
"InferenceEngineException must be nothrow copy constructible");
} // namespace details
} // namespace InferenceEngine
#include "ie_common.h"

View File

@ -1,71 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief A header file that provides macros to handle no exception methods
*
* @file ie_exception_conversion.hpp
*/
#pragma once
#include "ie_common.h"
#include "details/ie_exception.hpp"
#define CALL_STATUS_FNC(function, ...) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC was not initialized."; \
ResponseDesc resp; \
auto res = actual->function(__VA_ARGS__, &resp); \
if (res != OK) InferenceEngine::details::extract_exception(res, resp.msg);
#define CALL_STATUS_FNC_NO_ARGS(function) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC_NO_ARGS was not initialized."; \
ResponseDesc resp; \
auto res = actual->function(&resp); \
if (res != OK) InferenceEngine::details::extract_exception(res, resp.msg);
#define CALL_FNC_NO_ARGS(function) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_FNC_NO_ARGS was not initialized."; \
ResponseDesc resp; \
auto result = actual->function(&resp); \
if (resp.msg[0] != '\0') { \
THROW_IE_EXCEPTION << resp.msg; \
} \
return result;
namespace InferenceEngine {
namespace details {
inline void extract_exception(StatusCode status, const char* msg) {
switch (status) {
case NOT_IMPLEMENTED:
throw NotImplemented(msg);
case NETWORK_NOT_LOADED:
throw NetworkNotLoaded(msg);
case PARAMETER_MISMATCH:
throw ParameterMismatch(msg);
case NOT_FOUND:
throw NotFound(msg);
case OUT_OF_BOUNDS:
throw OutOfBounds(msg);
case UNEXPECTED:
throw Unexpected(msg);
case REQUEST_BUSY:
throw RequestBusy(msg);
case RESULT_NOT_READY:
throw ResultNotReady(msg);
case NOT_ALLOCATED:
throw NotAllocated(msg);
case INFER_NOT_STARTED:
throw InferNotStarted(msg);
case NETWORK_NOT_READ:
throw NetworkNotRead(msg);
case INFER_CANCELLED:
throw InferCancelled(msg);
default:
THROW_IE_EXCEPTION << msg << InferenceEngine::details::as_status << status;
}
}
} // namespace details
} // namespace InferenceEngine

View File

@ -12,7 +12,6 @@
#include <memory>
#include "ie_allocator.hpp"
#include "details/ie_exception.hpp"
namespace InferenceEngine {
namespace details {

View File

@ -52,7 +52,7 @@ public:
* @brief Searches for a function symbol in the loaded module
* @param symbolName Name of function to find
* @return A pointer to the function if found
* @throws InferenceEngineException if the function is not found
* @throws Exception if the function is not found
*/
void* get_symbol(const char* symbolName) const;
};

View File

@ -12,10 +12,10 @@
#include <memory>
#include <string>
#include <type_traits>
#include <functional>
#include "ie_common.h"
#include "ie_so_loader.h"
#include "details/ie_exception.hpp"
namespace InferenceEngine {
namespace details {
@ -150,6 +150,22 @@ public:
}
protected:
#define CATCH_IE_EXCEPTION(ExceptionType) catch (const InferenceEngine::ExceptionType& e) {throw e;}
#define CATCH_IE_EXCEPTIONS \
CATCH_IE_EXCEPTION(GeneralError) \
CATCH_IE_EXCEPTION(NotImplemented) \
CATCH_IE_EXCEPTION(NetworkNotLoaded) \
CATCH_IE_EXCEPTION(ParameterMismatch) \
CATCH_IE_EXCEPTION(NotFound) \
CATCH_IE_EXCEPTION(OutOfBounds) \
CATCH_IE_EXCEPTION(Unexpected) \
CATCH_IE_EXCEPTION(RequestBusy) \
CATCH_IE_EXCEPTION(ResultNotReady) \
CATCH_IE_EXCEPTION(NotAllocated) \
CATCH_IE_EXCEPTION(InferNotStarted) \
CATCH_IE_EXCEPTION(NetworkNotRead) \
CATCH_IE_EXCEPTION(InferCancelled)
/**
* @brief Implements load of object from library if Release method is presented
*/
@ -158,13 +174,7 @@ protected:
void* create = nullptr;
try {
create = _so_loader->get_symbol((SOCreatorTrait<T>::name + std::string("Shared")).c_str());
} catch (const details::InferenceEngineException& ex) {
if ((ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) == NOT_FOUND) {
create = nullptr;
} else {
throw;
}
}
} catch (const NotFound&) {}
if (create == nullptr) {
create = _so_loader->get_symbol(SOCreatorTrait<T>::name);
using CreateF = StatusCode(T*&, ResponseDesc*);
@ -172,7 +182,8 @@ protected:
ResponseDesc desc;
StatusCode sts = reinterpret_cast<CreateF*>(create)(object, &desc);
if (sts != OK) {
THROW_IE_EXCEPTION << as_status << sts << desc.msg;
IE_EXCEPTION_SWITCH(sts, ExceptionType,
InferenceEngine::details::ThrowNow<ExceptionType>{} <<= std::stringstream{} << IE_LOCATION << desc.msg)
}
IE_SUPPRESS_DEPRECATED_START
_pointedObj = std::shared_ptr<T>(object, [] (T* ptr){ptr->Release();});
@ -181,12 +192,10 @@ protected:
using CreateF = void(std::shared_ptr<T>&);
reinterpret_cast<CreateF*>(create)(_pointedObj);
}
} catch (const InferenceEngineException& ex) {
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
} CATCH_IE_EXCEPTIONS catch (const std::exception& ex) {
THROW_IE_EXCEPTION << ex.what();
} catch(...) {
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
THROW_IE_EXCEPTION_WITH_STATUS(Unexpected);
}
}
@ -197,14 +206,14 @@ protected:
try {
using CreateF = void(std::shared_ptr<T>&);
reinterpret_cast<CreateF*>(_so_loader->get_symbol(SOCreatorTrait<T>::name))(_pointedObj);
} catch (const InferenceEngineException& ex) {
THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what();
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what();
} CATCH_IE_EXCEPTIONS catch (const std::exception& ex) {
THROW_IE_EXCEPTION << ex.what();
} catch(...) {
THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] ";
THROW_IE_EXCEPTION_WITH_STATUS(Unexpected);
}
}
#undef CATCH_IE_EXCEPTION
#undef CATCH_IE_EXCEPTIONS
/**
* @brief Gets a smart pointer to the DLL

View File

@ -25,7 +25,6 @@
#include "ie_locked_memory.hpp"
#include "ie_precision.hpp"
#include "details/ie_blob_iterator.hpp"
#include "details/ie_exception.hpp"
#include "details/ie_pre_allocator.hpp"
namespace InferenceEngine {

View File

@ -16,7 +16,14 @@
#include <string>
#include <vector>
#include <map>
#include <sstream>
#include <stdexcept>
#include <iterator>
#include <ie_api.h>
#ifndef NDEBUG
#include <cassert>
#endif
namespace InferenceEngine {
/**
* @brief Represents tensor size.
@ -274,73 +281,211 @@ struct QueryNetworkResult {
ResponseDesc resp;
};
/** @brief This class represents StatusCode::GENERIC_ERROR exception */
class GeneralError : public std::logic_error {
using std::logic_error::logic_error;
namespace details {
struct INFERENCE_ENGINE_DEPRECATED("Use InferRequest::Exception")
INFERENCE_ENGINE_API_CLASS(InferenceEngineException) : public std::runtime_error {
using std::runtime_error::runtime_error;
bool hasStatus() const {return true;}
StatusCode getStatus() const;
};
} // namespace details
/**
* @brief Base Inference Engine exception class
*/
IE_SUPPRESS_DEPRECATED_START
struct INFERENCE_ENGINE_API_CLASS(Exception) : public details::InferenceEngineException {
using InferenceEngineException::InferenceEngineException;
};
IE_SUPPRESS_DEPRECATED_END
/// @cond
namespace details {
template<typename ExceptionType> struct ExceptionTraits;
}
#define INFERENCE_ENGINE_DECLARE_EXCEPTION(ExceptionType, statusCode) \
struct INFERENCE_ENGINE_API_CLASS(ExceptionType) final : public InferenceEngine::Exception { \
using Exception::Exception; \
}; \
namespace details { \
template<> struct ExceptionTraits<ExceptionType> { \
static const char* string() {return "[ " #statusCode " ]";} \
}; \
}
/// @endcond
/** @brief This class represents StatusCode::GENERAL_ERROR exception */
INFERENCE_ENGINE_DECLARE_EXCEPTION(GeneralError, GENERAL_ERROR)
/** @brief This class represents StatusCode::NOT_IMPLEMENTED exception */
class NotImplemented : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(NotImplemented, NOT_IMPLEMENTED)
/** @brief This class represents StatusCode::NETWORK_NOT_LOADED exception */
class NetworkNotLoaded : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(NetworkNotLoaded, NETWORK_NOT_LOADED)
/** @brief This class represents StatusCode::PARAMETER_MISMATCH exception */
class ParameterMismatch : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(ParameterMismatch, PARAMETER_MISMATCH)
/** @brief This class represents StatusCode::NOT_FOUND exception */
class NotFound : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(NotFound, NOT_FOUND)
/** @brief This class represents StatusCode::OUT_OF_BOUNDS exception */
class OutOfBounds : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(OutOfBounds, OUT_OF_BOUNDS)
/** @brief This class represents StatusCode::UNEXPECTED exception */
class Unexpected : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(Unexpected, UNEXPECTED)
/** @brief This class represents StatusCode::REQUEST_BUSY exception */
class RequestBusy : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(RequestBusy, REQUEST_BUSY)
/** @brief This class represents StatusCode::RESULT_NOT_READY exception */
class ResultNotReady : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(ResultNotReady, RESULT_NOT_READY)
/** @brief This class represents StatusCode::NOT_ALLOCATED exception */
class NotAllocated : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(NotAllocated, NOT_ALLOCATED)
/** @brief This class represents StatusCode::INFER_NOT_STARTED exception */
class InferNotStarted : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(InferNotStarted, INFER_NOT_STARTED)
/** @brief This class represents StatusCode::NETWORK_NOT_READ exception */
class NetworkNotRead : public std::logic_error {
using std::logic_error::logic_error;
};
INFERENCE_ENGINE_DECLARE_EXCEPTION(NetworkNotRead, NETWORK_NOT_READ)
/** @brief This class represents StatusCode::INFER_CANCELLED exception */
class InferCancelled : public std::logic_error {
using std::logic_error::logic_error;
INFERENCE_ENGINE_DECLARE_EXCEPTION(InferCancelled, INFER_CANCELLED)
/**
* @private
*/
#undef INFERENCE_ENGINE_DECLARE_EXCEPTION
namespace details {
/**
* @brief Tag struct used to throw exception
*/
template<typename ExceptionType>
struct ThrowNow final {
[[noreturn]] void operator<<=(const std::ostream& ostream) {
std::ostringstream stream;
stream << ostream.rdbuf();
throw ExceptionType{stream.str()};
}
};
} // namespace InferenceEngine
/// @cond
#ifndef NDEBUG
#define IE_LOCATION '\n' << __FILE__ << ':' << __LINE__<< ' '
#else
#define IE_LOCATION ""
#endif // NDEBUG
/// @endcond
/**
* @def IE_THROW
* @brief A macro used to throw specefied exception with a description
*/
#define IE_THROW(ExceptionType) \
InferenceEngine::details::ThrowNow<InferenceEngine::ExceptionType>{} <<= std::stringstream{} << IE_LOCATION
/**
* @def THROW_IE_EXCEPTION
* @brief A macro used to throw general exception with a description
*/
#define THROW_IE_EXCEPTION IE_THROW(GeneralError)
/**
* @def THROW_IE_EXCEPTION_WITH_STATUS
* @brief A macro used to throw general exception with a description and status
*/
#define THROW_IE_EXCEPTION_WITH_STATUS(ExceptionType) \
IE_THROW(ExceptionType) << InferenceEngine::details::ExceptionTraits<InferenceEngine::ExceptionType>::string() << ' '
/**
* @def IE_ASSERT
* @brief Uses assert() function if NDEBUG is not defined, InferenceEngine exception otherwise
*/
#ifdef NDEBUG
#define IE_ASSERT(EXPRESSION) \
if (!(EXPRESSION)) \
IE_THROW(GeneralError) << " AssertionFailed: " << #EXPRESSION // NOLINT
#else
/**
* @private
*/
struct NullStream {
template <typename T>
NullStream& operator<<(const T&) noexcept {return *this;}
};
#define IE_ASSERT(EXPRESSION) \
assert((EXPRESSION)); \
InferenceEngine::details::NullStream()
#endif // NDEBUG
/// @cond
#define IE_EXCEPTION_CASE(TYPE_ALIAS, STATUS_CODE, EXCEPTION_TYPE, ...) \
case InferenceEngine::STATUS_CODE : { \
using InferenceEngine::EXCEPTION_TYPE; using TYPE_ALIAS = EXCEPTION_TYPE; __VA_ARGS__; \
} break;
/// @endcond
/**
* @def IE_EXCEPTION_SWITCH
* @brief Generate Switch statement over error codes adn maps them to coresponding exceptions type
*/
#define IE_EXCEPTION_SWITCH(STATUS, TYPE_ALIAS, ...) \
switch (STATUS) { \
IE_EXCEPTION_CASE(TYPE_ALIAS, GENERAL_ERROR , GeneralError , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_IMPLEMENTED , NotImplemented , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, NETWORK_NOT_LOADED , NetworkNotLoaded , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, PARAMETER_MISMATCH , ParameterMismatch , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_FOUND , NotFound , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, OUT_OF_BOUNDS , OutOfBounds , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, UNEXPECTED , Unexpected , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, REQUEST_BUSY , RequestBusy , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, RESULT_NOT_READY , ResultNotReady , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_ALLOCATED , NotAllocated , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, INFER_NOT_STARTED , InferNotStarted , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, NETWORK_NOT_READ , NetworkNotRead , __VA_ARGS__) \
IE_EXCEPTION_CASE(TYPE_ALIAS, INFER_CANCELLED , InferCancelled , __VA_ARGS__) \
default: IE_ASSERT(!"Unreachable"); \
}
/**
* @private
*/
#define CALL_STATUS_FNC(function, ...) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used was not initialized."; \
ResponseDesc resp; \
auto res = actual->function(__VA_ARGS__, &resp); \
if (res != OK) IE_EXCEPTION_SWITCH(res, ExceptionType, \
InferenceEngine::details::ThrowNow<ExceptionType>{} \
<<= std::stringstream{} << IE_LOCATION << resp.msg)
/**
* @private
*/
#define CALL_STATUS_FNC_NO_ARGS(function) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC_NO_ARGS was not initialized."; \
ResponseDesc resp; \
auto res = actual->function(&resp); \
if (res != OK) IE_EXCEPTION_SWITCH(res, ExceptionType, \
InferenceEngine::details::ThrowNow<ExceptionType>{} \
<<= std::stringstream{} << IE_LOCATION)
/**
* @private
*/
#define CALL_FNC_NO_ARGS(function) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_FNC_NO_ARGS was not initialized."; \
ResponseDesc resp; \
auto result = actual->function(&resp); \
if (resp.msg[0] != '\0') { \
THROW_IE_EXCEPTION << resp.msg \
} \
return result;
} // namespace details
} // namespace InferenceEngine
#if defined(_WIN32)
#define __PRETTY_FUNCTION__ __FUNCSIG__
#else

View File

@ -10,7 +10,6 @@
#include <algorithm>
#include <cctype>
#include <details/ie_exception.hpp>
#include <iterator>
#include <map>
#include <memory>

View File

@ -13,7 +13,8 @@
#include <unordered_map>
#include <vector>
#include "details/ie_exception.hpp"
#include "ie_common.h"
namespace InferenceEngine {

View File

@ -432,7 +432,7 @@ int main(int argc, char *argv[]) {
std::string key = METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS);
try {
nireq = exeNetwork.GetMetric(key).as<unsigned int>();
} catch (const details::InferenceEngineException& ex) {
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION
<< "Every device used with the benchmark_app should "
<< "support OPTIMAL_NUMBER_OF_INFER_REQUESTS ExecutableNetwork metric. "

View File

@ -642,7 +642,7 @@ inline std::map<std::string, std::string> getMapFullDevicesNames(InferenceEngine
p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME));
devicesMap.insert(std::pair<std::string, std::string>(deviceName, p.as<std::string>()));
}
catch (InferenceEngine::details::InferenceEngineException &) {
catch (InferenceEngine::Exception &) {
}
}
}
@ -664,7 +664,7 @@ inline std::string getFullDeviceName(InferenceEngine::Core& ie, std::string devi
p = ie.GetMetric(device, METRIC_KEY(FULL_DEVICE_NAME));
return p.as<std::string>();
}
catch (InferenceEngine::details::InferenceEngineException &) {
catch (InferenceEngine::Exception &) {
return "";
}
}

View File

@ -5,7 +5,6 @@
#pragma once
#include <ie_layouts.h>
#include <details/ie_exception.hpp>
#include <cpp_interfaces/exception2status.hpp>
#include <api/layout.hpp>
@ -49,7 +48,8 @@ inline cldnn::data_types DataTypeFromPrecision(InferenceEngine::Precision p) {
case InferenceEngine::Precision::BOOL:
return cldnn::data_types::i8;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << p.name() << " precision";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
<< "The plugin does not support " << p.name() << " precision";
}
}
@ -74,7 +74,8 @@ inline cldnn::data_types DataTypeFromPrecision(ngraph::element::Type t) {
case ngraph::element::Type_t::u1:
return cldnn::data_types::bin;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << t.get_type_name()<< " precision";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
<< "The plugin does not support " << t.get_type_name()<< " precision";
}
}
@ -94,7 +95,7 @@ inline cldnn::format FormatFromLayout(InferenceEngine::Layout l) {
case InferenceEngine::Layout::NHWC:
return cldnn::format::byxf;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << l << " layout";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "The plugin does not support " << l << " layout";
}
}
@ -119,7 +120,8 @@ inline cldnn::format FormatFromTensorDesc(InferenceEngine::TensorDesc desc) {
case InferenceEngine::Layout::NHWC:
return cldnn::format::byxf;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << desc.getLayout() << " layout";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
<< "The plugin does not support " << desc.getLayout() << " layout";
}
}
@ -135,7 +137,8 @@ inline cldnn::format ImageFormatFromLayout(InferenceEngine::Layout l) {
case InferenceEngine::Layout::NHWC:
return cldnn::format::nv12;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << l << " image layout";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
<< "The plugin does not support " << l << " image layout";
}
}

View File

@ -7,7 +7,6 @@
#include <cldnn/cldnn_config.hpp>
#include "cldnn_config.h"
#include "cpp_interfaces/exception2status.hpp"
#include "details/ie_exception.hpp"
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
#include "ie_api.h"
#include "file_utils.h"
@ -52,7 +51,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
useProfiling = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
} else if (key.compare(PluginConfigParams::KEY_DYN_BATCH_ENABLED) == 0) {
if (val.compare(PluginConfigParams::YES) == 0) {
@ -60,7 +59,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
enableDynamicBatch = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
} else if (key.compare(PluginConfigParams::KEY_DUMP_KERNELS) == 0) {
if (val.compare(PluginConfigParams::YES) == 0) {
@ -68,14 +67,14 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
dumpCustomKernels = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_PRIORITY) == 0) {
std::stringstream ss(val);
uint32_t uVal(0);
ss >> uVal;
if (ss.fail()) {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
switch (uVal) {
case 0:
@ -91,7 +90,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
queuePriority = cldnn::priority_mode_types::high;
break;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported queue priority value: " << uVal;
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Unsupported queue priority value: " << uVal;
}
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_THROTTLE) == 0) {
@ -99,7 +98,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
uint32_t uVal(0);
ss >> uVal;
if (ss.fail()) {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
switch (uVal) {
case 0:
@ -115,7 +114,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
queueThrottle = cldnn::throttle_mode_types::high;
break;
default:
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported queue throttle value: " << uVal;
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Unsupported queue throttle value: " << uVal;
}
} else if (key.compare(PluginConfigParams::KEY_CONFIG_FILE) == 0) {
std::stringstream ss(val);
@ -137,7 +136,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::TUNING_RETUNE) == 0) {
tuningConfig.mode = cldnn::tuning_mode::tuning_retune_and_cache;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported tuning mode value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported tuning mode value by plugin: " << val;
}
} else if (key.compare(PluginConfigParams::KEY_TUNING_FILE) == 0) {
tuningConfig.cache_file_path = val;
@ -147,7 +146,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
memory_pool_on = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported memory pool flag value: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported memory pool flag value: " << val;
}
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_GRAPH_DUMPS_DIR) == 0) {
if (!val.empty()) {
@ -170,7 +169,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
exclusiveAsyncRequests = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
} else if (key.compare(PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS) == 0) {
if (val.compare(PluginConfigParams::GPU_THROUGHPUT_AUTO) == 0) {
@ -204,7 +203,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
enableInt8 = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val;
}
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_NV12_TWO_INPUTS) == 0) {
if (val.compare(PluginConfigParams::YES) == 0) {
@ -212,7 +211,7 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
nv12_two_inputs = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported NV12 flag value: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported NV12 flag value: " << val;
}
} else if (key.compare(CLDNNConfigParams::KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS) == 0) {
if (val.compare(PluginConfigParams::YES) == 0) {
@ -220,10 +219,10 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& configMap)
} else if (val.compare(PluginConfigParams::NO) == 0) {
enable_fp16_for_quantized_models = false;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS flag value: " << val;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS flag value: " << val;
}
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property key by plugin: " << key;
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property key by plugin: " << key;
}
adjustKeyMapValues();

View File

@ -416,7 +416,7 @@ auto check_inputs = [](InferenceEngine::InputsDataMap _networkInputs) {
input_precision != InferenceEngine::Precision::I32 &&
input_precision != InferenceEngine::Precision::I64 &&
input_precision != InferenceEngine::Precision::BOOL) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
<< "Input image format " << input_precision << " is not supported yet...";
}
}

View File

@ -66,16 +66,16 @@ InferRequestInternal::Ptr CLDNNExecNetwork::CreateInferRequestImpl(InputsDataMap
OutputsDataMap networkOutputs) {
OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "CLDNNExecNetwork::CreateInferRequestImpl");
if (m_graphs.empty()) {
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
}
for (auto& graph : m_graphs) {
if (graph == nullptr) {
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
}
if (!graph->IsLoaded()) {
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str << ": no networks created";
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded) << ": no networks created";
}
}
@ -98,7 +98,7 @@ IInferRequest::Ptr CLDNNExecNetwork::CreateInferRequest() {
InferenceEngine::CNNNetwork CLDNNExecNetwork::GetExecGraphInfo() {
if (m_graphs.empty())
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
return m_graphs.front()->GetExecGraphInfo();
}

View File

@ -337,7 +337,7 @@ void checkInputBlob(const Blob::Ptr &blob,
auto nv12_ptr = blob->as<NV12Blob>();
if (nv12_ptr == nullptr) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << wrong_nv12_blob;
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << wrong_nv12_blob;
}
auto y_ptr = nv12_ptr->y()->as<gpu::ClBlob>();
@ -402,7 +402,8 @@ void CLDNNInferRequest::checkBlobs() {
if (foundInputPair != std::end(_networkInputs)) {
foundInput = foundInputPair->second;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input with name: \'" << input.first << "\'";
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
<< "Failed to find input with name: \'" << input.first << "\'";
}
checkInputBlob(input.second, input.first, foundInput, m_graph->getConfig().nv12_two_inputs);
}
@ -415,7 +416,8 @@ void CLDNNInferRequest::checkBlobs() {
if (foundOutputPair != std::end(_networkOutputs)) {
foundOutput = foundOutputPair->second;
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find output with name: \'" << output.first << "\'";
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
<< "Failed to find output with name: \'" << output.first << "\'";
}
checkOutputBlob(output.second, output.first, foundOutput);
}
@ -449,10 +451,10 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
// perform all common checks first
if (name.empty()) {
THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name";
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name";
}
if (!data)
THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'";
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'";
size_t dataSize = data->size();
if (0 == dataSize) {
@ -470,7 +472,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
: foundOutput->getTensorDesc();
if (desc.getPrecision() != blobDesc.getPrecision()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch)
<< "Failed to set Blob with precision not corresponding to user "
<< (is_input ? "input" : "output") << " precision";
}
@ -498,7 +500,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
auto nv12_ptr = data->as<NV12Blob>();
if (nv12_ptr == nullptr) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << wrong_nv12_blob;
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << wrong_nv12_blob;
}
auto y_ptr = nv12_ptr->y()->as<gpu::ClBlob>();
@ -530,7 +532,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
_preProcData[name]->setRoiBlob(data);
} else {
if (compoundBlobPassed) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << cannot_set_compound;
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << cannot_set_compound;
}
size_t blobSize = desc.getLayout() != SCALAR
@ -548,7 +550,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data)
}
} else {
if (compoundBlobPassed) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << cannot_set_compound;
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << cannot_set_compound;
}
if (is_remote) {
@ -697,7 +699,7 @@ void CLDNNInferRequest::SetGraph(std::shared_ptr<CLDNNPlugin::CLDNNGraph> graph)
m_graph = graph;
if (m_graph == nullptr) {
THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded);
}
if (m_graph->GetMaxDynamicBatchSize() > 1) {

View File

@ -12,7 +12,6 @@
#include <mutex>
#include <cpp/ie_cnn_network.h>
#include "details/ie_exception.hpp"
#include "cldnn_config.h"

View File

@ -5,7 +5,6 @@
#include <cstdio>
#include <cmath>
#include <details/ie_exception.hpp>
#if GNA_LIB_VER == 2
#include <gna2-model-api.h>

View File

@ -269,7 +269,8 @@ inline void quantizeWeightsBiases(const QuantDesc & quantDesc,
make_custom_blob<typename QuantDesc::WeightsPrecision>(InferenceEngine::C, InferenceEngine::SizeVector({wl->_weights->size()}));
intWeights->allocate();
if (intWeights->buffer() == nullptr) {
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
<< "cannot copy weights for layer :"<< wl->name << " of size" << intWeights->byteSize();
}
@ -296,7 +297,8 @@ inline void quantizeWeightsBiases(const QuantDesc & quantDesc,
}));
bias->allocate();
if (bias->buffer() == nullptr) {
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
<< "cannot copy bias for layer :"<< wl->name <<"of size" << bias->byteSize();
}
@ -386,7 +388,8 @@ inline void quantizeWeightsBiasesConv(const QuantDesc & quantDesc,
auto intWeights = make_custom_blob<typename QuantDesc::WeightsPrecision>(InferenceEngine::C, InferenceEngine::SizeVector({conv->_weights->size()}));
intWeights->allocate();
if (intWeights->buffer() == nullptr) {
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
<< "cannot copy weights for layer :"<< conv->name << " of size" << intWeights->byteSize();
}
@ -410,7 +413,8 @@ inline void quantizeWeightsBiasesConv(const QuantDesc & quantDesc,
}));
bias->allocate();
if (bias->buffer() == nullptr) {
THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated)
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
<< "cannot copy bias for layer :"<< conv->name <<"of size" << bias->byteSize();
}
memset(bias->buffer(), 0, bias->byteSize());

View File

@ -4,7 +4,6 @@
#include <cstring>
#include <iostream>
#include <details/ie_exception.hpp>
#include <gna_plugin_log.hpp>
#include <limits>
#include "backend/gna_types.h"

View File

@ -9,7 +9,6 @@
#if GNA_LIB_VER == 2
#include "gna2_model_debug_log.hpp"
#include "gna2-model-api.h"
#include <details/ie_exception.hpp>
#include <cstdint>
#include <fstream>

View File

@ -24,7 +24,6 @@
#include "gna-api.h"
#endif
#include "details/ie_exception.hpp"
#include "gna_plugin_log.hpp"
//#define MODEL_DUMP

View File

@ -90,7 +90,7 @@ class GNAInferRequest : public InferenceEngine::AsyncInferRequestInternal {
if (inferRequestIdx == -1) {
return InferenceEngine::INFER_NOT_STARTED;
} else if (millis_timeout < -1) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str;
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch);
}
if (millis_timeout == InferenceEngine::IInferRequest::WaitMode::RESULT_READY) {

View File

@ -4,7 +4,6 @@
#include <vector>
#include <array>
#include <details/ie_exception.hpp>
#include <ios>
#include <iomanip>
#include <map>

View File

@ -39,6 +39,7 @@
#include <layers/gna_fake_quantize_layer.hpp>
#include "gna_graph_patterns.hpp"
#include "gna_tensor_tools.hpp"
#include <debug.h>
#include <ngraph/pass/manager.hpp>
#include <legacy/convert_function_to_cnn_network.hpp>
@ -1108,7 +1109,7 @@ uint32_t GNAPlugin::QueueInference(const InferenceEngine::BlobMap &inputs, Infer
Wait(0);
freeNnet = nnets.begin();
} else {
THROW_IE_EXCEPTION << as_status << REQUEST_BUSY
THROW_IE_EXCEPTION_WITH_STATUS(RequestBusy)
<< "GNA executable network has max of "
<< static_cast<uint32_t >(gnaFlags->gna_lib_async_threads_num)
<< " parallel infer requests, please sync one of already running";
@ -1589,7 +1590,7 @@ InferenceEngine::QueryNetworkResult GNAPlugin::QueryNetwork(const InferenceEngin
InferenceEngine::QueryNetworkResult res;
if (network.getFunction()) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << " ngraph::Function is not supported natively";
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << " ngraph::Function is not supported natively";
}
std::unordered_set<CNNLayer *> allLayers;

View File

@ -211,8 +211,9 @@ void Config::UpdateFromMap(const std::map<std::string, std::string>& config) {
THROW_GNA_EXCEPTION << "EXCLUSIVE_ASYNC_REQUESTS should be YES/NO, but not" << value;
}
} else {
THROW_GNA_EXCEPTION << as_status << NOT_FOUND << "Incorrect GNA Plugin config. Key " << item.first
<< " not supported";
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
<< "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": "
<< "Incorrect GNA Plugin config. Key " << item.first << " not supported";
}
if (gnaFlags.sw_fp32 && gnaFlags.gna_lib_async_threads_num > 1) {

View File

@ -75,7 +75,7 @@ public:
auto plg = GetCurrentPlugin();
try {
plg->SetConfig(config);
} catch (InferenceEngine::details::InferenceEngineException) {}
} catch (InferenceEngine::Exception&) {}
return plg->QueryNetwork(network, config);
}

View File

@ -6,7 +6,6 @@
#include <ostream>
#include <ie_common.h>
#include <details/ie_exception.hpp>
// #define GNA_DEBUG
#ifdef GNA_DEBUG

View File

@ -5,7 +5,6 @@
#include "gna_memory_util.hpp"
#include <cstdint>
#include <details/ie_exception.hpp>
#include "gna_plugin_log.hpp"
int32_t GNAPluginNS::memory::MemoryOffset(void *ptr_target, void *ptr_base) {

View File

@ -35,10 +35,13 @@ HeteroAsyncInferRequest::HeteroAsyncInferRequest(const InferRequestInternal::Ptr
Task _task;
};
auto reuestExecutor = std::make_shared<RequestExecutor>(_heteroInferRequest->_inferRequests[requestId]._request.get());
_pipeline.emplace_back(reuestExecutor, [reuestExecutor] {
if (StatusCode::OK != reuestExecutor->_status) {
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << reuestExecutor->_status;
auto requestExecutor = std::make_shared<RequestExecutor>(_heteroInferRequest->_inferRequests[requestId]._request.get());
_pipeline.emplace_back(requestExecutor, [requestExecutor] {
if (StatusCode::OK != requestExecutor->_status) {
IE_EXCEPTION_SWITCH(requestExecutor->_status, ExceptionType,
InferenceEngine::details::ThrowNow<ExceptionType>{}
<<= std::stringstream{} << IE_LOCATION
<< InferenceEngine::details::ExceptionTraits<ExceptionType>::string());
}
});
}

View File

@ -431,7 +431,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream&
pugi::xml_parse_result res = heteroXmlDoc.load_string(heteroXmlStr.c_str());
if (res.status != pugi::status_ok) {
THROW_IE_EXCEPTION_WITH_STATUS(NETWORK_NOT_READ) << "Error reading HETERO plugin xml header";
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead) << "Error reading HETERO plugin xml header";
}
using namespace XMLParseUtils;
@ -480,7 +480,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream&
bool loaded = false;
try {
executableNetwork = _heteroPlugin->GetCore()->ImportNetwork(heteroModel, deviceName, loadConfig);
} catch (const InferenceEngine::NotImplemented &) {
} catch (const InferenceEngine::NotImplemented& ex) {
// read XML content
std::string xmlString;
std::uint64_t dataSize = 0;
@ -608,7 +608,7 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) {
for (auto&& subnetwork : networks) {
try {
subnetwork._network.Export(heteroModel);
} catch (const InferenceEngine::NotImplemented &) {
} catch (const InferenceEngine::NotImplemented& ex) {
auto subnet = subnetwork._clonedNetwork;
if (!subnet.getFunction()) {
THROW_IE_EXCEPTION << "Hetero plugin supports only ngraph function representation";

View File

@ -77,11 +77,7 @@ void HeteroInferRequest::SetBlob(const std::string& name, const InferenceEngine:
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
r->SetBlob(name, data, foundInput->getPreProcess());
}
} catch (const InferenceEngine::details::InferenceEngineException & ex) {
std::string message = ex.what();
if (message.find(NOT_FOUND_str) == std::string::npos)
throw ex;
}
} catch (const InferenceEngine::NotFound& ex) {}
}
}

View File

@ -427,7 +427,7 @@ StatusCode CNNNetworkNGraphImpl::serialize(const std::string& xmlPath,
xmlPath, binPath, ngraph::pass::Serialize::Version::IR_V10,
custom_opsets);
manager.run_passes(_ngraph_function);
} catch (const InferenceEngineException& e) {
} catch (const Exception& e) {
return DescriptionBuffer(GENERAL_ERROR, resp) << e.what();
} catch (const std::exception& e) {
return DescriptionBuffer(UNEXPECTED, resp) << e.what();

View File

@ -193,7 +193,7 @@ std::istream& operator >> (std::istream& stream, CompiledBlobHeader& header) {
pugi::xml_parse_result res = document.load_string(xmlStr.c_str());
if (res.status != pugi::status_ok) {
THROW_IE_EXCEPTION_WITH_STATUS(NETWORK_NOT_READ) << "Error reading compiled blob header";
THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead) << "Error reading compiled blob header";
}
pugi::xml_node compiledBlobNode = document.document_element();

View File

@ -12,10 +12,9 @@
#endif
#include <file_utils.h>
#include <details/ie_exception.hpp>
#include <stdlib.h>
#include <sys/stat.h>
#include "ie_common.h"
#ifndef _WIN32
# include <limits.h>
# include <unistd.h>

View File

@ -11,7 +11,7 @@
namespace InferenceEngine {
Blob::Ptr Blob::createROI(const ROI&) const {
THROW_IE_EXCEPTION << "[NOT_IMPLEMENTED] createROI is not implemented for current type of Blob";
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "createROI is not implemented for current type of Blob";
}
Blob::Ptr make_shared_blob(const Blob::Ptr& inputBlob, const ROI& roi) {

View File

@ -0,0 +1,145 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <string>
#include <vector>
#include <tuple>
#include <memory>
#include <map>
#include <cassert>
#include <ie_common.h>
#include <ie_blob.h>
#include <ie_parameter.hpp>
#include <ie_iextension.h>
#include <ie_extension.h>
#include <exec_graph_info.hpp>
#include <ngraph/opsets/opset.hpp>
#include <cpp_interfaces/exception2status.hpp>
namespace ExecGraphInfoSerialization {
//
// exec_graph_info.hpp
//
constexpr ngraph::NodeTypeInfo ExecutionNode::type_info;
const ngraph::NodeTypeInfo& ExecutionNode::get_type_info() const {
return type_info;
}
} // namespace ExecGraphInfoSerialization
namespace InferenceEngine {
//
// ie_blob.h
//
Blob::~Blob() {}
MemoryBlob::~MemoryBlob() {}
//
// ie_iextension.h
//
ILayerImpl::~ILayerImpl() {}
ILayerExecImpl::~ILayerExecImpl() {}
std::map<std::string, ngraph::OpSet> IExtension::getOpSets() {
return {};
}
//
// ie_extension.h
//
std::map<std::string, ngraph::OpSet> Extension::getOpSets() {
return actual->getOpSets();
}
namespace details {
IE_SUPPRESS_DEPRECATED_START
StatusCode InferenceEngineException::getStatus() const {
return ExceptionToStatus(dynamic_cast<const Exception&>(*this));
}
} // namespace details
IE_SUPPRESS_DEPRECATED_END
INFERENCE_ENGINE_API_CPP(StatusCode) ExceptionToStatus(const Exception& exception) {
if (dynamic_cast<const GeneralError*>(&exception) != nullptr) {
return GENERAL_ERROR;
} else if (dynamic_cast<const NotImplemented*>(&exception) != nullptr) {
return NOT_IMPLEMENTED;
} else if (dynamic_cast<const NetworkNotLoaded*>(&exception) != nullptr) {
return NETWORK_NOT_LOADED;
} else if (dynamic_cast<const ParameterMismatch*>(&exception) != nullptr) {
return PARAMETER_MISMATCH;
} else if (dynamic_cast<const NotFound*>(&exception) != nullptr) {
return NOT_FOUND;
} else if (dynamic_cast<const OutOfBounds*>(&exception) != nullptr) {
return OUT_OF_BOUNDS;
} else if (dynamic_cast<const Unexpected*>(&exception) != nullptr) {
return UNEXPECTED;
} else if (dynamic_cast<const RequestBusy*>(&exception) != nullptr) {
return REQUEST_BUSY;
} else if (dynamic_cast<const ResultNotReady*>(&exception) != nullptr) {
return RESULT_NOT_READY;
} else if (dynamic_cast<const NotAllocated*>(&exception) != nullptr) {
return NOT_ALLOCATED;
} else if (dynamic_cast<const InferNotStarted*>(&exception) != nullptr) {
return INFER_NOT_STARTED;
} else if (dynamic_cast<const NetworkNotRead*>(&exception) != nullptr) {
return NETWORK_NOT_READ;
} else if (dynamic_cast<const InferCancelled*>(&exception) != nullptr) {
return INFER_CANCELLED;
} else {
assert(!"Unreachable"); return OK;
}
}
//
// ie_parameter.hpp
//
Parameter::~Parameter() {
clear();
}
#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
Parameter::Any::~Any() {}
template struct Parameter::RealData<int>;
template struct Parameter::RealData<bool>;
template struct Parameter::RealData<float>;
template struct Parameter::RealData<double>;
template struct Parameter::RealData<uint32_t>;
template struct Parameter::RealData<std::string>;
template struct Parameter::RealData<unsigned long>;
template struct Parameter::RealData<std::vector<int>>;
template struct Parameter::RealData<std::vector<std::string>>;
template struct Parameter::RealData<std::vector<unsigned long>>;
template struct Parameter::RealData<std::tuple<unsigned int, unsigned int>>;
template struct Parameter::RealData<std::tuple<unsigned int, unsigned int, unsigned int>>;
template struct Parameter::RealData<Blob::Ptr>;
//
// ie_blob.h
//
template <typename T, typename U>
TBlob<T, U>::~TBlob() {
free();
}
template class TBlob<float>;
template class TBlob<double>;
template class TBlob<int8_t>;
template class TBlob<uint8_t>;
template class TBlob<int16_t>;
template class TBlob<uint16_t>;
template class TBlob<int32_t>;
template class TBlob<uint32_t>;
template class TBlob<long>;
template class TBlob<long long>;
template class TBlob<unsigned long>;
template class TBlob<unsigned long long>;
#endif // defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
} // namespace InferenceEngine

View File

@ -620,7 +620,7 @@ public:
}
plugins[deviceName] = plugin;
} catch (const details::InferenceEngineException& ex) {
} catch (const Exception& ex) {
THROW_IE_EXCEPTION << "Failed to create plugin " << FileUtils::fromFilePath(desc.libraryLocation) << " for device " << deviceName
<< "\n"
<< "Please, check your environment\n"
@ -993,7 +993,7 @@ std::vector<std::string> Core::GetAvailableDevices() const {
try {
Parameter p = GetMetric(deviceName, propertyName);
devicesIDs = p.as<std::vector<std::string>>();
} catch (details::InferenceEngineException&) {
} catch (Exception&) {
// plugin is not created by e.g. invalid env
} catch (const std::exception& ex) {
THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName

View File

@ -16,25 +16,39 @@
#include "file_utils.h"
#include "cpp/ie_executable_network.hpp"
#include "cpp/ie_cnn_network.h"
#include "details/ie_exception_conversion.hpp"
#include "ie_plugin_ptr.hpp"
#include "cpp_interfaces/exception2status.hpp"
#if defined __GNUC__
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wreturn-type"
#endif
#define CATCH_IE_EXCEPTION(ExceptionType) catch (const InferenceEngine::ExceptionType& e) {throw e;}
#define CATCH_IE_EXCEPTIONS \
CATCH_IE_EXCEPTION(GeneralError) \
CATCH_IE_EXCEPTION(NotImplemented) \
CATCH_IE_EXCEPTION(NetworkNotLoaded) \
CATCH_IE_EXCEPTION(ParameterMismatch) \
CATCH_IE_EXCEPTION(NotFound) \
CATCH_IE_EXCEPTION(OutOfBounds) \
CATCH_IE_EXCEPTION(Unexpected) \
CATCH_IE_EXCEPTION(RequestBusy) \
CATCH_IE_EXCEPTION(ResultNotReady) \
CATCH_IE_EXCEPTION(NotAllocated) \
CATCH_IE_EXCEPTION(InferNotStarted) \
CATCH_IE_EXCEPTION(NetworkNotRead) \
CATCH_IE_EXCEPTION(InferCancelled)
#define CALL_STATEMENT(...) \
if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATEMENT was not initialized."; \
try { \
__VA_ARGS__; \
} catch (const InferenceEngine::details::InferenceEngineException& iex) { \
InferenceEngine::details::extract_exception(iex.hasStatus() ? \
iex.getStatus() : GENERAL_ERROR, iex.what()); \
} catch (const std::exception& ex) { \
InferenceEngine::details::extract_exception(GENERAL_ERROR, ex.what()); \
} CATCH_IE_EXCEPTIONS catch (const std::exception& ex) { \
THROW_IE_EXCEPTION << ex.what(); \
} catch (...) { \
InferenceEngine::details::extract_exception(UNEXPECTED, ""); \
THROW_IE_EXCEPTION_WITH_STATUS(Unexpected); \
}
namespace InferenceEngine {

View File

@ -1,123 +0,0 @@
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <string>
#include <vector>
#include <tuple>
#include <memory>
#include <map>
#include <details/ie_exception.hpp>
#include <ie_blob.h>
#include <ie_parameter.hpp>
#include <ie_iextension.h>
#include <ie_extension.h>
#include <exec_graph_info.hpp>
#include <ngraph/opsets/opset.hpp>
using namespace InferenceEngine;
//
// exec_graph_info.hpp
//
constexpr ngraph::NodeTypeInfo ExecGraphInfoSerialization::ExecutionNode::type_info;
const ngraph::NodeTypeInfo&
ExecGraphInfoSerialization::ExecutionNode::get_type_info() const {
return type_info;
}
//
// ie_blob.h
//
Blob::~Blob() {}
MemoryBlob::~MemoryBlob() {}
//
// ie_iextension.h
//
ILayerImpl::~ILayerImpl() {}
ILayerExecImpl::~ILayerExecImpl() {}
std::map<std::string, ngraph::OpSet> IExtension::getOpSets() {
return {};
}
//
// ie_extension.h
//
std::map<std::string, ngraph::OpSet> Extension::getOpSets() {
return actual->getOpSets();
}
//
// details/ie_exception.hpp
//
details::InferenceEngineException::~InferenceEngineException() noexcept {}
details::InferenceEngineException::InferenceEngineException(const std::string& filename, const int line, const std::string& message) noexcept :
std::exception(), _file(filename), _line(line) {
if (!message.empty()) {
exception_stream = std::make_shared<std::stringstream>(message);
}
}
details::InferenceEngineException::InferenceEngineException(const InferenceEngineException& that) noexcept :
std::exception() {
errorDesc = that.errorDesc;
status_code = that.status_code;
_file = that._file;
_line = that._line;
exception_stream = that.exception_stream;
}
//
// ie_parameter.hpp
//
Parameter::~Parameter() {
clear();
}
#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
Parameter::Any::~Any() {}
template struct InferenceEngine::Parameter::RealData<int>;
template struct InferenceEngine::Parameter::RealData<bool>;
template struct InferenceEngine::Parameter::RealData<float>;
template struct InferenceEngine::Parameter::RealData<double>;
template struct InferenceEngine::Parameter::RealData<uint32_t>;
template struct InferenceEngine::Parameter::RealData<std::string>;
template struct InferenceEngine::Parameter::RealData<unsigned long>;
template struct InferenceEngine::Parameter::RealData<std::vector<int>>;
template struct InferenceEngine::Parameter::RealData<std::vector<std::string>>;
template struct InferenceEngine::Parameter::RealData<std::vector<unsigned long>>;
template struct InferenceEngine::Parameter::RealData<std::tuple<unsigned int, unsigned int>>;
template struct InferenceEngine::Parameter::RealData<std::tuple<unsigned int, unsigned int, unsigned int>>;
template struct InferenceEngine::Parameter::RealData<InferenceEngine::Blob::Ptr>;
#endif // __clang__ && !__SYCL_COMPILER_VERSION
//
// ie_blob.h
//
#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION)
template <typename T, typename U>
TBlob<T, U>::~TBlob() {
free();
}
template class InferenceEngine::TBlob<float>;
template class InferenceEngine::TBlob<double>;
template class InferenceEngine::TBlob<int8_t>;
template class InferenceEngine::TBlob<uint8_t>;
template class InferenceEngine::TBlob<int16_t>;
template class InferenceEngine::TBlob<uint16_t>;
template class InferenceEngine::TBlob<int32_t>;
template class InferenceEngine::TBlob<uint32_t>;
template class InferenceEngine::TBlob<long>;
template class InferenceEngine::TBlob<long long>;
template class InferenceEngine::TBlob<unsigned long>;
template class InferenceEngine::TBlob<unsigned long long>;
#endif // __clang__ && !__SYCL_COMPILER_VERSION

View File

@ -4,7 +4,6 @@
#include <dlfcn.h>
#include "details/ie_exception.hpp"
#include "details/ie_so_loader.h"
#include "file_utils.h"
@ -38,14 +37,14 @@ public:
* @brief Searches for a function symbol in the loaded module
* @param symbolName Name of the function to find
* @return A pointer to the function if found
* @throws InferenceEngineException if the function is not found
* @throws Exception if the function is not found
*/
void* get_symbol(const char* symbolName) const {
void* procAddr = nullptr;
procAddr = dlsym(shared_object, symbolName);
if (procAddr == nullptr)
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
<< "dlSym cannot locate method '" << symbolName << "': " << dlerror();
return procAddr;
}

View File

@ -10,7 +10,7 @@
#include <sched.h>
#include "ie_system_conf.h"
#include "ie_parallel.hpp"
#include "details/ie_exception.hpp"
#include "ie_common.h"
#include <numeric>

View File

@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
//
#include "details/ie_exception.hpp"
#include "ie_common.h"
#include "details/ie_so_loader.h"
#include "file_utils.h"
@ -239,7 +239,7 @@ class SharedObjectLoader::Impl {
* @brief Searches for a function symbol in the loaded module
* @param symbolName Name of function to find
* @return A pointer to the function if found
* @throws InferenceEngineException if the function is not found
* @throws Exception if the function is not found
*/
void* get_symbol(const char* symbolName) const {
if (!shared_object) {
@ -247,7 +247,7 @@ class SharedObjectLoader::Impl {
}
auto procAddr = reinterpret_cast<void*>(GetProcAddress(shared_object, symbolName));
if (procAddr == nullptr)
THROW_IE_EXCEPTION << details::as_status << NOT_FOUND
THROW_IE_EXCEPTION_WITH_STATUS(NotFound)
<< "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError();
return procAddr;

View File

@ -3,7 +3,6 @@
//
#include "precision_utils.h"
#include <details/ie_exception.hpp>
#include <stdint.h>

View File

@ -18,7 +18,6 @@
#include "ie_parallel.hpp"
#include "ie_system_conf.h"
#include "threading/ie_thread_affinity.hpp"
#include "details/ie_exception.hpp"
#include "threading/ie_cpu_streams_executor.hpp"
#include <openvino/itt.hpp>

View File

@ -5,7 +5,6 @@
#include "threading/ie_istreams_executor.hpp"
#include "ie_plugin_config.hpp"
#include "cpp_interfaces/interface/ie_internal_plugin_config.hpp"
#include "details/ie_exception.hpp"
#include "ie_parallel.hpp"
#include "ie_system_conf.h"
#include "ie_parameter.hpp"

View File

@ -10,7 +10,6 @@
#include <set>
#include <string>
#include "details/ie_exception.hpp"
#include "ie_precision.hpp"
int XMLParseUtils::GetIntAttr(const pugi::xml_node& node, const char* str) {

View File

@ -205,7 +205,7 @@ public:
*
* @param str input string with float value
* @return float value if parsing was successful
* @throws InferenceEngineException in case of parsing error
* @throws Exception in case of parsing error
*/
static float ie_parse_float(const std::string& str);

View File

@ -10,7 +10,8 @@
#pragma once
#include <vector>
#include <details/ie_exception.hpp>
#include <ie_common.h>
namespace InferenceEngine {

View File

@ -400,7 +400,7 @@ StatusCode CNNNetworkImpl::serialize(const std::string& xmlPath, const std::stri
std::const_pointer_cast<ICNNNetwork>(shared_from_this())));
return OK;
#endif
} catch (const InferenceEngineException& e) {
} catch (const Exception& e) {
return DescriptionBuffer(GENERAL_ERROR, resp) << e.what();
} catch (const std::exception& e) {
return DescriptionBuffer(UNEXPECTED, resp) << e.what();
@ -448,7 +448,7 @@ StatusCode CNNNetworkImpl::setBatchSize(size_t size, ResponseDesc* responseDesc)
}
}
return OK;
} catch (const InferenceEngineException& e) {
} catch (const Exception& e) {
return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what();
} catch (const std::exception& e) {
return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what();
@ -472,7 +472,7 @@ StatusCode CNNNetworkImpl::setBatchSizeReshape(size_t size, ResponseDesc* respon
}
}
return reshape(inputShapes, responseDesc);
} catch (const InferenceEngineException& e) {
} catch (const Exception& e) {
return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what();
} catch (const std::exception& e) {
return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what();

View File

@ -36,7 +36,7 @@ void CNNLayer::parseParams() {
try {
LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type);
validator->parseParams(this);
} catch (const InferenceEngineException& ie_e) {
} catch (const Exception& ie_e) {
THROW_IE_EXCEPTION << "Error of validate layer: " << this->name << " with type: " << this->type << ". "
<< ie_e.what();
}

View File

@ -91,7 +91,7 @@ Paddings getPaddingsInternal(const Layer& layer) {
}
}
return {layer._padding, layer._pads_end};
} catch (const InferenceEngine::details::InferenceEngineException& iee) {
} catch (const InferenceEngine::Exception& iee) {
THROW_IE_EXCEPTION << errorPrefix << iee.what();
}
}

View File

@ -19,12 +19,12 @@ namespace ngraph {
namespace pass {
namespace low_precision {
class TRANSFORMATIONS_API InferenceEngineException : std::exception {
class TRANSFORMATIONS_API Exception : std::exception {
std::shared_ptr<std::ostringstream> buffer;
mutable std::string buffer_str;
public:
template <typename T>
InferenceEngineException& operator<< (const T& x) {
Exception& operator<< (const T& x) {
*buffer << x;
return *this;
}
@ -35,10 +35,10 @@ public:
}
};
#define THROW_TRANSFORMATION_EXCEPTION throw ::ngraph::pass::low_precision::InferenceEngineException() << __FILE__ << ":" << __LINE__ << " "
#define THROW_TRANSFORMATION_EXCEPTION throw ::ngraph::pass::low_precision::Exception() << __FILE__ << ":" << __LINE__ << " "
class TRANSFORMATIONS_API InferenceEngineLptException : public InferenceEngineException {
class TRANSFORMATIONS_API InferenceEngineLptException : public Exception {
public:
InferenceEngineLptException(const std::string& filename, const size_t line, const Node& node) {
*this

View File

@ -106,7 +106,7 @@ void Config::readProperties(const std::map<std::string, std::string> &prop) {
<< ". Expected only YES/NO";
}
} else {
THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property " << key << " by CPU plugin";
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property " << key << " by CPU plugin";
}
_config.clear();
}

View File

@ -2,7 +2,8 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <details/ie_exception.hpp>
#include <ie_common.h>
#include "mkldnn_descriptor.h"
mkldnn::primitive_desc_iterator MKLDNNDescriptor::createPrimitiveDescriptorIterator(const mkldnn::engine &engine,

View File

@ -296,14 +296,14 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std::
void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr &data) {
OV_ITT_SCOPED_TASK(itt::domains::MKLDNNPlugin, "SetBlob");
if (name.empty()) {
THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name";
THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name";
}
if (!data)
THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'";
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'";
const bool compoundBlobPassed = data->is<InferenceEngine::CompoundBlob>();
if (!compoundBlobPassed && data->buffer() == nullptr)
THROW_IE_EXCEPTION << "Input data was not allocated. Input name: \'" << name << "\'";
THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Input data was not allocated. Input name: \'" << name << "\'";
if (data->size() == 0) {
THROW_IE_EXCEPTION << "Input data is empty. Input name: \'" << name << "\'";
}
@ -313,13 +313,13 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
size_t dataSize = data->size();
if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
if (foundInput->getPrecision() != data->getTensorDesc().getPrecision()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob with precision: "
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob with precision: "
<< data->getTensorDesc().getPrecision() << ", if CNNNetwork input blob precision is: " << foundInput->getPrecision();
}
const bool preProcRequired = preProcessingRequired(foundInput, data);
if (compoundBlobPassed && !preProcRequired) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
<< "cannot set compound blob: supported only for input pre-processing";
}
@ -341,12 +341,12 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
}
if (foundInput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob. Dimensions mismatch.";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob. Dimensions mismatch.";
}
if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundInput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY &&
foundInput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob. Blocking descriptor mismatch.";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob. Blocking descriptor mismatch.";
}
if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 &&
@ -359,11 +359,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
}
} else {
if (compoundBlobPassed) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
<< "cannot set compound blob: supported only for input pre-processing";
}
if (foundOutput->getPrecision() != data->getTensorDesc().getPrecision()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output blob with precision: "
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output blob with precision: "
<< data->getTensorDesc().getPrecision() << ", if CNNNetwork output blob precision is: " << foundOutput->getPrecision();
}
size_t outputSize = foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::SCALAR
@ -374,11 +374,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In
<< dataSize << "!=" << outputSize << ").";
}
if (foundOutput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output Blob. Dimensions mismatch.";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output Blob. Dimensions mismatch.";
}
if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY &&
foundOutput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) {
THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output blob. Blocking descriptor mismatch.";
THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output blob. Blocking descriptor mismatch.";
}
if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 &&
!graph->getProperty().batchLimit) {

View File

@ -2,9 +2,10 @@
// SPDX-License-Identifier: Apache-2.0
//
#include <ie_common.h>
#include "mkldnn_memory_solver.hpp"
#include <details/ie_exception.hpp>
#include <algorithm>
#include <vector>

View File

@ -345,7 +345,7 @@ Engine::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network, const std
input_precision != InferenceEngine::Precision::BOOL &&
input_precision != InferenceEngine::Precision::I64 &&
input_precision != InferenceEngine::Precision::U64) {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented)
<< "Input image format " << input_precision << " is not supported yet...";
}
}
@ -513,7 +513,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
std::unique_ptr<MKLDNNNode> ptr;
try {
ptr.reset(MKLDNNNode::factory().create(*itLayer, {mkldnn::engine::kind::cpu, 0}, extensionManager, fake_w_cache));
} catch (InferenceEngine::details::InferenceEngineException&) {
} catch (InferenceEngine::Exception&) {
return false;
}
return true;
@ -569,7 +569,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma
// if we can create and have not thrown exception, then layer is supported
std::unique_ptr <MKLDNNNode>(MKLDNNNode::factory().create(*i, eng, extensionManager, fake_w_cache));
res.supportedLayersMap.insert({ (*i)->name, GetName() });
} catch (InferenceEngine::details::InferenceEngineException&) {
} catch (InferenceEngine::Exception&) {
}
i++;
}

View File

@ -9,7 +9,6 @@
#include <ie_common.h>
#include <vector>
#include <memory>
#include <details/ie_exception.hpp>
namespace MKLDNNPlugin {

View File

@ -28,7 +28,7 @@ public:
std::stoi(layer->params.at("axis")) :0;
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -66,7 +66,7 @@ public:
config.outConfs.push_back(outConfig);
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -46,7 +46,7 @@ public:
config.outConfs.push_back(outConfig);
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -73,7 +73,7 @@ public:
{ DataConfigurator(ConfLayout::PLN, input_precision), DataConfigurator(ConfLayout::PLN, boundaries_precision) },
{ DataConfigurator(ConfLayout::PLN, output_precision) });
}
catch (InferenceEngine::details::InferenceEngineException &ex) {
catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -5,6 +5,7 @@
#pragma once
#include <ie_layouts.h>
#include <functional>
namespace MKLDNNPlugin {

View File

@ -87,7 +87,7 @@ public:
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -96,7 +96,7 @@ public:
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -114,7 +114,7 @@ public:
std::vector<DataConfigurator> in_data_conf(layer->insData.size(), DataConfigurator(ConfLayout::PLN, Precision::FP32));
addConfig(layer, in_data_conf, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -273,7 +273,7 @@ public:
config.outConfs.push_back(dataS);
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -89,7 +89,7 @@ MKLDNNEmbeddingBagSum::MKLDNNEmbeddingBagSum(
for (size_t i = 1lu; i < inDataDims.size(); i++) {
_embDepth *= inDataDims[i];
}
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -83,7 +83,7 @@ public:
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -42,7 +42,7 @@ public:
addConfig(layer, { DataConfigurator(ConfLayout::PLN, Precision::I32), DataConfigurator(ConfLayout::PLN) },
{ DataConfigurator(ConfLayout::PLN) });
}
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -68,7 +68,7 @@ public:
config.outConfs.push_back(dataConfigOut);
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -51,7 +51,7 @@ public:
addConfig(layer, { DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision),
DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision) },
{ DataConfigurator(ConfLayout::PLN, precision) });
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -23,7 +23,7 @@ public:
bias = layer->GetParamAsFloat("bias");
addConfig(layer, {{ConfLayout::PLN, false, 0, Precision::FP32}}, {{ConfLayout::PLN, false, 0, Precision::FP32}});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -49,7 +49,7 @@ public:
reduced_axis_stride *= dims[i];
addConfig(layer, { { ConfLayout::PLN, false, 0, Precision::FP32 } }, { { ConfLayout::PLN, false, 0, Precision::FP32 } });
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -87,7 +87,7 @@ public:
THROW_IE_EXCEPTION << layer->name << " Incorrect Math layer type!";
addConfig(layer, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -9,7 +9,6 @@
#include <vector>
#include <mkldnn_extension_utils.h>
#include "details/ie_exception.hpp"
#include <legacy/ie_layers.h>
#include "mkldnn.hpp"
#include "mkldnn/iml_type_mapper.h"

View File

@ -138,7 +138,7 @@ public:
config.dynBatchSupport = false;
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -73,7 +73,7 @@ public:
config.outConfs.push_back(dataConfig);
confs.push_back(config);
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception& ex) {
errorMsg = ex.what();
}
}

View File

@ -28,7 +28,7 @@ public:
shift_.push_back(0);
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -117,7 +117,7 @@ public:
}
addConfig(layer, {{ConfLayout::ANY, true}, {ConfLayout::ANY, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -33,7 +33,7 @@ public:
offset_ = layer->GetParamAsFloat("offset");
addConfig(layer, {{ConfLayout::PLN, true}, {ConfLayout::PLN, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -47,7 +47,7 @@ public:
addConfig(layer,
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::ANY), DataConfigurator(ConfLayout::ANY)},
{DataConfigurator(ConfLayout::PLN, Precision::FP32)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -126,7 +126,7 @@ public:
addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32),
DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)});
}
} catch (const InferenceEngine::details::InferenceEngineException &ex) {
} catch (const InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}
@ -179,7 +179,7 @@ public:
{img_H, img_W, scale_H, scale_W}, anchors.data(), roi_indices.data(), p_roi_item, p_prob_item, conf);
return OK;
} catch (const InferenceEngine::details::InferenceEngineException& e) {
} catch (const InferenceEngine::Exception& e) {
if (resp) {
std::string errorMsg = e.what();
errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);

View File

@ -299,7 +299,7 @@ public:
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32),
DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)},
{DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}
@ -408,7 +408,7 @@ public:
}
return OK;
} catch (const InferenceEngine::details::InferenceEngineException& e) {
} catch (const std::exception& e) {
if (resp) {
std::string errorMsg = e.what();
errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);

View File

@ -89,7 +89,7 @@ public:
DataConfigurator(ConfLayout::PLN, Precision::FP32),
DataConfigurator(ConfLayout::PLN)}, {DataConfigurator(ConfLayout::PLN, supportedPrecision)});
}
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -54,7 +54,7 @@ public:
addConfig(layer, { DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN) },
{ DataConfigurator(ConfLayout::PLN) });
}
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

View File

@ -310,7 +310,7 @@ public:
logistic_kernel->create_ker();
addConfig(layer, {DataConfigurator(ConfLayout::PLN, input_prec)}, {DataConfigurator(ConfLayout::PLN, output_prec)});
} catch (InferenceEngine::details::InferenceEngineException &ex) {
} catch (InferenceEngine::Exception &ex) {
errorMsg = ex.what();
}
}

Some files were not shown because too many files have changed in this diff Show More