diff --git a/docs/template_extension/cpu_kernel.cpp b/docs/template_extension/cpu_kernel.cpp index 8d104b26bf9..7783449559e 100644 --- a/docs/template_extension/cpu_kernel.cpp +++ b/docs/template_extension/cpu_kernel.cpp @@ -3,7 +3,6 @@ // #include "cpu_kernel.hpp" #include "op.hpp" -#include
#include using namespace TemplateExtension; @@ -25,7 +24,7 @@ OpImplementation::OpImplementation(const std::shared_ptr &node) { add = castedNode->getAddAttr(); inShape = castedNode->get_input_shape(0); outShape = castedNode->get_output_shape(0); - } catch (InferenceEngine::details::InferenceEngineException& ex) { + } catch (InferenceEngine::Exception& ex) { error = ex.what(); } } @@ -92,14 +91,15 @@ InferenceEngine::StatusCode OpImplementation::init(InferenceEngine::LayerConfig } if (config.inConfs[0].desc.getDims().size() != 4 || config.outConfs[0].desc.getDims().size() != 4) { - THROW_IE_EXCEPTION << "Operation can be initialized only with 4d input/output tensors!"; + THROW_IE_EXCEPTION + << "Operation can be initialized only with 4d input/output tensors!"; } if (config.outConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32 || config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) { THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!"; } - } catch (InferenceEngine::details::InferenceEngineException&) { + } catch (InferenceEngine::Exception& ex) { if (resp) { strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1); resp->msg[sizeof(resp->msg)-1] = 0; diff --git a/docs/template_extension/fft_kernel.cpp b/docs/template_extension/fft_kernel.cpp index 9baca50ee63..199a1c67117 100644 --- a/docs/template_extension/fft_kernel.cpp +++ b/docs/template_extension/fft_kernel.cpp @@ -5,7 +5,6 @@ //! [fft_kernel:implementation] #include "fft_kernel.hpp" #include "fft_op.hpp" -#include
#include #include @@ -65,7 +64,7 @@ InferenceEngine::StatusCode FFTImpl::init(InferenceEngine::LayerConfig &config, config.inConfs[0].desc.getPrecision() != InferenceEngine::Precision::FP32) { THROW_IE_EXCEPTION << "Operation supports only FP32 precisions!"; } - } catch (InferenceEngine::details::InferenceEngineException&) { + } catch (InferenceEngine::Exception& ex) { if (resp) { strncpy(resp->msg, error.c_str(), sizeof(resp->msg) - 1); resp->msg[sizeof(resp->msg)-1] = 0; diff --git a/docs/template_plugin/src/template_config.cpp b/docs/template_plugin/src/template_config.cpp index 7297c5effbd..a07b3b56601 100644 --- a/docs/template_plugin/src/template_config.cpp +++ b/docs/template_plugin/src/template_config.cpp @@ -34,7 +34,7 @@ Configuration::Configuration(const ConfigMap& config, const Configuration & defa } else if (CONFIG_KEY(PERF_COUNT) == key) { perfCount = (CONFIG_VALUE(YES) == value); } else if (throwOnUnsupported) { - THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << key; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << ": " << key; } } } @@ -53,6 +53,6 @@ InferenceEngine::Parameter Configuration::Get(const std::string& name) const { } else if (name == CONFIG_KEY_INTERNAL(CPU_THREADS_PER_STREAM)) { return {std::to_string(_streamsExecutorConfig._threadsPerStream)}; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << ": " << name; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << ": " << name; } } diff --git a/docs/template_plugin/src/template_executable_network.cpp b/docs/template_plugin/src/template_executable_network.cpp index e848ced12e4..48568d6b59e 100644 --- a/docs/template_plugin/src/template_executable_network.cpp +++ b/docs/template_plugin/src/template_executable_network.cpp @@ -27,7 +27,7 @@ TemplatePlugin::ExecutableNetwork::ExecutableNetwork(const std::shared_ptr #include #include "inference_engine.hpp" -#include "details/ie_exception.hpp" #include "ie_compound_blob.h" #include "c_api/ie_c_api.h" @@ -119,6 +118,23 @@ std::map colorformat_map = {{IE::ColorFormat::RA {IE::ColorFormat::NV12, colorformat_e::NV12}, {IE::ColorFormat::I420, colorformat_e::I420}}; +#define CATCH_IE_EXCEPTION(StatusCode, ExceptionType) catch (const IE::ExceptionType&) {return IEStatusCode::StatusCode;} + +#define CATCH_IE_EXCEPTIONS \ + CATCH_IE_EXCEPTION(GENERAL_ERROR, GeneralError) \ + CATCH_IE_EXCEPTION(NOT_IMPLEMENTED, NotImplemented) \ + CATCH_IE_EXCEPTION(NETWORK_NOT_LOADED, NetworkNotLoaded) \ + CATCH_IE_EXCEPTION(PARAMETER_MISMATCH, ParameterMismatch) \ + CATCH_IE_EXCEPTION(NOT_FOUND, NotFound) \ + CATCH_IE_EXCEPTION(OUT_OF_BOUNDS, OutOfBounds) \ + CATCH_IE_EXCEPTION(UNEXPECTED, Unexpected) \ + CATCH_IE_EXCEPTION(REQUEST_BUSY, RequestBusy) \ + CATCH_IE_EXCEPTION(RESULT_NOT_READY, ResultNotReady) \ + CATCH_IE_EXCEPTION(NOT_ALLOCATED, NotAllocated) \ + CATCH_IE_EXCEPTION(INFER_NOT_STARTED, InferNotStarted) \ + CATCH_IE_EXCEPTION(NETWORK_NOT_READ, NetworkNotRead) \ + CATCH_IE_EXCEPTION(INFER_CANCELLED, InferCancelled) + /** *@brief convert the config type data to map type data. */ @@ -222,9 +238,7 @@ IEStatusCode ie_core_create(const char *xml_config_file, ie_core_t **core) { std::unique_ptr tmp(new ie_core_t); tmp->object = IE::Core(xml_config_file); *core = tmp.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -268,9 +282,7 @@ IEStatusCode ie_core_get_versions(const ie_core_t *core, const char *device_name vers_ptrs[i].description = iter->second.description; } versions->versions = vers_ptrs.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -303,9 +315,7 @@ IEStatusCode ie_core_read_network(ie_core_t *core, const char *xml, const char * } network_result->object = core->object.ReadNetwork(xml, bin); *network = network_result.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -325,9 +335,7 @@ IEStatusCode ie_core_read_network_from_memory(ie_core_t *core, const uint8_t *xm network_result->object = core->object.ReadNetwork(std::string(reinterpret_cast(xml_content), reinterpret_cast(xml_content + xml_content_size)), weight_blob->object); *network = network_result.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -351,9 +359,7 @@ IEStatusCode ie_core_load_network(ie_core_t *core, const ie_network_t *network, // create plugin in the registery and then create ExecutableNetwork. exe_net->object = core->object.LoadNetwork(network->object, device_name, conf_map); *exe_network = exe_net.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -376,9 +382,7 @@ IEStatusCode ie_core_set_config(ie_core_t *core, const ie_config_t *ie_core_conf try { core->object.SetConfig(conf_map, deviceName); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -395,9 +399,7 @@ IEStatusCode ie_core_register_plugin(ie_core_t *core, const char *plugin_name, c try { core->object.RegisterPlugin(plugin_name, device_name); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -414,9 +416,7 @@ IEStatusCode ie_core_register_plugins(ie_core_t *core, const char *xml_config_fi try { core->object.RegisterPlugins(xml_config_file); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -433,9 +433,7 @@ IEStatusCode ie_core_unregister_plugin(ie_core_t *core, const char *device_name) try { core->object.UnregisterPlugin(device_name); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -454,9 +452,7 @@ IEStatusCode ie_core_add_extension(ie_core_t *core, const char *extension_path, auto extension_ptr = std::make_shared(std::string{extension_path}); auto extension = std::dynamic_pointer_cast(extension_ptr); core->object.AddExtension(extension, device_name); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -474,9 +470,7 @@ IEStatusCode ie_core_get_metric(const ie_core_t *core, const char *device_name, try { IE::Parameter param = core->object.GetMetric(device_name, metric_name); parameter2IEparam(param, param_result); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -496,9 +490,7 @@ IEStatusCode ie_core_get_config(const ie_core_t *core, const char *device_name, // convert the parameter to ie_param_t parameter2IEparam(param, param_result); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -522,11 +514,7 @@ IEStatusCode ie_core_get_available_devices(const ie_core_t *core, ie_available_d memcpy(dev_ptrs[i], _devices[i].c_str(), _devices[i].length() + 1); } avai_devices->devices = dev_ptrs.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (const std::exception&) { - return IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -565,9 +553,7 @@ IEStatusCode ie_exec_network_create_infer_request(ie_executable_network_t *ie_ex std::unique_ptr req(new ie_infer_request_t); req->object = ie_exec_network->object.CreateInferRequest(); *request = req.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -585,9 +571,7 @@ IEStatusCode ie_exec_network_get_metric(const ie_executable_network_t *ie_exec_n try { InferenceEngine::Parameter parameter = ie_exec_network->object.GetMetric(metric_name); parameter2IEparam(parameter, param_result); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -605,9 +589,7 @@ IEStatusCode ie_exec_network_set_config(ie_executable_network_t *ie_exec_network try { const std::map conf_map = config2ParamMap(param_config); ie_exec_network->object.SetConfig(conf_map); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -625,9 +607,7 @@ IEStatusCode ie_exec_network_get_config(const ie_executable_network_t *ie_exec_n try { InferenceEngine::Parameter parameter = ie_exec_network->object.GetConfig(metric_config); parameter2IEparam(parameter, param_result); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -651,9 +631,7 @@ IEStatusCode ie_network_get_name(const ie_network_t *network, char **name) { std::unique_ptr netName(new char[_name.length() + 1]); *name = netName.release(); memcpy(*name, _name.c_str(), _name.length() + 1); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -670,9 +648,7 @@ IEStatusCode ie_network_get_inputs_number(const ie_network_t *network, size_t *s try { IE::InputsDataMap inputs = network->object.getInputsInfo(); *size_result = inputs.size(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -701,9 +677,7 @@ IEStatusCode ie_network_get_input_name(const ie_network_t *network, size_t numbe *name = inputName.release(); memcpy(*name, iter->first.c_str(), iter->first.length() + 1); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -726,9 +700,7 @@ IEStatusCode ie_network_get_input_precision(const ie_network_t *network, const c IE::Precision p = inputs[input_name]->getPrecision(); *prec_result = precision_map[p]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -757,9 +729,7 @@ IEStatusCode ie_network_set_input_precision(ie_network_t *network, const char *i } inputs[input_name]->setPrecision(precision); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -782,9 +752,7 @@ IEStatusCode ie_network_get_input_layout(const ie_network_t *network, const char IE::Layout l = inputs[input_name]->getLayout(); *layout_result = layout_map[l]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -813,9 +781,7 @@ IEStatusCode ie_network_set_input_layout(ie_network_t *network, const char *inpu } inputs[input_name]->setLayout(layout); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -841,9 +807,7 @@ IEStatusCode ie_network_get_input_dims(const ie_network_t *network, const char * dims_result->dims[i] = dims[i]; } } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -866,9 +830,7 @@ IEStatusCode ie_network_get_input_resize_algorithm(const ie_network_t *network, IE::ResizeAlgorithm resize = inputs[input_name]->getPreProcess().getResizeAlgorithm(); *resize_alg_result = resize_alg_map[resize]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -897,9 +859,7 @@ IEStatusCode ie_network_set_input_resize_algorithm(ie_network_t *network, const } inputs[input_name]->getPreProcess().setResizeAlgorithm(resize); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -922,9 +882,7 @@ IEStatusCode ie_network_get_color_format(const ie_network_t *network, const char IE::ColorFormat color = inputs[input_name]->getPreProcess().getColorFormat(); *colformat_result = colorformat_map[color]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -953,9 +911,7 @@ IEStatusCode ie_network_set_color_format(ie_network_t *network, const char *inpu } inputs[input_name]->getPreProcess().setColorFormat(color); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -993,9 +949,7 @@ IEStatusCode ie_network_get_input_shapes(ie_network *network, input_shapes_t *sh } shapes->shapes = shape_ptrs.release(); status = IEStatusCode::OK; - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1022,9 +976,7 @@ IEStatusCode ie_network_reshape(ie_network_t *network, const input_shapes_t shap } network->object.reshape(net_shapes); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1042,9 +994,7 @@ IEStatusCode ie_network_get_outputs_number(const ie_network_t *network, size_t * try { IE::OutputsDataMap outputs = network->object.getOutputsInfo(); *size_result = outputs.size(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1073,9 +1023,7 @@ IEStatusCode ie_network_get_output_name(const ie_network_t *network, const size_ *name = outputName.release(); memcpy(*name, iter->first.c_str(), iter->first.length() + 1); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1098,9 +1046,7 @@ IEStatusCode ie_network_get_output_precision(const ie_network_t *network, const IE::Precision p = outputs[output_name]->getPrecision(); *prec_result = precision_map[p]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1129,9 +1075,7 @@ IEStatusCode ie_network_set_output_precision(ie_network_t *network, const char * } outputs[output_name]->setPrecision(precision); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1154,9 +1098,7 @@ IEStatusCode ie_network_get_output_layout(const ie_network_t *network, const cha IE::Layout l = outputs[output_name]->getLayout(); *layout_result = layout_map[l]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1185,9 +1127,7 @@ IEStatusCode ie_network_set_output_layout(ie_network_t *network, const char *out } outputs[output_name]->setLayout(layout); } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1213,9 +1153,7 @@ IEStatusCode ie_network_get_output_dims(const ie_network_t *network, const char dims_result->dims[i] = dims[i]; } } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1260,9 +1198,7 @@ IEStatusCode ie_infer_request_get_blob(ie_infer_request_t *infer_request, const std::unique_ptr blob_result(new ie_blob_t); blob_result->object = blob_ptr; *blob = blob_result.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1279,9 +1215,7 @@ IEStatusCode ie_infer_request_set_blob(ie_infer_request_t *infer_request, const try { infer_request->object.SetBlob(name, blob->object); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1298,9 +1232,7 @@ IEStatusCode ie_infer_request_infer(ie_infer_request_t *infer_request) { try { infer_request->object.Infer(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1317,9 +1249,7 @@ IEStatusCode ie_infer_request_infer_async(ie_infer_request_t *infer_request) { try { infer_request->object.StartAsync(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1339,9 +1269,7 @@ IEStatusCode ie_infer_set_completion_callback(ie_infer_request_t *infer_request, callback->completeCallBackFunc(callback->args); }; infer_request->object.SetCompletionCallback(fun); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1359,9 +1287,7 @@ IEStatusCode ie_infer_request_wait(ie_infer_request_t *infer_request, const int6 try { IE::StatusCode status_code = infer_request->object.Wait(timeout); status = status_map[status_code]; - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1378,9 +1304,7 @@ IEStatusCode ie_infer_request_set_batch(ie_infer_request_t *infer_request, const try { infer_request->object.SetBatch(size); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1444,9 +1368,7 @@ IEStatusCode ie_blob_make_memory(const tensor_desc_t *tensorDesc, ie_blob_t **bl _blob->object->allocate(); *blob = _blob.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1518,9 +1440,7 @@ IEStatusCode ie_blob_make_memory_from_preallocated(const tensor_desc_t *tensorDe _blob->object = IE::make_shared_blob(tensor, p, size); } *blob = _blob.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1538,9 +1458,7 @@ IEStatusCode ie_blob_make_memory_with_roi(const ie_blob_t *inputBlob, const roi_ IE::ROI roi_d = {roi->id, roi->posX, roi->posY, roi->sizeX, roi->sizeY}; _blob->object = IE::make_shared_blob(inputBlob->object, roi_d); *blob = _blob.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1556,9 +1474,7 @@ IEStatusCode ie_blob_make_memory_nv12(const ie_blob_t *y, const ie_blob_t *uv, i std::unique_ptr _blob(new ie_blob_t); _blob->object = IE::make_shared_blob(y->object, uv->object); *nv12Blob = _blob.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1574,9 +1490,7 @@ IEStatusCode ie_blob_make_memory_i420(const ie_blob_t *y, const ie_blob_t *u, co std::unique_ptr _blob(new ie_blob_t); _blob->object = IE::make_shared_blob(y->object, u->object, v->object); *i420Blob = _blob.release(); - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1651,9 +1565,7 @@ IEStatusCode ie_blob_get_dims(const ie_blob_t *blob, dimensions_t *dims_result) for (size_t i = 0; i< dims_result->ranks; ++i) { dims_result->dims[i] = size_vector[i]; } - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1671,9 +1583,7 @@ IEStatusCode ie_blob_get_layout(const ie_blob_t *blob, layout_e *layout_result) try { IE::Layout l = blob->object->getTensorDesc().getLayout(); *layout_result = layout_map[l]; - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } @@ -1691,9 +1601,7 @@ IEStatusCode ie_blob_get_precision(const ie_blob_t *blob, precision_e *prec_resu try { IE::Precision p = blob->object->getTensorDesc().getPrecision(); *prec_result = precision_map[p]; - } catch (const IE::details::InferenceEngineException& e) { - return e.hasStatus() ? status_map[e.getStatus()] : IEStatusCode::UNEXPECTED; - } catch (...) { + } CATCH_IE_EXCEPTIONS catch (...) { return IEStatusCode::UNEXPECTED; } diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp index 1078685a2d6..72c0e9aa1ca 100644 --- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp +++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api_impl.cpp @@ -383,7 +383,10 @@ void InferenceEnginePython::InferRequestWrap::setBatch(int size) { void latency_callback(InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode code) { if (code != InferenceEngine::StatusCode::OK) { - THROW_IE_EXCEPTION << "Async Infer Request failed with status code " << code; + IE_EXCEPTION_SWITCH(code, ExceptionType, + InferenceEngine::details::ThrowNow{} + <<= std::stringstream{} << IE_LOCATION + << InferenceEngine::details::ExceptionTraits::string()); } InferenceEnginePython::InferRequestWrap *requestWrap; InferenceEngine::ResponseDesc dsc; diff --git a/inference-engine/include/cpp/ie_cnn_network.h b/inference-engine/include/cpp/ie_cnn_network.h index a9baeb38668..476b78604f0 100644 --- a/inference-engine/include/cpp/ie_cnn_network.h +++ b/inference-engine/include/cpp/ie_cnn_network.h @@ -19,7 +19,6 @@ #include "ie_blob.h" #include "ie_common.h" #include "ie_data.h" -#include "details/ie_exception_conversion.hpp" #include "ie_extension.h" namespace ngraph { diff --git a/inference-engine/include/cpp/ie_executable_network.hpp b/inference-engine/include/cpp/ie_executable_network.hpp index babe31c25f4..6ea6659e936 100644 --- a/inference-engine/include/cpp/ie_executable_network.hpp +++ b/inference-engine/include/cpp/ie_executable_network.hpp @@ -19,7 +19,6 @@ #include "cpp/ie_infer_request.hpp" #include "cpp/ie_memory_state.hpp" #include "ie_iexecutable_network.hpp" -#include "details/ie_exception_conversion.hpp" #include "details/ie_so_loader.h" namespace InferenceEngine { diff --git a/inference-engine/include/cpp/ie_infer_request.hpp b/inference-engine/include/cpp/ie_infer_request.hpp index ab4e0d75536..4fb73ea2daf 100644 --- a/inference-engine/include/cpp/ie_infer_request.hpp +++ b/inference-engine/include/cpp/ie_infer_request.hpp @@ -16,7 +16,6 @@ #include "cpp/ie_memory_state.hpp" #include "ie_remote_context.hpp" #include "ie_iinfer_request.hpp" -#include "details/ie_exception_conversion.hpp" #include "details/ie_so_loader.h" #include "ie_blob.h" @@ -245,7 +244,9 @@ public: auto res = actual->Wait(millis_timeout, &resp); if (res != OK && res != RESULT_NOT_READY && res != INFER_NOT_STARTED && res != INFER_CANCELLED) { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << res << resp.msg; + IE_EXCEPTION_SWITCH(res, ExceptionType, + InferenceEngine::details::ThrowNow{} + <<= std::stringstream{} << IE_LOCATION << resp.msg) } return res; } diff --git a/inference-engine/include/cpp/ie_memory_state.hpp b/inference-engine/include/cpp/ie_memory_state.hpp index 6f6ec043d94..88cec51177e 100644 --- a/inference-engine/include/cpp/ie_memory_state.hpp +++ b/inference-engine/include/cpp/ie_memory_state.hpp @@ -13,7 +13,6 @@ #include #include "ie_blob.h" -#include "details/ie_exception_conversion.hpp" #include "details/ie_so_loader.h" namespace InferenceEngine { diff --git a/inference-engine/include/details/ie_exception.hpp b/inference-engine/include/details/ie_exception.hpp index 73babcf3677..bbb23041056 100644 --- a/inference-engine/include/details/ie_exception.hpp +++ b/inference-engine/include/details/ie_exception.hpp @@ -2,168 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 // -/** - * @brief A header file for the main Inference Engine exception - * - * @file ie_exception.hpp - */ #pragma once -#include "ie_api.h" - -#include -#include -#include -#include -#include -#include - -/** - * @def THROW_IE_EXCEPTION - * @brief A macro used to throw general exception with a description - */ -#define THROW_IE_EXCEPTION throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__) - -/** - * @def IE_ASSERT - * @brief Uses assert() function if NDEBUG is not defined, InferenceEngine exception otherwise - */ -#ifdef NDEBUG -#define IE_ASSERT(EXPRESSION) \ - if (!(EXPRESSION)) \ - throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__) \ - << "AssertionFailed: " << #EXPRESSION // NOLINT -#else -#include - -/** - * @private - */ -class NullStream { -public: - template - NullStream& operator<<(const T&) noexcept { - return *this; - } - - NullStream& operator<<(std::ostream& (*)(std::ostream&)) noexcept { - return *this; - } -}; - -#define IE_ASSERT(EXPRESSION) \ - assert((EXPRESSION)); \ - NullStream() -#endif // NDEBUG - -namespace InferenceEngine { -enum StatusCode : int; -namespace details { - -/** - * @brief The InferenceEngineException class implements the main Inference Engine exception - */ -class INFERENCE_ENGINE_API_CLASS(InferenceEngineException): public std::exception { - mutable std::string errorDesc; - StatusCode status_code = static_cast(0); - std::string _file; - int _line; - std::shared_ptr exception_stream; - bool save_to_status_code = false; - -public: - /** - * @brief A C++ std::exception API member - * @return An exception description with a file name and file line - */ - const char* what() const noexcept override { - if (errorDesc.empty() && exception_stream) { - errorDesc = exception_stream->str(); -#ifndef NDEBUG - errorDesc += "\n" + _file + ":" + std::to_string(_line); -#endif - } - return errorDesc.c_str(); - } - - /** - * @brief A constructor. Creates an InferenceEngineException object from a specific file and line - * @param filename File where exception has been thrown - * @param line Line of the exception emitter - * @param message Exception message - */ - InferenceEngineException(const std::string& filename, const int line, const std::string& message = "") noexcept; - - /** - * @brief noexcept required for copy ctor - * @details The C++ Standard, [except.throw], paragraph 3 [ISO/IEC 14882-2014] - */ - InferenceEngineException(const InferenceEngineException& that) noexcept; - - /** - * @brief A stream output operator to be used within exception - * @param arg Object for serialization in the exception message - */ - template - InferenceEngineException& operator<<(const T& arg) { - if (save_to_status_code) { - auto can_convert = status_code_assign(arg); - save_to_status_code = false; - if (can_convert.second) { - this->status_code = can_convert.first; - return *this; - } - } - if (!exception_stream) { - exception_stream.reset(new std::stringstream()); - } - (*exception_stream) << arg; - return *this; - } - - /** - * @brief Manipulator to indicate that next item has to be converted to StatusCode to save - * @param iex InferenceEngineException object - */ - friend InferenceEngineException& as_status(InferenceEngineException& iex) { - iex.save_to_status_code = true; - return iex; - } - - /** - * @brief A stream output operator to catch InferenceEngineException manipulators - * @param manip InferenceEngineException manipulator to call - */ - InferenceEngineException& operator<<(InferenceEngineException& (*manip)(InferenceEngineException&)) { - return manip(*this); - } - - /** @brief Check if it has StatusCode value */ - bool hasStatus() const { - return this->status_code == 0 ? false : true; - } - - /** @brief Get StatusCode value */ - StatusCode getStatus() const { - return this->status_code; - } - - ~InferenceEngineException() noexcept override; - -private: - std::pair status_code_assign(const StatusCode& status) { - return {status, true}; - } - - template - std::pair status_code_assign(const T&) { - return {static_cast(0), false}; - } -}; - -InferenceEngineException& as_status(InferenceEngineException& iex); - -static_assert(std::is_nothrow_copy_constructible::value, - "InferenceEngineException must be nothrow copy constructible"); -} // namespace details -} // namespace InferenceEngine +#include "ie_common.h" diff --git a/inference-engine/include/details/ie_exception_conversion.hpp b/inference-engine/include/details/ie_exception_conversion.hpp deleted file mode 100644 index 3dc288d0767..00000000000 --- a/inference-engine/include/details/ie_exception_conversion.hpp +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -/** - * @brief A header file that provides macros to handle no exception methods - * - * @file ie_exception_conversion.hpp - */ -#pragma once - -#include "ie_common.h" -#include "details/ie_exception.hpp" - -#define CALL_STATUS_FNC(function, ...) \ - if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC was not initialized."; \ - ResponseDesc resp; \ - auto res = actual->function(__VA_ARGS__, &resp); \ - if (res != OK) InferenceEngine::details::extract_exception(res, resp.msg); - -#define CALL_STATUS_FNC_NO_ARGS(function) \ - if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC_NO_ARGS was not initialized."; \ - ResponseDesc resp; \ - auto res = actual->function(&resp); \ - if (res != OK) InferenceEngine::details::extract_exception(res, resp.msg); - -#define CALL_FNC_NO_ARGS(function) \ - if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_FNC_NO_ARGS was not initialized."; \ - ResponseDesc resp; \ - auto result = actual->function(&resp); \ - if (resp.msg[0] != '\0') { \ - THROW_IE_EXCEPTION << resp.msg; \ - } \ - return result; - -namespace InferenceEngine { -namespace details { - -inline void extract_exception(StatusCode status, const char* msg) { - switch (status) { - case NOT_IMPLEMENTED: - throw NotImplemented(msg); - case NETWORK_NOT_LOADED: - throw NetworkNotLoaded(msg); - case PARAMETER_MISMATCH: - throw ParameterMismatch(msg); - case NOT_FOUND: - throw NotFound(msg); - case OUT_OF_BOUNDS: - throw OutOfBounds(msg); - case UNEXPECTED: - throw Unexpected(msg); - case REQUEST_BUSY: - throw RequestBusy(msg); - case RESULT_NOT_READY: - throw ResultNotReady(msg); - case NOT_ALLOCATED: - throw NotAllocated(msg); - case INFER_NOT_STARTED: - throw InferNotStarted(msg); - case NETWORK_NOT_READ: - throw NetworkNotRead(msg); - case INFER_CANCELLED: - throw InferCancelled(msg); - default: - THROW_IE_EXCEPTION << msg << InferenceEngine::details::as_status << status; - } -} - -} // namespace details -} // namespace InferenceEngine diff --git a/inference-engine/include/details/ie_pre_allocator.hpp b/inference-engine/include/details/ie_pre_allocator.hpp index 5c1cd777f50..822ad5f81a7 100644 --- a/inference-engine/include/details/ie_pre_allocator.hpp +++ b/inference-engine/include/details/ie_pre_allocator.hpp @@ -12,7 +12,6 @@ #include #include "ie_allocator.hpp" -#include "details/ie_exception.hpp" namespace InferenceEngine { namespace details { diff --git a/inference-engine/include/details/ie_so_loader.h b/inference-engine/include/details/ie_so_loader.h index cf1edd4dff3..45e16b0b778 100644 --- a/inference-engine/include/details/ie_so_loader.h +++ b/inference-engine/include/details/ie_so_loader.h @@ -52,7 +52,7 @@ public: * @brief Searches for a function symbol in the loaded module * @param symbolName Name of function to find * @return A pointer to the function if found - * @throws InferenceEngineException if the function is not found + * @throws Exception if the function is not found */ void* get_symbol(const char* symbolName) const; }; diff --git a/inference-engine/include/details/ie_so_pointer.hpp b/inference-engine/include/details/ie_so_pointer.hpp index de3f3838cc7..f89a21a70f3 100644 --- a/inference-engine/include/details/ie_so_pointer.hpp +++ b/inference-engine/include/details/ie_so_pointer.hpp @@ -12,10 +12,10 @@ #include #include #include +#include #include "ie_common.h" #include "ie_so_loader.h" -#include "details/ie_exception.hpp" namespace InferenceEngine { namespace details { @@ -150,6 +150,22 @@ public: } protected: +#define CATCH_IE_EXCEPTION(ExceptionType) catch (const InferenceEngine::ExceptionType& e) {throw e;} +#define CATCH_IE_EXCEPTIONS \ + CATCH_IE_EXCEPTION(GeneralError) \ + CATCH_IE_EXCEPTION(NotImplemented) \ + CATCH_IE_EXCEPTION(NetworkNotLoaded) \ + CATCH_IE_EXCEPTION(ParameterMismatch) \ + CATCH_IE_EXCEPTION(NotFound) \ + CATCH_IE_EXCEPTION(OutOfBounds) \ + CATCH_IE_EXCEPTION(Unexpected) \ + CATCH_IE_EXCEPTION(RequestBusy) \ + CATCH_IE_EXCEPTION(ResultNotReady) \ + CATCH_IE_EXCEPTION(NotAllocated) \ + CATCH_IE_EXCEPTION(InferNotStarted) \ + CATCH_IE_EXCEPTION(NetworkNotRead) \ + CATCH_IE_EXCEPTION(InferCancelled) + /** * @brief Implements load of object from library if Release method is presented */ @@ -158,13 +174,7 @@ protected: void* create = nullptr; try { create = _so_loader->get_symbol((SOCreatorTrait::name + std::string("Shared")).c_str()); - } catch (const details::InferenceEngineException& ex) { - if ((ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) == NOT_FOUND) { - create = nullptr; - } else { - throw; - } - } + } catch (const NotFound&) {} if (create == nullptr) { create = _so_loader->get_symbol(SOCreatorTrait::name); using CreateF = StatusCode(T*&, ResponseDesc*); @@ -172,7 +182,8 @@ protected: ResponseDesc desc; StatusCode sts = reinterpret_cast(create)(object, &desc); if (sts != OK) { - THROW_IE_EXCEPTION << as_status << sts << desc.msg; + IE_EXCEPTION_SWITCH(sts, ExceptionType, + InferenceEngine::details::ThrowNow{} <<= std::stringstream{} << IE_LOCATION << desc.msg) } IE_SUPPRESS_DEPRECATED_START _pointedObj = std::shared_ptr(object, [] (T* ptr){ptr->Release();}); @@ -181,12 +192,10 @@ protected: using CreateF = void(std::shared_ptr&); reinterpret_cast(create)(_pointedObj); } - } catch (const InferenceEngineException& ex) { - THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what(); - } catch (const std::exception& ex) { - THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what(); + } CATCH_IE_EXCEPTIONS catch (const std::exception& ex) { + THROW_IE_EXCEPTION << ex.what(); } catch(...) { - THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] "; + THROW_IE_EXCEPTION_WITH_STATUS(Unexpected); } } @@ -197,14 +206,14 @@ protected: try { using CreateF = void(std::shared_ptr&); reinterpret_cast(_so_loader->get_symbol(SOCreatorTrait::name))(_pointedObj); - } catch (const InferenceEngineException& ex) { - THROW_IE_EXCEPTION << as_status << (ex.hasStatus() ? ex.getStatus() : GENERAL_ERROR) << ex.what(); - } catch (const std::exception& ex) { - THROW_IE_EXCEPTION << as_status << GENERAL_ERROR << ex.what(); + } CATCH_IE_EXCEPTIONS catch (const std::exception& ex) { + THROW_IE_EXCEPTION << ex.what(); } catch(...) { - THROW_IE_EXCEPTION << as_status << UNEXPECTED << "[ UNEXPECTED ] "; + THROW_IE_EXCEPTION_WITH_STATUS(Unexpected); } } + #undef CATCH_IE_EXCEPTION + #undef CATCH_IE_EXCEPTIONS /** * @brief Gets a smart pointer to the DLL diff --git a/inference-engine/include/ie_blob.h b/inference-engine/include/ie_blob.h index 69316340832..2254b95abaf 100644 --- a/inference-engine/include/ie_blob.h +++ b/inference-engine/include/ie_blob.h @@ -25,7 +25,6 @@ #include "ie_locked_memory.hpp" #include "ie_precision.hpp" #include "details/ie_blob_iterator.hpp" -#include "details/ie_exception.hpp" #include "details/ie_pre_allocator.hpp" namespace InferenceEngine { diff --git a/inference-engine/include/ie_common.h b/inference-engine/include/ie_common.h index 39f33d9a774..c0405699adb 100644 --- a/inference-engine/include/ie_common.h +++ b/inference-engine/include/ie_common.h @@ -16,7 +16,14 @@ #include #include #include +#include +#include +#include +#include +#ifndef NDEBUG +#include +#endif namespace InferenceEngine { /** * @brief Represents tensor size. @@ -274,73 +281,211 @@ struct QueryNetworkResult { ResponseDesc resp; }; -/** @brief This class represents StatusCode::GENERIC_ERROR exception */ -class GeneralError : public std::logic_error { - using std::logic_error::logic_error; +namespace details { +struct INFERENCE_ENGINE_DEPRECATED("Use InferRequest::Exception") +INFERENCE_ENGINE_API_CLASS(InferenceEngineException) : public std::runtime_error { + using std::runtime_error::runtime_error; + bool hasStatus() const {return true;} + StatusCode getStatus() const; }; +} // namespace details + +/** + * @brief Base Inference Engine exception class + */ +IE_SUPPRESS_DEPRECATED_START +struct INFERENCE_ENGINE_API_CLASS(Exception) : public details::InferenceEngineException { + using InferenceEngineException::InferenceEngineException; +}; +IE_SUPPRESS_DEPRECATED_END + +/// @cond +namespace details { + template struct ExceptionTraits; +} + +#define INFERENCE_ENGINE_DECLARE_EXCEPTION(ExceptionType, statusCode) \ +struct INFERENCE_ENGINE_API_CLASS(ExceptionType) final : public InferenceEngine::Exception { \ + using Exception::Exception; \ +}; \ +namespace details { \ +template<> struct ExceptionTraits { \ + static const char* string() {return "[ " #statusCode " ]";} \ +}; \ +} +/// @endcond + +/** @brief This class represents StatusCode::GENERAL_ERROR exception */ +INFERENCE_ENGINE_DECLARE_EXCEPTION(GeneralError, GENERAL_ERROR) /** @brief This class represents StatusCode::NOT_IMPLEMENTED exception */ -class NotImplemented : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(NotImplemented, NOT_IMPLEMENTED) /** @brief This class represents StatusCode::NETWORK_NOT_LOADED exception */ -class NetworkNotLoaded : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(NetworkNotLoaded, NETWORK_NOT_LOADED) /** @brief This class represents StatusCode::PARAMETER_MISMATCH exception */ -class ParameterMismatch : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(ParameterMismatch, PARAMETER_MISMATCH) /** @brief This class represents StatusCode::NOT_FOUND exception */ -class NotFound : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(NotFound, NOT_FOUND) /** @brief This class represents StatusCode::OUT_OF_BOUNDS exception */ -class OutOfBounds : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(OutOfBounds, OUT_OF_BOUNDS) /** @brief This class represents StatusCode::UNEXPECTED exception */ -class Unexpected : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(Unexpected, UNEXPECTED) /** @brief This class represents StatusCode::REQUEST_BUSY exception */ -class RequestBusy : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(RequestBusy, REQUEST_BUSY) /** @brief This class represents StatusCode::RESULT_NOT_READY exception */ -class ResultNotReady : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(ResultNotReady, RESULT_NOT_READY) /** @brief This class represents StatusCode::NOT_ALLOCATED exception */ -class NotAllocated : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(NotAllocated, NOT_ALLOCATED) /** @brief This class represents StatusCode::INFER_NOT_STARTED exception */ -class InferNotStarted : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(InferNotStarted, INFER_NOT_STARTED) /** @brief This class represents StatusCode::NETWORK_NOT_READ exception */ -class NetworkNotRead : public std::logic_error { - using std::logic_error::logic_error; -}; +INFERENCE_ENGINE_DECLARE_EXCEPTION(NetworkNotRead, NETWORK_NOT_READ) /** @brief This class represents StatusCode::INFER_CANCELLED exception */ -class InferCancelled : public std::logic_error { - using std::logic_error::logic_error; +INFERENCE_ENGINE_DECLARE_EXCEPTION(InferCancelled, INFER_CANCELLED) + +/** + * @private + */ +#undef INFERENCE_ENGINE_DECLARE_EXCEPTION + +namespace details { +/** + * @brief Tag struct used to throw exception + */ +template +struct ThrowNow final { + [[noreturn]] void operator<<=(const std::ostream& ostream) { + std::ostringstream stream; + stream << ostream.rdbuf(); + throw ExceptionType{stream.str()}; + } }; -} // namespace InferenceEngine +/// @cond +#ifndef NDEBUG +#define IE_LOCATION '\n' << __FILE__ << ':' << __LINE__<< ' ' +#else +#define IE_LOCATION "" +#endif // NDEBUG +/// @endcond +/** + * @def IE_THROW + * @brief A macro used to throw specefied exception with a description + */ +#define IE_THROW(ExceptionType) \ + InferenceEngine::details::ThrowNow{} <<= std::stringstream{} << IE_LOCATION + +/** + * @def THROW_IE_EXCEPTION + * @brief A macro used to throw general exception with a description + */ +#define THROW_IE_EXCEPTION IE_THROW(GeneralError) + +/** + * @def THROW_IE_EXCEPTION_WITH_STATUS + * @brief A macro used to throw general exception with a description and status + */ +#define THROW_IE_EXCEPTION_WITH_STATUS(ExceptionType) \ + IE_THROW(ExceptionType) << InferenceEngine::details::ExceptionTraits::string() << ' ' + +/** + * @def IE_ASSERT + * @brief Uses assert() function if NDEBUG is not defined, InferenceEngine exception otherwise + */ +#ifdef NDEBUG +#define IE_ASSERT(EXPRESSION) \ + if (!(EXPRESSION)) \ + IE_THROW(GeneralError) << " AssertionFailed: " << #EXPRESSION // NOLINT +#else +/** + * @private + */ +struct NullStream { + template + NullStream& operator<<(const T&) noexcept {return *this;} +}; + +#define IE_ASSERT(EXPRESSION) \ + assert((EXPRESSION)); \ + InferenceEngine::details::NullStream() +#endif // NDEBUG + +/// @cond +#define IE_EXCEPTION_CASE(TYPE_ALIAS, STATUS_CODE, EXCEPTION_TYPE, ...) \ + case InferenceEngine::STATUS_CODE : { \ + using InferenceEngine::EXCEPTION_TYPE; using TYPE_ALIAS = EXCEPTION_TYPE; __VA_ARGS__; \ + } break; +/// @endcond + +/** + * @def IE_EXCEPTION_SWITCH + * @brief Generate Switch statement over error codes adn maps them to coresponding exceptions type + */ +#define IE_EXCEPTION_SWITCH(STATUS, TYPE_ALIAS, ...) \ + switch (STATUS) { \ + IE_EXCEPTION_CASE(TYPE_ALIAS, GENERAL_ERROR , GeneralError , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_IMPLEMENTED , NotImplemented , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, NETWORK_NOT_LOADED , NetworkNotLoaded , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, PARAMETER_MISMATCH , ParameterMismatch , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_FOUND , NotFound , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, OUT_OF_BOUNDS , OutOfBounds , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, UNEXPECTED , Unexpected , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, REQUEST_BUSY , RequestBusy , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, RESULT_NOT_READY , ResultNotReady , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, NOT_ALLOCATED , NotAllocated , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, INFER_NOT_STARTED , InferNotStarted , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, NETWORK_NOT_READ , NetworkNotRead , __VA_ARGS__) \ + IE_EXCEPTION_CASE(TYPE_ALIAS, INFER_CANCELLED , InferCancelled , __VA_ARGS__) \ + default: IE_ASSERT(!"Unreachable"); \ + } + +/** + * @private + */ +#define CALL_STATUS_FNC(function, ...) \ + if (!actual) THROW_IE_EXCEPTION << "Wrapper used was not initialized."; \ + ResponseDesc resp; \ + auto res = actual->function(__VA_ARGS__, &resp); \ + if (res != OK) IE_EXCEPTION_SWITCH(res, ExceptionType, \ + InferenceEngine::details::ThrowNow{} \ + <<= std::stringstream{} << IE_LOCATION << resp.msg) + +/** + * @private + */ +#define CALL_STATUS_FNC_NO_ARGS(function) \ + if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATUS_FNC_NO_ARGS was not initialized."; \ + ResponseDesc resp; \ + auto res = actual->function(&resp); \ + if (res != OK) IE_EXCEPTION_SWITCH(res, ExceptionType, \ + InferenceEngine::details::ThrowNow{} \ + <<= std::stringstream{} << IE_LOCATION) + +/** + * @private + */ +#define CALL_FNC_NO_ARGS(function) \ + if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_FNC_NO_ARGS was not initialized."; \ + ResponseDesc resp; \ + auto result = actual->function(&resp); \ + if (resp.msg[0] != '\0') { \ + THROW_IE_EXCEPTION << resp.msg \ + } \ + return result; +} // namespace details +} // namespace InferenceEngine #if defined(_WIN32) #define __PRETTY_FUNCTION__ __FUNCSIG__ #else diff --git a/inference-engine/include/ie_parameter.hpp b/inference-engine/include/ie_parameter.hpp index 1032d599d05..5dc4597cd85 100644 --- a/inference-engine/include/ie_parameter.hpp +++ b/inference-engine/include/ie_parameter.hpp @@ -10,7 +10,6 @@ #include #include -#include
#include #include #include diff --git a/inference-engine/include/ie_precision.hpp b/inference-engine/include/ie_precision.hpp index 18059ce2020..c4107aa90a3 100644 --- a/inference-engine/include/ie_precision.hpp +++ b/inference-engine/include/ie_precision.hpp @@ -13,7 +13,8 @@ #include #include -#include "details/ie_exception.hpp" +#include "ie_common.h" + namespace InferenceEngine { diff --git a/inference-engine/samples/benchmark_app/main.cpp b/inference-engine/samples/benchmark_app/main.cpp index 96dc8abd5a0..a56427df9e0 100644 --- a/inference-engine/samples/benchmark_app/main.cpp +++ b/inference-engine/samples/benchmark_app/main.cpp @@ -432,7 +432,7 @@ int main(int argc, char *argv[]) { std::string key = METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS); try { nireq = exeNetwork.GetMetric(key).as(); - } catch (const details::InferenceEngineException& ex) { + } catch (const std::exception& ex) { THROW_IE_EXCEPTION << "Every device used with the benchmark_app should " << "support OPTIMAL_NUMBER_OF_INFER_REQUESTS ExecutableNetwork metric. " diff --git a/inference-engine/samples/common/samples/common.hpp b/inference-engine/samples/common/samples/common.hpp index cd6948e1e1c..703a6812a2a 100644 --- a/inference-engine/samples/common/samples/common.hpp +++ b/inference-engine/samples/common/samples/common.hpp @@ -642,7 +642,7 @@ inline std::map getMapFullDevicesNames(InferenceEngine p = ie.GetMetric(deviceName, METRIC_KEY(FULL_DEVICE_NAME)); devicesMap.insert(std::pair(deviceName, p.as())); } - catch (InferenceEngine::details::InferenceEngineException &) { + catch (InferenceEngine::Exception &) { } } } @@ -664,7 +664,7 @@ inline std::string getFullDeviceName(InferenceEngine::Core& ie, std::string devi p = ie.GetMetric(device, METRIC_KEY(FULL_DEVICE_NAME)); return p.as(); } - catch (InferenceEngine::details::InferenceEngineException &) { + catch (InferenceEngine::Exception &) { return ""; } } diff --git a/inference-engine/src/cldnn_engine/cldnn_common_utils.h b/inference-engine/src/cldnn_engine/cldnn_common_utils.h index 5405674f59f..d6e3e64a712 100644 --- a/inference-engine/src/cldnn_engine/cldnn_common_utils.h +++ b/inference-engine/src/cldnn_engine/cldnn_common_utils.h @@ -5,7 +5,6 @@ #pragma once #include -#include
#include #include @@ -49,7 +48,8 @@ inline cldnn::data_types DataTypeFromPrecision(InferenceEngine::Precision p) { case InferenceEngine::Precision::BOOL: return cldnn::data_types::i8; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << p.name() << " precision"; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) + << "The plugin does not support " << p.name() << " precision"; } } @@ -74,7 +74,8 @@ inline cldnn::data_types DataTypeFromPrecision(ngraph::element::Type t) { case ngraph::element::Type_t::u1: return cldnn::data_types::bin; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << t.get_type_name()<< " precision"; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) + << "The plugin does not support " << t.get_type_name()<< " precision"; } } @@ -94,7 +95,7 @@ inline cldnn::format FormatFromLayout(InferenceEngine::Layout l) { case InferenceEngine::Layout::NHWC: return cldnn::format::byxf; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << l << " layout"; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "The plugin does not support " << l << " layout"; } } @@ -119,7 +120,8 @@ inline cldnn::format FormatFromTensorDesc(InferenceEngine::TensorDesc desc) { case InferenceEngine::Layout::NHWC: return cldnn::format::byxf; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << desc.getLayout() << " layout"; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) + << "The plugin does not support " << desc.getLayout() << " layout"; } } @@ -135,7 +137,8 @@ inline cldnn::format ImageFormatFromLayout(InferenceEngine::Layout l) { case InferenceEngine::Layout::NHWC: return cldnn::format::nv12; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "The plugin does not support " << l << " image layout"; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) + << "The plugin does not support " << l << " image layout"; } } diff --git a/inference-engine/src/cldnn_engine/cldnn_config.cpp b/inference-engine/src/cldnn_engine/cldnn_config.cpp index 64dfffbd0cb..530e3215c0c 100644 --- a/inference-engine/src/cldnn_engine/cldnn_config.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_config.cpp @@ -7,7 +7,6 @@ #include #include "cldnn_config.h" #include "cpp_interfaces/exception2status.hpp" -#include "details/ie_exception.hpp" #include "cpp_interfaces/interface/ie_internal_plugin_config.hpp" #include "ie_api.h" #include "file_utils.h" @@ -52,7 +51,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { useProfiling = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } } else if (key.compare(PluginConfigParams::KEY_DYN_BATCH_ENABLED) == 0) { if (val.compare(PluginConfigParams::YES) == 0) { @@ -60,7 +59,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { enableDynamicBatch = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } } else if (key.compare(PluginConfigParams::KEY_DUMP_KERNELS) == 0) { if (val.compare(PluginConfigParams::YES) == 0) { @@ -68,14 +67,14 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { dumpCustomKernels = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } } else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_PRIORITY) == 0) { std::stringstream ss(val); uint32_t uVal(0); ss >> uVal; if (ss.fail()) { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } switch (uVal) { case 0: @@ -91,7 +90,7 @@ void Config::UpdateFromMap(const std::map& configMap) queuePriority = cldnn::priority_mode_types::high; break; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported queue priority value: " << uVal; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Unsupported queue priority value: " << uVal; } } else if (key.compare(CLDNNConfigParams::KEY_CLDNN_PLUGIN_THROTTLE) == 0) { @@ -99,7 +98,7 @@ void Config::UpdateFromMap(const std::map& configMap) uint32_t uVal(0); ss >> uVal; if (ss.fail()) { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } switch (uVal) { case 0: @@ -115,7 +114,7 @@ void Config::UpdateFromMap(const std::map& configMap) queueThrottle = cldnn::throttle_mode_types::high; break; default: - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Unsupported queue throttle value: " << uVal; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Unsupported queue throttle value: " << uVal; } } else if (key.compare(PluginConfigParams::KEY_CONFIG_FILE) == 0) { std::stringstream ss(val); @@ -137,7 +136,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::TUNING_RETUNE) == 0) { tuningConfig.mode = cldnn::tuning_mode::tuning_retune_and_cache; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported tuning mode value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported tuning mode value by plugin: " << val; } } else if (key.compare(PluginConfigParams::KEY_TUNING_FILE) == 0) { tuningConfig.cache_file_path = val; @@ -147,7 +146,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { memory_pool_on = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported memory pool flag value: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported memory pool flag value: " << val; } } else if (key.compare(CLDNNConfigParams::KEY_CLDNN_GRAPH_DUMPS_DIR) == 0) { if (!val.empty()) { @@ -170,7 +169,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { exclusiveAsyncRequests = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } } else if (key.compare(PluginConfigParams::KEY_GPU_THROUGHPUT_STREAMS) == 0) { if (val.compare(PluginConfigParams::GPU_THROUGHPUT_AUTO) == 0) { @@ -204,7 +203,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { enableInt8 = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property value by plugin: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property value by plugin: " << val; } } else if (key.compare(CLDNNConfigParams::KEY_CLDNN_NV12_TWO_INPUTS) == 0) { if (val.compare(PluginConfigParams::YES) == 0) { @@ -212,7 +211,7 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { nv12_two_inputs = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported NV12 flag value: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported NV12 flag value: " << val; } } else if (key.compare(CLDNNConfigParams::KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS) == 0) { if (val.compare(PluginConfigParams::YES) == 0) { @@ -220,10 +219,10 @@ void Config::UpdateFromMap(const std::map& configMap) } else if (val.compare(PluginConfigParams::NO) == 0) { enable_fp16_for_quantized_models = false; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS flag value: " << val; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported KEY_CLDNN_ENABLE_FP16_FOR_QUANTIZED_MODELS flag value: " << val; } } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property key by plugin: " << key; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property key by plugin: " << key; } adjustKeyMapValues(); diff --git a/inference-engine/src/cldnn_engine/cldnn_engine.cpp b/inference-engine/src/cldnn_engine/cldnn_engine.cpp index 8f3199aad27..6d8b9512197 100644 --- a/inference-engine/src/cldnn_engine/cldnn_engine.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_engine.cpp @@ -416,7 +416,7 @@ auto check_inputs = [](InferenceEngine::InputsDataMap _networkInputs) { input_precision != InferenceEngine::Precision::I32 && input_precision != InferenceEngine::Precision::I64 && input_precision != InferenceEngine::Precision::BOOL) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "Input image format " << input_precision << " is not supported yet..."; } } diff --git a/inference-engine/src/cldnn_engine/cldnn_executable_network.cpp b/inference-engine/src/cldnn_engine/cldnn_executable_network.cpp index 229f202300e..0e069035c66 100644 --- a/inference-engine/src/cldnn_engine/cldnn_executable_network.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_executable_network.cpp @@ -66,16 +66,16 @@ InferRequestInternal::Ptr CLDNNExecNetwork::CreateInferRequestImpl(InputsDataMap OutputsDataMap networkOutputs) { OV_ITT_SCOPED_TASK(itt::domains::CLDNNPlugin, "CLDNNExecNetwork::CreateInferRequestImpl"); if (m_graphs.empty()) { - THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded); } for (auto& graph : m_graphs) { if (graph == nullptr) { - THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded); } if (!graph->IsLoaded()) { - THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str << ": no networks created"; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded) << ": no networks created"; } } @@ -98,7 +98,7 @@ IInferRequest::Ptr CLDNNExecNetwork::CreateInferRequest() { InferenceEngine::CNNNetwork CLDNNExecNetwork::GetExecGraphInfo() { if (m_graphs.empty()) - THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded); return m_graphs.front()->GetExecGraphInfo(); } diff --git a/inference-engine/src/cldnn_engine/cldnn_infer_request.cpp b/inference-engine/src/cldnn_engine/cldnn_infer_request.cpp index 2e7b1b00d9a..a65dc679230 100644 --- a/inference-engine/src/cldnn_engine/cldnn_infer_request.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_infer_request.cpp @@ -337,7 +337,7 @@ void checkInputBlob(const Blob::Ptr &blob, auto nv12_ptr = blob->as(); if (nv12_ptr == nullptr) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << wrong_nv12_blob; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << wrong_nv12_blob; } auto y_ptr = nv12_ptr->y()->as(); @@ -402,7 +402,8 @@ void CLDNNInferRequest::checkBlobs() { if (foundInputPair != std::end(_networkInputs)) { foundInput = foundInputPair->second; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input with name: \'" << input.first << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) + << "Failed to find input with name: \'" << input.first << "\'"; } checkInputBlob(input.second, input.first, foundInput, m_graph->getConfig().nv12_two_inputs); } @@ -415,7 +416,8 @@ void CLDNNInferRequest::checkBlobs() { if (foundOutputPair != std::end(_networkOutputs)) { foundOutput = foundOutputPair->second; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find output with name: \'" << output.first << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) + << "Failed to find output with name: \'" << output.first << "\'"; } checkOutputBlob(output.second, output.first, foundOutput); } @@ -449,10 +451,10 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data) // perform all common checks first if (name.empty()) { - THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name"; } if (!data) - THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'"; size_t dataSize = data->size(); if (0 == dataSize) { @@ -470,7 +472,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data) : foundOutput->getTensorDesc(); if (desc.getPrecision() != blobDesc.getPrecision()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set Blob with precision not corresponding to user " << (is_input ? "input" : "output") << " precision"; } @@ -498,7 +500,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data) auto nv12_ptr = data->as(); if (nv12_ptr == nullptr) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << wrong_nv12_blob; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << wrong_nv12_blob; } auto y_ptr = nv12_ptr->y()->as(); @@ -530,7 +532,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data) _preProcData[name]->setRoiBlob(data); } else { if (compoundBlobPassed) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << cannot_set_compound; + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << cannot_set_compound; } size_t blobSize = desc.getLayout() != SCALAR @@ -548,7 +550,7 @@ void CLDNNInferRequest::SetBlob(const std::string& name, const Blob::Ptr &data) } } else { if (compoundBlobPassed) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << cannot_set_compound; + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << cannot_set_compound; } if (is_remote) { @@ -697,7 +699,7 @@ void CLDNNInferRequest::SetGraph(std::shared_ptr graph) m_graph = graph; if (m_graph == nullptr) { - THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotLoaded); } if (m_graph->GetMaxDynamicBatchSize() > 1) { diff --git a/inference-engine/src/cldnn_engine/cldnn_program.h b/inference-engine/src/cldnn_engine/cldnn_program.h index 51e0eaa4a5f..88c7e34bd2e 100644 --- a/inference-engine/src/cldnn_engine/cldnn_program.h +++ b/inference-engine/src/cldnn_engine/cldnn_program.h @@ -12,7 +12,6 @@ #include #include -#include "details/ie_exception.hpp" #include "cldnn_config.h" diff --git a/inference-engine/src/gna_plugin/backend/dnn.cpp b/inference-engine/src/gna_plugin/backend/dnn.cpp index 70176a1bdb8..9f21a71c4de 100644 --- a/inference-engine/src/gna_plugin/backend/dnn.cpp +++ b/inference-engine/src/gna_plugin/backend/dnn.cpp @@ -5,7 +5,6 @@ #include #include -#include
#if GNA_LIB_VER == 2 #include diff --git a/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp b/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp index f115ec19353..b9bb4383a26 100644 --- a/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp +++ b/inference-engine/src/gna_plugin/frontend/layer_quantizer.hpp @@ -269,8 +269,9 @@ inline void quantizeWeightsBiases(const QuantDesc & quantDesc, make_custom_blob(InferenceEngine::C, InferenceEngine::SizeVector({wl->_weights->size()})); intWeights->allocate(); if (intWeights->buffer() == nullptr) { - THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED - << "cannot copy weights for layer :"<< wl->name << " of size" << intWeights->byteSize(); + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) + << "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": " + << "cannot copy weights for layer :"<< wl->name << " of size" << intWeights->byteSize(); } int oIdx = wl->outData[0]->getDims().size() - 1; @@ -296,8 +297,9 @@ inline void quantizeWeightsBiases(const QuantDesc & quantDesc, })); bias->allocate(); if (bias->buffer() == nullptr) { - THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED - << "cannot copy bias for layer :"<< wl->name <<"of size" << bias->byteSize(); + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) + << "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": " + << "cannot copy bias for layer :"<< wl->name <<"of size" << bias->byteSize(); } memset(bias->buffer(), 0, bias->byteSize()); @@ -386,8 +388,9 @@ inline void quantizeWeightsBiasesConv(const QuantDesc & quantDesc, auto intWeights = make_custom_blob(InferenceEngine::C, InferenceEngine::SizeVector({conv->_weights->size()})); intWeights->allocate(); if (intWeights->buffer() == nullptr) { - THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED - << "cannot copy weights for layer :"<< conv->name << " of size" << intWeights->byteSize(); + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) + << "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": " + << "cannot copy weights for layer :"<< conv->name << " of size" << intWeights->byteSize(); } auto getBiasSizeForLayer = [](InferenceEngine::WeightableLayer *wl) { @@ -410,8 +413,9 @@ inline void quantizeWeightsBiasesConv(const QuantDesc & quantDesc, })); bias->allocate(); if (bias->buffer() == nullptr) { - THROW_GNA_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::NOT_ALLOCATED - << "cannot copy bias for layer :"<< conv->name <<"of size" << bias->byteSize(); + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) + << "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": " + << "cannot copy bias for layer :"<< conv->name <<"of size" << bias->byteSize(); } memset(bias->buffer(), 0, bias->byteSize()); diff --git a/inference-engine/src/gna_plugin/frontend/quantization.cpp b/inference-engine/src/gna_plugin/frontend/quantization.cpp index d8b5f9d4da3..323e503c8cc 100644 --- a/inference-engine/src/gna_plugin/frontend/quantization.cpp +++ b/inference-engine/src/gna_plugin/frontend/quantization.cpp @@ -4,7 +4,6 @@ #include #include -#include
#include #include #include "backend/gna_types.h" diff --git a/inference-engine/src/gna_plugin/gna2_model_debug_log.cpp b/inference-engine/src/gna_plugin/gna2_model_debug_log.cpp index 15f18f824e4..b47afdc8db3 100644 --- a/inference-engine/src/gna_plugin/gna2_model_debug_log.cpp +++ b/inference-engine/src/gna_plugin/gna2_model_debug_log.cpp @@ -9,7 +9,6 @@ #if GNA_LIB_VER == 2 #include "gna2_model_debug_log.hpp" #include "gna2-model-api.h" -#include
#include #include diff --git a/inference-engine/src/gna_plugin/gna_device.cpp b/inference-engine/src/gna_plugin/gna_device.cpp index 9e47f705135..9d14d647587 100644 --- a/inference-engine/src/gna_plugin/gna_device.cpp +++ b/inference-engine/src/gna_plugin/gna_device.cpp @@ -24,7 +24,6 @@ #include "gna-api.h" #endif -#include "details/ie_exception.hpp" #include "gna_plugin_log.hpp" //#define MODEL_DUMP diff --git a/inference-engine/src/gna_plugin/gna_infer_request.hpp b/inference-engine/src/gna_plugin/gna_infer_request.hpp index 4b2fea5663e..da1e8a22595 100644 --- a/inference-engine/src/gna_plugin/gna_infer_request.hpp +++ b/inference-engine/src/gna_plugin/gna_infer_request.hpp @@ -90,7 +90,7 @@ class GNAInferRequest : public InferenceEngine::AsyncInferRequestInternal { if (inferRequestIdx == -1) { return InferenceEngine::INFER_NOT_STARTED; } else if (millis_timeout < -1) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch); } if (millis_timeout == InferenceEngine::IInferRequest::WaitMode::RESULT_READY) { diff --git a/inference-engine/src/gna_plugin/gna_model_serial.cpp b/inference-engine/src/gna_plugin/gna_model_serial.cpp index 65acd278c81..236d569cd94 100644 --- a/inference-engine/src/gna_plugin/gna_model_serial.cpp +++ b/inference-engine/src/gna_plugin/gna_model_serial.cpp @@ -4,7 +4,6 @@ #include #include -#include
#include #include #include diff --git a/inference-engine/src/gna_plugin/gna_plugin.cpp b/inference-engine/src/gna_plugin/gna_plugin.cpp index b2c866092bf..71c9a7323e0 100644 --- a/inference-engine/src/gna_plugin/gna_plugin.cpp +++ b/inference-engine/src/gna_plugin/gna_plugin.cpp @@ -39,6 +39,7 @@ #include #include "gna_graph_patterns.hpp" #include "gna_tensor_tools.hpp" +#include #include #include @@ -1108,7 +1109,7 @@ uint32_t GNAPlugin::QueueInference(const InferenceEngine::BlobMap &inputs, Infer Wait(0); freeNnet = nnets.begin(); } else { - THROW_IE_EXCEPTION << as_status << REQUEST_BUSY + THROW_IE_EXCEPTION_WITH_STATUS(RequestBusy) << "GNA executable network has max of " << static_cast(gnaFlags->gna_lib_async_threads_num) << " parallel infer requests, please sync one of already running"; @@ -1589,7 +1590,7 @@ InferenceEngine::QueryNetworkResult GNAPlugin::QueryNetwork(const InferenceEngin InferenceEngine::QueryNetworkResult res; if (network.getFunction()) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << " ngraph::Function is not supported natively"; + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << " ngraph::Function is not supported natively"; } std::unordered_set allLayers; diff --git a/inference-engine/src/gna_plugin/gna_plugin_config.cpp b/inference-engine/src/gna_plugin/gna_plugin_config.cpp index b7d20534733..bd6851c82c0 100644 --- a/inference-engine/src/gna_plugin/gna_plugin_config.cpp +++ b/inference-engine/src/gna_plugin/gna_plugin_config.cpp @@ -211,8 +211,9 @@ void Config::UpdateFromMap(const std::map& config) { THROW_GNA_EXCEPTION << "EXCLUSIVE_ASYNC_REQUESTS should be YES/NO, but not" << value; } } else { - THROW_GNA_EXCEPTION << as_status << NOT_FOUND << "Incorrect GNA Plugin config. Key " << item.first - << " not supported"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) + << "[GNAPlugin] in function " << __PRETTY_FUNCTION__<< ": " + << "Incorrect GNA Plugin config. Key " << item.first << " not supported"; } if (gnaFlags.sw_fp32 && gnaFlags.gna_lib_async_threads_num > 1) { diff --git a/inference-engine/src/gna_plugin/gna_plugin_internal.hpp b/inference-engine/src/gna_plugin/gna_plugin_internal.hpp index 5917e28128f..c6d8d8b79ed 100644 --- a/inference-engine/src/gna_plugin/gna_plugin_internal.hpp +++ b/inference-engine/src/gna_plugin/gna_plugin_internal.hpp @@ -75,7 +75,7 @@ public: auto plg = GetCurrentPlugin(); try { plg->SetConfig(config); - } catch (InferenceEngine::details::InferenceEngineException) {} + } catch (InferenceEngine::Exception&) {} return plg->QueryNetwork(network, config); } diff --git a/inference-engine/src/gna_plugin/gna_plugin_log.hpp b/inference-engine/src/gna_plugin/gna_plugin_log.hpp index bba788b3007..5b119921f65 100644 --- a/inference-engine/src/gna_plugin/gna_plugin_log.hpp +++ b/inference-engine/src/gna_plugin/gna_plugin_log.hpp @@ -6,7 +6,6 @@ #include #include -#include
// #define GNA_DEBUG #ifdef GNA_DEBUG diff --git a/inference-engine/src/gna_plugin/memory/gna_memory_util.cpp b/inference-engine/src/gna_plugin/memory/gna_memory_util.cpp index df1bb385da2..bd6db71938c 100644 --- a/inference-engine/src/gna_plugin/memory/gna_memory_util.cpp +++ b/inference-engine/src/gna_plugin/memory/gna_memory_util.cpp @@ -5,7 +5,6 @@ #include "gna_memory_util.hpp" #include -#include
#include "gna_plugin_log.hpp" int32_t GNAPluginNS::memory::MemoryOffset(void *ptr_target, void *ptr_base) { diff --git a/inference-engine/src/hetero_plugin/hetero_async_infer_request.cpp b/inference-engine/src/hetero_plugin/hetero_async_infer_request.cpp index ab4ef5710cd..76d3aa9d65d 100644 --- a/inference-engine/src/hetero_plugin/hetero_async_infer_request.cpp +++ b/inference-engine/src/hetero_plugin/hetero_async_infer_request.cpp @@ -35,10 +35,13 @@ HeteroAsyncInferRequest::HeteroAsyncInferRequest(const InferRequestInternal::Ptr Task _task; }; - auto reuestExecutor = std::make_shared(_heteroInferRequest->_inferRequests[requestId]._request.get()); - _pipeline.emplace_back(reuestExecutor, [reuestExecutor] { - if (StatusCode::OK != reuestExecutor->_status) { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << reuestExecutor->_status; + auto requestExecutor = std::make_shared(_heteroInferRequest->_inferRequests[requestId]._request.get()); + _pipeline.emplace_back(requestExecutor, [requestExecutor] { + if (StatusCode::OK != requestExecutor->_status) { + IE_EXCEPTION_SWITCH(requestExecutor->_status, ExceptionType, + InferenceEngine::details::ThrowNow{} + <<= std::stringstream{} << IE_LOCATION + << InferenceEngine::details::ExceptionTraits::string()); } }); } diff --git a/inference-engine/src/hetero_plugin/hetero_executable_network.cpp b/inference-engine/src/hetero_plugin/hetero_executable_network.cpp index 6241118511a..7b962db79e4 100644 --- a/inference-engine/src/hetero_plugin/hetero_executable_network.cpp +++ b/inference-engine/src/hetero_plugin/hetero_executable_network.cpp @@ -431,7 +431,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream& pugi::xml_parse_result res = heteroXmlDoc.load_string(heteroXmlStr.c_str()); if (res.status != pugi::status_ok) { - THROW_IE_EXCEPTION_WITH_STATUS(NETWORK_NOT_READ) << "Error reading HETERO plugin xml header"; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead) << "Error reading HETERO plugin xml header"; } using namespace XMLParseUtils; @@ -480,7 +480,7 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream& bool loaded = false; try { executableNetwork = _heteroPlugin->GetCore()->ImportNetwork(heteroModel, deviceName, loadConfig); - } catch (const InferenceEngine::NotImplemented &) { + } catch (const InferenceEngine::NotImplemented& ex) { // read XML content std::string xmlString; std::uint64_t dataSize = 0; @@ -608,7 +608,7 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) { for (auto&& subnetwork : networks) { try { subnetwork._network.Export(heteroModel); - } catch (const InferenceEngine::NotImplemented &) { + } catch (const InferenceEngine::NotImplemented& ex) { auto subnet = subnetwork._clonedNetwork; if (!subnet.getFunction()) { THROW_IE_EXCEPTION << "Hetero plugin supports only ngraph function representation"; diff --git a/inference-engine/src/hetero_plugin/hetero_infer_request.cpp b/inference-engine/src/hetero_plugin/hetero_infer_request.cpp index 9d831f77030..f49938bc72b 100644 --- a/inference-engine/src/hetero_plugin/hetero_infer_request.cpp +++ b/inference-engine/src/hetero_plugin/hetero_infer_request.cpp @@ -77,11 +77,7 @@ void HeteroInferRequest::SetBlob(const std::string& name, const InferenceEngine: if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) { r->SetBlob(name, data, foundInput->getPreProcess()); } - } catch (const InferenceEngine::details::InferenceEngineException & ex) { - std::string message = ex.what(); - if (message.find(NOT_FOUND_str) == std::string::npos) - throw ex; - } + } catch (const InferenceEngine::NotFound& ex) {} } } diff --git a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp index 8738b93ceab..3b9acc36afd 100644 --- a/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp +++ b/inference-engine/src/inference_engine/cnn_network_ngraph_impl.cpp @@ -427,7 +427,7 @@ StatusCode CNNNetworkNGraphImpl::serialize(const std::string& xmlPath, xmlPath, binPath, ngraph::pass::Serialize::Version::IR_V10, custom_opsets); manager.run_passes(_ngraph_function); - } catch (const InferenceEngineException& e) { + } catch (const Exception& e) { return DescriptionBuffer(GENERAL_ERROR, resp) << e.what(); } catch (const std::exception& e) { return DescriptionBuffer(UNEXPECTED, resp) << e.what(); diff --git a/inference-engine/src/inference_engine/compilation_context.cpp b/inference-engine/src/inference_engine/compilation_context.cpp index 05cd6879f0a..daf405b6544 100644 --- a/inference-engine/src/inference_engine/compilation_context.cpp +++ b/inference-engine/src/inference_engine/compilation_context.cpp @@ -193,7 +193,7 @@ std::istream& operator >> (std::istream& stream, CompiledBlobHeader& header) { pugi::xml_parse_result res = document.load_string(xmlStr.c_str()); if (res.status != pugi::status_ok) { - THROW_IE_EXCEPTION_WITH_STATUS(NETWORK_NOT_READ) << "Error reading compiled blob header"; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead) << "Error reading compiled blob header"; } pugi::xml_node compiledBlobNode = document.document_element(); diff --git a/inference-engine/src/inference_engine/file_utils.cpp b/inference-engine/src/inference_engine/file_utils.cpp index a9a57fba744..b2c061f6f02 100644 --- a/inference-engine/src/inference_engine/file_utils.cpp +++ b/inference-engine/src/inference_engine/file_utils.cpp @@ -12,10 +12,9 @@ #endif #include -#include
#include #include - +#include "ie_common.h" #ifndef _WIN32 # include # include diff --git a/inference-engine/src/inference_engine/ie_blob_common.cpp b/inference-engine/src/inference_engine/ie_blob_common.cpp index 876b4ec9642..694770e244c 100644 --- a/inference-engine/src/inference_engine/ie_blob_common.cpp +++ b/inference-engine/src/inference_engine/ie_blob_common.cpp @@ -11,7 +11,7 @@ namespace InferenceEngine { Blob::Ptr Blob::createROI(const ROI&) const { - THROW_IE_EXCEPTION << "[NOT_IMPLEMENTED] createROI is not implemented for current type of Blob"; + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "createROI is not implemented for current type of Blob"; } Blob::Ptr make_shared_blob(const Blob::Ptr& inputBlob, const ROI& roi) { diff --git a/inference-engine/src/inference_engine/ie_common.cpp b/inference-engine/src/inference_engine/ie_common.cpp new file mode 100644 index 00000000000..217d954b8fb --- /dev/null +++ b/inference-engine/src/inference_engine/ie_common.cpp @@ -0,0 +1,145 @@ +// Copyright (C) 2018-2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace ExecGraphInfoSerialization { +// +// exec_graph_info.hpp +// +constexpr ngraph::NodeTypeInfo ExecutionNode::type_info; + +const ngraph::NodeTypeInfo& ExecutionNode::get_type_info() const { + return type_info; +} +} // namespace ExecGraphInfoSerialization + +namespace InferenceEngine { +// +// ie_blob.h +// + +Blob::~Blob() {} +MemoryBlob::~MemoryBlob() {} + +// +// ie_iextension.h +// +ILayerImpl::~ILayerImpl() {} +ILayerExecImpl::~ILayerExecImpl() {} +std::map IExtension::getOpSets() { + return {}; +} + +// +// ie_extension.h +// +std::map Extension::getOpSets() { + return actual->getOpSets(); +} +namespace details { +IE_SUPPRESS_DEPRECATED_START + +StatusCode InferenceEngineException::getStatus() const { + return ExceptionToStatus(dynamic_cast(*this)); +} +} // namespace details +IE_SUPPRESS_DEPRECATED_END + +INFERENCE_ENGINE_API_CPP(StatusCode) ExceptionToStatus(const Exception& exception) { + if (dynamic_cast(&exception) != nullptr) { + return GENERAL_ERROR; + } else if (dynamic_cast(&exception) != nullptr) { + return NOT_IMPLEMENTED; + } else if (dynamic_cast(&exception) != nullptr) { + return NETWORK_NOT_LOADED; + } else if (dynamic_cast(&exception) != nullptr) { + return PARAMETER_MISMATCH; + } else if (dynamic_cast(&exception) != nullptr) { + return NOT_FOUND; + } else if (dynamic_cast(&exception) != nullptr) { + return OUT_OF_BOUNDS; + } else if (dynamic_cast(&exception) != nullptr) { + return UNEXPECTED; + } else if (dynamic_cast(&exception) != nullptr) { + return REQUEST_BUSY; + } else if (dynamic_cast(&exception) != nullptr) { + return RESULT_NOT_READY; + } else if (dynamic_cast(&exception) != nullptr) { + return NOT_ALLOCATED; + } else if (dynamic_cast(&exception) != nullptr) { + return INFER_NOT_STARTED; + } else if (dynamic_cast(&exception) != nullptr) { + return NETWORK_NOT_READ; + } else if (dynamic_cast(&exception) != nullptr) { + return INFER_CANCELLED; + } else { + assert(!"Unreachable"); return OK; + } +} + +// +// ie_parameter.hpp +// + +Parameter::~Parameter() { + clear(); +} + +#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION) +Parameter::Any::~Any() {} + +template struct Parameter::RealData; +template struct Parameter::RealData; +template struct Parameter::RealData; +template struct Parameter::RealData; +template struct Parameter::RealData; +template struct Parameter::RealData; +template struct Parameter::RealData; +template struct Parameter::RealData>; +template struct Parameter::RealData>; +template struct Parameter::RealData>; +template struct Parameter::RealData>; +template struct Parameter::RealData>; +template struct Parameter::RealData; + +// +// ie_blob.h +// + +template +TBlob::~TBlob() { + free(); +} + +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +template class TBlob; +#endif // defined(__clang__) && !defined(__SYCL_COMPILER_VERSION) + +} // namespace InferenceEngine \ No newline at end of file diff --git a/inference-engine/src/inference_engine/ie_core.cpp b/inference-engine/src/inference_engine/ie_core.cpp index 08d116591de..85e1aecbecb 100644 --- a/inference-engine/src/inference_engine/ie_core.cpp +++ b/inference-engine/src/inference_engine/ie_core.cpp @@ -620,7 +620,7 @@ public: } plugins[deviceName] = plugin; - } catch (const details::InferenceEngineException& ex) { + } catch (const Exception& ex) { THROW_IE_EXCEPTION << "Failed to create plugin " << FileUtils::fromFilePath(desc.libraryLocation) << " for device " << deviceName << "\n" << "Please, check your environment\n" @@ -993,7 +993,7 @@ std::vector Core::GetAvailableDevices() const { try { Parameter p = GetMetric(deviceName, propertyName); devicesIDs = p.as>(); - } catch (details::InferenceEngineException&) { + } catch (Exception&) { // plugin is not created by e.g. invalid env } catch (const std::exception& ex) { THROW_IE_EXCEPTION << "An exception is thrown while trying to create the " << deviceName diff --git a/inference-engine/src/inference_engine/ie_plugin_cpp.hpp b/inference-engine/src/inference_engine/ie_plugin_cpp.hpp index f0474e190c3..441aba3e150 100644 --- a/inference-engine/src/inference_engine/ie_plugin_cpp.hpp +++ b/inference-engine/src/inference_engine/ie_plugin_cpp.hpp @@ -16,25 +16,39 @@ #include "file_utils.h" #include "cpp/ie_executable_network.hpp" #include "cpp/ie_cnn_network.h" -#include "details/ie_exception_conversion.hpp" #include "ie_plugin_ptr.hpp" +#include "cpp_interfaces/exception2status.hpp" #if defined __GNUC__ # pragma GCC diagnostic push # pragma GCC diagnostic ignored "-Wreturn-type" #endif +#define CATCH_IE_EXCEPTION(ExceptionType) catch (const InferenceEngine::ExceptionType& e) {throw e;} + +#define CATCH_IE_EXCEPTIONS \ + CATCH_IE_EXCEPTION(GeneralError) \ + CATCH_IE_EXCEPTION(NotImplemented) \ + CATCH_IE_EXCEPTION(NetworkNotLoaded) \ + CATCH_IE_EXCEPTION(ParameterMismatch) \ + CATCH_IE_EXCEPTION(NotFound) \ + CATCH_IE_EXCEPTION(OutOfBounds) \ + CATCH_IE_EXCEPTION(Unexpected) \ + CATCH_IE_EXCEPTION(RequestBusy) \ + CATCH_IE_EXCEPTION(ResultNotReady) \ + CATCH_IE_EXCEPTION(NotAllocated) \ + CATCH_IE_EXCEPTION(InferNotStarted) \ + CATCH_IE_EXCEPTION(NetworkNotRead) \ + CATCH_IE_EXCEPTION(InferCancelled) + #define CALL_STATEMENT(...) \ if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATEMENT was not initialized."; \ try { \ __VA_ARGS__; \ - } catch (const InferenceEngine::details::InferenceEngineException& iex) { \ - InferenceEngine::details::extract_exception(iex.hasStatus() ? \ - iex.getStatus() : GENERAL_ERROR, iex.what()); \ - } catch (const std::exception& ex) { \ - InferenceEngine::details::extract_exception(GENERAL_ERROR, ex.what()); \ + } CATCH_IE_EXCEPTIONS catch (const std::exception& ex) { \ + THROW_IE_EXCEPTION << ex.what(); \ } catch (...) { \ - InferenceEngine::details::extract_exception(UNEXPECTED, ""); \ + THROW_IE_EXCEPTION_WITH_STATUS(Unexpected); \ } namespace InferenceEngine { diff --git a/inference-engine/src/inference_engine/ie_rtti.cpp b/inference-engine/src/inference_engine/ie_rtti.cpp deleted file mode 100644 index e32e69ff188..00000000000 --- a/inference-engine/src/inference_engine/ie_rtti.cpp +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright (C) 2018-2020 Intel Corporation -// SPDX-License-Identifier: Apache-2.0 -// - -#include -#include -#include -#include -#include - -#include
-#include -#include -#include -#include -#include - -#include - -using namespace InferenceEngine; - -// -// exec_graph_info.hpp -// -constexpr ngraph::NodeTypeInfo ExecGraphInfoSerialization::ExecutionNode::type_info; - -const ngraph::NodeTypeInfo& -ExecGraphInfoSerialization::ExecutionNode::get_type_info() const { - return type_info; -} - -// -// ie_blob.h -// - -Blob::~Blob() {} -MemoryBlob::~MemoryBlob() {} - -// -// ie_iextension.h -// -ILayerImpl::~ILayerImpl() {} -ILayerExecImpl::~ILayerExecImpl() {} -std::map IExtension::getOpSets() { - return {}; -} - -// -// ie_extension.h -// -std::map Extension::getOpSets() { - return actual->getOpSets(); -} - -// -// details/ie_exception.hpp -// - -details::InferenceEngineException::~InferenceEngineException() noexcept {} - -details::InferenceEngineException::InferenceEngineException(const std::string& filename, const int line, const std::string& message) noexcept : - std::exception(), _file(filename), _line(line) { - if (!message.empty()) { - exception_stream = std::make_shared(message); - } -} - -details::InferenceEngineException::InferenceEngineException(const InferenceEngineException& that) noexcept : - std::exception() { - errorDesc = that.errorDesc; - status_code = that.status_code; - _file = that._file; - _line = that._line; - exception_stream = that.exception_stream; -} -// -// ie_parameter.hpp -// - -Parameter::~Parameter() { - clear(); -} - -#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION) -Parameter::Any::~Any() {} - -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData; -template struct InferenceEngine::Parameter::RealData>; -template struct InferenceEngine::Parameter::RealData>; -template struct InferenceEngine::Parameter::RealData>; -template struct InferenceEngine::Parameter::RealData>; -template struct InferenceEngine::Parameter::RealData>; -template struct InferenceEngine::Parameter::RealData; -#endif // __clang__ && !__SYCL_COMPILER_VERSION -// -// ie_blob.h -// - -#if defined(__clang__) && !defined(__SYCL_COMPILER_VERSION) -template -TBlob::~TBlob() { - free(); -} - -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -template class InferenceEngine::TBlob; -#endif // __clang__ && !__SYCL_COMPILER_VERSION diff --git a/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp b/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp index 62726738b10..f47080bd38a 100644 --- a/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp +++ b/inference-engine/src/inference_engine/os/lin/lin_shared_object_loader.cpp @@ -4,7 +4,6 @@ #include -#include "details/ie_exception.hpp" #include "details/ie_so_loader.h" #include "file_utils.h" @@ -38,14 +37,14 @@ public: * @brief Searches for a function symbol in the loaded module * @param symbolName Name of the function to find * @return A pointer to the function if found - * @throws InferenceEngineException if the function is not found + * @throws Exception if the function is not found */ void* get_symbol(const char* symbolName) const { void* procAddr = nullptr; procAddr = dlsym(shared_object, symbolName); if (procAddr == nullptr) - THROW_IE_EXCEPTION << details::as_status << NOT_FOUND + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "dlSym cannot locate method '" << symbolName << "': " << dlerror(); return procAddr; } diff --git a/inference-engine/src/inference_engine/os/lin/lin_system_conf.cpp b/inference-engine/src/inference_engine/os/lin/lin_system_conf.cpp index f7ec324195d..3dd65ae7abe 100644 --- a/inference-engine/src/inference_engine/os/lin/lin_system_conf.cpp +++ b/inference-engine/src/inference_engine/os/lin/lin_system_conf.cpp @@ -10,7 +10,7 @@ #include #include "ie_system_conf.h" #include "ie_parallel.hpp" -#include "details/ie_exception.hpp" +#include "ie_common.h" #include diff --git a/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp b/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp index 93fdd3e1916..2254ced6fc5 100644 --- a/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp +++ b/inference-engine/src/inference_engine/os/win/win_shared_object_loader.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "details/ie_exception.hpp" +#include "ie_common.h" #include "details/ie_so_loader.h" #include "file_utils.h" @@ -239,7 +239,7 @@ class SharedObjectLoader::Impl { * @brief Searches for a function symbol in the loaded module * @param symbolName Name of function to find * @return A pointer to the function if found - * @throws InferenceEngineException if the function is not found + * @throws Exception if the function is not found */ void* get_symbol(const char* symbolName) const { if (!shared_object) { @@ -247,7 +247,7 @@ class SharedObjectLoader::Impl { } auto procAddr = reinterpret_cast(GetProcAddress(shared_object, symbolName)); if (procAddr == nullptr) - THROW_IE_EXCEPTION << details::as_status << NOT_FOUND + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "GetProcAddress cannot locate method '" << symbolName << "': " << GetLastError(); return procAddr; diff --git a/inference-engine/src/inference_engine/precision_utils.cpp b/inference-engine/src/inference_engine/precision_utils.cpp index 991ae5f6775..0b9d7185b8a 100644 --- a/inference-engine/src/inference_engine/precision_utils.cpp +++ b/inference-engine/src/inference_engine/precision_utils.cpp @@ -3,7 +3,6 @@ // #include "precision_utils.h" -#include
#include diff --git a/inference-engine/src/inference_engine/threading/ie_cpu_streams_executor.cpp b/inference-engine/src/inference_engine/threading/ie_cpu_streams_executor.cpp index ede8313359f..7632fd73491 100644 --- a/inference-engine/src/inference_engine/threading/ie_cpu_streams_executor.cpp +++ b/inference-engine/src/inference_engine/threading/ie_cpu_streams_executor.cpp @@ -18,7 +18,6 @@ #include "ie_parallel.hpp" #include "ie_system_conf.h" #include "threading/ie_thread_affinity.hpp" -#include "details/ie_exception.hpp" #include "threading/ie_cpu_streams_executor.hpp" #include diff --git a/inference-engine/src/inference_engine/threading/ie_istreams_executor.cpp b/inference-engine/src/inference_engine/threading/ie_istreams_executor.cpp index 6ee30912faf..97ba78277ee 100644 --- a/inference-engine/src/inference_engine/threading/ie_istreams_executor.cpp +++ b/inference-engine/src/inference_engine/threading/ie_istreams_executor.cpp @@ -5,7 +5,6 @@ #include "threading/ie_istreams_executor.hpp" #include "ie_plugin_config.hpp" #include "cpp_interfaces/interface/ie_internal_plugin_config.hpp" -#include "details/ie_exception.hpp" #include "ie_parallel.hpp" #include "ie_system_conf.h" #include "ie_parameter.hpp" diff --git a/inference-engine/src/inference_engine/xml_parse_utils.cpp b/inference-engine/src/inference_engine/xml_parse_utils.cpp index 1a5cfd8a266..0a9f6762ddd 100644 --- a/inference-engine/src/inference_engine/xml_parse_utils.cpp +++ b/inference-engine/src/inference_engine/xml_parse_utils.cpp @@ -10,7 +10,6 @@ #include #include -#include "details/ie_exception.hpp" #include "ie_precision.hpp" int XMLParseUtils::GetIntAttr(const pugi::xml_node& node, const char* str) { diff --git a/inference-engine/src/legacy_api/include/legacy/ie_layers.h b/inference-engine/src/legacy_api/include/legacy/ie_layers.h index 97667158142..9114e08d9a3 100644 --- a/inference-engine/src/legacy_api/include/legacy/ie_layers.h +++ b/inference-engine/src/legacy_api/include/legacy/ie_layers.h @@ -205,7 +205,7 @@ public: * * @param str input string with float value * @return float value if parsing was successful - * @throws InferenceEngineException in case of parsing error + * @throws Exception in case of parsing error */ static float ie_parse_float(const std::string& str); diff --git a/inference-engine/src/legacy_api/include/legacy/ie_layers_property.hpp b/inference-engine/src/legacy_api/include/legacy/ie_layers_property.hpp index fcc9b872c4e..3456f6720b6 100644 --- a/inference-engine/src/legacy_api/include/legacy/ie_layers_property.hpp +++ b/inference-engine/src/legacy_api/include/legacy/ie_layers_property.hpp @@ -4,13 +4,14 @@ /** * @brief a header file for describing property style structure used by CNNLayers - * + * * @file ie_layers_property.hpp */ #pragma once #include -#include
+ +#include namespace InferenceEngine { diff --git a/inference-engine/src/legacy_api/src/cnn_network_impl.cpp b/inference-engine/src/legacy_api/src/cnn_network_impl.cpp index 69ba2ea25d5..45cc404ada0 100644 --- a/inference-engine/src/legacy_api/src/cnn_network_impl.cpp +++ b/inference-engine/src/legacy_api/src/cnn_network_impl.cpp @@ -400,7 +400,7 @@ StatusCode CNNNetworkImpl::serialize(const std::string& xmlPath, const std::stri std::const_pointer_cast(shared_from_this()))); return OK; #endif - } catch (const InferenceEngineException& e) { + } catch (const Exception& e) { return DescriptionBuffer(GENERAL_ERROR, resp) << e.what(); } catch (const std::exception& e) { return DescriptionBuffer(UNEXPECTED, resp) << e.what(); @@ -448,7 +448,7 @@ StatusCode CNNNetworkImpl::setBatchSize(size_t size, ResponseDesc* responseDesc) } } return OK; - } catch (const InferenceEngineException& e) { + } catch (const Exception& e) { return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what(); } catch (const std::exception& e) { return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what(); @@ -472,7 +472,7 @@ StatusCode CNNNetworkImpl::setBatchSizeReshape(size_t size, ResponseDesc* respon } } return reshape(inputShapes, responseDesc); - } catch (const InferenceEngineException& e) { + } catch (const Exception& e) { return DescriptionBuffer(GENERAL_ERROR, responseDesc) << e.what(); } catch (const std::exception& e) { return DescriptionBuffer(UNEXPECTED, responseDesc) << e.what(); diff --git a/inference-engine/src/legacy_api/src/ie_layer_validators.cpp b/inference-engine/src/legacy_api/src/ie_layer_validators.cpp index cd21dc63468..df52c676200 100644 --- a/inference-engine/src/legacy_api/src/ie_layer_validators.cpp +++ b/inference-engine/src/legacy_api/src/ie_layer_validators.cpp @@ -36,7 +36,7 @@ void CNNLayer::parseParams() { try { LayerValidator::Ptr validator = LayerValidators::getInstance()->getValidator(type); validator->parseParams(this); - } catch (const InferenceEngineException& ie_e) { + } catch (const Exception& ie_e) { THROW_IE_EXCEPTION << "Error of validate layer: " << this->name << " with type: " << this->type << ". " << ie_e.what(); } diff --git a/inference-engine/src/legacy_api/src/ie_layers_internal.cpp b/inference-engine/src/legacy_api/src/ie_layers_internal.cpp index e2690456741..8d165abbb9f 100644 --- a/inference-engine/src/legacy_api/src/ie_layers_internal.cpp +++ b/inference-engine/src/legacy_api/src/ie_layers_internal.cpp @@ -91,7 +91,7 @@ Paddings getPaddingsInternal(const Layer& layer) { } } return {layer._padding, layer._pads_end}; - } catch (const InferenceEngine::details::InferenceEngineException& iee) { + } catch (const InferenceEngine::Exception& iee) { THROW_IE_EXCEPTION << errorPrefix << iee.what(); } } diff --git a/inference-engine/src/low_precision_transformations/include/low_precision/common/ie_lpt_exception.hpp b/inference-engine/src/low_precision_transformations/include/low_precision/common/ie_lpt_exception.hpp index a426b2b8fe0..ab0c38e92da 100644 --- a/inference-engine/src/low_precision_transformations/include/low_precision/common/ie_lpt_exception.hpp +++ b/inference-engine/src/low_precision_transformations/include/low_precision/common/ie_lpt_exception.hpp @@ -19,12 +19,12 @@ namespace ngraph { namespace pass { namespace low_precision { -class TRANSFORMATIONS_API InferenceEngineException : std::exception { +class TRANSFORMATIONS_API Exception : std::exception { std::shared_ptr buffer; mutable std::string buffer_str; public: template - InferenceEngineException& operator<< (const T& x) { + Exception& operator<< (const T& x) { *buffer << x; return *this; } @@ -35,10 +35,10 @@ public: } }; -#define THROW_TRANSFORMATION_EXCEPTION throw ::ngraph::pass::low_precision::InferenceEngineException() << __FILE__ << ":" << __LINE__ << " " +#define THROW_TRANSFORMATION_EXCEPTION throw ::ngraph::pass::low_precision::Exception() << __FILE__ << ":" << __LINE__ << " " -class TRANSFORMATIONS_API InferenceEngineLptException : public InferenceEngineException { +class TRANSFORMATIONS_API InferenceEngineLptException : public Exception { public: InferenceEngineLptException(const std::string& filename, const size_t line, const Node& node) { *this diff --git a/inference-engine/src/mkldnn_plugin/config.cpp b/inference-engine/src/mkldnn_plugin/config.cpp index 20136f50b75..5bdb5a805d3 100644 --- a/inference-engine/src/mkldnn_plugin/config.cpp +++ b/inference-engine/src/mkldnn_plugin/config.cpp @@ -106,7 +106,7 @@ void Config::readProperties(const std::map &prop) { << ". Expected only YES/NO"; } } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Unsupported property " << key << " by CPU plugin"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Unsupported property " << key << " by CPU plugin"; } _config.clear(); } diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_descriptor.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_descriptor.cpp index 6a9cb76b4b0..dcaf1e19b92 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_descriptor.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_descriptor.cpp @@ -2,7 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // -#include
+#include + #include "mkldnn_descriptor.h" mkldnn::primitive_desc_iterator MKLDNNDescriptor::createPrimitiveDescriptorIterator(const mkldnn::engine &engine, diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp index 4f1c14b311f..7a3ed75d9fd 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_infer_request.cpp @@ -296,14 +296,14 @@ InferenceEngine::Blob::Ptr MKLDNNPlugin::MKLDNNInferRequest::GetBlob(const std:: void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const InferenceEngine::Blob::Ptr &data) { OV_ITT_SCOPED_TASK(itt::domains::MKLDNNPlugin, "SetBlob"); if (name.empty()) { - THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name"; } if (!data) - THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'"; const bool compoundBlobPassed = data->is(); if (!compoundBlobPassed && data->buffer() == nullptr) - THROW_IE_EXCEPTION << "Input data was not allocated. Input name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Input data was not allocated. Input name: \'" << name << "\'"; if (data->size() == 0) { THROW_IE_EXCEPTION << "Input data is empty. Input name: \'" << name << "\'"; } @@ -313,13 +313,13 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In size_t dataSize = data->size(); if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) { if (foundInput->getPrecision() != data->getTensorDesc().getPrecision()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob with precision: " + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob with precision: " << data->getTensorDesc().getPrecision() << ", if CNNNetwork input blob precision is: " << foundInput->getPrecision(); } const bool preProcRequired = preProcessingRequired(foundInput, data); if (compoundBlobPassed && !preProcRequired) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "cannot set compound blob: supported only for input pre-processing"; } @@ -341,12 +341,12 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In } if (foundInput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob. Dimensions mismatch."; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob. Dimensions mismatch."; } if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundInput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundInput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set input blob. Blocking descriptor mismatch."; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set input blob. Blocking descriptor mismatch."; } if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 && @@ -359,11 +359,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In } } else { if (compoundBlobPassed) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "cannot set compound blob: supported only for input pre-processing"; } if (foundOutput->getPrecision() != data->getTensorDesc().getPrecision()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output blob with precision: " + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output blob with precision: " << data->getTensorDesc().getPrecision() << ", if CNNNetwork output blob precision is: " << foundOutput->getPrecision(); } size_t outputSize = foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::SCALAR @@ -374,11 +374,11 @@ void MKLDNNPlugin::MKLDNNInferRequest::SetBlob(const std::string& name, const In << dataSize << "!=" << outputSize << ")."; } if (foundOutput->getTensorDesc().getDims() != data->getTensorDesc().getDims()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output Blob. Dimensions mismatch."; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output Blob. Dimensions mismatch."; } if (data->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::ANY && foundOutput->getTensorDesc().getBlockingDesc() != data->getTensorDesc().getBlockingDesc()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str << "Failed to set output blob. Blocking descriptor mismatch."; + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set output blob. Blocking descriptor mismatch."; } if (data->getTensorDesc().getPrecision() == InferenceEngine::Precision::FP32 && !graph->getProperty().batchLimit) { diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_memory_solver.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_memory_solver.cpp index 3bf656cbf81..3758eec3ff1 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_memory_solver.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_memory_solver.cpp @@ -2,9 +2,10 @@ // SPDX-License-Identifier: Apache-2.0 // +#include + #include "mkldnn_memory_solver.hpp" -#include
#include #include diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp index 862ee03fb0e..72bc5ec923f 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp @@ -345,7 +345,7 @@ Engine::LoadExeNetworkImpl(const InferenceEngine::CNNNetwork &network, const std input_precision != InferenceEngine::Precision::BOOL && input_precision != InferenceEngine::Precision::I64 && input_precision != InferenceEngine::Precision::U64) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "Input image format " << input_precision << " is not supported yet..."; } } @@ -513,7 +513,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma std::unique_ptr ptr; try { ptr.reset(MKLDNNNode::factory().create(*itLayer, {mkldnn::engine::kind::cpu, 0}, extensionManager, fake_w_cache)); - } catch (InferenceEngine::details::InferenceEngineException&) { + } catch (InferenceEngine::Exception&) { return false; } return true; @@ -569,7 +569,7 @@ QueryNetworkResult Engine::QueryNetwork(const CNNNetwork& network, const std::ma // if we can create and have not thrown exception, then layer is supported std::unique_ptr (MKLDNNNode::factory().create(*i, eng, extensionManager, fake_w_cache)); res.supportedLayersMap.insert({ (*i)->name, GetName() }); - } catch (InferenceEngine::details::InferenceEngineException&) { + } catch (InferenceEngine::Exception&) { } i++; } diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_primitive.h b/inference-engine/src/mkldnn_plugin/mkldnn_primitive.h index e2c84835bb2..2feb8ea9d9a 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_primitive.h +++ b/inference-engine/src/mkldnn_plugin/mkldnn_primitive.h @@ -9,7 +9,6 @@ #include #include #include -#include
namespace MKLDNNPlugin { diff --git a/inference-engine/src/mkldnn_plugin/nodes/argmax.cpp b/inference-engine/src/mkldnn_plugin/nodes/argmax.cpp index 63fa62a5807..bb10784a340 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/argmax.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/argmax.cpp @@ -28,7 +28,7 @@ public: std::stoi(layer->params.at("axis")) :0; addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/batch_to_space.cpp b/inference-engine/src/mkldnn_plugin/nodes/batch_to_space.cpp index 60e15726fc9..019d7001082 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/batch_to_space.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/batch_to_space.cpp @@ -66,7 +66,7 @@ public: config.outConfs.push_back(outConfig); config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/broadcast.cpp b/inference-engine/src/mkldnn_plugin/nodes/broadcast.cpp index f975202b078..007c9094e27 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/broadcast.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/broadcast.cpp @@ -46,7 +46,7 @@ public: config.outConfs.push_back(outConfig); config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/bucketize.cpp b/inference-engine/src/mkldnn_plugin/nodes/bucketize.cpp index e27a1b83c27..7f28d99acc5 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/bucketize.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/bucketize.cpp @@ -73,7 +73,7 @@ public: { DataConfigurator(ConfLayout::PLN, input_precision), DataConfigurator(ConfLayout::PLN, boundaries_precision) }, { DataConfigurator(ConfLayout::PLN, output_precision) }); } - catch (InferenceEngine::details::InferenceEngineException &ex) { + catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/common/tensor_desc_creator.h b/inference-engine/src/mkldnn_plugin/nodes/common/tensor_desc_creator.h index a3938a06583..6f013aae563 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/common/tensor_desc_creator.h +++ b/inference-engine/src/mkldnn_plugin/nodes/common/tensor_desc_creator.h @@ -5,6 +5,7 @@ #pragma once #include +#include namespace MKLDNNPlugin { diff --git a/inference-engine/src/mkldnn_plugin/nodes/cum_sum.cpp b/inference-engine/src/mkldnn_plugin/nodes/cum_sum.cpp index 6e12e4ef2eb..1296de3f3e3 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/cum_sum.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/cum_sum.cpp @@ -87,7 +87,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/depth_to_space.cpp b/inference-engine/src/mkldnn_plugin/nodes/depth_to_space.cpp index 42809f21026..7b754c65ee6 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/depth_to_space.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/depth_to_space.cpp @@ -96,7 +96,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/detectionoutput.cpp b/inference-engine/src/mkldnn_plugin/nodes/detectionoutput.cpp index c53d55ffc4a..612202f8ce8 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/detectionoutput.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/detectionoutput.cpp @@ -114,7 +114,7 @@ public: std::vector in_data_conf(layer->insData.size(), DataConfigurator(ConfLayout::PLN, Precision::FP32)); addConfig(layer, in_data_conf, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/detectionoutput_onnx.cpp b/inference-engine/src/mkldnn_plugin/nodes/detectionoutput_onnx.cpp index c1f75770669..63043bfead3 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/detectionoutput_onnx.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/detectionoutput_onnx.cpp @@ -273,7 +273,7 @@ public: config.outConfs.push_back(dataS); config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/embedding_bag_sum.cpp b/inference-engine/src/mkldnn_plugin/nodes/embedding_bag_sum.cpp index dace4c5195c..2cce91d3500 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/embedding_bag_sum.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/embedding_bag_sum.cpp @@ -89,7 +89,7 @@ MKLDNNEmbeddingBagSum::MKLDNNEmbeddingBagSum( for (size_t i = 1lu; i < inDataDims.size(); i++) { _embDepth *= inDataDims[i]; } - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/extract_image_patches.cpp b/inference-engine/src/mkldnn_plugin/nodes/extract_image_patches.cpp index 92f83a2e770..40cf7eccb6d 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/extract_image_patches.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/extract_image_patches.cpp @@ -83,7 +83,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/fill.cpp b/inference-engine/src/mkldnn_plugin/nodes/fill.cpp index e3831b83ef1..36e9ce96f2f 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/fill.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/fill.cpp @@ -42,7 +42,7 @@ public: addConfig(layer, { DataConfigurator(ConfLayout::PLN, Precision::I32), DataConfigurator(ConfLayout::PLN) }, { DataConfigurator(ConfLayout::PLN) }); } - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/gather.cpp b/inference-engine/src/mkldnn_plugin/nodes/gather.cpp index 24af01f7754..b5429647755 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/gather.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/gather.cpp @@ -68,7 +68,7 @@ public: config.outConfs.push_back(dataConfigOut); config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/gather_tree.cpp b/inference-engine/src/mkldnn_plugin/nodes/gather_tree.cpp index 7a0b527c18a..bf515d47a12 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/gather_tree.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/gather_tree.cpp @@ -51,7 +51,7 @@ public: addConfig(layer, { DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision), DataConfigurator(ConfLayout::PLN, precision) }, { DataConfigurator(ConfLayout::PLN, precision) }); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/grn.cpp b/inference-engine/src/mkldnn_plugin/nodes/grn.cpp index d412ab38554..b7cdfb147f5 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/grn.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/grn.cpp @@ -23,7 +23,7 @@ public: bias = layer->GetParamAsFloat("bias"); addConfig(layer, {{ConfLayout::PLN, false, 0, Precision::FP32}}, {{ConfLayout::PLN, false, 0, Precision::FP32}}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/log_softmax.cpp b/inference-engine/src/mkldnn_plugin/nodes/log_softmax.cpp index 01872db73d7..b6f55e2a7c8 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/log_softmax.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/log_softmax.cpp @@ -49,7 +49,7 @@ public: reduced_axis_stride *= dims[i]; addConfig(layer, { { ConfLayout::PLN, false, 0, Precision::FP32 } }, { { ConfLayout::PLN, false, 0, Precision::FP32 } }); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/math.cpp b/inference-engine/src/mkldnn_plugin/nodes/math.cpp index 26d5939b986..90182b88c8b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/math.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/math.cpp @@ -87,7 +87,7 @@ public: THROW_IE_EXCEPTION << layer->name << " Incorrect Math layer type!"; addConfig(layer, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, false, 0, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp index f5b295ed0ce..6154dd674b8 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp @@ -9,7 +9,6 @@ #include #include -#include "details/ie_exception.hpp" #include #include "mkldnn.hpp" #include "mkldnn/iml_type_mapper.h" diff --git a/inference-engine/src/mkldnn_plugin/nodes/non_max_suppression.cpp b/inference-engine/src/mkldnn_plugin/nodes/non_max_suppression.cpp index f56f77ee695..0a11850989b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/non_max_suppression.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/non_max_suppression.cpp @@ -138,7 +138,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/one_hot.cpp b/inference-engine/src/mkldnn_plugin/nodes/one_hot.cpp index 646e069e6bf..a544899009f 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/one_hot.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/one_hot.cpp @@ -73,7 +73,7 @@ public: config.outConfs.push_back(dataConfig); confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception& ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/powerfile.cpp b/inference-engine/src/mkldnn_plugin/nodes/powerfile.cpp index 6aa503ae018..c48339321ca 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/powerfile.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/powerfile.cpp @@ -28,7 +28,7 @@ public: shift_.push_back(0); addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/priorbox.cpp b/inference-engine/src/mkldnn_plugin/nodes/priorbox.cpp index 74c5d2b5461..7d7b5f9c19f 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/priorbox.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/priorbox.cpp @@ -117,7 +117,7 @@ public: } addConfig(layer, {{ConfLayout::ANY, true}, {ConfLayout::ANY, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/priorbox_clustered.cpp b/inference-engine/src/mkldnn_plugin/nodes/priorbox_clustered.cpp index 1fcd1df6c39..eb44aa93084 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/priorbox_clustered.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/priorbox_clustered.cpp @@ -33,7 +33,7 @@ public: offset_ = layer->GetParamAsFloat("offset"); addConfig(layer, {{ConfLayout::PLN, true}, {ConfLayout::PLN, true}}, {{ConfLayout::PLN, true, -1, Precision::FP32}}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/priorgridgenerator_onnx.cpp b/inference-engine/src/mkldnn_plugin/nodes/priorgridgenerator_onnx.cpp index c98e7475f23..94a1525e1a5 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/priorgridgenerator_onnx.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/priorgridgenerator_onnx.cpp @@ -47,7 +47,7 @@ public: addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::ANY), DataConfigurator(ConfLayout::ANY)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/proposal.cpp b/inference-engine/src/mkldnn_plugin/nodes/proposal.cpp index ac9f12ba77b..311a064689c 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/proposal.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/proposal.cpp @@ -126,7 +126,7 @@ public: addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); } - } catch (const InferenceEngine::details::InferenceEngineException &ex) { + } catch (const InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } @@ -179,7 +179,7 @@ public: {img_H, img_W, scale_H, scale_W}, anchors.data(), roi_indices.data(), p_roi_item, p_prob_item, conf); return OK; - } catch (const InferenceEngine::details::InferenceEngineException& e) { + } catch (const InferenceEngine::Exception& e) { if (resp) { std::string errorMsg = e.what(); errorMsg.copy(resp->msg, sizeof(resp->msg) - 1); diff --git a/inference-engine/src/mkldnn_plugin/nodes/proposal_onnx.cpp b/inference-engine/src/mkldnn_plugin/nodes/proposal_onnx.cpp index a66eada5961..59dee2913c8 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/proposal_onnx.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/proposal_onnx.cpp @@ -299,7 +299,7 @@ public: {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } @@ -408,7 +408,7 @@ public: } return OK; - } catch (const InferenceEngine::details::InferenceEngineException& e) { + } catch (const std::exception& e) { if (resp) { std::string errorMsg = e.what(); errorMsg.copy(resp->msg, sizeof(resp->msg) - 1); diff --git a/inference-engine/src/mkldnn_plugin/nodes/psroi.cpp b/inference-engine/src/mkldnn_plugin/nodes/psroi.cpp index 7d3173e009e..19ef019a909 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/psroi.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/psroi.cpp @@ -89,7 +89,7 @@ public: DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN)}, {DataConfigurator(ConfLayout::PLN, supportedPrecision)}); } - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/range.cpp b/inference-engine/src/mkldnn_plugin/nodes/range.cpp index 693f768c83d..461582786a2 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/range.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/range.cpp @@ -54,7 +54,7 @@ public: addConfig(layer, { DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN), DataConfigurator(ConfLayout::PLN) }, { DataConfigurator(ConfLayout::PLN) }); } - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/region_yolo.cpp b/inference-engine/src/mkldnn_plugin/nodes/region_yolo.cpp index 5d6a5c4667c..3b8a23d9d67 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/region_yolo.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/region_yolo.cpp @@ -310,7 +310,7 @@ public: logistic_kernel->create_ker(); addConfig(layer, {DataConfigurator(ConfLayout::PLN, input_prec)}, {DataConfigurator(ConfLayout::PLN, output_prec)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/reorg_yolo.cpp b/inference-engine/src/mkldnn_plugin/nodes/reorg_yolo.cpp index 750b3634015..49d95406bf0 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/reorg_yolo.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/reorg_yolo.cpp @@ -19,7 +19,7 @@ public: stride = layer->GetParamAsInt("stride"); addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/reverse_sequence.cpp b/inference-engine/src/mkldnn_plugin/nodes/reverse_sequence.cpp index bcb8d90c28c..05f1928fa7a 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/reverse_sequence.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/reverse_sequence.cpp @@ -64,7 +64,7 @@ public: addConfig(layer, { DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, lengthsPrecision) }, { DataConfigurator(ConfLayout::PLN, Precision::FP32) }); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/roifeatureextractor_onnx.cpp b/inference-engine/src/mkldnn_plugin/nodes/roifeatureextractor_onnx.cpp index f95de39c184..9695a8f6da9 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/roifeatureextractor_onnx.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/roifeatureextractor_onnx.cpp @@ -331,7 +331,7 @@ public: std::vector inputs_layouts(layer->insData.size(), DataConfigurator(ConfLayout::PLN, Precision::FP32)); std::vector outputs_layouts(layer->outData.size(), DataConfigurator(ConfLayout::PLN, Precision::FP32)); addConfig(layer, inputs_layouts, outputs_layouts); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/select.cpp b/inference-engine/src/mkldnn_plugin/nodes/select.cpp index 5e84e9fa8f7..af90f60dff8 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/select.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/select.cpp @@ -120,7 +120,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/shuffle_channels.cpp b/inference-engine/src/mkldnn_plugin/nodes/shuffle_channels.cpp index 0943a97bd4e..3891371d6ec 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/shuffle_channels.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/shuffle_channels.cpp @@ -107,7 +107,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/simplernms.cpp b/inference-engine/src/mkldnn_plugin/nodes/simplernms.cpp index 80997266cdb..47c42d4c523 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/simplernms.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/simplernms.cpp @@ -227,7 +227,7 @@ public: addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/space_to_batch.cpp b/inference-engine/src/mkldnn_plugin/nodes/space_to_batch.cpp index fc9b08aa05d..30adc953b7b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/space_to_batch.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/space_to_batch.cpp @@ -62,7 +62,7 @@ public: config.outConfs.push_back(outConfig); config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/space_to_depth.cpp b/inference-engine/src/mkldnn_plugin/nodes/space_to_depth.cpp index c76682c4716..13319f6aad5 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/space_to_depth.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/space_to_depth.cpp @@ -96,7 +96,7 @@ public: config.dynBatchSupport = false; confs.push_back(config); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/sparse_fill_empty_rows.cpp b/inference-engine/src/mkldnn_plugin/nodes/sparse_fill_empty_rows.cpp index a73e58a353f..3f4c9d4cdbb 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/sparse_fill_empty_rows.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/sparse_fill_empty_rows.cpp @@ -75,7 +75,7 @@ public: {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)}); } - catch (InferenceEngine::details::InferenceEngineException &ex) { + catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/sparse_segment_reduce.cpp b/inference-engine/src/mkldnn_plugin/nodes/sparse_segment_reduce.cpp index 2bea7fde1fb..eeffa8dbcfc 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/sparse_segment_reduce.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/sparse_segment_reduce.cpp @@ -71,7 +71,7 @@ public: { DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32) }, { DataConfigurator(ConfLayout::PLN, Precision::FP32) }); } - catch (InferenceEngine::details::InferenceEngineException &ex) { + catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/sparse_to_dense.cpp b/inference-engine/src/mkldnn_plugin/nodes/sparse_to_dense.cpp index 526248c580e..a6556da89ef 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/sparse_to_dense.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/sparse_to_dense.cpp @@ -63,7 +63,7 @@ public: { DataConfigurator(ConfLayout::PLN, Precision::I32) }); } } - catch (InferenceEngine::details::InferenceEngineException &ex) { + catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/sparse_weighted_reduce.cpp b/inference-engine/src/mkldnn_plugin/nodes/sparse_weighted_reduce.cpp index 6023476ebf0..a4be915d1eb 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/sparse_weighted_reduce.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/sparse_weighted_reduce.cpp @@ -138,7 +138,7 @@ public: DataConfigurator(ConfLayout::PLN, Precision::I32) }, { DataConfigurator(ConfLayout::PLN, Precision::FP32) }); } } - catch (InferenceEngine::details::InferenceEngineException &ex) { + catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/squeeze.cpp b/inference-engine/src/mkldnn_plugin/nodes/squeeze.cpp index 57263c1caae..8569564fe61 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/squeeze.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/squeeze.cpp @@ -37,7 +37,7 @@ public: // WA to enable the implementation only for equal input and output precisions confs[0].inConfs[0].desc.setPrecision(confs[0].outConfs[0].desc.getPrecision()); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/strided_slice.cpp b/inference-engine/src/mkldnn_plugin/nodes/strided_slice.cpp index f8d4af798d5..75089083d88 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/strided_slice.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/strided_slice.cpp @@ -142,7 +142,7 @@ public: DataConfigurator(ConfLayout::PLN, Precision::I32), DataConfigurator(ConfLayout::PLN, Precision::I32) }, { DataConfigurator(ConfLayout::PLN, dataPrecision) }); } - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/topk.cpp b/inference-engine/src/mkldnn_plugin/nodes/topk.cpp index 09ab13796b9..467a4a89d0b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/topk.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/topk.cpp @@ -102,7 +102,7 @@ public: // integer tensor. Will change it for corresponding output desc. confs.back().outConfs[1].desc.setPrecision(Precision::I32); } - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/topkrois_onnx.cpp b/inference-engine/src/mkldnn_plugin/nodes/topkrois_onnx.cpp index 195e3ecfff4..ed445b61d6b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/topkrois_onnx.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/topkrois_onnx.cpp @@ -41,7 +41,7 @@ public: addConfig(layer, {DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32)}, {DataConfigurator(ConfLayout::PLN, Precision::FP32)}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/unique.cpp b/inference-engine/src/mkldnn_plugin/nodes/unique.cpp index f544789041f..d7ab2e7ab8c 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/unique.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/unique.cpp @@ -94,7 +94,7 @@ public: DataConfigurator(ConfLayout::PLN, Precision::FP32), DataConfigurator(ConfLayout::PLN, Precision::FP32) }); } } - catch (InferenceEngine::details::InferenceEngineException &ex) { + catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/unsqueeze.cpp b/inference-engine/src/mkldnn_plugin/nodes/unsqueeze.cpp index 804e41140bd..79543a64896 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/unsqueeze.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/unsqueeze.cpp @@ -32,7 +32,7 @@ public: // WA to enable the implementation only for equal input and output precisions confs[0].inConfs[0].desc.setPrecision(confs[0].outConfs[0].desc.getPrecision()); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/src/multi_device/multi_device_async_infer_request.cpp b/inference-engine/src/multi_device/multi_device_async_infer_request.cpp index 845f6aac2d9..42292f353cb 100644 --- a/inference-engine/src/multi_device/multi_device_async_infer_request.cpp +++ b/inference-engine/src/multi_device/multi_device_async_infer_request.cpp @@ -74,7 +74,10 @@ MultiDeviceAsyncInferRequest::MultiDeviceAsyncInferRequest( if (nullptr != InferenceEngine::CurrentException()) std::rethrow_exception(InferenceEngine::CurrentException()); else - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << status; + IE_EXCEPTION_SWITCH(status, ExceptionType, + InferenceEngine::details::ThrowNow{} + <<= std::stringstream{} << IE_LOCATION + << InferenceEngine::details::ExceptionTraits::string()); } if (_needPerfCounters) _perfMap = _workerInferRequest->_inferRequest.GetPerformanceCounts(); diff --git a/inference-engine/src/multi_device/multi_device_exec_network.cpp b/inference-engine/src/multi_device/multi_device_exec_network.cpp index d445e9480b4..61cc3c22d03 100644 --- a/inference-engine/src/multi_device/multi_device_exec_network.cpp +++ b/inference-engine/src/multi_device/multi_device_exec_network.cpp @@ -68,7 +68,7 @@ MultiDeviceExecutableNetwork::MultiDeviceExecutableNetwork(const DeviceMap(); - } catch (const InferenceEngine::details::InferenceEngineException &iie) { + } catch (const InferenceEngine::Exception &iie) { THROW_IE_EXCEPTION << "Every device used with the Multi-Device should " << "support OPTIMAL_NUMBER_OF_INFER_REQUESTS ExecutableNetwork metric. " @@ -168,10 +168,9 @@ RemoteContext::Ptr MultiDeviceExecutableNetwork::GetContext() const { const auto& n = _networksPerDevice.at(device.deviceName); try { return n.GetContext(); - } catch (const NotImplemented&) { - } + } catch (const NotImplemented&) {} } - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED) << "None of the devices in the MULTI has an associated remote context." + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "None of the devices in the MULTI has an associated remote context." << " Current list of devices allowed via the DEVICE_PRIORITIES config: " << devices_names; } @@ -226,7 +225,7 @@ void MultiDeviceExecutableNetwork::SetConfig(const std::map lock{_mutex}; for (auto && device : metaDevices) { if (_networksPerDevice.find(device.deviceName) == _networksPerDevice.end()) { - THROW_IE_EXCEPTION << NOT_FOUND_str << "You can only change device priorities but not add new devices with" + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "You can only change device priorities but not add new devices with" << " the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES. " << device.deviceName << " device was not in the original device list!"; @@ -245,7 +244,7 @@ InferenceEngine::Parameter MultiDeviceExecutableNetwork::GetConfig(const std::st if (it != _config.end()) { return it->second; } else { - THROW_IE_EXCEPTION << NOT_FOUND_str << name <<" not found in the ExecutableNetwork config"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << name <<" not found in the ExecutableNetwork config"; } } @@ -255,7 +254,7 @@ InferenceEngine::Parameter MultiDeviceExecutableNetwork::GetMetric(const std::st for (auto n : _networksPerDevice) { try { res += n.second.GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as(); - } catch (const InferenceEngine::details::InferenceEngineException &iie) { + } catch (const InferenceEngine::Exception &iie) { THROW_IE_EXCEPTION << "Every device used with the Multi-Device should " << "support OPTIMAL_NUMBER_OF_INFER_REQUESTS ExecutableNetwork metric. " diff --git a/inference-engine/src/multi_device/multi_device_infer_request.cpp b/inference-engine/src/multi_device/multi_device_infer_request.cpp index a4f0128748b..675385761de 100644 --- a/inference-engine/src/multi_device/multi_device_infer_request.cpp +++ b/inference-engine/src/multi_device/multi_device_infer_request.cpp @@ -60,4 +60,12 @@ void MultiDeviceInferRequest::SetBlobsToAnotherRequest(InferRequest& req) { } } +std::map MultiDeviceInferRequest::GetPerformanceCounts() const { + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); +} + +void MultiDeviceInferRequest::InferImpl() { + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); +} + } // namespace MultiDevicePlugin diff --git a/inference-engine/src/multi_device/multi_device_infer_request.hpp b/inference-engine/src/multi_device/multi_device_infer_request.hpp index b7c674e272c..85a314b4f25 100644 --- a/inference-engine/src/multi_device/multi_device_infer_request.hpp +++ b/inference-engine/src/multi_device/multi_device_infer_request.hpp @@ -25,12 +25,8 @@ public: explicit MultiDeviceInferRequest(const InferenceEngine::InputsDataMap& networkInputs, const InferenceEngine::OutputsDataMap& networkOutputs, InferenceEngine::InferRequest request_to_share_blobs_with); - std::map GetPerformanceCounts() const override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); - } - void InferImpl() override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); - } + std::map GetPerformanceCounts() const override; + void InferImpl() override; // Multi-Device impl specific: sets the data (blobs from the device-less requests to the specific device request) void SetBlobsToAnotherRequest(InferenceEngine::InferRequest& req); }; diff --git a/inference-engine/src/multi_device/multi_device_plugin.cpp b/inference-engine/src/multi_device/multi_device_plugin.cpp index 3891e78d7ad..80aa1966e66 100644 --- a/inference-engine/src/multi_device/multi_device_plugin.cpp +++ b/inference-engine/src/multi_device/multi_device_plugin.cpp @@ -190,7 +190,7 @@ ExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadExeNetworkImpl(co IStreamsExecutor::ThreadBindingType::NONE}); executor->runAndWait(loads); if (executableNetworkPerDevice.empty()) - THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to load network to any device " + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to load network to any device " << "that the MULTI device is initialized to work with"; // checking the perf counters config from the loaded network to respect both device's plugin and load-specific setting diff --git a/inference-engine/src/plugin_api/cpp_interfaces/exception2status.hpp b/inference-engine/src/plugin_api/cpp_interfaces/exception2status.hpp index 4ec96091e9a..d7e5f4aff81 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/exception2status.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/exception2status.hpp @@ -12,31 +12,25 @@ #include "description_buffer.hpp" -/** - * @def THROW_IE_EXCEPTION_WITH_STATUS - * @brief Throws an exception along with the status (which is eventually converted to the typed exception) - */ -#define THROW_IE_EXCEPTION_WITH_STATUS(__status) THROW_IE_EXCEPTION << \ - InferenceEngine::details::as_status << InferenceEngine::StatusCode::__status << __status##_str - namespace InferenceEngine { +INFERENCE_ENGINE_API_CPP(StatusCode) ExceptionToStatus(const Exception& exception); + /** * @def TO_STATUS(x) * @brief Converts C++ exceptioned function call into a c-style one * @ingroup ie_dev_api_error_debug */ -#define TO_STATUS(x) \ - try { \ - x; \ - return OK; \ - } catch (const InferenceEngine::details::InferenceEngineException& iex) { \ - return InferenceEngine::DescriptionBuffer((iex.hasStatus() ? iex.getStatus() : GENERAL_ERROR), resp) \ - << iex.what(); \ - } catch (const std::exception& ex) { \ - return InferenceEngine::DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); \ - } catch (...) { \ - return InferenceEngine::DescriptionBuffer(UNEXPECTED); \ +#define TO_STATUS(x) \ + try { \ + x; \ + return OK; \ + } catch (const ::InferenceEngine::Exception& iex) { \ + return InferenceEngine::DescriptionBuffer(InferenceEngine::ExceptionToStatus(iex), resp) << iex.what(); \ + } catch (const std::exception& ex) { \ + return InferenceEngine::DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); \ + } catch (...) { \ + return InferenceEngine::DescriptionBuffer(UNEXPECTED); \ } /** @@ -48,8 +42,8 @@ namespace InferenceEngine { try { \ x; \ return OK; \ - } catch (const InferenceEngine::details::InferenceEngineException& iex) { \ - return InferenceEngine::DescriptionBuffer(iex.hasStatus() ? iex.getStatus() : GENERAL_ERROR) << iex.what(); \ + } catch (const ::InferenceEngine::Exception& iex) { \ + return InferenceEngine::DescriptionBuffer(InferenceEngine::ExceptionToStatus(iex)) << iex.what(); \ } catch (const std::exception& ex) { \ return InferenceEngine::DescriptionBuffer(GENERAL_ERROR) << ex.what(); \ } catch (...) { \ @@ -61,94 +55,15 @@ namespace InferenceEngine { * @brief Returns a status code of a called function, handles exeptions and converts to a status code. * @ingroup ie_dev_api_error_debug */ -#define NO_EXCEPT_CALL_RETURN_STATUS(x) \ - try { \ - return x; \ - } catch (const InferenceEngine::details::InferenceEngineException& iex) { \ - return InferenceEngine::DescriptionBuffer(iex.hasStatus() ? iex.getStatus() : GENERAL_ERROR, resp) \ - << iex.what(); \ - } catch (const std::exception& ex) { \ - return InferenceEngine::DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); \ - } catch (...) { \ - return InferenceEngine::DescriptionBuffer(UNEXPECTED); \ +#define NO_EXCEPT_CALL_RETURN_STATUS(x) \ + try { \ + return x; \ + } catch (const ::InferenceEngine::Exception& iex) { \ + return InferenceEngine::DescriptionBuffer(InferenceEngine::ExceptionToStatus(iex), resp) << iex.what(); \ + } catch (const std::exception& ex) { \ + return InferenceEngine::DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); \ + } catch (...) { \ + return InferenceEngine::DescriptionBuffer(UNEXPECTED); \ } -/** - * @addtogroup ie_dev_api_error_debug - * @{ - * @def PARAMETER_MISMATCH_str - * @brief Defines the `parameter mismatch` message - */ -#define PARAMETER_MISMATCH_str std::string("[PARAMETER_MISMATCH] ") - -/** - * @def NETWORK_NOT_LOADED_str - * @brief Defines the `network not loaded` message - */ -#define NETWORK_NOT_LOADED_str std::string("[NETWORK_NOT_LOADED] ") - -/** - * @def NETWORK_NOT_READ_str - * @brief Defines the `network not read` message - */ -#define NETWORK_NOT_READ_str std::string("[NETWORK_NOT_READ] ") - -/** - * @def NOT_FOUND_str - * @brief Defines the `not found` message - */ -#define NOT_FOUND_str std::string("[NOT_FOUND] ") - -/** - * @def UNEXPECTED_str - * @brief Defines the `unexpected` message - */ -#define UNEXPECTED_str std::string("[UNEXPECTED] ") - -/** - * @def GENERAL_ERROR_str - * @brief Defines the `general error` message - */ -#define GENERAL_ERROR_str std::string("[GENERAL ERROR] ") - -/** - * @def RESULT_NOT_READY_str - * @brief Defines the `result not ready` message - */ -#define RESULT_NOT_READY_str std::string("[RESULT_NOT_READY] ") - -/** - * @def INFER_NOT_STARTED_str - * @brief Defines the `infer not started` message - */ -#define INFER_NOT_STARTED_str std::string("[INFER_NOT_STARTED] ") - -/** - * @def REQUEST_BUSY_str - * @brief Defines the `request busy` message - */ -#define REQUEST_BUSY_str std::string("[REQUEST_BUSY] ") - -/** - * @def NOT_IMPLEMENTED_str - * @brief Defines the `not implemented` message - */ -#define NOT_IMPLEMENTED_str std::string("[NOT_IMPLEMENTED] ") - -/** - * @def NOT_ALLOCATED_str - * @brief Defines the `not allocated` message - */ -#define NOT_ALLOCATED_str std::string("[NOT_ALLOCATED] ") - -/** - * @def INFER_CANCELLED_str - * @brief Defines the `infer cancelled` message - */ -#define INFER_CANCELLED_str std::string("[INFER_CANCELLED] ") - -/** - * @} - */ - } // namespace InferenceEngine diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_internal.hpp index 3bc031bc64e..5b78ac16124 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_internal.hpp @@ -80,7 +80,7 @@ public: } CNNNetwork GetExecGraphInfo() override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } /** @@ -93,7 +93,7 @@ public: } std::vector QueryState() override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } void SetConfig(const std::map& config) override { @@ -111,11 +111,11 @@ public: Parameter GetMetric(const std::string& name) const override { (void)name; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } RemoteContext::Ptr GetContext() const override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } protected: @@ -127,7 +127,7 @@ protected: */ virtual void ExportImpl(std::ostream& networkModel) { (void)networkModel; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } InferenceEngine::InputsDataMap _networkInputs; //!< Holds infromation about network inputs info diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_internal.hpp index ab5e264fb2a..130bbc72874 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_internal.hpp @@ -41,7 +41,7 @@ public: } void GetUserData(void** data) override { - if (data == nullptr) THROW_IE_EXCEPTION << NOT_ALLOCATED_str; + if (data == nullptr) THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated); *data = _userData; } diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp index 54ebbaef124..4b1d1816a14 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_thread_safe_default.hpp @@ -77,9 +77,9 @@ class AsyncInferRequestThreadSafeDefault : public IAsyncInferRequestInternal { state = _state; switch (_state) { case InferState::Busy : - THROW_IE_EXCEPTION_WITH_STATUS(REQUEST_BUSY); + THROW_IE_EXCEPTION_WITH_STATUS(RequestBusy); case InferState::Canceled : - THROW_IE_EXCEPTION_WITH_STATUS(INFER_CANCELLED); + THROW_IE_EXCEPTION_WITH_STATUS(InferCancelled); case InferState::Idle : { _futures.erase(std::remove_if(std::begin(_futures), std::end(_futures), [](const std::shared_future& future) { @@ -118,9 +118,9 @@ protected: std::lock_guard lock {_mutex}; switch (_state) { case InferState::Busy : - THROW_IE_EXCEPTION_WITH_STATUS(REQUEST_BUSY); + THROW_IE_EXCEPTION_WITH_STATUS(RequestBusy); case InferState::Canceled : - THROW_IE_EXCEPTION_WITH_STATUS(INFER_CANCELLED); + THROW_IE_EXCEPTION_WITH_STATUS(InferCancelled); default: break; } } @@ -169,7 +169,7 @@ public: */ StatusCode Wait(int64_t millis_timeout) override { if (millis_timeout < IInferRequest::WaitMode::RESULT_READY) { - THROW_IE_EXCEPTION_WITH_STATUS(PARAMETER_MISMATCH) + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << " Timeout can't be less " << IInferRequest::WaitMode::RESULT_READY << " for InferRequest::Wait\n"; } @@ -247,7 +247,7 @@ public: void GetUserData(void** data) override { CheckState(); - if (data == nullptr) THROW_IE_EXCEPTION << NOT_ALLOCATED_str; + if (data == nullptr) THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated); *data = _userData; } @@ -277,7 +277,7 @@ public: void ThrowIfCanceled() const { std::lock_guard lock{_mutex}; if (_state == InferState::Canceled) { - THROW_IE_EXCEPTION_WITH_STATUS(INFER_CANCELLED); + THROW_IE_EXCEPTION_WITH_STATUS(InferCancelled); } } @@ -400,9 +400,9 @@ private: IE_ASSERT(nullptr != nextStageExecutor); nextStageExecutor->run(MakeNextStageTask(itNextStage, itEndStage, std::move(callbackExecutor))); } - } catch (InferenceEngine::details::InferenceEngineException& ie_ex) { - requestStatus = ie_ex.hasStatus() ? ie_ex.getStatus() : StatusCode::GENERAL_ERROR; - localCurrentException = std::make_exception_ptr(ie_ex); + } catch (InferenceEngine::Exception& ie_ex) { + requestStatus = ExceptionToStatus(ie_ex); + localCurrentException = std::current_exception(); } catch (...) { requestStatus = StatusCode::GENERAL_ERROR; localCurrentException = std::current_exception(); diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp index 30de7e98e22..290f47eea52 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp @@ -67,7 +67,7 @@ public: * @brief Default common implementation for all plugins */ void Cancel() override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } /** @@ -79,13 +79,13 @@ public: void SetBlob(const std::string& name, const Blob::Ptr& userBlob) override { OV_ITT_SCOPED_TASK(itt::domains::Plugin, "SetBlob"); if (name.empty()) { - THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to set blob with empty name"; } - if (!userBlob) THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'"; + if (!userBlob) THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Failed to set empty blob with name: \'" << name << "\'"; const bool compoundBlobPassed = userBlob->is(); const bool remoteBlobPassed = userBlob->is(); if (!compoundBlobPassed && !remoteBlobPassed && userBlob->buffer() == nullptr) - THROW_IE_EXCEPTION << "Input data was not allocated. Input name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotAllocated) << "Input data was not allocated. Input name: \'" << name << "\'"; if (userBlob->size() == 0) { THROW_IE_EXCEPTION << "Input data is empty. Input name: \'" << name << "\'"; } @@ -95,14 +95,14 @@ public: size_t dataSize = userBlob->size(); if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) { if (foundInput->getPrecision() != userBlob->getTensorDesc().getPrecision()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set Blob with precision not corresponding to user input precision"; } auto& devBlob = _deviceInputs[name]; const bool preProcRequired = preProcessingRequired(foundInput, userBlob, devBlob); if (compoundBlobPassed && !preProcRequired) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "cannot set compound blob: supported only for input pre-processing"; } @@ -121,7 +121,7 @@ public: } } else { if (compoundBlobPassed) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented) << "cannot set compound blob: supported only for input pre-processing"; } size_t outputSize = foundOutput->getTensorDesc().getLayout() != InferenceEngine::Layout::SCALAR @@ -132,7 +132,7 @@ public: << "!=" << outputSize << ")."; } if (foundOutput->getPrecision() != userBlob->getTensorDesc().getPrecision()) { - THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str + THROW_IE_EXCEPTION_WITH_STATUS(ParameterMismatch) << "Failed to set Blob with precision not corresponding to user output precision"; } _outputs[name] = userBlob; @@ -300,7 +300,7 @@ protected: return pair.first == name; }); if (foundOutputPair == std::end(_networkOutputs) && (foundInputPair == std::end(_networkInputs))) { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input or output with name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to find input or output with name: \'" << name << "\'"; } if (foundInputPair != std::end(_networkInputs)) { foundInput = foundInputPair->second; @@ -337,7 +337,7 @@ protected: return pair.first == name; }); if (foundInputPair == std::end(_networkInputs)) { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input with name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to find input with name: \'" << name << "\'"; } dims = foundInputPair->second->getTensorDesc().getDims(); refSize = foundInputPair->second->getTensorDesc().getLayout() != SCALAR @@ -349,7 +349,7 @@ protected: return pair.first == name; }); if (foundOutputPair == std::end(_networkOutputs)) { - THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find output with name: \'" << name << "\'"; + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Failed to find output with name: \'" << name << "\'"; } dims = foundOutputPair->second->getTensorDesc().getDims(); refSize = foundOutputPair->second->getTensorDesc().getLayout() != SCALAR diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp index 73064fa723d..4b2068cadab 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp @@ -78,7 +78,7 @@ public: const std::map& config) override { (void)modelFileName; (void)config; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } ExecutableNetwork ImportNetwork(std::istream& networkModel, @@ -96,7 +96,7 @@ public: void SetConfig(const std::map& config) override { (void)config; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } void SetCore(ICore* core) noexcept override { @@ -109,11 +109,11 @@ public: } void AddExtension(InferenceEngine::IExtensionPtr /*extension*/) override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } QueryNetworkResult QueryNetwork(const CNNNetwork& /*network*/, const std::map& /*config*/) const override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } void SetName(const std::string& pluginName) noexcept override { @@ -126,20 +126,20 @@ public: Parameter GetConfig(const std::string& /*name*/, const std::map& /*options*/) const override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } Parameter GetMetric(const std::string& /*name*/, const std::map& /*options*/) const override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } RemoteContext::Ptr CreateContext(const ParamMap& /*params*/) override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } RemoteContext::Ptr GetDefaultContext(const ParamMap& /*params*/) override { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } protected: @@ -172,7 +172,7 @@ protected: (void)network; (void)context; (void)config; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } /** @@ -188,7 +188,7 @@ protected: const std::map& config) { (void)networkModel; (void)config; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } /** @@ -205,7 +205,7 @@ protected: (void)networkModel; (void)context; (void)config; - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } std::string _pluginName; //!< A device name that plugins enables diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index 5370fdd5ed6..c22dc5d1668 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -276,12 +276,12 @@ protected: INFERENCE_PLUGIN_API(void) CreatePluginEngine(::std::shared_ptr<::InferenceEngine::IInferencePlugin>& plugin) { \ try { \ plugin = ::std::make_shared(__VA_ARGS__); \ - } catch (const InferenceEngine::details::InferenceEngineException&) { \ + } catch (const InferenceEngine::Exception&) { \ throw; \ } catch (const std::exception& ex) { \ THROW_IE_EXCEPTION << ex.what(); \ } catch (...) { \ - THROW_IE_EXCEPTION_WITH_STATUS(UNEXPECTED); \ + THROW_IE_EXCEPTION_WITH_STATUS(Unexpected); \ } \ plugin->SetVersion(version); \ } diff --git a/inference-engine/src/plugin_api/debug.h b/inference-engine/src/plugin_api/debug.h index ab7510ce19c..ee6ed517a57 100644 --- a/inference-engine/src/plugin_api/debug.h +++ b/inference-engine/src/plugin_api/debug.h @@ -25,9 +25,6 @@ #include "ie_algorithm.hpp" -namespace InferenceEngine { -namespace details { - /** * @brief Serializes a `std::vector` to a `std::ostream` * @ingroup ie_dev_api_error_debug @@ -35,6 +32,7 @@ namespace details { * @param vec A vector to serialize * @return A reference to a `std::stream` */ +namespace std { template inline std::ostream& operator<<(std::ostream& out, const std::vector& vec) { if (vec.empty()) return std::operator<<(out, "[]"); @@ -44,7 +42,10 @@ inline std::ostream& operator<<(std::ostream& out, const std::vector& vec) { } return out << "]"; } +} // namespace std +namespace InferenceEngine { +namespace details { /** * @brief trim from start (in place) * @ingroup ie_dev_api_error_debug diff --git a/inference-engine/src/plugin_api/file_utils.h b/inference-engine/src/plugin_api/file_utils.h index c9e9d0bd763..5875e899e86 100644 --- a/inference-engine/src/plugin_api/file_utils.h +++ b/inference-engine/src/plugin_api/file_utils.h @@ -86,7 +86,7 @@ template<> struct FileTraits { * @ingroup ie_dev_api_file_utils * @param filePath - path to file, can be relative to current working directory * @return Absolute path of file - * @throw InferenceEngineException if any error occurred + * @throw InferenceEngine::Exception if any error occurred */ INFERENCE_ENGINE_API_CPP(std::string) absoluteFilePath(const std::string& filePath); @@ -94,7 +94,7 @@ INFERENCE_ENGINE_API_CPP(std::string) absoluteFilePath(const std::string& filePa * @brief Interface function to create directorty recursively by given path * @ingroup ie_dev_api_file_utils * @param dirPath - path to file, can be relative to current working directory - * @throw InferenceEngineException if any error occurred + * @throw InferenceEngine::Exception if any error occurred */ INFERENCE_ENGINE_API_CPP(void) createDirectoryRecursive(const std::string& dirPath); diff --git a/inference-engine/src/plugin_api/ie_ngraph_utils.hpp b/inference-engine/src/plugin_api/ie_ngraph_utils.hpp index aa3e40b08db..62626d4917a 100644 --- a/inference-engine/src/plugin_api/ie_ngraph_utils.hpp +++ b/inference-engine/src/plugin_api/ie_ngraph_utils.hpp @@ -123,7 +123,7 @@ inline Precision convertPrecision(const ::ngraph::element::Type& precision) { case ::ngraph::element::Type_t::boolean: return Precision(Precision::BOOL); default: - THROW_IE_EXCEPTION << "Incorrect precision " << precision.get_type_name() << "!"; + THROW_IE_EXCEPTION << "Incorrect precision " << precision.get_type_name() << "!"; return{}; } } diff --git a/inference-engine/src/readers/ir_reader_v7/ie_cnn_net_reader_impl.cpp b/inference-engine/src/readers/ir_reader_v7/ie_cnn_net_reader_impl.cpp index 3670fbda243..6ccc7d533aa 100644 --- a/inference-engine/src/readers/ir_reader_v7/ie_cnn_net_reader_impl.cpp +++ b/inference-engine/src/readers/ir_reader_v7/ie_cnn_net_reader_impl.cpp @@ -35,7 +35,7 @@ StatusCode CNNNetReaderImpl::SetWeights(const TBlob::Ptr& weights, Resp if (_version < 10) { _parser->SetWeights(weights); } - } catch (const InferenceEngineException& iee) { + } catch (const Exception& iee) { xmlDoc.reset(); return DescriptionBuffer(desc) << iee.what(); } @@ -109,7 +109,7 @@ StatusCode CNNNetReaderImpl::ReadWeights(const char* filepath, ResponseDesc* res weightsPtr->allocate(); readAllFile(filepath, weightsPtr->buffer(), ulFileSize); return SetWeights(weightsPtr, resp); - } catch (const InferenceEngineException& ex) { + } catch (const Exception& ex) { return DescriptionBuffer(resp) << ex.what(); } } @@ -158,7 +158,7 @@ StatusCode CNNNetReaderImpl::ReadNetwork(const pugi::xml_node& const_root, Respo } catch (const std::string& err) { parseSuccess = false; return DescriptionBuffer(desc) << err; - } catch (const InferenceEngineException& e) { + } catch (const Exception& e) { description = e.what(); parseSuccess = false; return DescriptionBuffer(desc) << e.what(); diff --git a/inference-engine/src/readers/ir_reader_v7/ie_layer_validators.cpp b/inference-engine/src/readers/ir_reader_v7/ie_layer_validators.cpp index acd7b825d29..1a6981aaf40 100644 --- a/inference-engine/src/readers/ir_reader_v7/ie_layer_validators.cpp +++ b/inference-engine/src/readers/ir_reader_v7/ie_layer_validators.cpp @@ -7,7 +7,6 @@ #include #include -#include
#include #include #include @@ -42,7 +41,7 @@ void details::validateLayer(const CNNLayer * layer) { InOutDims shapes; getInOutShapes(layer, shapes); validator->checkShapes(layer, shapes.inDims); - } catch (const InferenceEngineException& ie_e) { + } catch (const Exception& ie_e) { THROW_IE_EXCEPTION << "Error of validate layer: " << layer->name << " with type: " << layer->type << ". " << ie_e.what(); } diff --git a/inference-engine/src/vpu/common/include/vpu/utils/auto_scope.hpp b/inference-engine/src/vpu/common/include/vpu/utils/auto_scope.hpp index b3c444d9afa..cc750b1dc03 100644 --- a/inference-engine/src/vpu/common/include/vpu/utils/auto_scope.hpp +++ b/inference-engine/src/vpu/common/include/vpu/utils/auto_scope.hpp @@ -6,7 +6,6 @@ #include -#include
namespace vpu { diff --git a/inference-engine/src/vpu/common/include/vpu/utils/checked_cast.hpp b/inference-engine/src/vpu/common/include/vpu/utils/checked_cast.hpp index 513fe9c6228..9d41deca299 100644 --- a/inference-engine/src/vpu/common/include/vpu/utils/checked_cast.hpp +++ b/inference-engine/src/vpu/common/include/vpu/utils/checked_cast.hpp @@ -11,7 +11,7 @@ #include #include -#include
+#include namespace vpu { diff --git a/inference-engine/src/vpu/common/include/vpu/utils/error.hpp b/inference-engine/src/vpu/common/include/vpu/utils/error.hpp index dfbb242713e..e525e045bfe 100644 --- a/inference-engine/src/vpu/common/include/vpu/utils/error.hpp +++ b/inference-engine/src/vpu/common/include/vpu/utils/error.hpp @@ -6,7 +6,6 @@ #include -#include
#include @@ -14,6 +13,8 @@ #include #include +#include + namespace vpu { // TODO: replace with VPU_THROW_FORMAT/VPU_THROW_UNLESS/VPU_INTERNAL_CHECK and remove @@ -21,7 +22,7 @@ namespace vpu { namespace details { -using VPUException = InferenceEngine::details::InferenceEngineException; +using VPUException = InferenceEngine::Exception; class UnsupportedLayerException : public VPUException { public: @@ -30,7 +31,8 @@ public: template void throwFormat(const char* fileName, int lineNumber, const char* messageFormat, Args&&... args) { - throw Exception(fileName, lineNumber, formatString(messageFormat, std::forward(args)...)); + IE_THROW(GeneralError) << '\n' << fileName << ':' << lineNumber << ' ' + << formatString(messageFormat, std::forward(args)...); } } // namespace details diff --git a/inference-engine/src/vpu/common/include/vpu/utils/extra.hpp b/inference-engine/src/vpu/common/include/vpu/utils/extra.hpp index 8712709556b..34fbca13567 100644 --- a/inference-engine/src/vpu/common/include/vpu/utils/extra.hpp +++ b/inference-engine/src/vpu/common/include/vpu/utils/extra.hpp @@ -4,7 +4,6 @@ #pragma once -#include
namespace vpu { diff --git a/inference-engine/src/vpu/common/include/vpu/utils/handle.hpp b/inference-engine/src/vpu/common/include/vpu/utils/handle.hpp index 79ed1f5366c..a815f781ba8 100644 --- a/inference-engine/src/vpu/common/include/vpu/utils/handle.hpp +++ b/inference-engine/src/vpu/common/include/vpu/utils/handle.hpp @@ -12,7 +12,7 @@ #include #include -#include
+#include namespace vpu { diff --git a/inference-engine/src/vpu/common/include/vpu/utils/numeric.hpp b/inference-engine/src/vpu/common/include/vpu/utils/numeric.hpp index 59ca9a15ec9..a278fb232de 100644 --- a/inference-engine/src/vpu/common/include/vpu/utils/numeric.hpp +++ b/inference-engine/src/vpu/common/include/vpu/utils/numeric.hpp @@ -9,8 +9,7 @@ #include #include -#include
- +#include namespace vpu { using fp16_t = short; diff --git a/inference-engine/src/vpu/common/src/parsed_config_base.cpp b/inference-engine/src/vpu/common/src/parsed_config_base.cpp index 41c3170bebd..925bb46109b 100644 --- a/inference-engine/src/vpu/common/src/parsed_config_base.cpp +++ b/inference-engine/src/vpu/common/src/parsed_config_base.cpp @@ -41,8 +41,7 @@ void ParsedConfigBase::update( const bool isDeprecatedOption = deprecatedOptions.count(entry.first) != 0; if (!isCompileOption && !isRunTimeOption) { - THROW_IE_EXCEPTION - << NOT_FOUND_str << entry.first + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << entry.first << " key is not supported for VPU"; } diff --git a/inference-engine/src/vpu/common/src/utils/enums.cpp b/inference-engine/src/vpu/common/src/utils/enums.cpp index 9042a9c2eb2..236f7bf3156 100644 --- a/inference-engine/src/vpu/common/src/utils/enums.cpp +++ b/inference-engine/src/vpu/common/src/utils/enums.cpp @@ -12,6 +12,8 @@ #include #include +#include + namespace vpu { namespace { diff --git a/inference-engine/src/vpu/common/src/utils/ie_helpers.cpp b/inference-engine/src/vpu/common/src/utils/ie_helpers.cpp index da471bd207e..83b7677efed 100644 --- a/inference-engine/src/vpu/common/src/utils/ie_helpers.cpp +++ b/inference-engine/src/vpu/common/src/utils/ie_helpers.cpp @@ -9,7 +9,6 @@ #include #include -#include
#include #include diff --git a/inference-engine/src/vpu/graph_transformer/include/vpu/model/data_contents/data_content.hpp b/inference-engine/src/vpu/graph_transformer/include/vpu/model/data_contents/data_content.hpp index ae2a74ffc08..a3718d68a6e 100644 --- a/inference-engine/src/vpu/graph_transformer/include/vpu/model/data_contents/data_content.hpp +++ b/inference-engine/src/vpu/graph_transformer/include/vpu/model/data_contents/data_content.hpp @@ -6,7 +6,6 @@ #include -#include
#include #include diff --git a/inference-engine/src/vpu/graph_transformer/src/model/stage.cpp b/inference-engine/src/vpu/graph_transformer/src/model/stage.cpp index f8df36a0a12..b93c4603dce 100644 --- a/inference-engine/src/vpu/graph_transformer/src/model/stage.cpp +++ b/inference-engine/src/vpu/graph_transformer/src/model/stage.cpp @@ -224,14 +224,14 @@ StageSHAVEsRequirements StageNode::getSHAVEsRequirements() const { void StageNode::initialCheck() const { try { initialCheckImpl(); - } catch (const InferenceEngine::details::InferenceEngineException& exception) { + } catch (const InferenceEngine::Exception& exception) { VPU_THROW_EXCEPTION << name() << " of type " << type() << ": " << exception.what(); } if (const auto injectedStage = this->injectedStage()) { try { injectedStage->initialCheck(); - } catch (const InferenceEngine::details::InferenceEngineException& exception) { + } catch (const InferenceEngine::Exception& exception) { VPU_THROW_EXCEPTION << name() << " of type " << type() << ": " << exception.what(); } } @@ -240,14 +240,14 @@ void StageNode::initialCheck() const { void StageNode::finalCheck() const { try { finalCheckImpl(); - } catch (const InferenceEngine::details::InferenceEngineException& exception) { + } catch (const InferenceEngine::Exception& exception) { VPU_THROW_EXCEPTION << name() << " of type " << type() << ": " << exception.what(); } if (const auto injectedStage = this->injectedStage()) { try { injectedStage->finalCheck(); - } catch (const ie::details::InferenceEngineException& exception) { + } catch (const ie::Exception& exception) { VPU_THROW_EXCEPTION << name() << " of type " << type() << ": " << exception.what(); } } diff --git a/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.cpp b/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.cpp index 300a9a0ccdf..6ebf33880e6 100644 --- a/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.cpp +++ b/inference-engine/src/vpu/myriad_plugin/myriad_executable_network.cpp @@ -181,7 +181,7 @@ InferenceEngine::Parameter ExecutableNetwork::GetMetric(const std::string &name) } else if (name == METRIC_KEY(DEVICE_THERMAL)) { IE_SET_METRIC_RETURN(DEVICE_THERMAL, _executor->GetThermal(_device)); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } diff --git a/inference-engine/src/vpu/myriad_plugin/myriad_mvnc_wraper.cpp b/inference-engine/src/vpu/myriad_plugin/myriad_mvnc_wraper.cpp index 5ba91ef4f86..c1d6873a7bf 100644 --- a/inference-engine/src/vpu/myriad_plugin/myriad_mvnc_wraper.cpp +++ b/inference-engine/src/vpu/myriad_plugin/myriad_mvnc_wraper.cpp @@ -3,7 +3,8 @@ // #include "myriad_mvnc_wraper.h" -#include "details/ie_exception.hpp" + +#include using namespace vpu::MyriadPlugin; diff --git a/inference-engine/src/vpu/myriad_plugin/myriad_plugin.cpp b/inference-engine/src/vpu/myriad_plugin/myriad_plugin.cpp index 7f6d6f5e1bb..1fc8367c3d4 100644 --- a/inference-engine/src/vpu/myriad_plugin/myriad_plugin.cpp +++ b/inference-engine/src/vpu/myriad_plugin/myriad_plugin.cpp @@ -159,7 +159,7 @@ InferenceEngine::ExecutableNetwork Engine::ImportNetwork( std::ifstream blobFile(modelFileName, std::ios::binary); if (!blobFile.is_open()) { - THROW_IE_EXCEPTION << ie::details::as_status << NETWORK_NOT_READ; + THROW_IE_EXCEPTION_WITH_STATUS(NetworkNotRead); } return ImportNetwork(blobFile, config); @@ -215,5 +215,5 @@ InferenceEngine::Parameter Engine::GetMetric(const std::string& name, return Parameter(); } } - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } diff --git a/inference-engine/tests/functional/inference_engine/async_infer_request_test.cpp b/inference-engine/tests/functional/inference_engine/async_infer_request_test.cpp index 5a84d5f0b95..ecb5304be1b 100644 --- a/inference-engine/tests/functional/inference_engine/async_infer_request_test.cpp +++ b/inference-engine/tests/functional/inference_engine/async_infer_request_test.cpp @@ -15,76 +15,76 @@ using namespace InferenceEngine::details; TEST(InferRequestCPPTests, throwsOnInitWithNull) { IInferRequest::Ptr nlptr = nullptr; - ASSERT_THROW(InferRequest req(nlptr), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(InferRequest req(nlptr), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedSetBlob) { InferRequest req; - ASSERT_THROW(req.SetBlob({}, {}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.SetBlob({}, {}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedGetBlob) { InferRequest req; - ASSERT_THROW(req.GetBlob({}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.GetBlob({}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedSetBlobPreproc) { InferRequest req; - ASSERT_THROW(req.SetBlob({}, {}, {}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.SetBlob({}, {}, {}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedGetPreProcess) { InferRequest req; - ASSERT_THROW(req.GetPreProcess({}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.GetPreProcess({}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedInfer) { InferRequest req; - ASSERT_THROW(req.Infer(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Infer(), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedGetPerformanceCounts) { InferRequest req; - ASSERT_THROW(req.GetPerformanceCounts(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.GetPerformanceCounts(), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedSetInput) { InferRequest req; - ASSERT_THROW(req.SetInput({{}}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.SetInput({{}}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedSetOutput) { InferRequest req; - ASSERT_THROW(req.SetOutput({{}}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.SetOutput({{}}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedSetBatch) { InferRequest req; - ASSERT_THROW(req.SetBatch({}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.SetBatch({}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedStartAsync) { InferRequest req; - ASSERT_THROW(req.StartAsync(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.StartAsync(), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedWait) { InferRequest req; - ASSERT_THROW(req.Wait({}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Wait({}), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedSetCompletionCallback) { InferRequest req; std::function f; - ASSERT_THROW(req.SetCompletionCallback(f), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.SetCompletionCallback(f), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedCast) { InferRequest req; - ASSERT_THROW((void)static_cast(req), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW((void)static_cast(req), InferenceEngine::Exception); } TEST(InferRequestCPPTests, throwsOnUninitializedQueryState) { InferRequest req; - ASSERT_THROW(req.QueryState(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.QueryState(), InferenceEngine::Exception); } diff --git a/inference-engine/tests/functional/inference_engine/cnn_network/convert_ngraph_to_cnn_network_tests.cpp b/inference-engine/tests/functional/inference_engine/cnn_network/convert_ngraph_to_cnn_network_tests.cpp index 1e3bb5e8a77..694c0722ae9 100644 --- a/inference-engine/tests/functional/inference_engine/cnn_network/convert_ngraph_to_cnn_network_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/cnn_network/convert_ngraph_to_cnn_network_tests.cpp @@ -44,7 +44,7 @@ TEST(ConvertFunctionToCNNNetworkTests, ConvertPReLUNetwork) { InferenceEngine::CNNNetwork nGraphImpl(f); try { auto net = std::make_shared(nGraphImpl); - } catch (InferenceEngine::details::InferenceEngineException &err) { + } catch (InferenceEngine::Exception &err) { const std::string ref_msg = "Error of validate layer: prelu with type: PReLU. Number of inputs (2) is not equal to expected ones: 1"; const std::string resp_msg = err.what(); ASSERT_TRUE(resp_msg.find(ref_msg) != std::string::npos) << resp_msg; @@ -73,8 +73,8 @@ TEST(ConvertFunctionToCNNNetworkTests, ConvertConvolutionNetwork) { InferenceEngine::CNNNetwork nGraphImpl(f); try { auto net = std::make_shared(nGraphImpl); - } catch (InferenceEngine::details::InferenceEngineException&) { - FAIL(); + } catch (InferenceEngine::Exception &err) { + FAIL() << err.what(); } } @@ -118,11 +118,11 @@ TEST(ConvertFunctionToCNNNetworkTests, OpsShouldBeConvertedToIERepresentation) { res->input(0).replace_source_output(ngraph_node->output(0)); EXPECT_THROW(InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl, true), - InferenceEngine::details::InferenceEngineException) + InferenceEngine::Exception) << "failed node: " << ngraph_node->get_type_name() << std::endl; try { InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl, true); - } catch (InferenceEngine::details::InferenceEngineException &err) { + } catch (InferenceEngine::Exception &err) { std::string type_name = ngraph_node->get_type_name(); std::map exceptions = { {"Broadcast", "Tile"}, {"Interpolate", "Interp"}, @@ -199,7 +199,7 @@ TEST(ConvertFunctionToCNNNetworkTests, ConvertTopKWithOneInput) { OutputsDataMap outputs = nGraphImpl.getOutputsInfo(); ASSERT_EQ(outputs.size(), 1); ASSERT_EQ(outputs.begin()->first, "topK.1"); - } catch (InferenceEngine::details::InferenceEngineException &err) { + } catch (InferenceEngine::Exception &err) { const std::string ref_msg = "Error of validate layer: prelu with type: PReLU. Number of inputs (2) is not equal to expected ones: 1"; const std::string resp_msg = err.what(); ASSERT_TRUE(resp_msg.find(ref_msg) != std::string::npos) << resp_msg; @@ -225,8 +225,8 @@ TEST(ConvertFunctionToCNNNetworkTests, UnsupportedDynamicOps) { InferenceEngine::CNNNetwork nGraphImpl(f); try { InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl); - FAIL() << "InferenceEngineException must be thrown"; - } catch(InferenceEngine::details::InferenceEngineException & e) { + FAIL() << "InferenceEngine::Exception must be thrown"; + } catch(InferenceEngine::Exception & e) { EXPECT_THAT(e.what(), testing::HasSubstr(std::string("Unsupported dynamic ops: \n" "v0::Parameter param () -> (f32?)\n" "v0::Relu relu (param[0]:f32?) -> (f32?)\n" @@ -358,8 +358,8 @@ TEST(ConvertFunctionToCNNNetworkTests, NonUniqueNamesNegative) { InferenceEngine::CNNNetwork nGraphImpl(f); try { InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl); - FAIL() << "InferenceEngineException must be thrown"; - } catch(InferenceEngine::details::InferenceEngineException & e) { + FAIL() << "InferenceEngine::Exception must be thrown"; + } catch(InferenceEngine::Exception & e) { EXPECT_THAT(e.what(), testing::HasSubstr(std::string("Detected two output operations with the same name:"))); } } @@ -386,8 +386,8 @@ TEST(ConvertFunctionToCNNNetworkTests, NonUniqueNamesParametersNegative) { try { input2->set_friendly_name("param"); InferenceEngine::details::convertFunctionToICNNNetwork(f, nGraphImpl); - FAIL() << "InferenceEngineException must be thrown"; - } catch(InferenceEngine::details::InferenceEngineException & e) { + FAIL() << "InferenceEngine::Exception must be thrown"; + } catch(InferenceEngine::Exception & e) { EXPECT_THAT(e.what(), testing::HasSubstr(std::string("Detected two output operations with the same name:"))); } } diff --git a/inference-engine/tests/functional/inference_engine/cnn_network_test.cpp b/inference-engine/tests/functional/inference_engine/cnn_network_test.cpp index 205c3b049f1..ce1529b11d5 100644 --- a/inference-engine/tests/functional/inference_engine/cnn_network_test.cpp +++ b/inference-engine/tests/functional/inference_engine/cnn_network_test.cpp @@ -13,62 +13,62 @@ IE_SUPPRESS_DEPRECATED_START TEST_F(CNNNetworkTests, throwsOnInitWithNull) { std::shared_ptr nlptr = nullptr; - ASSERT_THROW(CNNNetwork network(nlptr), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(CNNNetwork network(nlptr), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnUninitializedCastToICNNNetwork) { CNNNetwork network; - ASSERT_THROW((void)static_cast(network), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW((void)static_cast(network), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnConstUninitializedCastToICNNNetwork) { const CNNNetwork network; - ASSERT_THROW((void)static_cast(network), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW((void)static_cast(network), InferenceEngine::Exception); } IE_SUPPRESS_DEPRECATED_END TEST_F(CNNNetworkTests, throwsOnInitWithNullNgraph) { std::shared_ptr nlptr = nullptr; - ASSERT_THROW(CNNNetwork network(nlptr), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(CNNNetwork network(nlptr), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnUninitializedGetOutputsInfo) { CNNNetwork network; - ASSERT_THROW(network.getOutputsInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getOutputsInfo(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnUninitializedGetInputsInfo) { CNNNetwork network; - ASSERT_THROW(network.getInputsInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getInputsInfo(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnUninitializedLayerCount) { CNNNetwork network; - ASSERT_THROW(network.layerCount(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.layerCount(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnUninitializedGetName) { CNNNetwork network; - ASSERT_THROW(network.getName(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getName(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnUninitializedGetFunction) { CNNNetwork network; - ASSERT_THROW(network.getFunction(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getFunction(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnConstUninitializedGetFunction) { const CNNNetwork network; - ASSERT_THROW(network.getFunction(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getFunction(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnConstUninitializedBegin) { CNNNetwork network; - ASSERT_THROW(network.getFunction(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getFunction(), InferenceEngine::Exception); } TEST_F(CNNNetworkTests, throwsOnConstUninitializedGetInputShapes) { CNNNetwork network; - ASSERT_THROW(network.getInputShapes(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(network.getInputShapes(), InferenceEngine::Exception); } diff --git a/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp b/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp index 67dd12118f3..585af693b8c 100644 --- a/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/core_threading_tests.cpp @@ -3,7 +3,6 @@ // #include -#include
#include #include @@ -50,7 +49,7 @@ public: FileUtils::makePluginLibraryName({}, std::string("template_extension") + IE_BUILD_POSTFIX)); ie.AddExtension(extension); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { ASSERT_STR_CONTAINS(ex.what(), "name: custom_opset. Opset"); } } @@ -127,7 +126,7 @@ TEST_F(CoreThreadingTests, DISABLED_GetAvailableDevices) { for (auto && deviceName : devices) { try { ie.UnregisterPlugin(deviceName); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { // if several threads unload plugin at once, the first thread does this // while all others will throw an exception that plugin is not registered ASSERT_STR_CONTAINS(ex.what(), "name is not registered in the"); diff --git a/inference-engine/tests/functional/inference_engine/data_test.cpp b/inference-engine/tests/functional/inference_engine/data_test.cpp index 7dcc8ca111a..1ccf76c305b 100644 --- a/inference-engine/tests/functional/inference_engine/data_test.cpp +++ b/inference-engine/tests/functional/inference_engine/data_test.cpp @@ -10,7 +10,6 @@ using namespace ::testing; using namespace std; using namespace InferenceEngine; -using namespace InferenceEngine::details; class DataTests : public ::testing::Test { protected: @@ -74,7 +73,7 @@ TEST_F(DataTests, canSetEmptyDimsForBlockingDescOrder) { TEST_F(DataTests, throwOnFillDescByEmptyDimsForBlockingDesc) { BlockingDescTest desc(emptyDims, emptyDims); - ASSERT_THROW(desc.fillDescTest(emptyDims, emptyDims), InferenceEngineException); + ASSERT_THROW(desc.fillDescTest(emptyDims, emptyDims), Exception); } TEST_F(DataTests, throwOnSetEmptyDimsForBlockingDescBlocked) { diff --git a/inference-engine/tests/functional/inference_engine/exception_test.cpp b/inference-engine/tests/functional/inference_engine/exception_test.cpp index 400758d1dbc..ec5fc98cc43 100644 --- a/inference-engine/tests/functional/inference_engine/exception_test.cpp +++ b/inference-engine/tests/functional/inference_engine/exception_test.cpp @@ -5,21 +5,24 @@ #include #include -#include
using namespace InferenceEngine; using ExceptionTests = ::testing::Test; -template +template class WrapperClass { public: static InferenceEngine::StatusCode toStatusWrapper(InferenceEngine::ResponseDesc *resp) { - TO_STATUS(THROW_IE_EXCEPTION << details::as_status << T) + TO_STATUS(IE_EXCEPTION_SWITCH(statusCode, ExceptionType, + InferenceEngine::details::ThrowNow{} + <<= std::stringstream{} << IE_LOCATION)) } static InferenceEngine::StatusCode toStatusWrapperMsg(std::string &msg, InferenceEngine::ResponseDesc *resp) { - TO_STATUS(THROW_IE_EXCEPTION << details::as_status << T << msg) + TO_STATUS(IE_EXCEPTION_SWITCH(statusCode, ExceptionType, + InferenceEngine::details::ThrowNow{} + <<= std::stringstream{} << IE_LOCATION << msg)) } }; @@ -54,8 +57,8 @@ TEST_F(ExceptionTests, canHandleNullPtr) { // shared_ptr holding the nullptr std::shared_ptr actual; // check that accessing the nullptr thru macros throws - ASSERT_THROW(CALL_STATUS_FNC_NO_ARGS(func0), InferenceEngine::details::InferenceEngineException); - ASSERT_THROW(CALL_STATUS_FNC(func1, 0), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(CALL_STATUS_FNC_NO_ARGS(func0), InferenceEngine::Exception); + ASSERT_THROW(CALL_STATUS_FNC(func1, 0), InferenceEngine::Exception); } TEST_F(ExceptionTests, throwAfterConvertStatusToClassContainMessage) { @@ -63,8 +66,8 @@ TEST_F(ExceptionTests, throwAfterConvertStatusToClassContainMessage) { auto actual = std::make_shared>(); try { CALL_STATUS_FNC(toStatusWrapperMsg, refMessage) - } catch (const NotAllocated &iex) { + } catch (const NotAllocated& iex) { std::string actualMessage = iex.what(); - ASSERT_EQ(actualMessage.find(refMessage), 0); + ASSERT_TRUE(actualMessage.find(refMessage) != std::string::npos); } } diff --git a/inference-engine/tests/functional/inference_engine/executable_network.cpp b/inference-engine/tests/functional/inference_engine/executable_network.cpp index 937328b4ffd..19178b84bd7 100644 --- a/inference-engine/tests/functional/inference_engine/executable_network.cpp +++ b/inference-engine/tests/functional/inference_engine/executable_network.cpp @@ -12,27 +12,27 @@ using namespace InferenceEngine::details; TEST(ExecutableNetworkTests, throwsOnInitWithNull) { std::shared_ptr nlptr = nullptr; - ASSERT_THROW(ExecutableNetwork exec(nlptr), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(ExecutableNetwork exec(nlptr), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedGetOutputsInfo) { ExecutableNetwork exec; - ASSERT_THROW(exec.GetOutputsInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.GetOutputsInfo(), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedGetInputsInfo) { ExecutableNetwork exec; - ASSERT_THROW(exec.GetInputsInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.GetInputsInfo(), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedExport) { ExecutableNetwork exec; - ASSERT_THROW(exec.Export(std::string()), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.Export(std::string()), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedExportStream) { ExecutableNetwork exec; - ASSERT_THROW(exec.Export(std::cout), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.Export(std::cout), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, nothrowsOnUninitializedCast) { @@ -42,32 +42,32 @@ TEST(ExecutableNetworkTests, nothrowsOnUninitializedCast) { TEST(ExecutableNetworkTests, throwsOnUninitializedGetExecGraphInfo) { ExecutableNetwork exec; - ASSERT_THROW(exec.GetExecGraphInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.GetExecGraphInfo(), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedQueryState) { IE_SUPPRESS_DEPRECATED_START ExecutableNetwork exec; - ASSERT_THROW(exec.QueryState(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.QueryState(), InferenceEngine::Exception); IE_SUPPRESS_DEPRECATED_END } TEST(ExecutableNetworkTests, throwsOnUninitializedSetConfig) { ExecutableNetwork exec; - ASSERT_THROW(exec.SetConfig({{}}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.SetConfig({{}}), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedGetConfig) { ExecutableNetwork exec; - ASSERT_THROW(exec.GetConfig({}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.GetConfig({}), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedGetMetric) { ExecutableNetwork exec; - ASSERT_THROW(exec.GetMetric({}), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.GetMetric({}), InferenceEngine::Exception); } TEST(ExecutableNetworkTests, throwsOnUninitializedGetContext) { ExecutableNetwork exec; - ASSERT_THROW(exec.GetContext(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exec.GetContext(), InferenceEngine::Exception); } \ No newline at end of file diff --git a/inference-engine/tests/functional/inference_engine/net_reader_test.cpp b/inference-engine/tests/functional/inference_engine/net_reader_test.cpp index 6c4cb650ed8..e7c77b05f4d 100644 --- a/inference-engine/tests/functional/inference_engine/net_reader_test.cpp +++ b/inference-engine/tests/functional/inference_engine/net_reader_test.cpp @@ -31,7 +31,7 @@ using NetReaderNoParamTest = CommonTestUtils::TestsCommon; TEST_F(NetReaderNoParamTest, IncorrectModel) { InferenceEngine::Core ie; - ASSERT_THROW(ie.ReadNetwork("incorrectFilePath"), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(ie.ReadNetwork("incorrectFilePath"), InferenceEngine::Exception); } using NetReaderTestParams = std::tuple; @@ -141,7 +141,7 @@ TEST_P(NetReaderTest, ReadCorrectModelWithWeightsUnicodePath) { CommonTestUtils::removeFile(weightsPath); GTEST_COUT << "OK" << std::endl; } - catch (const InferenceEngine::details::InferenceEngineException &e_next) { + catch (const InferenceEngine::Exception &e_next) { CommonTestUtils::removeFile(modelPath); CommonTestUtils::removeFile(weightsPath); FAIL() << e_next.what(); diff --git a/inference-engine/tests/functional/inference_engine/ngraph_reader/negative_tests.cpp b/inference-engine/tests/functional/inference_engine/ngraph_reader/negative_tests.cpp index ee7e4494224..9c8cca290a2 100644 --- a/inference-engine/tests/functional/inference_engine/ngraph_reader/negative_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/ngraph_reader/negative_tests.cpp @@ -75,5 +75,5 @@ TEST_F(NGraphReaderTests, DISABLED_ReadIncorrectNetwork) { Blob::CPtr blob; Core reader; - ASSERT_THROW(reader.ReadNetwork(model, blob), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(reader.ReadNetwork(model, blob), InferenceEngine::Exception); } diff --git a/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp b/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp index 7eab10ad247..16bb343ca4f 100644 --- a/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/ngraph_reshape_tests.cpp @@ -437,7 +437,7 @@ public: TEST_F(NGraphReshapeTests, LoadBadNewExtension) { InferenceEngine::Core ie; - ASSERT_THROW(ie.AddExtension(std::make_shared()), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(ie.AddExtension(std::make_shared()), InferenceEngine::Exception); } TEST_F(NGraphReshapeTests, TestInterpParameters) { diff --git a/inference-engine/tests/functional/inference_engine/onnx_reader/model_support_tests.cpp b/inference-engine/tests/functional/inference_engine/onnx_reader/model_support_tests.cpp index a460eb5a8ea..acdcf177d56 100644 --- a/inference-engine/tests/functional/inference_engine/onnx_reader/model_support_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/onnx_reader/model_support_tests.cpp @@ -49,7 +49,7 @@ TEST(ONNXReader_ModelSupported, scrambled_keys) { TEST(ONNXReader_ModelUnsupported, no_graph_field) { // this model contains only 2 fields (it doesn't contain a graph in particular) EXPECT_THROW(InferenceEngine::Core{}.ReadNetwork(model_path("unsupported/no_graph_field.onnx")), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST(ONNXReader_ModelUnsupported, incorrect_onnx_field) { @@ -57,16 +57,16 @@ TEST(ONNXReader_ModelUnsupported, incorrect_onnx_field) { // this test will have to be changed if the number of fields in onnx.proto // (ModelProto message definition) ever reaches 31 or more EXPECT_THROW(InferenceEngine::Core{}.ReadNetwork(model_path("unsupported/incorrect_onnx_field.onnx")), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST(ONNXReader_ModelUnsupported, unknown_wire_type) { // in this model the graph key contains wire type 7 encoded in it - this value is incorrect EXPECT_THROW(InferenceEngine::Core{}.ReadNetwork(model_path("unsupported/unknown_wire_type.onnx")), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST(ONNXReader_ModelUnsupported, no_valid_keys) { EXPECT_THROW(InferenceEngine::Core{}.ReadNetwork(model_path("unsupported/no_valid_keys.prototxt")), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } diff --git a/inference-engine/tests/functional/inference_engine/parameter_tests.cpp b/inference-engine/tests/functional/inference_engine/parameter_tests.cpp index 7146df5a9f8..e7249890450 100644 --- a/inference-engine/tests/functional/inference_engine/parameter_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/parameter_tests.cpp @@ -263,10 +263,10 @@ TEST_F(ParameterTests, CompareParametersWithoutEqualOperator) { Parameter parB = b; Parameter parC = c; - ASSERT_THROW((void)(parA == parB), details::InferenceEngineException); - ASSERT_THROW((void)(parA != parB), details::InferenceEngineException); - ASSERT_THROW((void)(parA == parC), details::InferenceEngineException); - ASSERT_THROW((void)(parA != parC), details::InferenceEngineException); + ASSERT_THROW((void)(parA == parB), Exception); + ASSERT_THROW((void)(parA != parB), Exception); + ASSERT_THROW((void)(parA == parC), Exception); + ASSERT_THROW((void)(parA != parC), Exception); } TEST_F(ParameterTests, ParameterRemovedRealObject) { diff --git a/inference-engine/tests/functional/inference_engine/preprocess_test.cpp b/inference-engine/tests/functional/inference_engine/preprocess_test.cpp index 06c65c14883..00d68f5cf09 100644 --- a/inference-engine/tests/functional/inference_engine/preprocess_test.cpp +++ b/inference-engine/tests/functional/inference_engine/preprocess_test.cpp @@ -13,7 +13,7 @@ TEST_F(PreProcessTests, throwsOnSettingNullMeanImage) { InferenceEngine::PreProcessInfo info; info.init(1); ASSERT_THROW(info.setMeanImage(InferenceEngine::Blob::Ptr(nullptr)), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(PreProcessTests, throwsOnSetting2DMeanImage) { @@ -21,7 +21,7 @@ TEST_F(PreProcessTests, throwsOnSetting2DMeanImage) { info.init(1); InferenceEngine::Blob::Ptr blob(new InferenceEngine::TBlob({ InferenceEngine::Precision::FP32, {1, 1}, InferenceEngine::Layout::HW})); - ASSERT_THROW(info.setMeanImage(blob), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(info.setMeanImage(blob), InferenceEngine::Exception); } TEST_F(PreProcessTests, throwsOnSettingWrongSizeMeanImage) { @@ -30,7 +30,7 @@ TEST_F(PreProcessTests, throwsOnSettingWrongSizeMeanImage) { InferenceEngine::TBlob::Ptr blob(new InferenceEngine::TBlob({ InferenceEngine::Precision::FP32, { 2, 1, 1 }, InferenceEngine::Layout::CHW })); blob->allocate(); - ASSERT_THROW(info.setMeanImage(blob), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(info.setMeanImage(blob), InferenceEngine::Exception); } TEST_F(PreProcessTests, noThrowWithCorrectSizeMeanImage) { diff --git a/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp b/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp index e67e1dfc551..053583cf036 100644 --- a/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp +++ b/inference-engine/tests/functional/inference_engine/shared_object_loader_test.cpp @@ -43,7 +43,7 @@ TEST_F(SharedObjectLoaderTests, canLoadExistedPlugin) { } TEST_F(SharedObjectLoaderTests, loaderThrowsIfNoPlugin) { - EXPECT_THROW(loadDll("wrong_name"), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(loadDll("wrong_name"), InferenceEngine::Exception); } TEST_F(SharedObjectLoaderTests, canFindExistedMethod) { @@ -55,9 +55,7 @@ TEST_F(SharedObjectLoaderTests, canFindExistedMethod) { TEST_F(SharedObjectLoaderTests, throwIfMethodNofFoundInLibrary) { loadDll(get_mock_engine_name()); - - EXPECT_THROW(make_std_function("wrong_function"), - InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_std_function("wrong_function"), InferenceEngine::Exception); } TEST_F(SharedObjectLoaderTests, canCallExistedMethod) { diff --git a/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp b/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp index 82f985ada99..401b8209830 100644 --- a/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp +++ b/inference-engine/tests/functional/inference_engine/so_pointer_tests.cpp @@ -93,14 +93,14 @@ public: } // namespace InferenceEngine TEST_F(SoPointerTests, UnknownPlugin) { - ASSERT_THROW(SOPointer("UnknownPlugin"), InferenceEngineException); + ASSERT_THROW(SOPointer("UnknownPlugin"), Exception); } TEST_F(SoPointerTests, UnknownPluginExceptionStr) { try { SOPointer("UnknownPlugin"); } - catch (InferenceEngineException &e) { + catch (Exception &e) { ASSERT_STR_CONTAINS(e.what(), "Cannot load library 'UnknownPlugin':"); ASSERT_STR_DOES_NOT_CONTAIN(e.what(), "path:"); ASSERT_STR_DOES_NOT_CONTAIN(e.what(), "from CWD:"); diff --git a/inference-engine/tests/functional/inference_engine/tensor_desc_test.cpp b/inference-engine/tests/functional/inference_engine/tensor_desc_test.cpp index 0401ed85b92..b5ad8f95389 100644 --- a/inference-engine/tests/functional/inference_engine/tensor_desc_test.cpp +++ b/inference-engine/tests/functional/inference_engine/tensor_desc_test.cpp @@ -17,7 +17,7 @@ using namespace InferenceEngine; using TensorDescTests = ::testing::Test; TEST_F(TensorDescTests, CreateBlobWithIncorrectLayout) { - ASSERT_THROW(make_shared_blob({ Precision::FP32, {1, 3, 32}, Layout::NC }), details::InferenceEngineException); + ASSERT_THROW(make_shared_blob({ Precision::FP32, {1, 3, 32}, Layout::NC }), Exception); } TEST_F(TensorDescTests, CreateBlockedBlobNCHW) { diff --git a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/core_integration.cpp b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/core_integration.cpp index 276c1ef603d..451dba0bcbc 100644 --- a/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/core_integration.cpp +++ b/inference-engine/tests/functional/plugin/gna/shared_tests_instances/behavior/core_integration.cpp @@ -142,7 +142,7 @@ TEST_P(IEClassExecutableNetworkSetConfigFromFp32Test, SetConfigFromFp32Throws) { initialConfig[GNA_CONFIG_KEY(DEVICE_MODE)] = GNAConfigParams::GNA_SW_FP32; ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleNetwork, deviceName, initialConfig); - ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngineException); + ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), Exception); } // TODO: Convolution with 3D input is not supported on GNA diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/multi/gpu_remote_blob_tests.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/multi/gpu_remote_blob_tests.cpp index 761467f4d7c..eeb56d29dce 100644 --- a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/multi/gpu_remote_blob_tests.cpp +++ b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/multi/gpu_remote_blob_tests.cpp @@ -43,7 +43,7 @@ TEST_P(MultiDevice_Test, cannotInferRemoteBlobIfNotInitializedForDevice) { ASSERT_NE((std::shared_ptr)req, nullptr); ASSERT_NO_THROW(req.SetBlob(first_input_name, rblob)); ASSERT_NO_THROW(req.StartAsync()); - ASSERT_THROW(req.Wait(InferenceEngine::IInferRequest::WaitMode::RESULT_READY), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Wait(InferenceEngine::IInferRequest::WaitMode::RESULT_READY), InferenceEngine::Exception); } const std::vector device_names_and_support_for_remote_blobs2 { diff --git a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/dynamic_shape_resolver.cpp b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/dynamic_shape_resolver.cpp index 1fde0719fbf..c1bf3e29281 100644 --- a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/dynamic_shape_resolver.cpp +++ b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/dynamic_shape_resolver.cpp @@ -8,7 +8,6 @@ #include #include #include -#include
#include #include diff --git a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/out_shape_of_reshape.cpp b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/out_shape_of_reshape.cpp index 0e8c6544af4..847a252002d 100644 --- a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/out_shape_of_reshape.cpp +++ b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/out_shape_of_reshape.cpp @@ -9,7 +9,6 @@ #include #include -#include
#include diff --git a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_broadcast.cpp b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_broadcast.cpp index 3d597f958c0..066ec613e92 100644 --- a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_broadcast.cpp +++ b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_broadcast.cpp @@ -9,7 +9,6 @@ #include #include -#include
#include diff --git a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_nonzero.cpp b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_nonzero.cpp index 4785e35b2ee..c2a65875089 100644 --- a/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_nonzero.cpp +++ b/inference-engine/tests/functional/plugin/myriad/ngraph/operations/static_shape_nonzero.cpp @@ -9,7 +9,6 @@ #include #include -#include
#include diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/config.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/config.hpp index c83ec86d628..336ec664ddb 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/config.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/config.hpp @@ -61,7 +61,7 @@ namespace BehaviorTestsDefinitions { targetDevice.find(CommonTestUtils::DEVICE_HETERO) == std::string::npos) { ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_THROW(ie->SetConfig(configuration, targetDevice), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } else { ASSERT_NO_THROW(ie->GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS))); ASSERT_NO_THROW(ie->SetConfig(configuration, targetDevice)); @@ -74,7 +74,7 @@ namespace BehaviorTestsDefinitions { // Create CNNNetwork from ngrpah::Function InferenceEngine::CNNNetwork cnnNet(function); ASSERT_THROW(auto execNet = ie->LoadNetwork(cnnNet, targetDevice, configuration), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } using IncorrectConfigAPITests = BehaviorTestsUtils::BehaviorTestsBasic; @@ -90,7 +90,7 @@ namespace BehaviorTestsDefinitions { } else { try { ie->SetConfig(configuration, targetDevice); - } catch (InferenceEngine::details::InferenceEngineException &) {} + } catch (InferenceEngine::Exception &) {} } } diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp index 9b01d7bf053..f4e247e1518 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp @@ -55,18 +55,11 @@ namespace BehaviorTestsDefinitions { ASSERT_NE(metrics.end(), it); \ } -// TODO: issue with RTTI -#ifdef __APPLE__ -using NotImplementedException = std::exception; -#else -using NotImplementedException = InferenceEngine::NotImplemented; -#endif - #define SKIP_IF_NOT_IMPLEMENTED(...) \ { \ try { \ __VA_ARGS__; \ - } catch (const NotImplementedException &) { \ + } catch (const InferenceEngine::NotImplemented&) { \ GTEST_SKIP(); \ } \ } @@ -219,7 +212,7 @@ TEST(IEClassBasicTest, smoke_createDefault) { TEST_P(IEClassBasicTestP, registerExistingPluginThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.RegisterPlugin(pluginName, deviceName), InferenceEngineException); + ASSERT_THROW(ie.RegisterPlugin(pluginName, deviceName), Exception); } TEST_P(IEClassBasicTestP, registerNewPluginNoThrows) { @@ -232,12 +225,12 @@ TEST_P(IEClassBasicTestP, registerNewPluginNoThrows) { TEST(IEClassBasicTest, smoke_registerExistingPluginFileThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.RegisterPlugins("nonExistPlugins.xml"), InferenceEngineException); + ASSERT_THROW(ie.RegisterPlugins("nonExistPlugins.xml"), Exception); } TEST(IEClassBasicTest, smoke_createNonExistingConfigThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() - ASSERT_THROW(Core ie("nonExistPlugins.xml"), InferenceEngineException); + ASSERT_THROW(Core ie("nonExistPlugins.xml"), Exception); } #ifdef __linux__ @@ -256,7 +249,7 @@ TEST(IEClassBasicTest, smoke_createMockEngineConfigThrows) { std::string filename{"mock_engine.xml"}; std::string content{""}; CommonTestUtils::createFile(filename, content); - ASSERT_THROW(Core ie(filename), InferenceEngineException); + ASSERT_THROW(Core ie(filename), Exception); CommonTestUtils::removeFile(filename.c_str()); } @@ -300,7 +293,7 @@ TEST_P(IEClassBasicTestP, smoke_registerPluginsXMLUnicodePath) { GTEST_COUT << "OK" << std::endl; } - catch (const InferenceEngine::details::InferenceEngineException &e_next) { + catch (const InferenceEngine::Exception &e_next) { CommonTestUtils::removeFile(pluginsXmlW); std::remove(pluginXML.c_str()); FAIL() << e_next.what(); @@ -341,7 +334,7 @@ TEST_P(IEClassBasicTestP, unregisterExistingPluginNoThrow) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; // device instance is not created yet - ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngineException); + ASSERT_THROW(ie.UnregisterPlugin(deviceName), Exception); // make the first call to IE which created device instance ie.GetVersions(deviceName); @@ -352,7 +345,7 @@ TEST_P(IEClassBasicTestP, unregisterExistingPluginNoThrow) { TEST_P(IEClassBasicTestP, accessToUnregisteredPluginThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.UnregisterPlugin(deviceName), InferenceEngineException); + ASSERT_THROW(ie.UnregisterPlugin(deviceName), Exception); ASSERT_NO_THROW(ie.GetVersions(deviceName)); ASSERT_NO_THROW(ie.UnregisterPlugin(deviceName)); ASSERT_NO_THROW(ie.SetConfig({}, deviceName)); @@ -363,7 +356,7 @@ TEST_P(IEClassBasicTestP, accessToUnregisteredPluginThrows) { TEST(IEClassBasicTest, smoke_unregisterNonExistingPluginThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.UnregisterPlugin("unkown_device"), InferenceEngineException); + ASSERT_THROW(ie.UnregisterPlugin("unkown_device"), Exception); } // @@ -380,7 +373,7 @@ TEST_P(IEClassBasicTestP, SetConfigAllThrows) { TEST_P(IEClassBasicTestP, SetConfigForUnRegisteredDeviceThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.SetConfig({{"unsupported_key", "4"}}, "unregistered_device"), InferenceEngineException); + ASSERT_THROW(ie.SetConfig({{"unsupported_key", "4"}}, "unregistered_device"), Exception); } TEST_P(IEClassBasicTestP, SetConfigNoThrow) { @@ -432,7 +425,7 @@ TEST_P(IEClassBasicTestP, ImportNetworkThrows) { if (deviceName == CommonTestUtils::DEVICE_CPU || deviceName == CommonTestUtils::DEVICE_GPU) { - ASSERT_THROW(ie.ImportNetwork("model", deviceName), NotImplementedException); + ASSERT_THROW(ie.ImportNetwork("model", deviceName), NotImplemented); } } @@ -440,13 +433,13 @@ TEST(IEClassBasicTest, smoke_ImportNetworkHeteroThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.ImportNetwork("model", CommonTestUtils::DEVICE_HETERO), InferenceEngineException); + ASSERT_THROW(ie.ImportNetwork("model", CommonTestUtils::DEVICE_HETERO), Exception); } TEST(IEClassBasicTest, smoke_ImportNetworkMultiThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() InferenceEngine::Core ie; - ASSERT_THROW(ie.ImportNetwork("model", CommonTestUtils::DEVICE_MULTI), InferenceEngineException); + ASSERT_THROW(ie.ImportNetwork("model", CommonTestUtils::DEVICE_MULTI), Exception); } TEST_P(IEClassBasicTestP, ImportNetworkWithNullContextThrows) { @@ -454,7 +447,7 @@ TEST_P(IEClassBasicTestP, ImportNetworkWithNullContextThrows) { Core ie; RemoteContext::Ptr context = nullptr; std::istringstream stream("None"); - ASSERT_THROW(ie.ImportNetwork(stream, context, {}), InferenceEngineException); + ASSERT_THROW(ie.ImportNetwork(stream, context, {}), Exception); } // @@ -547,7 +540,7 @@ TEST_P(IEClassNetworkTestP, QueryNetworkActualNoThrow) { try { ie.QueryNetwork(actualNetwork, deviceName); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); } @@ -566,7 +559,7 @@ TEST_P(IEClassNetworkTestP, QueryNetworkWithKSO) { FAIL() << "Op " << op->get_friendly_name() << " is not supported by " << deviceName; } } - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); } @@ -618,7 +611,7 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithConstantBranches) { op->get_rt_info()["affinity"] = std::make_shared>(affinity); } ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoNetwork, deviceName); - } catch (const NotImplementedException& ex) { + } catch (const NotImplemented& ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); } @@ -642,7 +635,7 @@ TEST_P(IEClassNetworkTestP, SetAffinityWithKSO) { op->get_rt_info()["affinity"] = std::make_shared>(affinity); } ExecutableNetwork exeNetwork = ie.LoadNetwork(ksoNetwork, deviceName); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); } @@ -659,7 +652,7 @@ TEST_P(IEClassNetworkTestP, QueryNetworkHeteroActualNoThrow) { TEST_P(IEClassNetworkTestP, QueryNetworkMultiThrows) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Core ie; - ASSERT_THROW(ie.QueryNetwork(actualNetwork, CommonTestUtils::DEVICE_MULTI), InferenceEngineException); + ASSERT_THROW(ie.QueryNetwork(actualNetwork, CommonTestUtils::DEVICE_MULTI), Exception); } TEST(IEClassBasicTest, smoke_GetMetricSupportedMetricsHeteroNoThrow) { @@ -701,7 +694,7 @@ TEST(IEClassBasicTest, smoke_GetMetricSupportedConfigKeysHeteroThrows) { Core ie; // TODO: check std::string targetDevice = CommonTestUtils::DEVICE_HETERO + std::string(":") + CommonTestUtils::DEVICE_CPU; - ASSERT_THROW(ie.GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)), InferenceEngineException); + ASSERT_THROW(ie.GetMetric(targetDevice, METRIC_KEY(SUPPORTED_CONFIG_KEYS)), Exception); } TEST_P(IEClassGetMetricTest_SUPPORTED_METRICS, GetMetricAndPrintNoThrow) { @@ -854,7 +847,7 @@ TEST_P(IEClassGetMetricTest_ThrowUnsupported, GetMetricThrow) { Core ie; Parameter p; - ASSERT_THROW(p = ie.GetMetric(deviceName, "unsupported_metric"), InferenceEngineException); + ASSERT_THROW(p = ie.GetMetric(deviceName, "unsupported_metric"), Exception); } TEST_P(IEClassGetConfigTest, GetConfigNoThrow) { @@ -890,7 +883,7 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigHeteroThrow) { Core ie; Parameter p; - ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO, "unsupported_config"), InferenceEngineException); + ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO, "unsupported_config"), Exception); } TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) { @@ -899,7 +892,7 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigHeteroWithDeviceThrow) { Parameter p; ASSERT_THROW(p = ie.GetConfig(CommonTestUtils::DEVICE_HETERO + std::string(":") + deviceName, HETERO_CONFIG_KEY(DUMP_GRAPH_DOT)), - InferenceEngineException); + Exception); } TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigThrow) { @@ -907,7 +900,7 @@ TEST_P(IEClassGetConfigTest_ThrowUnsupported, GetConfigThrow) { Core ie; Parameter p; - ASSERT_THROW(p = ie.GetConfig(deviceName, "unsupported_config"), InferenceEngineException); + ASSERT_THROW(p = ie.GetConfig(deviceName, "unsupported_config"), Exception); } TEST_P(IEClassGetAvailableDevices, GetAvailableDevicesNoThrow) { @@ -1009,7 +1002,7 @@ TEST_P(IEClassExecutableNetworkGetMetricTest_ThrowsUnsupported, GetMetricThrow) ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleNetwork, deviceName); - ASSERT_THROW(p = exeNetwork.GetMetric("unsupported_metric"), InferenceEngineException); + ASSERT_THROW(p = exeNetwork.GetMetric("unsupported_metric"), Exception); } TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoThrow) { @@ -1036,7 +1029,7 @@ TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigThrows) { ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleNetwork, deviceName); - ASSERT_THROW(p = exeNetwork.GetConfig("unsupported_config"), InferenceEngineException); + ASSERT_THROW(p = exeNetwork.GetConfig("unsupported_config"), Exception); } TEST_P(IEClassExecutableNetworkSetConfigTest, SetConfigThrows) { @@ -1046,7 +1039,7 @@ TEST_P(IEClassExecutableNetworkSetConfigTest, SetConfigThrows) { ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleNetwork, deviceName); - ASSERT_THROW(exeNetwork.SetConfig({{"unsupported_config", "some_value"}}), InferenceEngineException); + ASSERT_THROW(exeNetwork.SetConfig({{"unsupported_config", "some_value"}}), Exception); } TEST_P(IEClassExecutableNetworkSupportedConfigTest, SupportedConfigWorks) { @@ -1068,7 +1061,7 @@ TEST_P(IEClassExecutableNetworkUnsupportedConfigTest, UnsupportedConfigThrows) { ExecutableNetwork exeNetwork = ie.LoadNetwork(simpleNetwork, deviceName); - ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), InferenceEngineException); + ASSERT_THROW(exeNetwork.SetConfig({{configKey, configValue}}), Exception); } TEST_P(IEClassExecutableNetworkGetConfigTest, GetConfigNoEmptyNoThrow) { @@ -1231,7 +1224,7 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithDeviceID) { if (supportsDeviceID(ie, deviceName)) { try { ie.QueryNetwork(simpleNetwork, deviceName + ".0"); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { std::string message = ex.what(); ASSERT_STR_CONTAINS(message, "[NOT_IMPLEMENTED] ngraph::Function is not supported natively"); } @@ -1245,7 +1238,7 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithBigDeviceIDThrows) { Core ie; if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.QueryNetwork(actualNetwork, deviceName + ".110"), InferenceEngineException); + ASSERT_THROW(ie.QueryNetwork(actualNetwork, deviceName + ".110"), Exception); } else { GTEST_SKIP(); } @@ -1256,7 +1249,7 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkWithInvalidDeviceIDThrows) { Core ie; if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.QueryNetwork(actualNetwork, deviceName + ".l0"), InferenceEngineException); + ASSERT_THROW(ie.QueryNetwork(actualNetwork, deviceName + ".l0"), Exception); } else { GTEST_SKIP(); } @@ -1268,7 +1261,7 @@ TEST_P(IEClassQueryNetworkTest, QueryNetworkHETEROWithBigDeviceIDThrows) { if (supportsDeviceID(ie, deviceName)) { ASSERT_THROW(ie.QueryNetwork(actualNetwork, CommonTestUtils::DEVICE_HETERO, - {{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngineException); + {{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), Exception); } else { GTEST_SKIP(); } @@ -1311,7 +1304,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithBigDeviceIDThrows) { Core ie; if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.LoadNetwork(actualNetwork, deviceName + ".10"), InferenceEngineException); + ASSERT_THROW(ie.LoadNetwork(actualNetwork, deviceName + ".10"), Exception); } else { GTEST_SKIP(); } @@ -1322,7 +1315,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkWithInvalidDeviceIDThrows) { Core ie; if (supportsDeviceID(ie, deviceName)) { - ASSERT_THROW(ie.LoadNetwork(actualNetwork, deviceName + ".l0"), InferenceEngineException); + ASSERT_THROW(ie.LoadNetwork(actualNetwork, deviceName + ".l0"), Exception); } else { GTEST_SKIP(); } @@ -1334,7 +1327,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROWithBigDeviceIDThrows) { if (supportsDeviceID(ie, deviceName)) { ASSERT_THROW(ie.LoadNetwork(actualNetwork, "HETERO", - {{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), InferenceEngineException); + {{"TARGET_FALLBACK", deviceName + ".100," + CommonTestUtils::DEVICE_CPU}}), Exception); } else { GTEST_SKIP(); } @@ -1347,7 +1340,7 @@ TEST_P(IEClassLoadNetworkTest, LoadNetworkHETEROAndDeviceIDThrows) { if (supportsDeviceID(ie, deviceName)) { ASSERT_THROW(ie.LoadNetwork(actualNetwork, CommonTestUtils::DEVICE_HETERO, {{"TARGET_FALLBACK", deviceName + "," + CommonTestUtils::DEVICE_CPU}, - {CONFIG_KEY(DEVICE_ID), "110"}}), InferenceEngineException); + {CONFIG_KEY(DEVICE_ID), "110"}}), Exception); } else { GTEST_SKIP(); } diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp index 6a9275f5b15..e346e5e33ec 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/core_threading_tests.hpp @@ -3,7 +3,6 @@ // #include -#include
#include #include #include @@ -56,7 +55,7 @@ public: void safePluginUnregister(InferenceEngine::Core & ie) { try { ie.UnregisterPlugin(deviceName); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { // if several threads unload plugin at once, the first thread does this // while all others will throw an exception that plugin is not registered ASSERT_STR_CONTAINS(ex.what(), "name is not registered in the"); @@ -68,7 +67,7 @@ public: auto extension = std::make_shared( FileUtils::makePluginLibraryName({}, "template_extension")); ie.AddExtension(extension); - } catch (const InferenceEngine::details::InferenceEngineException & ex) { + } catch (const InferenceEngine::Exception & ex) { ASSERT_STR_CONTAINS(ex.what(), "name: experimental"); } } diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp index 793e0eb215a..33357685dd4 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/infer_request.hpp @@ -95,7 +95,7 @@ TEST_P(InferRequestTests, failToSetNullptrForInput) { ASSERT_NO_THROW(req = execNet.CreateInferRequest()); InferenceEngine::Blob::Ptr inputBlob = nullptr; ASSERT_THROW(req.SetBlob(cnnNet.getInputsInfo().begin()->first, inputBlob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(InferRequestTests, failToSetEmptyInputBlob) { @@ -110,7 +110,7 @@ TEST_P(InferRequestTests, failToSetEmptyInputBlob) { ASSERT_NO_THROW(req = execNet.CreateInferRequest()); InferenceEngine::Blob::Ptr blob; ASSERT_THROW(req.SetBlob(cnnNet.getInputsInfo().begin()->first, blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(InferRequestTests, failToSetEmptyOutputBlob) { @@ -125,7 +125,7 @@ TEST_P(InferRequestTests, failToSetEmptyOutputBlob) { ASSERT_NO_THROW(req = execNet.CreateInferRequest()); InferenceEngine::Blob::Ptr blob; ASSERT_THROW(req.SetBlob(cnnNet.getOutputsInfo().begin()->first, blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(InferRequestTests, failToSetNotAllocatedInput) { @@ -173,7 +173,7 @@ TEST_P(InferRequestTests, failToSetBlobWithIncorrectName) { FuncTestUtils::createAndFillBlob(cnnNet.getInputsInfo().begin()->second->getTensorDesc()); blob->allocate(); ASSERT_THROW(req.SetBlob(incorrect_input_name, blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(InferRequestTests, failToSetInputWithIncorrectSizes) { @@ -191,7 +191,7 @@ TEST_P(InferRequestTests, failToSetInputWithIncorrectSizes) { blob->allocate(); blob->getTensorDesc().getDims()[0]*=2; ASSERT_THROW(req.SetBlob(cnnNet.getInputsInfo().begin()->first, blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(InferRequestTests, failToSetOutputWithIncorrectSizes) { @@ -209,7 +209,7 @@ TEST_P(InferRequestTests, failToSetOutputWithIncorrectSizes) { blob->allocate(); blob->getTensorDesc().getDims()[0]*=2; ASSERT_THROW(req.SetBlob(cnnNet.getOutputsInfo().begin()->first, blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(InferRequestTests, canInferWithoutSetAndGetInOut) { @@ -288,7 +288,7 @@ TEST_P(InferRequestTests, canProcessDeallocatedInputBlobAfterSetBlob) { blob->allocate(); ASSERT_NO_THROW(req.SetBlob(cnnNet.getInputsInfo().begin()->first, blob)); blob->deallocate(); - ASSERT_THROW(req.Infer(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Infer(), InferenceEngine::Exception); } TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterGetBlob) { @@ -306,7 +306,7 @@ TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterGetBlob) { blob->allocate(); ASSERT_NO_THROW(req.SetBlob(cnnNet.getOutputsInfo().begin()->first, blob)); blob->deallocate(); - ASSERT_THROW(req.Infer(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Infer(), InferenceEngine::Exception); } TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterGetBlobForAsync) { @@ -324,8 +324,8 @@ TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterGetBlobForAsync) { blob->allocate(); ASSERT_NO_THROW(req.SetBlob(cnnNet.getOutputsInfo().begin()->first, blob)); blob->deallocate(); - ASSERT_THROW(req.Infer(), InferenceEngine::details::InferenceEngineException); - ASSERT_THROW(req.StartAsync(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Infer(), InferenceEngine::Exception); + ASSERT_THROW(req.StartAsync(), InferenceEngine::Exception); } TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterGetAndSetBlob) { @@ -344,7 +344,7 @@ TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterGetAndSetBlob) { ASSERT_NO_THROW(blob = req.GetBlob(cnnNet.getOutputsInfo().begin()->first)); ASSERT_NO_THROW(req.SetBlob(cnnNet.getOutputsInfo().begin()->first, blob)); blob->deallocate(); - ASSERT_THROW(req.Infer(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Infer(), InferenceEngine::Exception); } TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterSetBlob) { @@ -362,7 +362,7 @@ TEST_P(InferRequestTests, canProcessDeallocatedOutputBlobAfterSetBlob) { blob->allocate(); ASSERT_NO_THROW(req.SetBlob(cnnNet.getOutputsInfo().begin()->first, blob)); blob->deallocate(); - ASSERT_THROW(req.Infer(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Infer(), InferenceEngine::Exception); } TEST_P(InferRequestTests, secondCallGetOutputDoNotReAllocateData) { @@ -441,7 +441,7 @@ TEST_P(InferRequestTests, FailedAsyncInferWithNegativeTimeForWait) { ASSERT_NO_THROW(blob = req.GetBlob(cnnNet.getInputsInfo().begin()->first)); req.Infer(); req.StartAsync(); - ASSERT_THROW(req.Wait(-2), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(req.Wait(-2), InferenceEngine::Exception); } TEST_P(InferRequestTests, canRun3SyncRequestsConsistentlyFromThreads) { diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/test_plugin.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/test_plugin.hpp index afb611cff58..0688f3669d0 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/test_plugin.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/test_plugin.hpp @@ -53,7 +53,7 @@ TEST_P(BehaviorTests, canNotLoadNetworkWithoutWeights) { InferenceEngine::Core core; auto model = FuncTestUtils::TestModel::convReluNormPoolFcModelFP32; ASSERT_THROW(core.ReadNetwork(model.model_xml_str, InferenceEngine::Blob::CPtr()), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(BehaviorTests, pluginDoesNotChangeOriginalNetwork) { @@ -83,17 +83,7 @@ TEST_P(BehaviorTestInput, canSetInputPrecisionForNetwork) { || targetDevice == CommonTestUtils::DEVICE_HDDL || targetDevice == CommonTestUtils::DEVICE_KEEMBAY) && netPrecision == InferenceEngine::Precision::I16) { - std::string msg; - InferenceEngine::StatusCode sts = InferenceEngine::StatusCode::OK; - try { - ie->LoadNetwork(cnnNet, targetDevice, configuration); - } catch (InferenceEngine::details::InferenceEngineException & ex) { - msg = ex.what(); - sts = ex.getStatus(); - } - ASSERT_EQ(InferenceEngine::StatusCode::GENERAL_ERROR, sts) << msg; - std::string refError = "Input image format I16 is not supported yet."; - ASSERT_EQ(refError, msg); + ASSERT_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration), InferenceEngine::GeneralError); } else { ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); } @@ -107,24 +97,14 @@ TEST_P(BehaviorTestOutput, canSetOutputPrecisionForNetwork) { InferenceEngine::OutputsDataMap output_info; InferenceEngine::CNNNetwork cnnNet(function); setOutputNetworkPrecision(cnnNet, output_info, netPrecision); - - std::string msg; - InferenceEngine::StatusCode sts = InferenceEngine::StatusCode::OK; - - try { - InferenceEngine::ExecutableNetwork exeNetwork = ie->LoadNetwork(cnnNet, targetDevice, configuration); - } catch (InferenceEngine::details::InferenceEngineException & ex) { - sts = ex.getStatus(); - msg = ex.what(); - std::cout << "LoadNetwork() threw InferenceEngineException. Status: " << sts << ", message: " << msg << std::endl; - } - if ((netPrecision == InferenceEngine::Precision::I16 || netPrecision == InferenceEngine::Precision::U8)) { if ((targetDevice == "CPU") || (targetDevice == "GPU")) { - ASSERT_EQ(InferenceEngine::StatusCode::OK, sts); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); + } else { + GTEST_SKIP(); } } else { - ASSERT_EQ(InferenceEngine::StatusCode::OK, sts); + ASSERT_NO_THROW(ie->LoadNetwork(cnnNet, targetDevice, configuration)); } } } // namespace BehaviorTestsDefinitions \ No newline at end of file diff --git a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_quantization.hpp b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_quantization.hpp index fae3aff2fd6..298222813a2 100644 --- a/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_quantization.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/subgraph_tests/concat_quantization.hpp @@ -18,7 +18,7 @@ TEST_P(ConcatQuantization, CompareWithRefImpl) { InferenceEngine::CNNNetwork cnnNetwork = InferenceEngine::CNNNetwork{ function }; executableNetwork = core->LoadNetwork(cnnNetwork, targetDevice); } - catch (InferenceEngine::details::InferenceEngineException & ex) { + catch (InferenceEngine::Exception & ex) { FAIL() << ex.what(); } }; diff --git a/inference-engine/tests/functional/plugin/shared/src/behavior/invalid_cases/proposal.cpp b/inference-engine/tests/functional/plugin/shared/src/behavior/invalid_cases/proposal.cpp index 8805b18f2af..826286c6650 100644 --- a/inference-engine/tests/functional/plugin/shared/src/behavior/invalid_cases/proposal.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/behavior/invalid_cases/proposal.cpp @@ -100,5 +100,5 @@ void ProposalBehTest::Run() { } TEST_P(ProposalBehTest, CompareWithRefs) { - ASSERT_THROW(Run(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(Run(), InferenceEngine::Exception); } diff --git a/inference-engine/tests/functional/plugin/shared/src/behavior/layout.cpp b/inference-engine/tests/functional/plugin/shared/src/behavior/layout.cpp index 1442012cc6f..6da7aff5181 100644 --- a/inference-engine/tests/functional/plugin/shared/src/behavior/layout.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/behavior/layout.cpp @@ -105,7 +105,7 @@ TEST_P(LayoutTest, NetWithLayout) { ASSERT_EQ(inputBlob->getTensorDesc().getLayout(), layout); } else { ASSERT_THROW(cnnNet.getInputsInfo().begin()->second->setLayout(layout), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } } } // namespace BehaviorTestsDefinitions diff --git a/inference-engine/tests/functional/plugin/shared/src/behavior/set_blob_of_kind.cpp b/inference-engine/tests/functional/plugin/shared/src/behavior/set_blob_of_kind.cpp index e39ef624d52..4ac146f3492 100644 --- a/inference-engine/tests/functional/plugin/shared/src/behavior/set_blob_of_kind.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/behavior/set_blob_of_kind.cpp @@ -82,7 +82,7 @@ void SetBlobOfKindTest::ExpectSetBlobThrow() { const auto &info = input.second; auto blob = GenerateInput(*info); EXPECT_THROW(inferRequest.SetBlob(info->name(), blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } } diff --git a/inference-engine/tests/functional/plugin/shared/src/hetero/synthetic.cpp b/inference-engine/tests/functional/plugin/shared/src/hetero/synthetic.cpp index 73e2731f305..6ebbb663c30 100644 --- a/inference-engine/tests/functional/plugin/shared/src/hetero/synthetic.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/hetero/synthetic.cpp @@ -90,7 +90,7 @@ void HeteroSyntheticTest::SetUp() { bool registred = true; try { PluginCache::get().ie()->RegisterPlugin(pluginParameter._location, pluginParameter._name); - } catch (InferenceEngine::details::InferenceEngineException& ex) { + } catch (InferenceEngine::Exception& ex) { if (std::string{ex.what()}.find("Device with \"" + pluginParameter._name + "\" is already registered in the InferenceEngine") == std::string::npos) { diff --git a/inference-engine/tests/functional/plugin/shared/src/ngraph_conversion_tests/plugin_specific_ngraph_conversion.cpp b/inference-engine/tests/functional/plugin/shared/src/ngraph_conversion_tests/plugin_specific_ngraph_conversion.cpp index c10556a0a61..cfef2af4490 100644 --- a/inference-engine/tests/functional/plugin/shared/src/ngraph_conversion_tests/plugin_specific_ngraph_conversion.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/ngraph_conversion_tests/plugin_specific_ngraph_conversion.cpp @@ -35,7 +35,7 @@ TEST_P(PluginSpecificConversion, addOutputAfterLoadNetwork) { InferenceEngine::ExecutableNetwork exeNetwork = ie.LoadNetwork(network, device); network.addOutput("add1"); InferenceEngine::ExecutableNetwork exeNetwork2 = ie.LoadNetwork(network, device); - } catch (InferenceEngine::details::InferenceEngineException& ex) { + } catch (InferenceEngine::Exception& ex) { FAIL() << ex.what(); } } diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/activation.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/activation.hpp index 1552ca270fe..a85c0929b19 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/activation.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/activation.hpp @@ -15,7 +15,6 @@ #include "ie_core.hpp" #include "ie_precision.hpp" -#include "details/ie_exception.hpp" #include "ngraph/opsets/opset1.hpp" diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/grn.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/grn.hpp index 8aebddcf9df..d45312fe341 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/grn.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/grn.hpp @@ -16,7 +16,6 @@ #include "ie_core.hpp" #include "ie_precision.hpp" -#include "details/ie_exception.hpp" #include "ngraph/opsets/opset1.hpp" diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/prior_box_clustered.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/prior_box_clustered.hpp index e7e5110d683..a8483549bf5 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/prior_box_clustered.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/prior_box_clustered.hpp @@ -16,7 +16,6 @@ #include "ie_core.hpp" #include "ie_precision.hpp" -#include "details/ie_exception.hpp" #include "ngraph/opsets/opset1.hpp" diff --git a/inference-engine/tests/ie_test_utils/common_test_utils/common_layers_params.hpp b/inference-engine/tests/ie_test_utils/common_test_utils/common_layers_params.hpp index 391d93d09e3..30f5c2a9208 100644 --- a/inference-engine/tests/ie_test_utils/common_test_utils/common_layers_params.hpp +++ b/inference-engine/tests/ie_test_utils/common_test_utils/common_layers_params.hpp @@ -8,7 +8,6 @@ #include #include // to allow putting vector into exception string stream -#include "details/ie_exception.hpp" #include #include "ie_blob.h" diff --git a/inference-engine/tests/ie_test_utils/common_test_utils/common_utils.hpp b/inference-engine/tests/ie_test_utils/common_test_utils/common_utils.hpp index 97724ece09d..ca9feceea03 100644 --- a/inference-engine/tests/ie_test_utils/common_test_utils/common_utils.hpp +++ b/inference-engine/tests/ie_test_utils/common_test_utils/common_utils.hpp @@ -57,10 +57,7 @@ inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::CNNNet return layer; ++i; } - - std::stringstream stream; - stream << "Layer " << layerName << " not found in network"; - throw InferenceEngine::NotFound(stream.str()); + THROW_IE_EXCEPTION_WITH_STATUS(NotFound) << "Layer " << layerName << " not found in network"; } template diff --git a/inference-engine/tests/ie_test_utils/common_test_utils/data_utils.cpp b/inference-engine/tests/ie_test_utils/common_test_utils/data_utils.cpp index 4867d8d1746..401f80dfe30 100644 --- a/inference-engine/tests/ie_test_utils/common_test_utils/data_utils.cpp +++ b/inference-engine/tests/ie_test_utils/common_test_utils/data_utils.cpp @@ -5,7 +5,6 @@ #include #include // to allow putting vector into exception string stream -#include
#include #include diff --git a/inference-engine/tests/ie_test_utils/common_test_utils/xml_net_builder/xml_net_builder.cpp b/inference-engine/tests/ie_test_utils/common_test_utils/xml_net_builder/xml_net_builder.cpp index fff43fd223a..48b2910d760 100644 --- a/inference-engine/tests/ie_test_utils/common_test_utils/xml_net_builder/xml_net_builder.cpp +++ b/inference-engine/tests/ie_test_utils/common_test_utils/xml_net_builder/xml_net_builder.cpp @@ -5,7 +5,6 @@ #include #include #include -#include "details/ie_exception.hpp" #include "common_test_utils/xml_net_builder/xml_net_builder.hpp" namespace CommonTestUtils { diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp index 18b6f7388bf..1b016807a30 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp @@ -26,7 +26,7 @@ Parameter MockPlugin::GetMetric(const std::string& name, const std::mapGetMetric(name, options); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } @@ -36,7 +36,7 @@ MockPlugin::LoadNetwork(const CNNNetwork &network, if (_target) { return _target->LoadNetwork(network, config); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } @@ -46,7 +46,7 @@ MockPlugin::LoadNetwork(const CNNNetwork& network, const std::mapLoadNetwork(network, config, context); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } @@ -62,7 +62,7 @@ MockPlugin::ImportNetworkImpl(std::istream& networkModel, if (_target) { return _target->ImportNetwork(networkModel, config); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } @@ -73,7 +73,7 @@ MockPlugin::ImportNetworkImpl(std::istream& networkModel, if (_target) { return _target->ImportNetwork(networkModel, context, config); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } @@ -81,7 +81,7 @@ InferenceEngine::RemoteContext::Ptr MockPlugin::GetDefaultContext(const Inferenc if (_target) { return _target->GetDefaultContext(params); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } @@ -91,7 +91,7 @@ MockPlugin::QueryNetwork(const InferenceEngine::CNNNetwork& network, if (_target) { return _target->QueryNetwork(network, config); } else { - THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); + THROW_IE_EXCEPTION_WITH_STATUS(NotImplemented); } } diff --git a/inference-engine/tests/unit/cpu/mkldnn_memory_desc_test.cpp b/inference-engine/tests/unit/cpu/mkldnn_memory_desc_test.cpp index a02edca942f..43da25e3141 100644 --- a/inference-engine/tests/unit/cpu/mkldnn_memory_desc_test.cpp +++ b/inference-engine/tests/unit/cpu/mkldnn_memory_desc_test.cpp @@ -6,7 +6,6 @@ #include #include "mkldnn_memory.h" -#include "details/ie_exception.hpp" using namespace MKLDNNPlugin; using namespace InferenceEngine; diff --git a/inference-engine/tests/unit/cpu/mkldnn_memory_solver_test.cpp b/inference-engine/tests/unit/cpu/mkldnn_memory_solver_test.cpp index 3accbb784c6..4038a6da020 100644 --- a/inference-engine/tests/unit/cpu/mkldnn_memory_solver_test.cpp +++ b/inference-engine/tests/unit/cpu/mkldnn_memory_solver_test.cpp @@ -4,9 +4,9 @@ #include #include +#include #include "mkldnn_memory_solver.hpp" -#include "details/ie_exception.hpp" using Box = MKLDNNPlugin::MemorySolver::Box; @@ -68,7 +68,7 @@ TEST(MemSolverTest, GetOffsetThrowException) { MKLDNNPlugin::MemorySolver ms(boxes); ms.solve(); - EXPECT_THROW(ms.getOffset(100), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(ms.getOffset(100), InferenceEngine::Exception); } TEST(MemSolverTest, LinearAndEven) { diff --git a/inference-engine/tests/unit/cpu/mkldnn_memory_test.cpp b/inference-engine/tests/unit/cpu/mkldnn_memory_test.cpp index 4d099dc226f..623f0109ed5 100644 --- a/inference-engine/tests/unit/cpu/mkldnn_memory_test.cpp +++ b/inference-engine/tests/unit/cpu/mkldnn_memory_test.cpp @@ -6,7 +6,6 @@ #include #include "mkldnn_memory.h" -#include "details/ie_exception.hpp" using namespace MKLDNNPlugin; using namespace InferenceEngine; diff --git a/inference-engine/tests/unit/gna/gna_model_serial_test.cpp b/inference-engine/tests/unit/gna/gna_model_serial_test.cpp index e41cad1d558..e7d28c8b06d 100644 --- a/inference-engine/tests/unit/gna/gna_model_serial_test.cpp +++ b/inference-engine/tests/unit/gna/gna_model_serial_test.cpp @@ -21,5 +21,5 @@ TEST(GNAModelSerialTest, TestErrorOnTellg) { IstreamMock mock; EXPECT_CALL(mock, seekoff(_, _, _)).WillRepeatedly(Return(-1)); std::istream is(&mock); - ASSERT_THROW(GNAModelSerial::ReadHeader(is), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(GNAModelSerial::ReadHeader(is), InferenceEngine::Exception); } diff --git a/inference-engine/tests/unit/gna/gna_plugin_config_test.cpp b/inference-engine/tests/unit/gna/gna_plugin_config_test.cpp index 7b9337a10fb..985527d088e 100644 --- a/inference-engine/tests/unit/gna/gna_plugin_config_test.cpp +++ b/inference-engine/tests/unit/gna/gna_plugin_config_test.cpp @@ -35,7 +35,7 @@ protected: } void ExpectThrow(const std::string& key, const std::string& val) { EXPECT_THROW(config.UpdateFromMap({{key, val}}), - details::InferenceEngineException); + Exception); } void SetAndCheckFlag(const std::string& key, bool& val, bool reverse = false) { const bool yes = reverse ? false : true; diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp index 64522c7330a..e6925d26225 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_base_test.cpp @@ -228,8 +228,8 @@ protected: _incorrectName = "incorrect_name"; _inputName = MockNotEmptyICNNNetwork::INPUT_BLOB_NAME; _failedToFindInOutError = - NOT_FOUND_str + "Failed to find input or output with name: \'" + _incorrectName + "\'"; - _inputDataNotAllocatedError = std::string("Input data was not allocated. Input name: \'") + "[ NOT_FOUND ] Failed to find input or output with name: \'" + _incorrectName + "\'"; + _inputDataNotAllocatedError = std::string("[ NOT_ALLOCATED ] Input data was not allocated. Input name: \'") + _inputName + "\'"; _inputDataIsEmptyError = std::string("Input data is empty. Input name: \'") + _inputName + "\'"; @@ -250,7 +250,7 @@ protected: std::string actualError; try { function(); - } catch (const InferenceEngineException &iie) { + } catch (const Exception &iie) { actualError = iie.what(); } return actualError; @@ -280,7 +280,7 @@ TEST_F(InferRequestTests, constructorsTests) { ASSERT_NO_THROW(InferRequest req(mock_request)); IInferRequest::Ptr tmp; // InferRequest's "actual" is nullptr, let's check it throws on construction - ASSERT_THROW(InferRequest req(tmp), InferenceEngineException); + ASSERT_THROW(InferRequest req(tmp), Exception); } // StartAsync @@ -291,7 +291,7 @@ TEST_F(InferRequestTests, canForwardStartAsync) { TEST_F(InferRequestTests, throwsIfStartAsyncReturnNotOK) { EXPECT_CALL(*mock_request.get(), StartAsync(_)).WillOnce(Return(GENERAL_ERROR)); - ASSERT_THROW(requestWrapper->StartAsync(), InferenceEngineException); + ASSERT_THROW(requestWrapper->StartAsync(), Exception); } // Wait @@ -314,7 +314,7 @@ TEST_F(InferRequestTests, canForwardInfer) { TEST_F(InferRequestTests, throwsIfInferReturnNotOK) { EXPECT_CALL(*mock_request.get(), Infer(_)).WillOnce(Return(GENERAL_ERROR)); - ASSERT_THROW(requestWrapper->Infer(), InferenceEngineException); + ASSERT_THROW(requestWrapper->Infer(), Exception); } // GetPerformanceCounts @@ -327,7 +327,7 @@ TEST_F(InferRequestTests, canForwardGetPerformanceCounts) { TEST_F(InferRequestTests, throwsIfGetPerformanceCountsReturnNotOK) { std::map info; EXPECT_CALL(*mock_request.get(), GetPerformanceCounts(_, _)).WillOnce(Return(GENERAL_ERROR)); - ASSERT_THROW(info = requestWrapper->GetPerformanceCounts(), InferenceEngineException); + ASSERT_THROW(info = requestWrapper->GetPerformanceCounts(), Exception); } MATCHER_P(blob_in_map_pointer_is_same, ref_blob, "") { @@ -351,7 +351,7 @@ TEST_F(InferRequestTests, getInputCallsSetBlob) { TEST_F(InferRequestTests, throwsIfSetInputReturnNotOK) { EXPECT_CALL(*mock_request.get(), SetBlob(_, _, _)).WillOnce(Return(GENERAL_ERROR)); BlobMap blobMap{{{}, {}}}; - ASSERT_THROW(requestWrapper->SetInput(blobMap), InferenceEngineException); + ASSERT_THROW(requestWrapper->SetInput(blobMap), Exception); } // SetOutput @@ -382,7 +382,7 @@ TEST_F(InferRequestTests, throwsIfGetBlobReturnNotOK) { std::string name = "blob1"; EXPECT_CALL(*mock_request.get(), GetBlob(_, _, _)).WillOnce(Return(GENERAL_ERROR)); - ASSERT_THROW(blob = requestWrapper->GetBlob(name), InferenceEngineException); + ASSERT_THROW(blob = requestWrapper->GetBlob(name), Exception); } // SetBlob @@ -399,13 +399,13 @@ TEST_F(InferRequestTests, throwsIfSetBlobReturnNotOK) { std::string name = "blob1"; EXPECT_CALL(*mock_request.get(), SetBlob(_, _, _)).WillOnce(Return(GENERAL_ERROR)); - ASSERT_THROW(requestWrapper->SetBlob(name, blob), InferenceEngineException); + ASSERT_THROW(requestWrapper->SetBlob(name, blob), Exception); } TEST_F(InferRequestTests, throwsIfSetOutputReturnNotOK) { EXPECT_CALL(*mock_request.get(), SetBlob(_, _, _)).WillOnce(Return(GENERAL_ERROR)); BlobMap blobMap{{{}, {}}}; - ASSERT_THROW(requestWrapper->SetOutput(blobMap), InferenceEngineException); + ASSERT_THROW(requestWrapper->SetOutput(blobMap), Exception); } // SetCompletionCallback API @@ -446,27 +446,23 @@ TEST_F(InferRequestTests, canForwardAnyCallback) { TEST_F(InferRequestTests, failToSetInputWithInCorrectName) { auto InferRequest = getInferRequestWithMockImplInside(); auto blobMap = getBlobMapWithIncorrectName(); - auto exceptionMessage = getExceptionMessage([&]() { InferRequest->SetInput(blobMap); }); - ASSERT_EQ(_failedToFindInOutError, exceptionMessage.substr(0, _failedToFindInOutError.size())); + ASSERT_THROW(InferRequest->SetInput(blobMap), NotFound); } TEST_F(InferRequestTests, failToSetOutputWithInCorrectName) { auto InferRequest = getInferRequestWithMockImplInside(); auto blobMap = getBlobMapWithIncorrectName(); - auto exceptionMessage = getExceptionMessage([&]() { InferRequest->SetOutput(blobMap); }); - ASSERT_EQ(_failedToFindInOutError, exceptionMessage.substr(0, _failedToFindInOutError.size())); + ASSERT_THROW(InferRequest->SetOutput(blobMap), NotFound); } TEST_F(InferRequestTests, failToSetInputWithNotAllocatedInput) { auto InferRequest = getInferRequestWithMockImplInside(); auto blobMap = getBlobMapWithNotAllocatedInput(); - auto exceptionMessage = getExceptionMessage([&]() { InferRequest->SetInput(blobMap); }); - ASSERT_EQ(_inputDataNotAllocatedError, exceptionMessage.substr(0, _inputDataNotAllocatedError.size())); + ASSERT_THROW(InferRequest->SetInput(blobMap), NotAllocated); } TEST_F(InferRequestTests, failToSetInputWithEmptyDimensions) { auto InferRequest = getInferRequestWithMockImplInside(); auto blobMap = getBlobMapWithEmptyDimensions(); - auto exceptionMessage = getExceptionMessage([&]() { InferRequest->SetInput(blobMap); }); - ASSERT_EQ(_inputDataIsEmptyError, exceptionMessage.substr(0, _inputDataIsEmptyError.size())); + ASSERT_THROW(InferRequest->SetInput(blobMap), GeneralError); } diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp index 7ae41e057da..f93b191de5c 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_infer_async_request_thread_safe_default_test.cpp @@ -66,16 +66,6 @@ protected: mockInferRequestInternal = make_shared(inputsInfo, outputsInfo); testRequest = make_shared(mockInferRequestInternal, mockTaskExecutor, mockTaskExecutor); } - - bool _doesThrowExceptionWithMessage(std::function func, string refError) { - std::string whatMessage; - try { - func(); - } catch (const InferenceEngineException &iee) { - whatMessage = iee.what(); - } - return whatMessage.find(refError) != std::string::npos; - } }; // StartAsync @@ -84,7 +74,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnStartAsync) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->StartAsync(); }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->StartAsync(), RequestBusy); taskExecutor->executeAll(); } @@ -92,10 +82,10 @@ TEST_F(InferRequestThreadSafeDefaultTests, canResetBusyStatusIfStartAsyncFails) auto taskExecutor = std::make_shared(); testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, checkBlobs()).Times(2) - .WillOnce(Throw(InferenceEngineException(__FILE__, __LINE__) << "compare")) + .WillOnce(Throw(GeneralError{""})) .WillOnce(Return()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([&]() { testRequest->StartAsync(); }, "compare")); + ASSERT_THROW(testRequest->StartAsync(), GeneralError); ASSERT_NO_THROW(testRequest->StartAsync()); taskExecutor->executeAll(); } @@ -106,7 +96,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnGetUserData) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->GetUserData(nullptr); }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->GetUserData(nullptr), RequestBusy); taskExecutor->executeAll(); } @@ -116,7 +106,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnSetUserData) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->SetUserData(nullptr); }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->SetUserData(nullptr), RequestBusy); taskExecutor->executeAll(); } @@ -133,7 +123,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnInfer) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->Infer(); }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->StartAsync(), RequestBusy); taskExecutor->executeAll(); } @@ -141,9 +131,10 @@ TEST_F(InferRequestThreadSafeDefaultTests, canResetBusyStatusIfInferFails) { auto taskExecutor = std::make_shared(); testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(2) - .WillOnce(Throw(InferenceEngineException(__FILE__, __LINE__) << "compare")) + .WillOnce(Throw(GeneralError{""})) .WillOnce(Return()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->Infer(); }, "compare")); + ASSERT_THROW(testRequest->Infer(), GeneralError); + ASSERT_NO_THROW(testRequest->Infer()); taskExecutor->executeAll(); } @@ -154,9 +145,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnGetPerformanceCoun testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { - auto info = testRequest->GetPerformanceCounts(); - }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->GetPerformanceCounts(), RequestBusy); taskExecutor->executeAll(); } @@ -166,9 +155,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnGetBlob) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { - auto data = testRequest->GetBlob({}); - }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->GetBlob({}), RequestBusy); taskExecutor->executeAll(); } @@ -178,7 +165,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnSetBlob) { testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->SetBlob({}, {}); }, REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->SetBlob({}, {}), RequestBusy); taskExecutor->executeAll(); } @@ -188,8 +175,7 @@ TEST_F(InferRequestThreadSafeDefaultTests, returnRequestBusyOnSetCompletionCallb testRequest = make_shared(mockInferRequestInternal, taskExecutor, taskExecutor); EXPECT_CALL(*mockInferRequestInternal, InferImpl()).Times(1).WillOnce(Return()); ASSERT_NO_THROW(testRequest->StartAsync()); - ASSERT_TRUE(_doesThrowExceptionWithMessage([this]() { testRequest->SetCompletionCallback(nullptr); }, - REQUEST_BUSY_str)); + ASSERT_THROW(testRequest->SetCompletionCallback({}), RequestBusy); taskExecutor->executeAll(); } diff --git a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_plugin_test.cpp b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_plugin_test.cpp index f85f4bef209..f9d2dfa9ba1 100644 --- a/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_plugin_test.cpp +++ b/inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_plugin_test.cpp @@ -70,67 +70,72 @@ TEST_F(InferenceEnginePluginInternalTest, failToSetBlobWithInCorrectName) { Blob::Ptr inBlob = make_shared_blob({ Precision::FP32, {1, 1, 1, 1}, NCHW }); inBlob->allocate(); string inputName = "not_input"; - std::string refError = NOT_FOUND_str + "Failed to find input or output with name: \'" + inputName + "\'"; + std::string refError = "[ NOT_FOUND ] Failed to find input or output with name: \'" + inputName + "\'"; IInferRequest::Ptr inferRequest; getInferRequestWithMockImplInside(inferRequest); ASSERT_NO_THROW(sts = inferRequest->SetBlob(inputName.c_str(), inBlob, &dsc)); - ASSERT_EQ(StatusCode::GENERAL_ERROR, sts); - dsc.msg[refError.length()] = '\0'; - ASSERT_EQ(refError, dsc.msg); + ASSERT_EQ(StatusCode::NOT_FOUND, sts); + ASSERT_TRUE(std::string{dsc.msg}.find(refError) != std::string::npos) + << "\tExpected: " << refError + << "\n\tActual: " << dsc.msg; } TEST_F(InferenceEnginePluginInternalTest, failToSetBlobWithEmptyName) { Blob::Ptr inBlob = make_shared_blob({ Precision::FP32, {}, NCHW }); inBlob->allocate(); string inputName = "not_input"; - std::string refError = NOT_FOUND_str + "Failed to set blob with empty name"; + std::string refError = "[ NOT_FOUND ] Failed to set blob with empty name"; IInferRequest::Ptr inferRequest; getInferRequestWithMockImplInside(inferRequest); ASSERT_NO_THROW(sts = inferRequest->SetBlob("", inBlob, &dsc)); - ASSERT_EQ(StatusCode::GENERAL_ERROR, sts); - dsc.msg[refError.length()] = '\0'; - ASSERT_EQ(refError, dsc.msg); + ASSERT_EQ(StatusCode::NOT_FOUND, sts); + ASSERT_TRUE(std::string{dsc.msg}.find(refError) != std::string::npos) + << "\tExpected: " << refError + << "\n\tActual: " << dsc.msg; } TEST_F(InferenceEnginePluginInternalTest, failToSetNullPtr) { string inputName = MockNotEmptyICNNNetwork::INPUT_BLOB_NAME; - std::string refError = NOT_ALLOCATED_str + "Failed to set empty blob with name: \'" + inputName + "\'"; + std::string refError = "[ NOT_ALLOCATED ] Failed to set empty blob with name: \'" + inputName + "\'"; IInferRequest::Ptr inferRequest; getInferRequestWithMockImplInside(inferRequest); Blob::Ptr inBlob = nullptr; ASSERT_NO_THROW(sts = inferRequest->SetBlob(inputName.c_str(), inBlob, &dsc)); - ASSERT_EQ(StatusCode::GENERAL_ERROR, sts); - dsc.msg[refError.length()] = '\0'; - ASSERT_EQ(refError, dsc.msg); + ASSERT_EQ(StatusCode::NOT_ALLOCATED, sts); + ASSERT_TRUE(std::string{dsc.msg}.find(refError) != std::string::npos) + << "\tExpected: " << refError + << "\n\tActual: " << dsc.msg; } TEST_F(InferenceEnginePluginInternalTest, failToSetEmptyBlob) { Blob::Ptr inBlob; string inputName = MockNotEmptyICNNNetwork::INPUT_BLOB_NAME; - std::string refError = NOT_ALLOCATED_str + "Failed to set empty blob with name: \'" + inputName + "\'"; + std::string refError = "[ NOT_ALLOCATED ] Failed to set empty blob with name: \'" + inputName + "\'"; IInferRequest::Ptr inferRequest; getInferRequestWithMockImplInside(inferRequest); ASSERT_NO_THROW(sts = inferRequest->SetBlob(inputName.c_str(), inBlob, &dsc)); - ASSERT_EQ(StatusCode::GENERAL_ERROR, sts); - dsc.msg[refError.length()] = '\0'; - ASSERT_EQ(refError, dsc.msg); + ASSERT_EQ(StatusCode::NOT_ALLOCATED, sts); + ASSERT_TRUE(std::string{dsc.msg}.find(refError) != std::string::npos) + << "\tExpected: " << refError + << "\n\tActual: " << dsc.msg; } TEST_F(InferenceEnginePluginInternalTest, failToSetNotAllocatedBlob) { string inputName = MockNotEmptyICNNNetwork::INPUT_BLOB_NAME; - std::string refError = "Input data was not allocated. Input name: \'" + inputName + "\'"; + std::string refError = "[ NOT_ALLOCATED ] Input data was not allocated. Input name: \'" + inputName + "\'"; IInferRequest::Ptr inferRequest; getInferRequestWithMockImplInside(inferRequest); Blob::Ptr blob = make_shared_blob({ Precision::FP32, {}, NCHW }); ASSERT_NO_THROW(sts = inferRequest->SetBlob(inputName.c_str(), blob, &dsc)); - ASSERT_EQ(StatusCode::GENERAL_ERROR, sts); - dsc.msg[refError.length()] = '\0'; - ASSERT_EQ(refError, dsc.msg); + ASSERT_EQ(StatusCode::NOT_ALLOCATED, sts); + ASSERT_TRUE(std::string{dsc.msg}.find(refError) != std::string::npos) + << "\tExpected: " << refError + << "\n\tActual: " << dsc.msg; } TEST_F(InferenceEnginePluginInternalTest, executableNetworkInternalExportsMagicAndName) { @@ -159,32 +164,32 @@ TEST_F(InferenceEnginePluginInternalTest, pluginInternalEraseMagicAndNameWhenImp TEST(InferencePluginTests, throwsOnNullptrCreation) { InferenceEnginePluginPtr nulptr; InferencePlugin plugin; - ASSERT_THROW(plugin = InferencePlugin(nulptr), details::InferenceEngineException); + ASSERT_THROW(plugin = InferencePlugin(nulptr), Exception); } TEST(InferencePluginTests, throwsOnUninitializedGetVersion) { InferencePlugin plg; - ASSERT_THROW(plg.GetVersion(), details::InferenceEngineException); + ASSERT_THROW(plg.GetVersion(), Exception); } TEST(InferencePluginTests, throwsOnUninitializedLoadNetwork) { InferencePlugin plg; - ASSERT_THROW(plg.LoadNetwork(CNNNetwork(), {}), details::InferenceEngineException); + ASSERT_THROW(plg.LoadNetwork(CNNNetwork(), {}), Exception); } TEST(InferencePluginTests, throwsOnUninitializedImportNetwork) { InferencePlugin plg; - ASSERT_THROW(plg.ImportNetwork({}, {}), details::InferenceEngineException); + ASSERT_THROW(plg.ImportNetwork({}, {}), Exception); } TEST(InferencePluginTests, throwsOnUninitializedAddExtension) { InferencePlugin plg; - ASSERT_THROW(plg.AddExtension(IExtensionPtr()), details::InferenceEngineException); + ASSERT_THROW(plg.AddExtension(IExtensionPtr()), Exception); } TEST(InferencePluginTests, throwsOnUninitializedSetConfig) { InferencePlugin plg; - ASSERT_THROW(plg.SetConfig({{}}), details::InferenceEngineException); + ASSERT_THROW(plg.SetConfig({{}}), Exception); } TEST(InferencePluginTests, nothrowsUninitializedCast) { diff --git a/inference-engine/tests/unit/inference_engine/ie_blob_test.cpp b/inference-engine/tests/unit/inference_engine/ie_blob_test.cpp index 69520c1d4c5..ee5bf8d2893 100644 --- a/inference-engine/tests/unit/inference_engine/ie_blob_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_blob_test.cpp @@ -28,14 +28,14 @@ protected: // Testing TBlob(const TensorDesc& tensorDesc, T* ptr, size_t data_size = 0) TEST_F(BlobTests, TBlobThrowsIfPtrForPreAllocatorIsNullPtr) { ASSERT_THROW(InferenceEngine::TBlob({InferenceEngine::Precision::FP32, {1}, InferenceEngine::C}, nullptr), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } // Testing TBlob(const TensorDesc& tensorDesc, const std::std::shared_ptr& alloc) TEST_F(BlobTests, TBlobThrowsIfAllocatorIsNullPtr) { ASSERT_THROW(InferenceEngine::TBlob( {InferenceEngine::Precision::FP32, {1}, InferenceEngine::C}, std::shared_ptr()), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } @@ -271,7 +271,7 @@ TEST_F(BlobTests, canMakeSharedBlob) { TEST_F(BlobTests, cannotCreateBlobWithIncorrectPrecision) { InferenceEngine::TensorDesc desc(InferenceEngine::Precision::FP16, {1, 3, 227, 227}, InferenceEngine::Layout::NCHW); - ASSERT_THROW(InferenceEngine::make_shared_blob(desc), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(InferenceEngine::make_shared_blob(desc), InferenceEngine::Exception); } TEST_F(BlobTests, canUseBlobInMoveSemantics) { @@ -396,7 +396,7 @@ TEST_F(BlobTests, makeRoiBlobWrongSize) { // try to create ROI blob with wrong size InferenceEngine::ROI roi = {0, 1, 1, 4, 4}; // cropped picture with: id = 0, (x,y) = (1,1), sizeX (W) = 4, sizeY (H) = 4 - ASSERT_THROW(make_shared_blob(blob, roi), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(make_shared_blob(blob, roi), InferenceEngine::Exception); } TEST_F(BlobTests, readRoiBlob) { diff --git a/inference-engine/tests/unit/inference_engine/ie_compound_blob_test.cpp b/inference-engine/tests/unit/inference_engine/ie_compound_blob_test.cpp index 2ffb7a0721e..d298f172239 100644 --- a/inference-engine/tests/unit/inference_engine/ie_compound_blob_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_compound_blob_test.cpp @@ -139,7 +139,7 @@ TEST(BlobConversionTests, blobSharesOwnershipOnCast) { TEST_F(CompoundBlobTests, cannotCreateCompoundBlobFromNullptr) { Blob::Ptr valid = make_shared_blob(TensorDesc(Precision::U8, {1, 3, 4, 4}, NCHW)); EXPECT_THROW(make_shared_blob(std::vector({valid, nullptr})), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(CompoundBlobTests, canCreateEmptyCompoundBlob) { @@ -174,7 +174,7 @@ TEST_F(CompoundBlobTests, cannotCreateCompoundBlobFromCompoundBlob) { verifyCompoundBlob(_test_blob); EXPECT_THROW(make_shared_blob(std::vector({blob, _test_blob})), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(CompoundBlobTests, compoundBlobHoldsCorrectDataInCorrectOrder) { @@ -248,46 +248,46 @@ TEST_F(CompoundBlobTests, compoundBlobHoldsValidDataWhenUnderlyingBlobIsDestroye TEST_F(NV12BlobTests, cannotCreateNV12BlobFromNullptrBlobs) { Blob::Ptr valid = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 4, 4}, NHWC)); EXPECT_THROW(make_shared_blob(valid, nullptr), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); EXPECT_THROW(make_shared_blob(nullptr, valid), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromCompoundBlobs) { Blob::Ptr blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 4, 4}, NHWC)); auto cblob = make_shared_blob(std::vector({blob})); EXPECT_THROW(make_shared_blob(cblob, blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); EXPECT_THROW(make_shared_blob(blob, cblob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithDifferentElementSize) { Blob::Ptr blob_u8 = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 4, 4}, NHWC)); Blob::Ptr blob_float = make_shared_blob(TensorDesc(Precision::FP32, {1, 2, 2, 2}, NHWC)); EXPECT_THROW(make_shared_blob(blob_u8, blob_float), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithNonU8Precision) { Blob::Ptr float_y_blob = make_shared_blob(TensorDesc(Precision::FP32, {1, 1, 4, 4}, NHWC)); Blob::Ptr float_uv_blob = make_shared_blob(TensorDesc(Precision::FP32, {1, 2, 2, 2}, NHWC)); EXPECT_THROW(make_shared_blob(float_y_blob, float_uv_blob), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithInconsistentBatchSize) { Blob::Ptr y = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 4, 4}, NHWC)); Blob::Ptr uv = make_shared_blob(TensorDesc(Precision::U8, {2, 2, 2, 2}, NHWC)); - EXPECT_THROW(make_shared_blob(y, uv), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y, uv), InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithWrongChannelNumber) { Blob::Ptr y = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 4, 4}, NHWC)); Blob::Ptr uv = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 2, 2}, NHWC)); - EXPECT_THROW(make_shared_blob(y, y), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(uv, uv), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(uv, y), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y, y), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(uv, uv), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(uv, y), InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithWrongWidthRatio) { @@ -296,10 +296,10 @@ TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithWrongWidthRatio) { Blob::Ptr uv1 = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 5, 3}, NHWC)); Blob::Ptr uv2 = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 6, 3}, NHWC)); Blob::Ptr uv3 = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 8, 3}, NHWC)); - EXPECT_THROW(make_shared_blob(y, uv0), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y, uv1), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y, uv2), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y, uv3), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y, uv0), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y, uv1), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y, uv2), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y, uv3), InferenceEngine::Exception); } TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithWrongHeightRatio) { @@ -308,10 +308,10 @@ TEST_F(NV12BlobTests, cannotCreateNV12BlobFromPlanesWithWrongHeightRatio) { Blob::Ptr uv1 = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 3, 5}, NHWC)); Blob::Ptr uv2 = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 3, 6}, NHWC)); Blob::Ptr uv3 = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 3, 8}, NHWC)); - EXPECT_THROW(make_shared_blob(y, uv0), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y, uv1), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y, uv2), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y, uv3), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y, uv0), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y, uv1), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y, uv2), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y, uv3), InferenceEngine::Exception); } TEST_F(NV12BlobTests, canCreateNV12BlobFromTwoPlanes) { @@ -351,8 +351,8 @@ TEST_F(I420BlobTests, canCreateI420BlobFromThreeMovedPlanes) { TEST_F(I420BlobTests, cannotCreateI420BlobFromNullptrBlobs) { Blob::Ptr valid = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 4, 4}, NHWC)); - EXPECT_THROW(make_shared_blob(valid, nullptr, nullptr), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(nullptr, valid, nullptr), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(valid, nullptr, nullptr), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(nullptr, valid, nullptr), InferenceEngine::Exception); } TEST_F(I420BlobTests, cannotCreateI420BlobFromCompoundBlobs) { @@ -367,7 +367,7 @@ TEST_F(I420BlobTests, cannotCreateI420BlobFromCompoundBlobs) { auto c_y_blob = make_cblob(y_blob); auto c_u_blob = make_cblob(u_blob); auto c_v_blob = make_cblob(v_blob); - using ie_exception_t = InferenceEngine::details::InferenceEngineException; + using ie_exception_t = InferenceEngine::Exception; EXPECT_THROW(make_shared_blob(c_y_blob, u_blob, v_blob), ie_exception_t); EXPECT_THROW(make_shared_blob(y_blob, c_u_blob, v_blob), ie_exception_t); @@ -379,7 +379,7 @@ TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithDifferentElementSize) { Blob::Ptr u_blob_float = make_shared_blob(TensorDesc(Precision::FP32, {1, 1, 2, 2}, NHWC)); Blob::Ptr v_blob_float = make_shared_blob(TensorDesc(Precision::FP32, {1, 1, 2, 2}, NHWC)); - EXPECT_THROW(make_shared_blob(y_blob_u8, u_blob_float, v_blob_float), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y_blob_u8, u_blob_float, v_blob_float), InferenceEngine::Exception); } TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithNonU8Precision) { @@ -387,23 +387,23 @@ TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithNonU8Precision) { Blob::Ptr u_blob_float = make_shared_blob(TensorDesc(Precision::FP32, {1, 1, 2, 2}, NHWC)); Blob::Ptr v_blob_float = make_shared_blob(TensorDesc(Precision::FP32, {1, 1, 2, 2}, NHWC)); - EXPECT_THROW(make_shared_blob(y_blob_float, u_blob_float, v_blob_float), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y_blob_float, u_blob_float, v_blob_float), InferenceEngine::Exception); } TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithInconsistentBatchSize) { Blob::Ptr y_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 6, 8}, NHWC)); Blob::Ptr u_blob = make_shared_blob(TensorDesc(Precision::U8, {2, 1, 3, 4}, NHWC)); Blob::Ptr v_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 3, 4}, NHWC)); - EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::Exception); } TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithWrongChannelNumber) { Blob::Ptr y_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 6, 8}, NHWC)); Blob::Ptr u_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 2, 3, 4}, NHWC)); Blob::Ptr v_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 3, 4}, NHWC)); - EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::Exception); } TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithWrongWidthRatio) { @@ -411,8 +411,8 @@ TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithWrongWidthRatio) { Blob::Ptr u_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 3, 2}, NHWC)); Blob::Ptr v_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 3, 4}, NHWC)); - EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::Exception); } TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithWrongHeightRatio) { @@ -420,8 +420,8 @@ TEST_F(I420BlobTests, cannotCreateI420BlobFromPlanesWithWrongHeightRatio) { Blob::Ptr u_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 2, 4}, NHWC)); Blob::Ptr v_blob = make_shared_blob(TensorDesc(Precision::U8, {1, 1, 3, 4}, NHWC)); - EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::details::InferenceEngineException); - EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(make_shared_blob(y_blob, u_blob, v_blob), InferenceEngine::Exception); + EXPECT_THROW(make_shared_blob(y_blob, v_blob, u_blob), InferenceEngine::Exception); } diff --git a/inference-engine/tests/unit/inference_engine/ie_exception_test.cpp b/inference-engine/tests/unit/inference_engine/ie_exception_test.cpp index ec3416ef5c8..d87dbbce262 100644 --- a/inference-engine/tests/unit/inference_engine/ie_exception_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_exception_test.cpp @@ -6,29 +6,24 @@ #include #include -#include "details/ie_exception.hpp" #include "ie_common.h" +#include "cpp_interfaces/exception2status.hpp" // TODO: cover and
from // tests/unit/inference_engine/exception_test.cpp -TEST(ExceptionTests, CopyConstructor) { - InferenceEngine::details::InferenceEngineException exception(__FILE__, __LINE__); - ASSERT_NO_THROW(InferenceEngine::details::InferenceEngineException {exception}); -} - TEST(ExceptionTests, CanThrowUsingMacro) { std::string message = "Exception message!"; - ASSERT_THROW(THROW_IE_EXCEPTION << message, InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(THROW_IE_EXCEPTION << message, InferenceEngine::Exception); } TEST(ExceptionTests, CanThrowScoringException) { - InferenceEngine::details::InferenceEngineException exception(__FILE__, __LINE__); - ASSERT_THROW(throw exception, InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception exception{""}; + ASSERT_THROW(throw exception, InferenceEngine::Exception); } TEST(ExceptionTests, CanDefineExceptionContent) { - InferenceEngine::details::InferenceEngineException exception(__FILE__, __LINE__); + InferenceEngine::Exception exception{""}; ASSERT_STREQ(exception.what(), ""); } @@ -41,8 +36,8 @@ TEST(ExceptionTests, ExceptionShowsCorrectMessageDebugVersion) { lineNum = __LINE__ + 1; THROW_IE_EXCEPTION << message; } - catch (InferenceEngine::details::InferenceEngineException &iex) { - std::string ref_message = message + "\n" + __FILE__ + ":" + std::to_string(lineNum); + catch (InferenceEngine::Exception &iex) { + std::string ref_message = std::string {"\n"} + __FILE__ + ":" + std::to_string(lineNum) + " " + message; ASSERT_STREQ(iex.what(), ref_message.c_str()); } } @@ -52,83 +47,23 @@ TEST(ExceptionTests, ExceptionShowsCorrectMessageReleaseVersion) { try { THROW_IE_EXCEPTION << message; } - catch (InferenceEngine::details::InferenceEngineException &iex) { + catch (InferenceEngine::Exception &iex) { std::string ref_message = message; ASSERT_STREQ(iex.what(), ref_message.c_str()); } } #endif -TEST(ExceptionTests, ExceptionCanBeCoughtAsStandard) { +TEST(ExceptionTests, ExceptionCanBeCaughtAsStandard) { ASSERT_THROW(THROW_IE_EXCEPTION, std::exception); } TEST(ExceptionTests, CanThrowStatusCode) { try { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << InferenceEngine::StatusCode::INFER_NOT_STARTED; + THROW_IE_EXCEPTION_WITH_STATUS(InferNotStarted); } - catch (const InferenceEngine::details::InferenceEngineException &iex) { - ASSERT_TRUE(iex.hasStatus()); - ASSERT_EQ(iex.getStatus(), InferenceEngine::StatusCode::INFER_NOT_STARTED); - } -} - -TEST(ExceptionTests, HandleOnlyFirstStatus) { - try { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << - InferenceEngine::StatusCode::NETWORK_NOT_LOADED << InferenceEngine::StatusCode::NOT_FOUND; - } - catch (const InferenceEngine::details::InferenceEngineException &iex) { - ASSERT_TRUE(iex.hasStatus()); - ASSERT_EQ(iex.getStatus(), InferenceEngine::StatusCode::NETWORK_NOT_LOADED); - } -} - -TEST(ExceptionTests, IgnoreNotStatusCodeEnumAfterManip) { - enum testEnum : int { - FIRST = 1 - }; - try { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << testEnum::FIRST; - } - catch (const InferenceEngine::details::InferenceEngineException &iex) { - ASSERT_FALSE(iex.hasStatus()); - } -} - -TEST(ExceptionTests, CanUseManipulatorStandalone) { - auto iex = InferenceEngine::details::InferenceEngineException("filename", 1); - as_status(iex); - try { - throw iex << InferenceEngine::StatusCode::NOT_IMPLEMENTED; - } catch (const InferenceEngine::details::InferenceEngineException &iex) { - ASSERT_TRUE(iex.hasStatus()); - ASSERT_EQ(iex.getStatus(), InferenceEngine::StatusCode::NOT_IMPLEMENTED); - } -} - -TEST(ExceptionTests, StatusCodeNotAppearInMessageAfterCatch) { - std::string message = "Exception message!"; - std::string strStatusCode = std::to_string(InferenceEngine::StatusCode::NETWORK_NOT_LOADED); - try { - THROW_IE_EXCEPTION << "" << message; - } - catch (const InferenceEngine::details::InferenceEngineException &iex) { - ASSERT_THAT(iex.what(), testing::HasSubstr(message)); - ASSERT_THAT(iex.what(), testing::Not(testing::HasSubstr(""))); - } -} - -TEST(ExceptionTests, StatusCodeAppearInMessageAfterCatch) { - std::string message = "Exception message!"; - std::string strStatusCode = std::to_string(InferenceEngine::StatusCode::NETWORK_NOT_LOADED); - try { - THROW_IE_EXCEPTION << "" << message; - } - catch (const InferenceEngine::details::InferenceEngineException &iex) { - ASSERT_THAT(iex.what(), testing::HasSubstr(message)); - ASSERT_THAT(iex.what(), testing::HasSubstr("")); + catch (const InferenceEngine::InferNotStarted& iex) { + ASSERT_EQ(InferenceEngine::ExceptionToStatus(iex), InferenceEngine::StatusCode::INFER_NOT_STARTED); } } @@ -142,10 +77,10 @@ TEST(ExceptionTests, ExceptionWithAssertThrowsNothingIfExpressionTrue) { } TEST(ExceptionTests, ExceptionWithAssertThrowsExceptionIfFalse) { - ASSERT_THROW(IE_ASSERT(false), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(IE_ASSERT(false), InferenceEngine::Exception); } TEST(ExceptionTests, ExceptionWithAssertThrowsExceptionIfFalseExpession) { - ASSERT_THROW(IE_ASSERT(0 == 1), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(IE_ASSERT(0 == 1), InferenceEngine::Exception); } #endif // NDEBUG diff --git a/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp b/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp index f5c85dac8fc..666d3a9a696 100644 --- a/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp @@ -35,7 +35,7 @@ TEST(ExecutableNetworkConstructorTests, ThrowsIfConstructFromNullptr) { // TODO issue: 26390; ExecutableNetwork's constructor shouldn't be available EXPECT_NO_THROW(InferenceEngine::ExecutableNetwork exeNet{}); - EXPECT_THROW(InferenceEngine::ExecutableNetwork exeNet{nullptr}, InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(InferenceEngine::ExecutableNetwork exeNet{nullptr}, InferenceEngine::Exception); } TEST(ExecutableNetworkConstructorTests, CanConstruct) { @@ -75,7 +75,7 @@ TEST_F(ExecutableNetworkTests, GetOutputsInfoThrowsIfReturnErr) { .Times(1) .WillOnce(Return(InferenceEngine::GENERAL_ERROR)); - ASSERT_THROW(exeNetwork->GetOutputsInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->GetOutputsInfo(), InferenceEngine::Exception); } TEST_F(ExecutableNetworkTests, GetOutputsInfo) { @@ -93,7 +93,7 @@ TEST_F(ExecutableNetworkTests, GetInputsInfoThrowsIfReturnErr) { .Times(1) .WillOnce(Return(InferenceEngine::GENERAL_ERROR)); - ASSERT_THROW(exeNetwork->GetInputsInfo(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->GetInputsInfo(), InferenceEngine::Exception); } TEST_F(ExecutableNetworkTests, GetInputsInfo) { @@ -109,7 +109,7 @@ TEST_F(ExecutableNetworkTests, GetInputsInfo) { TEST_F(ExecutableNetworkTests, resetThrowsIfResetToNullptr) { InferenceEngine::IExecutableNetwork::Ptr mockIExeNet_p_2{}; - ASSERT_THROW(exeNetwork->reset(mockIExeNet_p_2), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->reset(mockIExeNet_p_2), InferenceEngine::Exception); } TEST_F(ExecutableNetworkTests, reset) { @@ -132,7 +132,7 @@ TEST_F(ExecutableNetworkTests, QueryStateThrowsIfReturnErr) { EXPECT_CALL(*mockIExeNet_p.get(), QueryState(_, _, _)) .Times(1) .WillOnce(Return(InferenceEngine::GENERAL_ERROR)); - EXPECT_THROW(exeNetwork->QueryState(), InferenceEngine::details::InferenceEngineException); + EXPECT_THROW(exeNetwork->QueryState(), InferenceEngine::Exception); } TEST_F(ExecutableNetworkTests, QueryStateIfReturnOutOfBounds) { @@ -181,13 +181,13 @@ TEST_F(ExecutableNetworkWithIInferReqTests, CanCreateInferRequest) { TEST_F(ExecutableNetworkWithIInferReqTests, CreateInferRequestThrowsIfReturnNotOK) { EXPECT_CALL(*mockIExeNet_p.get(), CreateInferRequest(_, _)).WillOnce(Return(InferenceEngine::GENERAL_ERROR)); - ASSERT_THROW(exeNetwork->CreateInferRequest(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->CreateInferRequest(), InferenceEngine::Exception); } TEST_F(ExecutableNetworkWithIInferReqTests, CreateInferRequestThrowsIfSetRequestToNullptr) { EXPECT_CALL(*mockIExeNet_p.get(), CreateInferRequest(_, _)) .WillOnce(DoAll(SetArgReferee<0>(nullptr), Return(InferenceEngine::OK))); - ASSERT_THROW(exeNetwork->CreateInferRequest(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->CreateInferRequest(), InferenceEngine::Exception); } // CreateInferRequestPtr @@ -201,13 +201,13 @@ TEST_F(ExecutableNetworkWithIInferReqTests, CanCreateInferRequestPtr) { TEST_F(ExecutableNetworkWithIInferReqTests, CreateInferRequestPtrThrowsIfReturnNotOK) { EXPECT_CALL(*mockIExeNet_p.get(), CreateInferRequest(_, _)).WillOnce(Return(InferenceEngine::GENERAL_ERROR)); - ASSERT_THROW(exeNetwork->CreateInferRequestPtr(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->CreateInferRequestPtr(), InferenceEngine::Exception); } TEST_F(ExecutableNetworkWithIInferReqTests, CreateInferRequestPtrThrowsIfSetRequestToNullptr) { EXPECT_CALL(*mockIExeNet_p.get(), CreateInferRequest(_, _)) .WillOnce(DoAll(SetArgReferee<0>(nullptr), Return(InferenceEngine::OK))); - ASSERT_THROW(exeNetwork->CreateInferRequestPtr(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(exeNetwork->CreateInferRequestPtr(), InferenceEngine::Exception); } IE_SUPPRESS_DEPRECATED_START diff --git a/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp b/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp index 9f001d3770c..c10b3b6bd6b 100644 --- a/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_extension_test.cpp @@ -41,7 +41,7 @@ TEST(ExtensionTests, testGetImplTypes) { TEST(ExtensionTests, testGetImplTypesThrowsIfNgraphNodeIsNullPtr) { IExtensionPtr extension = std::make_shared(getExtensionPath()); ASSERT_THROW(extension->getImplTypes(std::shared_ptr ()), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST(ExtensionTests, testGetImplementation) { @@ -54,5 +54,5 @@ TEST(ExtensionTests, testGetImplementation) { TEST(ExtensionTests, testGetImplementationThrowsIfNgraphNodeIsNullPtr) { IExtensionPtr extension = std::make_shared(getExtensionPath()); ASSERT_THROW(extension->getImplementation(std::shared_ptr (), ""), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } diff --git a/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp b/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp index a6a5da2dd33..2181fe2817c 100644 --- a/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp +++ b/inference-engine/tests/unit/inference_engine/ie_plugin_ptr.cpp @@ -45,7 +45,7 @@ TEST_F(PluginTest, canCreatePluginUsingSmartPtr) { } TEST_F(PluginTest, shouldThrowExceptionIfPluginNotExist) { - EXPECT_THROW(InferenceEnginePluginPtr("unknown_plugin"), InferenceEngineException); + EXPECT_THROW(InferenceEnginePluginPtr("unknown_plugin"), Exception); } InferenceEnginePluginPtr PluginTest::getPtr() { diff --git a/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/aot_behavior_tests.cpp b/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/aot_behavior_tests.cpp index 0e2a458916a..3abd8131cb6 100644 --- a/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/aot_behavior_tests.cpp +++ b/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/aot_behavior_tests.cpp @@ -67,27 +67,15 @@ class AOTBehaviorTests : public BehaviorPluginTest { } void canImportBlob() { - ASSERT_EQ(StatusCode::OK, importBlob()) << response.msg; + ASSERT_NO_THROW(importBlob()) << response.msg; } void canNotImportBlob() { - ASSERT_NE(StatusCode::OK, importBlob()) << response.msg; + ASSERT_THROW(importBlob(), InferenceEngine::Exception) << response.msg; } - StatusCode importBlob() { - InferenceEngine::Core core; - ExecutableNetwork ret; - - try - { - ret = core.ImportNetwork("local_tmp.fw", GetParam().device); - } - catch (InferenceEngine::details::InferenceEngineException & ex) - { - return ex.getStatus(); - } - - return StatusCode::OK; + void importBlob() { + InferenceEngine::Core{}.ImportNetwork("local_tmp.fw", GetParam().device); } void setHeaderVersion(int major, int minor) { diff --git a/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/vpu_watchdog_tests.cpp b/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/vpu_watchdog_tests.cpp index b9b6057fd55..0f1efc5f6a4 100644 --- a/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/vpu_watchdog_tests.cpp +++ b/inference-engine/tests_deprecated/behavior/vpu/myriad_tests/vpu_watchdog_tests.cpp @@ -14,7 +14,6 @@ #include "functional_test_utils/test_model/test_model.hpp" #include "helpers/myriad_devices.hpp" -#include
#include diff --git a/inference-engine/tests_deprecated/functional/ie_tests/src/classification_matcher.cpp b/inference-engine/tests_deprecated/functional/ie_tests/src/classification_matcher.cpp index f15abfa4159..aa657ba9712 100644 --- a/inference-engine/tests_deprecated/functional/ie_tests/src/classification_matcher.cpp +++ b/inference-engine/tests_deprecated/functional/ie_tests/src/classification_matcher.cpp @@ -274,7 +274,7 @@ void ClassificationMatcher::match_n(size_t top, int index) { saveResults(topClassesIndexes, probabilities, top); } - } catch (InferenceEngine::details::InferenceEngineException &e) { + } catch (InferenceEngine::Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { FAIL() << e.what(); diff --git a/inference-engine/tests_deprecated/functional/ie_tests/src/raw_matcher.cpp b/inference-engine/tests_deprecated/functional/ie_tests/src/raw_matcher.cpp index cf9fe953ccd..3bb977207e6 100644 --- a/inference-engine/tests_deprecated/functional/ie_tests/src/raw_matcher.cpp +++ b/inference-engine/tests_deprecated/functional/ie_tests/src/raw_matcher.cpp @@ -209,7 +209,7 @@ void RawMatcher::match() { *config.perfInfoPtr = inferRequest.GetPerformanceCounts(); } } - } catch (details::InferenceEngineException &e) { + } catch (Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { diff --git a/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp b/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp index 9f7ba002b03..f0a47d32b6f 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/extensions_tests/extensions_test.cpp @@ -154,7 +154,7 @@ protected: score_engine.reset(); ASSERT_EQ(p.extension.use_count(), 2); - } catch (const InferenceEngine::details::InferenceEngineException& e) { + } catch (const InferenceEngine::Exception& e) { FAIL() << e.what(); } } @@ -178,7 +178,7 @@ protected: ASSERT_EQ(p.extension.use_count(), p.pluginName.find("Multi")==std::string::npos ? 3 : 4); score_engine1.reset(); ASSERT_EQ(p.extension.use_count(), 2); - } catch (const InferenceEngine::details::InferenceEngineException& e) { + } catch (const InferenceEngine::Exception& e) { FAIL() << e.what(); } } @@ -242,8 +242,8 @@ protected: Blob::Ptr weights; CNNNetwork cnnNet1 = ie.ReadNetwork(model, weights); ASSERT_NO_THROW(ie.LoadNetwork(cnnNet1, device)); - ASSERT_THROW(ie2.ReadNetwork(model, weights), details::InferenceEngineException); - } catch (const InferenceEngine::details::InferenceEngineException& e) { + ASSERT_THROW(ie2.ReadNetwork(model, weights), InferenceEngine::Exception); + } catch (const InferenceEngine::Exception& e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/network_tests/ngraph_network_test.cpp b/inference-engine/tests_deprecated/functional/mkldnn/network_tests/ngraph_network_test.cpp index 372de68f9c4..750aa35c3e8 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/network_tests/ngraph_network_test.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/network_tests/ngraph_network_test.cpp @@ -107,7 +107,7 @@ protected: for (size_t i = 0; i < v7blb->size(); i++) { ASSERT_EQ(v7data[i], v5data[i]); } - } catch (const InferenceEngine::details::InferenceEngineException& e) { + } catch (const InferenceEngine::Exception& e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/shared_tests_instance/io_blob_tests/cropResize_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/shared_tests_instance/io_blob_tests/cropResize_tests.cpp index 4b0aea1dbf5..0ba3e4ae733 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/shared_tests_instance/io_blob_tests/cropResize_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/shared_tests_instance/io_blob_tests/cropResize_tests.cpp @@ -203,7 +203,7 @@ TEST_F(IEPreprocessTest, smoke_NetworkInputSmallSize) { EXPECT_NO_THROW(preprocess->execute(out_blob, info, false)); } else { EXPECT_THROW(preprocess->execute(out_blob, info, false), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/argmax_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/argmax_tests.cpp index 14e39e6c656..0c009d00a14 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/argmax_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/argmax_tests.cpp @@ -178,7 +178,7 @@ protected: compare(*outputBlobs.begin()->second, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/concat_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/concat_tests.cpp index 1ac3400eb0c..8fe62bfdf9c 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/concat_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/concat_tests.cpp @@ -223,7 +223,7 @@ protected: } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_int8_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_int8_tests.cpp index 050f223ba68..c09a3936398 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_int8_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_int8_tests.cpp @@ -219,7 +219,7 @@ protected: calculateRef(weights, p, src, dst_ref); compare(*dst, *dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests.cpp index 35739686558..f579c92053d 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests.cpp @@ -239,7 +239,7 @@ protected: CNNNetwork network = getNetwork(weights, p); infer(network, p, src, dst); compare(*dst, *dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -300,7 +300,7 @@ protected: dst_ref->allocate(); calculateRef(weights, p, src, dst_ref); compare(*dst, *dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests_int8.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests_int8.cpp index 6cfe6146071..7d388f7f490 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests_int8.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/conv_tests_int8.cpp @@ -371,7 +371,7 @@ protected: // Comparing the result with the reference compare_NRMSD(*dst, dst_ref, 0.17); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/detectionout_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/detectionout_tests.cpp index 341bf4d2436..b360c6360ae 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/detectionout_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/detectionout_tests.cpp @@ -174,7 +174,7 @@ protected: inferRequest.SetOutput(outputBlobs); inferRequest.Infer(); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/fullycon_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/fullycon_tests.cpp index 632559c49af..8653a341622 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/fullycon_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/fullycon_tests.cpp @@ -157,7 +157,7 @@ protected: ref_innerproduct(*srcPtr, weights->readOnly().as(), weights->size() / sizeof(float), dst_ref, p); compare(*dst, dst_ref, 0.9f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_batchnorm_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_batchnorm_tests.cpp index 986f501290f..44218585b2e 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_batchnorm_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_batchnorm_tests.cpp @@ -160,7 +160,7 @@ protected: ref_batchnorm4D(*srcPtr, (const float*) weights->buffer(), ((const float*) weights->buffer() + p.in.c), dst_ref, p); compare(*dst, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_deconv_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_deconv_tests.cpp index 21ebfd25b6c..9d26aada050 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_deconv_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_deconv_tests.cpp @@ -189,7 +189,7 @@ protected: ref_deconv(src, weights, bias, dst_ref, p); compare(*dst.get(), *dst_ref.get()); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_logistic_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_logistic_tests.cpp index b1f81736063..af915f74a61 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_logistic_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_logistic_tests.cpp @@ -120,7 +120,7 @@ class smoke_CPULogisticOnlyTest : public TestsCommon, compare(*dst, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_power_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_power_tests.cpp index 45e82e789d8..b5f7673decf 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_power_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_power_tests.cpp @@ -129,7 +129,7 @@ protected: compare(*dst, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_roipooling_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_roipooling_tests.cpp index ff987e4945c..b6cf4f208e8 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_roipooling_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_roipooling_tests.cpp @@ -92,7 +92,7 @@ protected: InferenceEngine::Core ie; ASSERT_NO_THROW(ie.ReadNetwork(model, Blob::CPtr())); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_scaleshift_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_scaleshift_tests.cpp index 1e9f95f2996..05a3d6e4278 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_scaleshift_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_scaleshift_tests.cpp @@ -154,7 +154,7 @@ protected: compare(*dst, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_simplernms_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_simplernms_tests.cpp index dbe4dfe252d..54788538f1a 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_simplernms_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/mkldnn_simplernms_tests.cpp @@ -142,7 +142,7 @@ protected: inferRequest.SetBlob(outInfo.begin()->first, dst); inferRequest.Infer(); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/norm_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/norm_tests.cpp index d1568c7932e..38554c0a6a5 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/norm_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/norm_tests.cpp @@ -157,7 +157,7 @@ protected: ref_norm(*srcPtr, dst_ref, p); compare(*dst, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/pooling_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/pooling_tests.cpp index e97bb9b07d3..2e51a63f6c9 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/pooling_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/pooling_tests.cpp @@ -159,7 +159,7 @@ protected: ref_pool(src, dst_ref, p); compare(*dst.get(), *dst_ref.get()); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/priorbox_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/priorbox_tests.cpp index b02069c144a..69c484a31f2 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/priorbox_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/priorbox_tests.cpp @@ -191,7 +191,7 @@ protected: for (int d = 0; d < p.out.h * p.out.w; ++d) { EXPECT_NEAR(dst_ptr[d], 0.1, eps); } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -353,7 +353,7 @@ protected: EXPECT_NEAR(dst_ptr[d], 0.1, 1e-6); } } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/region_yolo_tests.cpp b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/region_yolo_tests.cpp index 24eef95effe..e1a700ed0a8 100644 --- a/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/region_yolo_tests.cpp +++ b/inference-engine/tests_deprecated/functional/mkldnn/single_layer_tests/region_yolo_tests.cpp @@ -216,7 +216,7 @@ protected: compare(*outputBlobs.begin()->second, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/input_tests/parser_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/input_tests/parser_tests.hpp index d1594ad406c..862425593a6 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/input_tests/parser_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/input_tests/parser_tests.hpp @@ -122,7 +122,7 @@ TEST_F(IncorrectIRTests, smoke_loadIRWithIncorrectInput) { InferenceEngine::Core ie; ASSERT_THROW(ie.ReadNetwork(model, InferenceEngine::Blob::CPtr()), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } TEST_P(IncorrectIRTests, loadIncorrectLayer) { diff --git a/inference-engine/tests_deprecated/functional/shared_tests/io_blob_tests/cropResize_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/io_blob_tests/cropResize_tests.hpp index f878731c337..8e9fdcc585d 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/io_blob_tests/cropResize_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/io_blob_tests/cropResize_tests.hpp @@ -26,7 +26,6 @@ #include #include "ie_parallel.hpp" -#include "details/ie_exception.hpp" using namespace ::testing; using namespace InferenceEngine; diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/bin_conv_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/bin_conv_tests.hpp index 5c599e77f12..b9980a8433d 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/bin_conv_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/bin_conv_tests.hpp @@ -337,7 +337,7 @@ protected: infer(network, p, src, dst); compare(*dst, *dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/deformable_psroi_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/deformable_psroi_tests.hpp index baa2da59024..62badbfb84c 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/deformable_psroi_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/deformable_psroi_tests.hpp @@ -316,7 +316,7 @@ protected: compare(*dsts_map.begin()->second, *dsts_vec[0]); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/one_hot_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/one_hot_tests.hpp index f4842b19a3c..9f21bfb395e 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/one_hot_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/one_hot_tests.hpp @@ -183,7 +183,7 @@ protected: ref_one_hot_4d(*src, dst_ref, p); compare(*output, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/permute_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/permute_tests.hpp index 1206787fe8d..f2738bcbf08 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/permute_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/permute_tests.hpp @@ -142,7 +142,7 @@ protected: ref_permute(*srcPtr, dst_ref, p.base); compare(*output, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/quantize_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/quantize_tests.hpp index 91a5f0beabe..dca7b833090 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/quantize_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/quantize_tests.hpp @@ -296,7 +296,7 @@ protected: compare(*out_vec[0], *dsts_vec[0]); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/reduce_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/reduce_tests.hpp index 8b67fb7b5a0..d3f444bef75 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/reduce_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/reduce_tests.hpp @@ -393,7 +393,7 @@ protected: inferRequest.Infer(); compare(*output, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/resample_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/resample_tests.hpp index fcbfcdd5d4d..c81b02e72f7 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/resample_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/resample_tests.hpp @@ -260,7 +260,7 @@ protected: ref_resample(*srcPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/ti_tests.hpp b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/ti_tests.hpp index 2498281bd78..f2770e0b45c 100644 --- a/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/ti_tests.hpp +++ b/inference-engine/tests_deprecated/functional/shared_tests/single_layer_tests/ti_tests.hpp @@ -169,7 +169,7 @@ protected: setValuesInBlob(req.GetBlob("in2"), 1.0f); req.Infer(); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -321,7 +321,7 @@ protected: setValuesInBlob(req.GetBlob("in1"), 1.0f); req.Infer(); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/functional/vpu/common/layers/myriad_layers_concat_test.hpp b/inference-engine/tests_deprecated/functional/vpu/common/layers/myriad_layers_concat_test.hpp index 01d2b314c29..bed70c872b0 100644 --- a/inference-engine/tests_deprecated/functional/vpu/common/layers/myriad_layers_concat_test.hpp +++ b/inference-engine/tests_deprecated/functional/vpu/common/layers/myriad_layers_concat_test.hpp @@ -151,20 +151,6 @@ static std::vector s_concatInputs = { {{1,}, {1, 2, 4}, {1, 2, 3, 4, 5}, {2, 4}} }; -template -std::ostream &operator << (std::ostream & os, const std::vector & vector_of_elements) { - os <<"{"; - int idx=0; - for(const auto & element : vector_of_elements) { - os << element; - if(++idx != vector_of_elements.size()) { - os<<","; - } - } - os <<"}"; - return os; -} - //function is returning correct name to gtest std::string getTestCaseName(testing::TestParamInfo param) { auto core = std::get<0>(param.param); diff --git a/inference-engine/tests_deprecated/functional/vpu/myriad_tests/myriad_configs_tests.cpp b/inference-engine/tests_deprecated/functional/vpu/myriad_tests/myriad_configs_tests.cpp index ec4259dbb0a..df160200b95 100644 --- a/inference-engine/tests_deprecated/functional/vpu/myriad_tests/myriad_configs_tests.cpp +++ b/inference-engine/tests_deprecated/functional/vpu/myriad_tests/myriad_configs_tests.cpp @@ -51,7 +51,7 @@ TEST_P(myriadCorrectModelsConfigsTests_nightly, CreateInferRequestWithUnavailabl InferenceEngine::InferRequest request; ASSERT_THROW(request = executable.CreateInferRequest(), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } //------------------------------------------------------------------------------ @@ -64,7 +64,7 @@ TEST_P(myriadIncorrectModelsConfigsTests_nightly, LoadNetworkWithIncorrectConfig InferenceEngine::CNNNetwork net(ngraph::builder::subgraph::makeSplitConvConcat()); InferenceEngine::ExecutableNetwork executable; ASSERT_THROW(executable = _vpuPluginPtr->LoadNetwork(net, config), - InferenceEngine::details::InferenceEngineException); + InferenceEngine::Exception); } //------------------------------------------------------------------------------ diff --git a/inference-engine/tests_deprecated/helpers/tests_file_utils.cpp b/inference-engine/tests_deprecated/helpers/tests_file_utils.cpp index e568bcfb6bd..d35556dec04 100644 --- a/inference-engine/tests_deprecated/helpers/tests_file_utils.cpp +++ b/inference-engine/tests_deprecated/helpers/tests_file_utils.cpp @@ -3,7 +3,6 @@ // #include -#include "details/ie_exception.hpp" #include #include diff --git a/inference-engine/tests_deprecated/unit/engines/gna/gna_aminteldnn_test.cpp b/inference-engine/tests_deprecated/unit/engines/gna/gna_aminteldnn_test.cpp index 4cf9cb40b87..08a693ead0a 100644 --- a/inference-engine/tests_deprecated/unit/engines/gna/gna_aminteldnn_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/gna/gna_aminteldnn_test.cpp @@ -32,16 +32,16 @@ TEST_F(GNA_AmIntelDnn_test, intel_nnet_type_tSecondInitNotAllowed) { // First init is ok ASSERT_NO_THROW(amIntelDnn.InitGNAStruct(&desc)); // Second init would cause memory leak, so it's prohibited - ASSERT_THROW(amIntelDnn.InitGNAStruct(&desc), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(amIntelDnn.InitGNAStruct(&desc), InferenceEngine::Exception); amIntelDnn.DestroyGNAStruct(&desc); } TEST_F(GNA_AmIntelDnn_test, intel_nnet_type_t_ptrIsNullptr) { - ASSERT_THROW(amIntelDnn.InitGNAStruct(nullptr), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(amIntelDnn.InitGNAStruct(nullptr), InferenceEngine::Exception); } TEST_F(GNA_AmIntelDnn_test, intel_nnet_type_t_pLayersIsNotNullptr) { - ASSERT_THROW(amIntelDnn.InitGNAStruct(&desc), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(amIntelDnn.InitGNAStruct(&desc), InferenceEngine::Exception); } TEST_F(GNA_AmIntelDnn_test, ComponentIsEmpty) { @@ -50,5 +50,5 @@ TEST_F(GNA_AmIntelDnn_test, ComponentIsEmpty) { #else desc.pLayers = nullptr; #endif - ASSERT_THROW(amIntelDnn.InitGNAStruct(&desc), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(amIntelDnn.InitGNAStruct(&desc), InferenceEngine::Exception); } diff --git a/inference-engine/tests_deprecated/unit/engines/gna/gna_cppwraper_test.cpp b/inference-engine/tests_deprecated/unit/engines/gna/gna_cppwraper_test.cpp index ebd306e69e2..7b2aaa3301d 100644 --- a/inference-engine/tests_deprecated/unit/engines/gna/gna_cppwraper_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/gna/gna_cppwraper_test.cpp @@ -13,9 +13,9 @@ class GNA_CPPWrapper_test : public ::testing::Test {}; TEST_F(GNA_CPPWrapper_test, CPPWrapperConstructorCannotWorkWithInputEqualToZero) { #if GNA_LIB_VER == 2 - ASSERT_THROW(GNAPluginNS::CPPWrapper(0), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(GNAPluginNS::CPPWrapper(0), InferenceEngine::Exception); #else - ASSERT_THROW(GNAPluginNS::CPPWrapper(0), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(GNAPluginNS::CPPWrapper(0), InferenceEngine::Exception); #endif } diff --git a/inference-engine/tests_deprecated/unit/engines/gna/layers/gna_squeeze_test.cpp b/inference-engine/tests_deprecated/unit/engines/gna/layers/gna_squeeze_test.cpp index fad0c91bdde..c655a7a1842 100644 --- a/inference-engine/tests_deprecated/unit/engines/gna/layers/gna_squeeze_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/gna/layers/gna_squeeze_test.cpp @@ -19,16 +19,6 @@ typedef struct { using SqueezeTestParam = std::tuple; -template -inline std::ostream& operator<<(std::ostream& out, const std::vector& vec) { - if (vec.empty()) return std::operator<<(out, "[]"); - out << "[" << vec[0]; - for (unsigned i = 1; i < vec.size(); i++) { - out << " " << vec[i]; - } - return out << "]"; -} - class GNASqueezeTest_ : public GNATest<>, public testing::WithParamInterface { public: diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/dump_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/dump_test.cpp index b2d30642af0..3b7344015df 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/dump_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/dump_test.cpp @@ -19,7 +19,7 @@ TEST(MKLDNNDumpTests, UnallocatedBlob_NoDump) { EXPECT_THROW({ BlobDumper(blob).dump(buff); - }, details::InferenceEngineException); + }, Exception); } TEST(MKLDNNDumpTests, EmptyBlob_NoDump) { @@ -30,7 +30,7 @@ TEST(MKLDNNDumpTests, EmptyBlob_NoDump) { EXPECT_THROW({ BlobDumper(blob).dump(buff); - }, details::InferenceEngineException); + }, Exception); } TEST(MKLDNNDumpTests, Ser) { diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/broadcast_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/broadcast_tests.cpp index f4c20b088ef..822f184d0cc 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/broadcast_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/broadcast_tests.cpp @@ -248,7 +248,7 @@ protected: return; } } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/bucketize_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/bucketize_tests.cpp index 3f0a9a9f6c8..d6cd4724083 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/bucketize_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/bucketize_tests.cpp @@ -159,7 +159,7 @@ protected: auto iter = out.begin(); compare_int(*output_blob_map[iter->first], *output_blob_ref, 0); } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp index 5391545cf99..680bb3b8457 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fake_layer.cpp @@ -76,7 +76,7 @@ public: explicit FakeLayerPLNImpl(const CNNLayer* layer) { try { addConfig(layer, {{ConfLayout::PLN, false, 0}}, {{ConfLayout::PLN, false, 0}}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } @@ -97,7 +97,7 @@ public: auto blk_layout = ConfLayout::BLK8; #endif addConfig(layer, {{blk_layout, false, 0}}, {{blk_layout, false, 0}}); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fill_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fill_tests.cpp index 944f724251c..ff05e3dc72e 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fill_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/fill_tests.cpp @@ -172,7 +172,7 @@ protected: } else { return; } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/gather_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/gather_tests.cpp index be2407affe5..f9aeeee882a 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/gather_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/gather_tests.cpp @@ -280,7 +280,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -449,7 +449,7 @@ protected: // Check results if (memcmp((*output).data(), &p.ref[0], output->byteSize()) != 0) FAIL() << "Wrong result with compare TF reference!"; - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -668,7 +668,7 @@ protected: if (memcmp(&((float*)(*output).data())[12], &p.ref[8], 8 * sizeof(float)) != 0) FAIL() << "Wrong result with compare TF reference!"; } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp index d186d95967a..8ab245d6e29 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/graph_generic_test.cpp @@ -565,7 +565,7 @@ TEST_F(MKLDNNGraphGenericTests, DontCreateGPUGenericPrimitive) { ASSERT_NO_THROW(network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr())); MKLDNNGraphTestClass graph; - ASSERT_THROW(graph.CreateGraph(network, extMgr), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(graph.CreateGraph(network, extMgr), InferenceEngine::Exception); } TEST_F(MKLDNNGraphGenericTests, ExecuteConstGenericPrimitive) { diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/log_softmax_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/log_softmax_tests.cpp index 4a103227b7a..7ac42a6ed0a 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/log_softmax_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/log_softmax_tests.cpp @@ -246,7 +246,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref, 0.00001f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp index 6e7eb38e02e..fd753872780 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp @@ -275,7 +275,7 @@ protected: graph.Infer(srcs, outputBlobs); float threshold = p.math_function == "Erf" ? 0.0001f : 0.00001f; compare(*output, dst_ref, threshold); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/mvn_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/mvn_tests.cpp index e5b88586e03..d23e12250d8 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/mvn_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/mvn_tests.cpp @@ -328,7 +328,7 @@ protected: dst_ref.allocate(); ref_mvn(*srcPtr, dst_ref, p); compare(*output, dst_ref, 0.0001f); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } @@ -386,7 +386,7 @@ public: try { is_blocked = layer->GetParamAsBool("is_blocked"); addConfig(layer); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } @@ -601,7 +601,7 @@ protected: dst_ref.allocate(); ref_mvn(*srcPtr, dst_ref, p); compare(*output, dst_ref, 0.0001f); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/non_max_suppression_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/non_max_suppression_tests.cpp index 063033cca71..8096e9937a5 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/non_max_suppression_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/non_max_suppression_tests.cpp @@ -511,7 +511,7 @@ protected: if (memcmp((*output).data(), &p.ref[0], output->byteSize()) != 0) FAIL() << "Wrong result with compare TF reference!"; } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/normalize_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/normalize_tests.cpp index a5471f0ccf2..76add13572a 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/normalize_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/normalize_tests.cpp @@ -283,7 +283,7 @@ protected: ref_normalize(*srcPtr, dst_ref, p, weights->readOnly().as()); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -338,7 +338,7 @@ public: try { is_blocked = layer->GetParamAsBool("is_blocked"); addConfig(layer); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } @@ -571,7 +571,7 @@ protected: ref_normalize(*srcPtr, dst_ref, p, weights->readOnly().as()); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/onehot_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/onehot_tests.cpp index 0fcccc6ea03..fd98d6faa35 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/onehot_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/onehot_tests.cpp @@ -108,7 +108,7 @@ protected: InferenceEngine::CNNNetwork network; try { network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()); - } catch (InferenceEngine::details::InferenceEngineException &e) { + } catch (InferenceEngine::Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { FAIL() << e.what(); @@ -148,7 +148,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -247,7 +247,7 @@ protected: InferenceEngine::CNNNetwork network; try { network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()); - } catch (InferenceEngine::details::InferenceEngineException &e) { + } catch (InferenceEngine::Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { FAIL() << e.what(); @@ -292,7 +292,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -399,7 +399,7 @@ protected: InferenceEngine::CNNNetwork network; try { network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()); - } catch (InferenceEngine::details::InferenceEngineException &e) { + } catch (InferenceEngine::Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { FAIL() << e.what(); @@ -444,7 +444,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -557,7 +557,7 @@ protected: InferenceEngine::CNNNetwork network; try { network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()); - } catch (InferenceEngine::details::InferenceEngineException &e) { + } catch (InferenceEngine::Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { FAIL() << e.what(); @@ -604,7 +604,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -729,7 +729,7 @@ protected: InferenceEngine::CNNNetwork network; try { network = core.ReadNetwork(model, InferenceEngine::Blob::CPtr()); - } catch (InferenceEngine::details::InferenceEngineException &e) { + } catch (InferenceEngine::Exception &e) { FAIL() << e.what(); } catch (std::exception &e) { FAIL() << e.what(); @@ -777,7 +777,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/range_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/range_tests.cpp index 8faef838f06..f2c8b9c3c0e 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/range_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/range_tests.cpp @@ -224,7 +224,7 @@ protected: } else { return; } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reduce_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reduce_tests.cpp index 121294e42ab..f3d790dfd55 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reduce_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reduce_tests.cpp @@ -429,7 +429,7 @@ protected: FAIL() << "Wrong out_shape dimensions!"; } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reverse_sequence_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reverse_sequence_tests.cpp index 4219168c096..ca47e81309b 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reverse_sequence_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/reverse_sequence_tests.cpp @@ -222,7 +222,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/scatter_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/scatter_tests.cpp index 5a5413853a8..33beefe5ae9 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/scatter_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/scatter_tests.cpp @@ -177,7 +177,7 @@ protected: // Check results if (memcmp((*output).data(), &p.reference[0], output->byteSize()) != 0) FAIL() << "Wrong result with compare TF reference!"; - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/select_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/select_tests.cpp index 16638f05d45..00d0232c423 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/select_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/select_tests.cpp @@ -257,7 +257,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/shuffle_channels_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/shuffle_channels_tests.cpp index 16a38d61ef6..1476b869056 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/shuffle_channels_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/shuffle_channels_tests.cpp @@ -170,7 +170,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_fill_empty_rows_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_fill_empty_rows_tests.cpp index 33400465e0b..73e5682a102 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_fill_empty_rows_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_fill_empty_rows_tests.cpp @@ -398,7 +398,7 @@ protected: compare(*output_values, output_values_ref, 0.0f); compare(*output_empty_rows_indicator, output_empty_rows_indicator_ref, 0.0f); } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_segment_reduce_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_segment_reduce_tests.cpp index b545bd79064..249f904c6d6 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_segment_reduce_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_segment_reduce_tests.cpp @@ -154,7 +154,7 @@ protected: auto iter = out.begin(); compare(*output_blob_map[iter->first], *output_ref, 0.0f); } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_to_dense_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_to_dense_tests.cpp index 755d28d33f3..f637099dc2e 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_to_dense_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_to_dense_tests.cpp @@ -175,7 +175,7 @@ protected: auto iter = out.begin(); compare_int(*output_blob_map[iter->first], *output_blob_ref, 0); } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_weighted_reduce_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_weighted_reduce_tests.cpp index 9516b9d6200..dfea3c6af4d 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_weighted_reduce_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/sparse_weighted_reduce_tests.cpp @@ -202,7 +202,7 @@ protected: auto iter = out.begin(); compare(*output_blob_map[iter->first], *output_blob_ref, 0.0f); } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/strided_slice_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/strided_slice_tests.cpp index db7ec4b9741..20ddd5f28a7 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/strided_slice_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/strided_slice_tests.cpp @@ -409,7 +409,7 @@ protected: // Infer graph.Infer(srcs, outputBlobs); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/topk_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/topk_tests.cpp index 2e89f20de76..84acdd75d18 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/topk_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/topk_tests.cpp @@ -293,7 +293,7 @@ protected: if (p.reference_idx.data()[i] != (*output1).data()[i]) FAIL() << "The difference between res_idx[i] and reference_idx[i]"; } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -484,7 +484,7 @@ protected: } } } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/unique_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/unique_tests.cpp index 962515cf636..d5a4783426c 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/unique_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/unique_tests.cpp @@ -165,7 +165,7 @@ protected: compare(*output_blob_map[iter->first], *output_counts_blob_ref, 0.0f); } } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp index 9092bb4ce52..782a472ab99 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_activation_test.cpp @@ -270,7 +270,7 @@ protected: ref_activation(*srcPtr, dst_ref, p); compare(*output, dst_ref, 0.0005f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -378,7 +378,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkActivation); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkActivation); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp index 169e5c97ceb..e76ee151eba 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_scaleshift_test.cpp @@ -237,7 +237,7 @@ protected: ref_batchnorm4DWithScale(*srcPtr, (const float*) weights->buffer(), ((const float*) weights->buffer() + p.in.c), (const float*) weights->buffer() + p.in.c*2, dst_ref, p.epsilon); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -326,7 +326,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkScaleShift); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkScaleShift); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp index aa86f98ce2d..67f25e75be4 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_batchnorm_test.cpp @@ -209,7 +209,7 @@ protected: ref_batchnorm4D(*srcPtr, (const float*) weights->buffer(), ((const float*) weights->buffer() + p.in.c), dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -294,7 +294,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkScaleShift); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkScaleShift); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp index f88a74bd77d..bec0de481aa 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_concat_test.cpp @@ -197,7 +197,7 @@ protected: index2++; index++; } } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -379,7 +379,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkConcat, checkType); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkConcat, checkType); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -791,7 +791,7 @@ protected: } } } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -1021,7 +1021,7 @@ protected: index2++; index++; } } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp index 020f4999688..f950fdba252 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_conv_test.cpp @@ -332,7 +332,7 @@ protected: dst_ref.allocate(); ref_conv(*srcPtr, (const float *)weights->buffer(), weights->size() / sizeof(float), dst_ref, p); compare(*output, dst_ref, 0.0002f); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } @@ -481,7 +481,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, dims[0], dims[0], checkConvolution, MKLDNNGraphTestClass::CheckDynBatchType::Child); graph.checkDynBatch(srcs, outputBlobs, 1, dims[0], checkConvolution, MKLDNNGraphTestClass::CheckDynBatchType::Child); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_crop_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_crop_test.cpp index 60d6b1b02cd..0d7a6116e8d 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_crop_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_crop_test.cpp @@ -210,7 +210,7 @@ protected: ref_crop(*srcPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -294,7 +294,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkCrop); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkCrop); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp index 96a9ebf322f..d303b6544f2 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_deconv_test.cpp @@ -343,7 +343,7 @@ protected: ref_deconv(*srcPtr, weights, bias, dst_ref, p); compare(*output, dst_ref, 0.0002f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -526,7 +526,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkDeconvolution, MKLDNNGraphTestClass::CheckDynBatchType::Child); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkDeconvolution, MKLDNNGraphTestClass::CheckDynBatchType::Child); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp index 938017c24db..18048f9d298 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_depthwise_test.cpp @@ -252,7 +252,7 @@ protected: ref_depthwise(*srcPtr, weights->readOnly().as(), weights->size() / sizeof(float), dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -417,7 +417,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkDepthwise); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkDepthwise); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_eltwise_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_eltwise_test.cpp index d96d4490e38..94e9ef562de 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_eltwise_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_eltwise_test.cpp @@ -361,7 +361,7 @@ protected: actual_reorder_nodes ++; } ASSERT_EQ(actual_reorder_nodes, p.num_reorder_nodes); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp index 00d65d60124..167e33ff896 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_fullyconnected_test.cpp @@ -221,7 +221,7 @@ protected: ref_innerproduct(*srcPtr, (const float *)weights->buffer(), weights->size() / sizeof(float), dst_ref, p); compare(*output, dst_ref, 0.9f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -317,7 +317,7 @@ class MKLDNNGraphDynBatchFullyConnectedTests: public MKLDNNGraphFullyConnectedTe graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkFC); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkFC); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp index 9d2a5492e38..01951bdb868 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_gemm_test.cpp @@ -283,7 +283,7 @@ protected: ref_gemm(src_vec, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -459,7 +459,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, check); graph.checkDynBatch(srcs, outputBlobs, 1, MB, check); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -635,7 +635,7 @@ protected: ref_gemm(src_vec, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_input_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_input_test.cpp index 277ea5fd288..7817f050888 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_input_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_input_test.cpp @@ -155,7 +155,7 @@ protected: ASSERT_EQ(p.selectedType, nodes[i]->getSelectedPrimitiveDescriptor()->getImplementationType()); } } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -349,7 +349,7 @@ protected: index2++; index++; } } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -454,7 +454,7 @@ protected: if (memcmp((*output).data(), &p.reference[0], output->byteSize()) != 0) FAIL() << "Wrong result with compare reference!"; } - catch (const InferenceEngine::details::InferenceEngineException &e) { + catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp index f946eb67b07..e447dcd28da 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_lrn_test.cpp @@ -192,7 +192,7 @@ protected: ref_lrn(*srcPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -285,7 +285,7 @@ protected: }; graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkLRN); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkLRN); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp index 9e08bc0c8ba..0a5fb1ebed7 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_permute_test.cpp @@ -46,7 +46,7 @@ public: block_dims = layer->GetParamAsInts("block_dims"); order = layer->GetParamAsInts("order"); addConfig(layer); - } catch (InferenceEngine::details::InferenceEngineException &ex) { + } catch (InferenceEngine::Exception &ex) { errorMsg = ex.what(); } } @@ -308,7 +308,7 @@ protected: ref_permute(*srcPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const details::InferenceEngineException &e) { + } catch (const Exception &e) { FAIL() << e.what(); } } @@ -598,7 +598,7 @@ protected: }; graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPermute); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPermute); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp index e94592e6c63..4db7516b466 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_pooling_test.cpp @@ -347,7 +347,7 @@ protected: ref_pool(*srcPtr, dst_ref, p); compare(*output, dst_ref, 0.0001f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -486,7 +486,7 @@ protected: }; graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPooling); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPooling); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp index 8c27ca1a3b8..ae9ee61f12c 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_power_test.cpp @@ -158,7 +158,7 @@ protected: ref_power(*srcPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -300,7 +300,7 @@ protected: }; graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkPower); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkPower); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_relu_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_relu_test.cpp index 4fcea93cc4a..12efbcc655c 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_relu_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_relu_test.cpp @@ -195,7 +195,7 @@ protected: ref_relu(*srcPtr, dst_ref, p); compare(*output, dst_ref, 0.0005f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reorder_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reorder_test.cpp index 1aaf08537a0..365e56e12d6 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reorder_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reorder_test.cpp @@ -27,7 +27,7 @@ TEST_F(MKLDNNGraphReorderTests, cannotCreatePrimitiveDescriprorsWithoutOtherLaye node.reset(MKLDNNPlugin::MKLDNNNode::factory().create(layer, eng, {}, cache)); ASSERT_EQ(MKLDNNPlugin::Type::Reorder, node->getType()); - ASSERT_THROW(node->getSupportedDescriptors(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(node->getSupportedDescriptors(), InferenceEngine::Exception); } TEST_F(MKLDNNGraphReorderTests, CreateReorder) { diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reshape_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reshape_test.cpp index e9d2af9ca4e..8c21202657b 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reshape_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_reshape_test.cpp @@ -162,7 +162,7 @@ protected: ref_reshape(*srcPtr, dst_ref); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_roi_pooling_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_roi_pooling_test.cpp index 7d8aeb079bf..db74d51e399 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_roi_pooling_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_roi_pooling_test.cpp @@ -290,7 +290,7 @@ protected: ref_roipooling(*srcPtr, *roiPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_simplernms_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_simplernms_test.cpp index c1860f09b13..70edee7c01c 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_simplernms_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_simplernms_test.cpp @@ -447,7 +447,7 @@ protected: ref_simplernms(*srcClsPtr, *srcDeltaPtr, *srcInfoPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp index 8d6a6105247..9a4ab4d9165 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_softmax_test.cpp @@ -285,7 +285,7 @@ protected: graph.Infer(srcs, outputBlobs); check_softmax_fwd(*output, p); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -384,7 +384,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkSoftmax); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkSoftmax); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp index 2be31841e27..3a3f69ab563 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_split_test.cpp @@ -216,7 +216,7 @@ protected: for (auto& output : outputBlobs) { compare(*output.second, dst_refs[ref_idx++], 0.0005f); } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -360,7 +360,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkSplit); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkSplit); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp index a8dc36d1555..4b5dc018deb 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_tile_test.cpp @@ -177,7 +177,7 @@ protected: ref_tile(*srcPtr, dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } @@ -256,7 +256,7 @@ protected: graph.checkDynBatch(srcs, outputBlobs, MB, MB, checkTile); graph.checkDynBatch(srcs, outputBlobs, 1, MB, checkTile); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_concat_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_concat_tests.cpp index e4750691b8d..2944cc83d51 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_concat_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_concat_tests.cpp @@ -228,7 +228,7 @@ protected: graph.Infer(srcs, outputBlobs2); compare(*output, *output2, 0.0005f); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_depthwise_fusing_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_depthwise_fusing_test.cpp index faee5786bf4..3c06de4cbea 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_depthwise_fusing_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_conv_depthwise_fusing_test.cpp @@ -307,7 +307,7 @@ protected: ref_conv_depthwise(*srcPtr, (const float *)weights->buffer(), dst_ref, p); compare(*output, dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_deconv_concat_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_deconv_concat_tests.cpp index ec928fac50a..d2a27035ce0 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_deconv_concat_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_deconv_concat_tests.cpp @@ -371,7 +371,7 @@ protected: } } - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_dw_conv_fusing_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_dw_conv_fusing_test.cpp index e96cb0af41f..e532e6dede3 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_dw_conv_fusing_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_dw_conv_fusing_test.cpp @@ -319,7 +319,7 @@ protected: compare(*output, conv2_dst_ref); - } catch (const InferenceEngine::details::InferenceEngineException &e) { + } catch (const InferenceEngine::Exception &e) { FAIL() << e.what(); } } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_structure_test.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_structure_test.cpp index eb5e07b770d..2bdb84d08bc 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_structure_test.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_structure_test.cpp @@ -6619,7 +6619,7 @@ TEST_F(MKLDNNGraphStructureTests, TestCheckIncorrectScaleShift) { ASSERT_NO_THROW(network = core.ReadNetwork(model, weights)); MKLDNNGraphTestClass graph; - ASSERT_THROW(graph.CreateGraph(network), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(graph.CreateGraph(network), InferenceEngine::Exception); } TEST_F(MKLDNNGraphStructureTests, TestConcatWithFourInputs) { diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/test_layers.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/test_layers.cpp index d38990dae83..10facb3e7c6 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/test_layers.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/test_layers.cpp @@ -174,10 +174,10 @@ // ASSERT_NE(nullptr, dynamic_cast(ctx.get())); // // InferenceEngine::CNNLayer cnnLayer({}); -// EXPECT_THROW(MKLDNNPlugin::LayerRegistry::CreateLayer(&cnnLayer, nullptr, dynamic_cast(ctx.get())) , InferenceEngine::details::InferenceEngineException); +// EXPECT_THROW(MKLDNNPlugin::LayerRegistry::CreateLayer(&cnnLayer, nullptr, dynamic_cast(ctx.get())) , InferenceEngine::Exception); //} // //TEST_F(MKLDNNLayersTests, canNotCreateLayerWithoutContext) { // InferenceEngine::ConvolutionLayer convLayer({}); -// EXPECT_THROW(MKLDNNPlugin::LayerRegistry::CreateLayer(&convLayer, nullptr, nullptr), InferenceEngine::details::InferenceEngineException); +// EXPECT_THROW(MKLDNNPlugin::LayerRegistry::CreateLayer(&convLayer, nullptr, nullptr), InferenceEngine::Exception); //} \ No newline at end of file diff --git a/inference-engine/tests_deprecated/unit/inference_engine_tests/get_num_iterations_test.cpp b/inference-engine/tests_deprecated/unit/inference_engine_tests/get_num_iterations_test.cpp index 05799b07c86..d289883c2e0 100644 --- a/inference-engine/tests_deprecated/unit/inference_engine_tests/get_num_iterations_test.cpp +++ b/inference-engine/tests_deprecated/unit/inference_engine_tests/get_num_iterations_test.cpp @@ -62,19 +62,6 @@ public: } }; -template -std::ostream& operator<<(std::ostream& stream, const std::vector& object) { - stream << "["; - for (std::size_t i = 0; i < object.size(); ++i) { - stream << object[i]; - if (i < object.size() - 1) { - stream << " "; - } - } - stream << "]"; - return stream; -} - struct NegativeTestParams { std::vector inputsDimensions; std::vector inputRules; diff --git a/inference-engine/tests_deprecated/unit/inference_engine_tests/layers_test.cpp b/inference-engine/tests_deprecated/unit/inference_engine_tests/layers_test.cpp index 6d9aba27d5a..3f43ffda443 100644 --- a/inference-engine/tests_deprecated/unit/inference_engine_tests/layers_test.cpp +++ b/inference-engine/tests_deprecated/unit/inference_engine_tests/layers_test.cpp @@ -72,7 +72,7 @@ TEST_F(LayersTests, throwsOnExpiredDataPtr) { layer.insData.resize(1); layer.insData[0] = dataPtr; dataPtr.reset(); - ASSERT_THROW(layer.input(), InferenceEngine::details::InferenceEngineException); + ASSERT_THROW(layer.input(), InferenceEngine::Exception); } template @@ -425,7 +425,7 @@ TEST_F(LayersTests, returnEmptyPadForValidPadConvolution) { TEST_F(LayersTests, throwOnSamePadForEmptyConvolution) { ConvolutionLayer layer(getDefaultParamsForLayer()); layer.params["auto_pad"] = "same_upper"; - ASSERT_THROW(getPaddings(layer), details::InferenceEngineException); + ASSERT_THROW(getPaddings(layer), Exception); } TEST_F(LayersTests, throwOnInvalidDimsSamePadForConvolution) { @@ -433,7 +433,7 @@ TEST_F(LayersTests, throwOnInvalidDimsSamePadForConvolution) { layer.params["auto_pad"] = "same_upper"; auto emptyData = std::make_shared("", TensorDesc(Precision::UNSPECIFIED, Layout::ANY)); layer.insData.push_back(emptyData); - ASSERT_THROW(getPaddings(layer), details::InferenceEngineException); + ASSERT_THROW(getPaddings(layer), Exception); } TEST_F(LayersTests, throwOn2DSamePadForConvolution) { @@ -441,7 +441,7 @@ TEST_F(LayersTests, throwOn2DSamePadForConvolution) { layer.params["auto_pad"] = "same_upper"; auto notEmptyData = std::make_shared("", TensorDesc(Precision::UNSPECIFIED, SizeVector{ 1, 1 }, Layout::NC)); layer.insData.push_back(notEmptyData); - ASSERT_THROW(getPaddings(layer), details::InferenceEngineException); + ASSERT_THROW(getPaddings(layer), Exception); } TEST_F(LayersTests, throwWithNotEnoughParamsSamePadForConvolution) { diff --git a/inference-engine/tests_deprecated/unit/inference_engine_tests/util_const_infer_test.cpp b/inference-engine/tests_deprecated/unit/inference_engine_tests/util_const_infer_test.cpp index 13f9f43eb32..a62ce456c1c 100644 --- a/inference-engine/tests_deprecated/unit/inference_engine_tests/util_const_infer_test.cpp +++ b/inference-engine/tests_deprecated/unit/inference_engine_tests/util_const_infer_test.cpp @@ -636,7 +636,7 @@ TEST_F(RemoveLayerTests, throwErrorOnFoldWithUnknownImplForNotShapeDefiningLayer } IE::ConstTransformer transformator(net.get()); - ASSERT_THROW(transformator.foldConstSubgraphs(), IE::details::InferenceEngineException); + ASSERT_THROW(transformator.foldConstSubgraphs(), IE::Exception); } TEST_F(RemoveLayerTests, canFullTrim) { diff --git a/tests/time_tests/src/timetests/timetest_infer.cpp b/tests/time_tests/src/timetests/timetest_infer.cpp index 58010ba0c50..df4984f2932 100644 --- a/tests/time_tests/src/timetests/timetest_infer.cpp +++ b/tests/time_tests/src/timetests/timetest_infer.cpp @@ -67,7 +67,7 @@ int runPipeline(const std::string &model, const std::string &device) { try { pipeline(model, device); - } catch (const InferenceEngine::details::InferenceEngineException &iex) { + } catch (const InferenceEngine::Exception &iex) { std::cerr << "Inference Engine pipeline failed with Inference Engine exception:\n" << iex.what();