diff --git a/inference-engine/include/details/ie_exception.hpp b/inference-engine/include/details/ie_exception.hpp index e85d5a6075a..73babcf3677 100644 --- a/inference-engine/include/details/ie_exception.hpp +++ b/inference-engine/include/details/ie_exception.hpp @@ -20,7 +20,7 @@ /** * @def THROW_IE_EXCEPTION - * @brief A macro used to throw the exception with a notable description + * @brief A macro used to throw general exception with a description */ #define THROW_IE_EXCEPTION throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__) diff --git a/inference-engine/src/hetero_plugin/hetero_executable_network.cpp b/inference-engine/src/hetero_plugin/hetero_executable_network.cpp index 6ea6efd5cc4..a3b80cfc055 100644 --- a/inference-engine/src/hetero_plugin/hetero_executable_network.cpp +++ b/inference-engine/src/hetero_plugin/hetero_executable_network.cpp @@ -480,46 +480,42 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream& bool loaded = false; try { executableNetwork = _heteroPlugin->GetCore()->ImportNetwork(heteroModel, deviceName, loadConfig); - } catch(InferenceEngine::details::InferenceEngineException& ie_ex) { - if (std::string::npos != std::string{ie_ex.what()}.find(NOT_IMPLEMENTED_str)) { - // read XML content - std::string xmlString; - std::getline(heteroModel, xmlString); - std::uint64_t dataSize = 0; - heteroModel.read(reinterpret_cast(&dataSize), sizeof(dataSize)); + } catch(const InferenceEngine::NotImplemented&) { + // read XML content + std::string xmlString; + std::getline(heteroModel, xmlString); + std::uint64_t dataSize = 0; + heteroModel.read(reinterpret_cast(&dataSize), sizeof(dataSize)); - // read blob content - InferenceEngine::Blob::Ptr dataBlob; - if (0 != dataSize) { - dataBlob = InferenceEngine::make_shared_blob( - InferenceEngine::TensorDesc(InferenceEngine::Precision::U8, - {static_cast(dataSize)}, - InferenceEngine::Layout::C)); - dataBlob->allocate(); - heteroModel.read(dataBlob->buffer(), dataSize); - } - - cnnnetwork = _heteroPlugin->GetCore()->ReadNetwork(xmlString, std::move(dataBlob)); - auto inputs = cnnnetwork.getInputsInfo(); - auto inputsNode = subnetworkNode.child("inputs"); - for (auto inputNode = inputsNode.child("input"); !inputNode.empty(); inputNode = inputNode.next_sibling("input")) { - auto inputName = GetStrAttr(inputNode, "name"); - inputs[inputName]->setPrecision(Precision::FromStr(GetStrAttr(inputNode, "precision"))); - } - - auto outputsNode = subnetworkNode.child("outputs"); - for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) { - cnnnetwork.addOutput(GetStrAttr(outputNode, "creatorName"), GetUInt64Attr(outputNode, "index")); - } - auto outputs = cnnnetwork.getOutputsInfo(); - for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) { - outputs[GetStrAttr(outputNode, "name")]->setPrecision(Precision::FromStr(GetStrAttr(outputNode, "precision"))); - } - executableNetwork = _heteroPlugin->GetCore()->LoadNetwork(cnnnetwork, deviceName, loadConfig); - loaded = true; - } else { - throw; + // read blob content + InferenceEngine::Blob::Ptr dataBlob; + if (0 != dataSize) { + dataBlob = InferenceEngine::make_shared_blob( + InferenceEngine::TensorDesc(InferenceEngine::Precision::U8, + {static_cast(dataSize)}, + InferenceEngine::Layout::C)); + dataBlob->allocate(); + heteroModel.read(dataBlob->buffer(), dataSize); } + + cnnnetwork = _heteroPlugin->GetCore()->ReadNetwork(xmlString, std::move(dataBlob)); + auto inputs = cnnnetwork.getInputsInfo(); + auto inputsNode = subnetworkNode.child("inputs"); + for (auto inputNode = inputsNode.child("input"); !inputNode.empty(); inputNode = inputNode.next_sibling("input")) { + auto inputName = GetStrAttr(inputNode, "name"); + inputs[inputName]->setPrecision(Precision::FromStr(GetStrAttr(inputNode, "precision"))); + } + + auto outputsNode = subnetworkNode.child("outputs"); + for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) { + cnnnetwork.addOutput(GetStrAttr(outputNode, "creatorName"), GetUInt64Attr(outputNode, "index")); + } + auto outputs = cnnnetwork.getOutputsInfo(); + for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) { + outputs[GetStrAttr(outputNode, "name")]->setPrecision(Precision::FromStr(GetStrAttr(outputNode, "precision"))); + } + executableNetwork = _heteroPlugin->GetCore()->LoadNetwork(cnnnetwork, deviceName, loadConfig); + loaded = true; } for (auto&& input : executableNetwork.GetInputsInfo()) { @@ -597,24 +593,20 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) { for (auto&& subnetwork : networks) { try { subnetwork._network.Export(heteroModel); - } catch (InferenceEngine::details::InferenceEngineException& ie_ex) { - if (std::string::npos != std::string{ie_ex.what()}.find(NOT_IMPLEMENTED_str)) { - // TODO: enable once serialization to IR v10 is implemented + } catch (const InferenceEngine::NotImplemented&) { + // TODO: enable once serialization to IR v10 is implemented #if 1 - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str - << "Device " << subnetwork._device << " does not implement Export method"; + THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED) + << "Device " << subnetwork._device << " does not implement Export method"; #else - pugi::xml_document doc; - auto subnet = subnetwork._clonedNetwork; - auto dataSize = static_cast(InferenceEngine::Serialization::FillXmlDoc(subnet, doc)); - doc.save(heteroModel, nullptr, pugi::format_raw); - heteroModel << std::endl; - heteroModel.write(reinterpret_cast(&dataSize), sizeof(dataSize)); - InferenceEngine::Serialization::SerializeBlobs(heteroModel, subnet); + pugi::xml_document doc; + auto subnet = subnetwork._clonedNetwork; + auto dataSize = static_cast(InferenceEngine::Serialization::FillXmlDoc(subnet, doc)); + doc.save(heteroModel, nullptr, pugi::format_raw); + heteroModel << std::endl; + heteroModel.write(reinterpret_cast(&dataSize), sizeof(dataSize)); + InferenceEngine::Serialization::SerializeBlobs(heteroModel, subnet); #endif - } else { - throw; - } } } } diff --git a/inference-engine/src/multi_device/multi_device_exec_network.cpp b/inference-engine/src/multi_device/multi_device_exec_network.cpp index b8795376b51..2a5c50182be 100644 --- a/inference-engine/src/multi_device/multi_device_exec_network.cpp +++ b/inference-engine/src/multi_device/multi_device_exec_network.cpp @@ -171,9 +171,8 @@ RemoteContext::Ptr MultiDeviceExecutableNetwork::GetContext() const { } catch (const NotImplemented& ex) { } } - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED - << NOT_IMPLEMENTED_str << "None of the devices in the MULTI has an associated remote context." - << "Current list of devices allowed via the DEVICE_PRIORITIES config: " << devices_names; + THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED) << "None of the devices in the MULTI has an associated remote context." + << " Current list of devices allowed via the DEVICE_PRIORITIES config: " << devices_names; } InferenceEngine::InferRequestInternal::Ptr MultiDeviceExecutableNetwork::CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs, @@ -210,8 +209,7 @@ IInferRequest::Ptr MultiDeviceExecutableNetwork::CreateInferRequest() { void MultiDeviceExecutableNetwork::SetConfig(const std::map &config) { auto priorities = config.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES); if (priorities == config.end() || config.size() > 1) { - THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << - "The only config supported for the Network's SetConfig is MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES"; + THROW_IE_EXCEPTION << "The only config supported for the Network's SetConfig is MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES"; } else { auto multiPlugin = std::dynamic_pointer_cast(this->_plugin); assert(multiPlugin != nullptr); @@ -220,7 +218,7 @@ void MultiDeviceExecutableNetwork::SetConfig(const std::map QueryState() override { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); } void SetConfig(const std::map& config) override { @@ -107,11 +107,11 @@ public: Parameter GetMetric(const std::string& name) const override { (void)name; - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); } RemoteContext::Ptr GetContext() const override { - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); } protected: @@ -123,7 +123,7 @@ protected: */ virtual void ExportImpl(std::ostream& networkModel) { (void)networkModel; - THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str; + THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED); } InferenceEngine::InputsDataMap _networkInputs; //!< Holds infromation about network inputs info diff --git a/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp b/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp index 769a6843590..1028aae982c 100644 --- a/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/behavior/core_integration.hpp @@ -61,17 +61,7 @@ namespace BehaviorTestsDefinitions { { \ try { \ __VA_ARGS__; \ - } catch(InferenceEngine::details::InferenceEngineException& ieException) { \ - auto notImplementedExceptionIsThrown = \ - std::string::npos != std::string {ieException.what()} \ - .find(NOT_IMPLEMENTED_str); \ - if (notImplementedExceptionIsThrown) { \ - GTEST_SKIP(); \ - } else { \ - FAIL() << "thrown from expression: " # __VA_ARGS__ << std::endl \ - << "what: " << ieException.what(); \ - } \ - } catch (const InferenceEngine::NotImplemented& ex) { \ + } catch (const InferenceEngine::NotImplemented&) { \ GTEST_SKIP(); \ } \ }