Attempt to put some order to the single general (that differs only by messages) and typed exception, on the example of NOT_IMPLEMENTED (#3537)
NOT_IMPLEMENTED status code correctly translates to the NonImplemented exception (and handled as the correspondingly typed exception)
This commit is contained in:
parent
77ecd7e17c
commit
ee8e9a9e8a
@ -20,7 +20,7 @@
|
||||
|
||||
/**
|
||||
* @def THROW_IE_EXCEPTION
|
||||
* @brief A macro used to throw the exception with a notable description
|
||||
* @brief A macro used to throw general exception with a description
|
||||
*/
|
||||
#define THROW_IE_EXCEPTION throw InferenceEngine::details::InferenceEngineException(__FILE__, __LINE__)
|
||||
|
||||
|
@ -480,46 +480,42 @@ HeteroExecutableNetwork::HeteroExecutableNetwork(std::istream&
|
||||
bool loaded = false;
|
||||
try {
|
||||
executableNetwork = _heteroPlugin->GetCore()->ImportNetwork(heteroModel, deviceName, loadConfig);
|
||||
} catch(InferenceEngine::details::InferenceEngineException& ie_ex) {
|
||||
if (std::string::npos != std::string{ie_ex.what()}.find(NOT_IMPLEMENTED_str)) {
|
||||
// read XML content
|
||||
std::string xmlString;
|
||||
std::getline(heteroModel, xmlString);
|
||||
std::uint64_t dataSize = 0;
|
||||
heteroModel.read(reinterpret_cast<char*>(&dataSize), sizeof(dataSize));
|
||||
} catch(const InferenceEngine::NotImplemented&) {
|
||||
// read XML content
|
||||
std::string xmlString;
|
||||
std::getline(heteroModel, xmlString);
|
||||
std::uint64_t dataSize = 0;
|
||||
heteroModel.read(reinterpret_cast<char*>(&dataSize), sizeof(dataSize));
|
||||
|
||||
// read blob content
|
||||
InferenceEngine::Blob::Ptr dataBlob;
|
||||
if (0 != dataSize) {
|
||||
dataBlob = InferenceEngine::make_shared_blob<std::uint8_t>(
|
||||
InferenceEngine::TensorDesc(InferenceEngine::Precision::U8,
|
||||
{static_cast<std::size_t>(dataSize)},
|
||||
InferenceEngine::Layout::C));
|
||||
dataBlob->allocate();
|
||||
heteroModel.read(dataBlob->buffer(), dataSize);
|
||||
}
|
||||
|
||||
cnnnetwork = _heteroPlugin->GetCore()->ReadNetwork(xmlString, std::move(dataBlob));
|
||||
auto inputs = cnnnetwork.getInputsInfo();
|
||||
auto inputsNode = subnetworkNode.child("inputs");
|
||||
for (auto inputNode = inputsNode.child("input"); !inputNode.empty(); inputNode = inputNode.next_sibling("input")) {
|
||||
auto inputName = GetStrAttr(inputNode, "name");
|
||||
inputs[inputName]->setPrecision(Precision::FromStr(GetStrAttr(inputNode, "precision")));
|
||||
}
|
||||
|
||||
auto outputsNode = subnetworkNode.child("outputs");
|
||||
for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) {
|
||||
cnnnetwork.addOutput(GetStrAttr(outputNode, "creatorName"), GetUInt64Attr(outputNode, "index"));
|
||||
}
|
||||
auto outputs = cnnnetwork.getOutputsInfo();
|
||||
for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) {
|
||||
outputs[GetStrAttr(outputNode, "name")]->setPrecision(Precision::FromStr(GetStrAttr(outputNode, "precision")));
|
||||
}
|
||||
executableNetwork = _heteroPlugin->GetCore()->LoadNetwork(cnnnetwork, deviceName, loadConfig);
|
||||
loaded = true;
|
||||
} else {
|
||||
throw;
|
||||
// read blob content
|
||||
InferenceEngine::Blob::Ptr dataBlob;
|
||||
if (0 != dataSize) {
|
||||
dataBlob = InferenceEngine::make_shared_blob<std::uint8_t>(
|
||||
InferenceEngine::TensorDesc(InferenceEngine::Precision::U8,
|
||||
{static_cast<std::size_t>(dataSize)},
|
||||
InferenceEngine::Layout::C));
|
||||
dataBlob->allocate();
|
||||
heteroModel.read(dataBlob->buffer(), dataSize);
|
||||
}
|
||||
|
||||
cnnnetwork = _heteroPlugin->GetCore()->ReadNetwork(xmlString, std::move(dataBlob));
|
||||
auto inputs = cnnnetwork.getInputsInfo();
|
||||
auto inputsNode = subnetworkNode.child("inputs");
|
||||
for (auto inputNode = inputsNode.child("input"); !inputNode.empty(); inputNode = inputNode.next_sibling("input")) {
|
||||
auto inputName = GetStrAttr(inputNode, "name");
|
||||
inputs[inputName]->setPrecision(Precision::FromStr(GetStrAttr(inputNode, "precision")));
|
||||
}
|
||||
|
||||
auto outputsNode = subnetworkNode.child("outputs");
|
||||
for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) {
|
||||
cnnnetwork.addOutput(GetStrAttr(outputNode, "creatorName"), GetUInt64Attr(outputNode, "index"));
|
||||
}
|
||||
auto outputs = cnnnetwork.getOutputsInfo();
|
||||
for (auto outputNode = outputsNode.child("output"); !outputNode.empty(); outputNode = outputNode.next_sibling("output")) {
|
||||
outputs[GetStrAttr(outputNode, "name")]->setPrecision(Precision::FromStr(GetStrAttr(outputNode, "precision")));
|
||||
}
|
||||
executableNetwork = _heteroPlugin->GetCore()->LoadNetwork(cnnnetwork, deviceName, loadConfig);
|
||||
loaded = true;
|
||||
}
|
||||
|
||||
for (auto&& input : executableNetwork.GetInputsInfo()) {
|
||||
@ -597,24 +593,20 @@ void HeteroExecutableNetwork::ExportImpl(std::ostream& heteroModel) {
|
||||
for (auto&& subnetwork : networks) {
|
||||
try {
|
||||
subnetwork._network.Export(heteroModel);
|
||||
} catch (InferenceEngine::details::InferenceEngineException& ie_ex) {
|
||||
if (std::string::npos != std::string{ie_ex.what()}.find(NOT_IMPLEMENTED_str)) {
|
||||
// TODO: enable once serialization to IR v10 is implemented
|
||||
} catch (const InferenceEngine::NotImplemented&) {
|
||||
// TODO: enable once serialization to IR v10 is implemented
|
||||
#if 1
|
||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
|
||||
<< "Device " << subnetwork._device << " does not implement Export method";
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED)
|
||||
<< "Device " << subnetwork._device << " does not implement Export method";
|
||||
#else
|
||||
pugi::xml_document doc;
|
||||
auto subnet = subnetwork._clonedNetwork;
|
||||
auto dataSize = static_cast<std::uint64_t>(InferenceEngine::Serialization::FillXmlDoc(subnet, doc));
|
||||
doc.save(heteroModel, nullptr, pugi::format_raw);
|
||||
heteroModel << std::endl;
|
||||
heteroModel.write(reinterpret_cast<char*>(&dataSize), sizeof(dataSize));
|
||||
InferenceEngine::Serialization::SerializeBlobs(heteroModel, subnet);
|
||||
pugi::xml_document doc;
|
||||
auto subnet = subnetwork._clonedNetwork;
|
||||
auto dataSize = static_cast<std::uint64_t>(InferenceEngine::Serialization::FillXmlDoc(subnet, doc));
|
||||
doc.save(heteroModel, nullptr, pugi::format_raw);
|
||||
heteroModel << std::endl;
|
||||
heteroModel.write(reinterpret_cast<char*>(&dataSize), sizeof(dataSize));
|
||||
InferenceEngine::Serialization::SerializeBlobs(heteroModel, subnet);
|
||||
#endif
|
||||
} else {
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -171,9 +171,8 @@ RemoteContext::Ptr MultiDeviceExecutableNetwork::GetContext() const {
|
||||
} catch (const NotImplemented& ex) {
|
||||
}
|
||||
}
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED
|
||||
<< NOT_IMPLEMENTED_str << "None of the devices in the MULTI has an associated remote context."
|
||||
<< "Current list of devices allowed via the DEVICE_PRIORITIES config: " << devices_names;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED) << "None of the devices in the MULTI has an associated remote context."
|
||||
<< " Current list of devices allowed via the DEVICE_PRIORITIES config: " << devices_names;
|
||||
}
|
||||
|
||||
InferenceEngine::InferRequestInternal::Ptr MultiDeviceExecutableNetwork::CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
|
||||
@ -210,8 +209,7 @@ IInferRequest::Ptr MultiDeviceExecutableNetwork::CreateInferRequest() {
|
||||
void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, InferenceEngine::Parameter> &config) {
|
||||
auto priorities = config.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES);
|
||||
if (priorities == config.end() || config.size() > 1) {
|
||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str <<
|
||||
"The only config supported for the Network's SetConfig is MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES";
|
||||
THROW_IE_EXCEPTION << "The only config supported for the Network's SetConfig is MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES";
|
||||
} else {
|
||||
auto multiPlugin = std::dynamic_pointer_cast<MultiDeviceInferencePlugin>(this->_plugin);
|
||||
assert(multiPlugin != nullptr);
|
||||
@ -220,7 +218,7 @@ void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, Inferen
|
||||
if (std::any_of(metaDevices.begin(), metaDevices.end(), [](const DeviceInformation& kvp) {
|
||||
return kvp.numRequestsPerDevices != -1;
|
||||
})) {
|
||||
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << "You can only change device priorities but not number of requests"
|
||||
THROW_IE_EXCEPTION << "You can only change device priorities but not number of requests"
|
||||
<<" with the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES!";
|
||||
}
|
||||
|
||||
|
@ -12,6 +12,13 @@
|
||||
|
||||
#include "description_buffer.hpp"
|
||||
|
||||
/**
|
||||
* @def THROW_IE_EXCEPTION_WITH_STATUS
|
||||
* @brief Throws an exception along with the status (which is eventually converted to the typed exception)
|
||||
*/
|
||||
#define THROW_IE_EXCEPTION_WITH_STATUS(__status) THROW_IE_EXCEPTION << \
|
||||
InferenceEngine::details::as_status << InferenceEngine::StatusCode::__status << __status##_str
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
/**
|
||||
@ -86,6 +93,18 @@ namespace InferenceEngine {
|
||||
*/
|
||||
#define NOT_FOUND_str std::string("[NOT_FOUND] ")
|
||||
|
||||
/**
|
||||
* @def UNEXPECTED_str
|
||||
* @brief Defines the `unexpected` message
|
||||
*/
|
||||
#define UNEXPECTED_str std::string("[UNEXPECTED] ")
|
||||
|
||||
/**
|
||||
* @def GENERAL_ERROR_str
|
||||
* @brief Defines the `general error` message
|
||||
*/
|
||||
#define GENERAL_ERROR_str std::string("[GENERAL ERROR] ")
|
||||
|
||||
/**
|
||||
* @def RESULT_NOT_READY_str
|
||||
* @brief Defines the `result not ready` message
|
||||
|
@ -64,7 +64,7 @@ public:
|
||||
|
||||
void Export(const std::string& modelFileName) override {
|
||||
(void)modelFileName;
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
void Export(std::ostream& networkModel) override {
|
||||
@ -76,7 +76,7 @@ public:
|
||||
}
|
||||
|
||||
CNNNetwork GetExecGraphInfo() override {
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -89,7 +89,7 @@ public:
|
||||
}
|
||||
|
||||
std::vector<IVariableStateInternal::Ptr> QueryState() override {
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
void SetConfig(const std::map<std::string, Parameter>& config) override {
|
||||
@ -107,11 +107,11 @@ public:
|
||||
|
||||
Parameter GetMetric(const std::string& name) const override {
|
||||
(void)name;
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
RemoteContext::Ptr GetContext() const override {
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -123,7 +123,7 @@ protected:
|
||||
*/
|
||||
virtual void ExportImpl(std::ostream& networkModel) {
|
||||
(void)networkModel;
|
||||
THROW_IE_EXCEPTION << InferenceEngine::details::as_status << StatusCode::NOT_IMPLEMENTED << NOT_IMPLEMENTED_str;
|
||||
THROW_IE_EXCEPTION_WITH_STATUS(NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
InferenceEngine::InputsDataMap _networkInputs; //!< Holds infromation about network inputs info
|
||||
|
@ -61,17 +61,7 @@ namespace BehaviorTestsDefinitions {
|
||||
{ \
|
||||
try { \
|
||||
__VA_ARGS__; \
|
||||
} catch(InferenceEngine::details::InferenceEngineException& ieException) { \
|
||||
auto notImplementedExceptionIsThrown = \
|
||||
std::string::npos != std::string {ieException.what()} \
|
||||
.find(NOT_IMPLEMENTED_str); \
|
||||
if (notImplementedExceptionIsThrown) { \
|
||||
GTEST_SKIP(); \
|
||||
} else { \
|
||||
FAIL() << "thrown from expression: " # __VA_ARGS__ << std::endl \
|
||||
<< "what: " << ieException.what(); \
|
||||
} \
|
||||
} catch (const InferenceEngine::NotImplemented& ex) { \
|
||||
} catch (const InferenceEngine::NotImplemented&) { \
|
||||
GTEST_SKIP(); \
|
||||
} \
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user