Remove constructors for ov Exceptions (#16938)

* Remove constructors for ov Exceptions

* Fixed linux build

* Fixed ONNX Frontend

* Fixed paddle

* Fixed exceptions in tests

* Deprecate constructors for ov::Exception

* Suppress some warnings

* Merge several exceptions

* Some small changes

* Suppress more warnings

* More warnings

* mode warnings

* Suppress more warnings

* More warnings
This commit is contained in:
Ilya Churaev
2023-04-18 21:02:26 +04:00
committed by GitHub
parent 441dad2eea
commit 566ef01a3f
251 changed files with 834 additions and 649 deletions

View File

@@ -170,7 +170,7 @@ shared_ptr<Node> op::ConvolutionIE::clone_with_new_inputs(const ngraph::OutputVe
m_auto_pad);
}
throw ngraph_error("Unsupported number of arguments for ConvolutionIE operation");
OPENVINO_THROW("Unsupported number of arguments for ConvolutionIE operation");
}
bool op::ConvolutionIE::visit_attributes(AttributeVisitor& visitor) {

View File

@@ -27,7 +27,7 @@ op::CropIE::CropIE(const Output<Node>& data,
std::shared_ptr<Node> op::CropIE::clone_with_new_inputs(const OutputVector& new_args) const {
if (new_args.size() != 1) {
throw ngraph_error("Incorrect number of new arguments");
OPENVINO_THROW("Incorrect number of new arguments");
}
return make_shared<CropIE>(new_args.at(0), axes, dim, offset);

View File

@@ -140,7 +140,7 @@ shared_ptr<Node> op::DeconvolutionIE::clone_with_new_inputs(const ngraph::Output
m_output_padding,
m_output_shape);
}
throw ngraph::ngraph_error("Unexpected number of arguments");
OPENVINO_THROW("Unexpected number of arguments");
}
bool op::DeconvolutionIE::visit_attributes(AttributeVisitor& visitor) {

View File

@@ -25,7 +25,7 @@ op::Eltwise::Eltwise(const Output<Node>& data1,
std::shared_ptr<Node> op::Eltwise::clone_with_new_inputs(const OutputVector& new_args) const {
if (new_args.size() != 2) {
throw ngraph_error("Incorrect number of new arguments");
OPENVINO_THROW("Incorrect number of new arguments");
}
return make_shared<Eltwise>(new_args.at(0), new_args.at(1), eltwise_type, m_output_type);

View File

@@ -173,7 +173,7 @@ std::shared_ptr<Node> op::NonMaxSuppressionIE3::clone_with_new_inputs(const ngra
m_sort_result_descending,
m_output_type);
}
throw ngraph::ngraph_error("Unsupported number of inputs: " + std::to_string(new_args.size()));
OPENVINO_THROW("Unsupported number of inputs: " + std::to_string(new_args.size()));
}
bool op::NonMaxSuppressionIE3::visit_attributes(AttributeVisitor& visitor) {

View File

@@ -26,10 +26,10 @@ op::PadIE::PadIE(const std::shared_ptr<op::v1::Pad>& pad)
auto const_node =
std::dynamic_pointer_cast<op::Constant>(pad->input(3).get_source_output().get_node_shared_ptr());
if (!const_node) {
throw ngraph_error("Pad " + pad->get_friendly_name() + " with not constant pad_value is not allowed");
OPENVINO_THROW("Pad " + pad->get_friendly_name() + " with not constant pad_value is not allowed");
}
if (!ov::op::util::get_single_value(const_node, m_pad_value)) {
throw ngraph_error("Unsupported pad value");
OPENVINO_THROW("Unsupported pad value");
}
}
constructor_validate_and_infer_types();

View File

@@ -27,7 +27,7 @@ op::PowerIE::PowerIE(const Output<ngraph::Node>& data_batch,
std::shared_ptr<Node> op::PowerIE::clone_with_new_inputs(const OutputVector& new_args) const {
if (new_args.size() != 1) {
throw ngraph_error("Incorrect number of new arguments");
OPENVINO_THROW("Incorrect number of new arguments");
}
return make_shared<PowerIE>(new_args.at(0), this->power, this->scale, this->shift, this->m_output_type);

View File

@@ -41,7 +41,7 @@ op::ScaleShiftIE::ScaleShiftIE(const Output<Node>& data_batch,
std::shared_ptr<Node> op::ScaleShiftIE::clone_with_new_inputs(const OutputVector& new_args) const {
if (new_args.size() != 3) {
throw ngraph_error("Incorrect number of new arguments");
OPENVINO_THROW("Incorrect number of new arguments");
}
return make_shared<ScaleShiftIE>(new_args.at(0), new_args.at(1), new_args.at(2), output_type);

View File

@@ -65,7 +65,7 @@ ngraph::pass::ConvertMatMulToFC::ConvertMatMulToFC() {
if (shape_a_aligned[i] != shape_b_aligned[i] && shape_a_aligned[i] > 1 && shape_b_aligned[i] > 1) {
std::ostringstream stream;
stream << "Shapes can't be aligned: " << shape_a_aligned << " " << shape_b_aligned;
throw ngraph_error(stream.str());
OPENVINO_THROW(stream.str());
}
size_t max_value = std::max(shape_a_aligned[i], shape_b_aligned[i]);
shape_a_aligned[i] = shape_b_aligned[i] = max_value;
@@ -119,7 +119,7 @@ ngraph::pass::ConvertMatMulToFC::ConvertMatMulToFC() {
std::tie(shape_a_aligned, shape_b_aligned) = get_aligned_shapes();
if (shape_a_aligned.size() < 2 || shape_b_aligned.size() < 2) {
throw ngraph_error("MatMul " + matmul->get_friendly_name() + " shapes are inconsistent.");
OPENVINO_THROW("MatMul " + matmul->get_friendly_name() + " shapes are inconsistent.");
}
// Transferring from MatMul representation: [B, I, K] * [B, K, O] = [B, I, O]

View File

@@ -66,8 +66,7 @@ ngraph::pass::ConvertNMS5ToLegacyMatcher::ConvertNMS5ToLegacyMatcher(bool force_
center_point_box = 0;
break;
default:
throw ngraph_error("NonMaxSuppression layer " + nms_5->get_friendly_name() +
" has unsupported box encoding");
OPENVINO_THROW("NonMaxSuppression layer " + nms_5->get_friendly_name() + " has unsupported box encoding");
}
std::shared_ptr<op::NonMaxSuppressionIE3> nms_legacy{nullptr};

View File

@@ -77,7 +77,7 @@ ngraph::pass::ConvertNMSToNMSIEMatcher::ConvertNMSToNMSIEMatcher() {
center_point_box = 0;
break;
default:
throw ngraph_error("NonMaxSuppression layer " + nms->get_friendly_name() + " has unsupported box encoding");
OPENVINO_THROW("NonMaxSuppression layer " + nms->get_friendly_name() + " has unsupported box encoding");
}
auto new_nms = std::make_shared<ngraph::op::NonMaxSuppressionIE>(nms->input_value(0),
nms->input_value(1),

View File

@@ -134,7 +134,7 @@ matcher_pass_callback get_callback() {
} else if (auto avg_pool = std::dynamic_pointer_cast<opset1::AvgPool>(node)) {
last = convert(last, avg_pool, new_ops);
} else {
throw ngraph_error("Reshape1DOps: op type is not supported");
OPENVINO_THROW("Reshape1DOps: op type is not supported");
}
last.get_node_shared_ptr()->set_friendly_name(node->get_friendly_name() + "/new");

View File

@@ -80,7 +80,7 @@ private:
} else if (eltwise_type == ngraph::opset5::Multiply::get_type_info_static()) {
eltwise = std::make_shared<ngraph::opset5::Multiply>(conv, const_node);
} else {
throw ngraph::ngraph_error("Unsupported element type");
OPENVINO_THROW("Unsupported element type");
}
return std::make_shared<ngraph::Function>(ngraph::NodeVector{eltwise.get_node_shared_ptr()},
@@ -121,7 +121,7 @@ private:
std::make_shared<ngraph::opset5::Multiply>(weights, ov::op::util::reshapeTo(const_node, const_shape));
conv = conv.get_node_shared_ptr()->copy_with_new_inputs({input, weights});
} else {
throw ngraph::ngraph_error("Unsupported element type");
OPENVINO_THROW("Unsupported element type");
}
return std::make_shared<ngraph::Function>(ngraph::NodeVector{conv.get_node_shared_ptr()},

View File

@@ -85,7 +85,7 @@ public:
const AddConstant& add_const,
const IsDequantization& is_dequanization) {
if (mul_const.skip && add_const.skip) {
throw ngraph::ngraph_error("Invalid arguments");
OPENVINO_THROW("Invalid arguments");
}
auto input = std::make_shared<ngraph::opset1::Parameter>(ngraph::element::f32, input_shape);