Warning as error for Windows (#13291)

* parent 6e7016ccda
author Ilya Churaev <ilya.churaev@intel.com> 1664281499 +0400
committer Ilya Churaev <ilya.churaev@intel.com> 1664510018 +0400

Fixed warnings on local machine

* Added CMAKE_COMPILE_WARNING_AS_ERROR usage

* Fixed style

* Fixed merge conflicts

* Fixed typo

* Fixed myriad build for macOS

* Fixed warning

* Fixed tests

* Disabled incorrect test

* Try to fix linux tests

* Revert "Try to fix linux tests"

This reverts commit 29224c93ff.

* Fixed tests

* Revert logic with incorrect cast

* Fixed log softmax

* Disable warning as error for cuda

* Try to fix inference_engine_s

* Fixed cmake

* Revert "Fixed cmake"

This reverts commit 87e9e4e674.

* Revert "Try to fix inference_engine_s"

This reverts commit a1adca8b05.

* WA for static symbols in inference_engine_s test library

* Fixed code style

* Fixed static definition for master

* Revert "Fixed static definition for master"

This reverts commit 20d00d215a.

* Revert "Fixed code style"

This reverts commit 0eb2362543.

* Revert "WA for static symbols in inference_engine_s test library"

This reverts commit 75ef86a79d.

* Fixed linker issue for Windows

* Disable WaE by default

* Disable warning as error in the developer package

* Try to fix dev package

* Try to fix Windows Jenkins

* Revert old behavior for tread_warn_as_err variable
This commit is contained in:
Ilya Churaev
2022-10-06 13:44:21 +04:00
committed by GitHub
parent 25f85a3beb
commit 8a9c19e3eb
285 changed files with 1125 additions and 876 deletions

View File

@@ -86,7 +86,8 @@ bool MoveFakeQuantize::transform(TransformationContext& context, ngraph::pattern
const auto concat_axis = concat_node->get_concatenation_axis();
for (size_t i = 0; i < 4; i++) {
curr_constants[i] = as_type_ptr<opset1::Constant>(fq->get_input_node_shared_ptr(i + 1));
if (!multi_chanels && curr_constants[i]->get_shape().size() > concat_axis && curr_constants[i]->get_shape()[concat_axis] != 1) {
if (!multi_chanels && concat_axis >= 0 && curr_constants[i]->get_shape().size() > static_cast<size_t>(concat_axis)
&& curr_constants[i]->get_shape()[concat_axis] != 1) {
multi_chanels = true;
}
}

View File

@@ -1066,7 +1066,7 @@ std::tuple<std::shared_ptr<Node>, std::shared_ptr<Node>> NetworkHelper::decompos
fq->get_levels(),
fq->get_auto_broadcast()),
true,
outChannelsShapeIndex);
static_cast<int>(outChannelsShapeIndex));
NetworkHelper::copyInfo(fq, newFQ);
std::shared_ptr<ngraph::Node> convert2;
@@ -1804,7 +1804,7 @@ std::vector<std::vector<std::shared_ptr<ngraph::opset1::Constant>>> NetworkHelpe
auto number_of_concat_inputs = concat->get_input_size();
const auto concatNode = as_type_ptr<opset1::Concat>(concat);
const auto concat_axis = concatNode->get_concatenation_axis();
std::vector<unsigned int> shape_axis(number_of_concat_inputs);
std::vector<int64_t> shape_axis(number_of_concat_inputs);
for (size_t i{ 0 }; i < number_of_concat_inputs; ++i) {
auto shape = concat->get_input_partial_shape(i);
shape_axis[i] = shape[concat_axis].get_length();

View File

@@ -181,7 +181,7 @@ bool PadTransformation::canBeTransformed(const TransformationContext& context, s
}
if (padsBegin[i] != 0) {
beginNonZeroIdx = i;
beginNonZeroIdx = static_cast<int>(i);
}
}
@@ -193,7 +193,7 @@ bool PadTransformation::canBeTransformed(const TransformationContext& context, s
}
if (padsEnd[i] != 0) {
endNonZeroIdx = i;
endNonZeroIdx = static_cast<int>(i);
}
}

View File

@@ -118,7 +118,7 @@ void reshapeDequantizationConstant(const std::shared_ptr<opset1::Reshape>& resha
const std::shared_ptr<Node> broadcastedConstant = getBCastedConst(originalConstant, dimensionsToBroadcast);
std::vector<int> newReshapeConstValues(reshapeOutputRank.get_length(), 1ul);
newReshapeConstValues[1] = reshapeOutputPShape[1].get_length();
newReshapeConstValues[1] = static_cast<int>(reshapeOutputPShape[1].get_length());
const std::shared_ptr<opset1::Constant> newReshapeConstant = std::make_shared<opset1::Constant>(
element::i32,
Shape({ newReshapeConstValues.size() }),