From 0feed998e3496dcd0fde57ebf055bd16bf29bda4 Mon Sep 17 00:00:00 2001 From: "Efode, Irina" Date: Wed, 20 Oct 2021 00:25:22 +0300 Subject: [PATCH] Remove incorrect OPENVINO_ASSERT, correct eltwise tests and fix threshold --- .../single_layer_tests/softmax.cpp | 1 + .../single_layer/elementwise.cpp | 4 - .../shared_test_classes/base/ov_subgraph.hpp | 14 +++- .../single_layer/eltwise.hpp | 3 + .../src/base/ov_subgraph.cpp | 34 ++++---- .../src/single_layer/eltwise.cpp | 79 +++++++++++-------- .../src/ov_tensor_utils.cpp | 32 ++++---- .../include/ngraph_functions/builders.hpp | 3 + .../ngraph_functions/utils/ngraph_helpers.hpp | 4 +- .../src/utils/ngraph_helpers.cpp | 14 ++-- 10 files changed, 112 insertions(+), 76 deletions(-) diff --git a/docs/template_plugin/tests/functional/shared_tests_instances/single_layer_tests/softmax.cpp b/docs/template_plugin/tests/functional/shared_tests_instances/single_layer_tests/softmax.cpp index c916eaa65c6..0dc6f80b1a7 100644 --- a/docs/template_plugin/tests/functional/shared_tests_instances/single_layer_tests/softmax.cpp +++ b/docs/template_plugin/tests/functional/shared_tests_instances/single_layer_tests/softmax.cpp @@ -13,6 +13,7 @@ namespace { const std::vector netPrecisions = { ov::element::f32, + ov::element::f16, }; const std::vector inputStaticShape2D = { diff --git a/inference-engine/tests/functional/inference_engine/serialization/single_layer/elementwise.cpp b/inference-engine/tests/functional/inference_engine/serialization/single_layer/elementwise.cpp index 799dd8aa5d2..c21a3d99a58 100644 --- a/inference-engine/tests/functional/inference_engine/serialization/single_layer/elementwise.cpp +++ b/inference-engine/tests/functional/inference_engine/serialization/single_layer/elementwise.cpp @@ -59,8 +59,6 @@ const auto elementiwiseParams = ::testing::Combine( ::testing::ValuesIn(secondaryInputTypes), ::testing::ValuesIn(opTypes), ::testing::ValuesIn(inputPrecisions), - ::testing::Values(ov::element::undefined), - ::testing::Values(ov::element::undefined), ::testing::Values(CommonTestUtils::DEVICE_CPU), ::testing::Values(additionalConfig)); @@ -70,8 +68,6 @@ const auto elementiwiseParamsDyn = ::testing::Combine( ::testing::ValuesIn(secondaryInputTypes), ::testing::ValuesIn(opTypes), ::testing::ValuesIn(inputPrecisions), - ::testing::Values(ov::element::undefined), - ::testing::Values(ov::element::undefined), ::testing::Values(CommonTestUtils::DEVICE_CPU), ::testing::Values(additionalConfig)); diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp index c93b232fbac..6fcaeabf0a6 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/base/ov_subgraph.hpp @@ -43,6 +43,8 @@ protected: void init_input_shapes(const InputShapes& shapes); void init_input_shapes(const InputShape& shapes); +// void propagate_shape_to_all_inputs(bool proragate_first_shape = true, +// const InputShape& targetShape = InputShape()); std::shared_ptr core = ov::test::utils::PluginCache::get().core(); std::string targetDevice; @@ -59,7 +61,6 @@ protected: constexpr static const double disable_threshold = std::numeric_limits::max(); double abs_threshold = disable_threshold, rel_threshold = disable_threshold; - // TODO: iefode: change namespace names a bit later LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance();; private: @@ -67,6 +68,17 @@ private: std::vector get_plugin_outputs(); }; +//inline std::vector> static_shapes_to_test_representation(const std::vector>& shapes) { +// std::vector> result; +// for (const auto& staticShapes : shapes) { +// std::vector tmp; +// for (const auto& staticShape : staticShapes) { +// tmp.push_back({{}, {staticShape}}); +// } +// result.push_back(tmp); +// } +// return result; +//} inline std::vector static_shapes_to_test_representation(const std::vector& staticShapes) { std::vector result; for (const auto& staticShape : staticShapes) { diff --git a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/eltwise.hpp b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/eltwise.hpp index 9b97a7d4352..91fd76052ab 100644 --- a/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/eltwise.hpp +++ b/inference-engine/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/eltwise.hpp @@ -29,6 +29,9 @@ protected: public: static std::string getTestCaseName(const testing::TestParamInfo& obj); + +private: + void transformInputShapesAccordingEltwise(const ov::PartialShape& secondInputShape); }; } // namespace subgraph } // namespace test diff --git a/inference-engine/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp b/inference-engine/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp index e0f2589db2e..7672a39f9bd 100644 --- a/inference-engine/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp +++ b/inference-engine/tests/functional/shared_test_classes/src/base/ov_subgraph.cpp @@ -4,6 +4,8 @@ #include #include +#include + #ifdef _WIN32 #include #endif @@ -39,22 +41,22 @@ void SubgraphBaseTest::run() { summary.updateOPsStats(function, status); SKIP_IF_CURRENT_TEST_IS_DISABLED(); - OPENVINO_ASSERT(!targetStaticShapes.empty(), "Target Static Shape is empty!!!"); + ASSERT_FALSE(targetStaticShapes.empty()) << "Target Static Shape is empty!!!"; std::string errorMessage; try { compile_model(); for (const auto& targetStaticShapeVec : targetStaticShapes) { - try { +// try { if (!inputDynamicShapes.empty()) { // resize ngraph function according new target shape - ngraph::helpers::resize_function(targetStaticShapeVec, functionRefs); + ngraph::helpers::resize_function(functionRefs, targetStaticShapeVec); } generate_inputs(targetStaticShapeVec); infer(); validate(); - } catch (const std::exception &ex) { - OPENVINO_ASSERT("Incorrect target static shape: ", ex.what()); - } +// } catch (const std::exception &ex) { +// throw std::runtime_error("Incorrect target static shape: " + CommonTestUtils::vec2str(targetStaticShapeVec) + " " + ex.what()); +// } } status = LayerTestsUtils::PassRate::Statuses::PASSED; } catch (const std::exception &ex) { @@ -150,19 +152,14 @@ void SubgraphBaseTest::infer() { } std::vector SubgraphBaseTest::calculate_refs() { - // nGraph interpreter does not support f16/bf16 - ngraph::pass::ConvertPrecision().run_on_function(functionRefs); - ngraph::pass::ConvertPrecision().run_on_function(functionRefs); - functionRefs->validate_nodes_and_infer_types(); - - return ngraph::helpers::interpreterFunction(functionRefs, inputs); + return ngraph::helpers::interpretFunction(functionRefs, inputs); } std::vector SubgraphBaseTest::get_plugin_outputs() { auto outputs = std::vector{}; - for (const auto& output : executableNetwork.outputs()) { - const auto& name = output.get_tensor().get_any_name(); + for (const auto& output : function->get_results()) { + const std::string name = ngraph::op::util::create_ie_output_name(output->input_value(0)); outputs.push_back(inferRequest.get_tensor(name)); } return outputs; @@ -176,8 +173,8 @@ void SubgraphBaseTest::validate() { return; } - OPENVINO_ASSERT(actualOutputs.size() == expectedOutputs.size(), - "nGraph interpreter has ", expectedOutputs.size(), " outputs, while IE ", actualOutputs.size()); + ASSERT_EQ(actualOutputs.size(), expectedOutputs.size()) << "nGraph interpreter has " + << expectedOutputs.size() << " outputs, while IE " << actualOutputs.size(); compare(expectedOutputs, actualOutputs); } @@ -198,10 +195,13 @@ void SubgraphBaseTest::init_input_shapes(const InputShape& shapes) { std::pair, std::vector>> tmpShapeObj; if (shapes.first.rank() != 0) { tmpShapeObj.first = {shapes.first}; + for (const auto& staticShape : shapes.second) { + tmpShapeObj.second.push_back({staticShape}); + } } else { tmpShapeObj.first = {}; + tmpShapeObj.second = {shapes.second}; } - tmpShapeObj.second = {shapes.second}; init_input_shapes(tmpShapeObj); } diff --git a/inference-engine/tests/functional/shared_test_classes/src/single_layer/eltwise.cpp b/inference-engine/tests/functional/shared_test_classes/src/single_layer/eltwise.cpp index a3c13807534..73811102efe 100644 --- a/inference-engine/tests/functional/shared_test_classes/src/single_layer/eltwise.cpp +++ b/inference-engine/tests/functional/shared_test_classes/src/single_layer/eltwise.cpp @@ -48,6 +48,7 @@ void EltwiseLayerTest::generate_inputs(const std::vector& targetI bool isReal = param->get_element_type().is_real(); switch (opType) { case ngraph::helpers::EltwiseTypes::POWER: + case ngraph::helpers::EltwiseTypes::MOD: case ngraph::helpers::EltwiseTypes::FLOOR_MOD: tensor = isReal ? ov::test::utils::create_and_fill_tensor(param->get_element_type(), targetInputStaticShapes[i], 2, 2, 128) : @@ -69,6 +70,23 @@ void EltwiseLayerTest::generate_inputs(const std::vector& targetI } } +void EltwiseLayerTest::transformInputShapesAccordingEltwise(const ov::PartialShape& secondInputShape) { + // propagate shapes in case 1 shape is defined + if (inputDynamicShapes.size() == 1) { + inputDynamicShapes.push_back(inputDynamicShapes.front()); + for (auto& staticShape : targetStaticShapes) { + staticShape.push_back(staticShape.front()); + } + } + ASSERT_EQ(inputDynamicShapes.size(), 2) << "Incorrect inputs number!"; + if (secondInputShape.get_shape() == ov::Shape{1}) { + inputDynamicShapes[1] = secondInputShape; + for (auto& staticShape : targetStaticShapes) { + staticShape[1] = secondInputShape.get_shape(); + } + } +} + void EltwiseLayerTest::SetUp() { InputShapes shapes; ElementType netType; @@ -76,58 +94,57 @@ void EltwiseLayerTest::SetUp() { CommonTestUtils::OpType opType; ngraph::helpers::EltwiseTypes eltwiseType; Config additional_config; - std::tie(shapes, eltwiseType, secondaryInputType, opType, netType, targetDevice, additional_config) = + std::tie(shapes, eltwiseType, secondaryInputType, opType, netType, targetDevice, configuration) = this->GetParam(); init_input_shapes(shapes); - ngraph::Shape inputShape1 = targetStaticShapes.front().front(), inputShape2 = targetStaticShapes.front().back(); + auto parameters = ngraph::builder::makeDynamicParams(netType, {inputDynamicShapes.front()}); - configuration.insert(additional_config.begin(), additional_config.end()); - auto input = ngraph::builder::makeParams(netType, {inputShape1}); - - std::vector shape_input_secondary; + ov::PartialShape shape_input_secondary; switch (opType) { case CommonTestUtils::OpType::SCALAR: { - shape_input_secondary = std::vector({1}); + shape_input_secondary = {1}; break; } case CommonTestUtils::OpType::VECTOR: - shape_input_secondary = inputShape2; + shape_input_secondary = inputDynamicShapes.back(); break; default: FAIL() << "Unsupported Secondary operation type"; } + // To propagate shape_input_secondary just in static case because all shapes are defined in dynamic scenarion + if (shape_input_secondary.is_static()) { + transformInputShapesAccordingEltwise(shape_input_secondary); + } std::shared_ptr secondaryInput; - if (eltwiseType == ngraph::helpers::EltwiseTypes::DIVIDE || - eltwiseType == ngraph::helpers::EltwiseTypes::FLOOR_MOD || - eltwiseType == ngraph::helpers::EltwiseTypes::MOD) { - std::vector data(ngraph::shape_size(shape_input_secondary)); - data = NGraphFunctions::Utils::generateVector(ngraph::shape_size(shape_input_secondary), 10, 2); - secondaryInput = ngraph::builder::makeConstant(netType, shape_input_secondary, data); - } else if (eltwiseType == ngraph::helpers::EltwiseTypes::POWER && secondaryInputType == ngraph::helpers::InputLayerType::CONSTANT) { - // to avoid floating point overflow on some platforms, let's fill the constant with small numbers. - secondaryInput = ngraph::builder::makeConstant(netType, shape_input_secondary, {}, true, 3); + if (secondaryInputType == ngraph::helpers::InputLayerType::PARAMETER) { + secondaryInput = ngraph::builder::makeDynamicParams(netType, {shape_input_secondary}).front(); + parameters.push_back(std::dynamic_pointer_cast(secondaryInput)); } else { - secondaryInput = ngraph::builder::makeInputLayer(netType, secondaryInputType, shape_input_secondary); - if (secondaryInputType == ngraph::helpers::InputLayerType::PARAMETER) { - input.push_back(std::dynamic_pointer_cast(secondaryInput)); + ov::Shape shape = shape_input_secondary.get_shape(); + switch (eltwiseType) { + case ngraph::helpers::EltwiseTypes::DIVIDE: + case ngraph::helpers::EltwiseTypes::MOD: + case ngraph::helpers::EltwiseTypes::FLOOR_MOD: { + std::vector data = NGraphFunctions::Utils::generateVector(ngraph::shape_size(shape), 10, 2); + secondaryInput = ngraph::builder::makeConstant(netType, shape, data); + break; + } + case ngraph::helpers::EltwiseTypes::POWER: + secondaryInput = ngraph::builder::makeConstant(netType, shape, {}, true, 3); + break; + default: + secondaryInput = ngraph::builder::makeConstant(netType, shape, {}, true); } } - input[0]->set_friendly_name("param0"); + + parameters[0]->set_friendly_name("param0"); secondaryInput->set_friendly_name("param1"); - auto eltwise = ngraph::builder::makeEltwise(input[0], secondaryInput, eltwiseType); - function = std::make_shared(eltwise, input, "Eltwise"); - // w/a: to propagate 1 input shape for other input - for (auto& staticShape : targetStaticShapes) { - if (function->get_parameters().size() > staticShape.size()) { - for (size_t i = 0; i < function->get_parameters().size() - staticShape.size(); i++) { - staticShape.push_back(staticShape.front()); - } - } - } + auto eltwise = ngraph::builder::makeEltwise(parameters[0], secondaryInput, eltwiseType); + function = std::make_shared(eltwise, parameters, "Eltwise"); } } // namespace subgraph } // namespace test diff --git a/inference-engine/tests/ie_test_utils/functional_test_utils/src/ov_tensor_utils.cpp b/inference-engine/tests/ie_test_utils/functional_test_utils/src/ov_tensor_utils.cpp index b27567f7994..1e8edf0d08d 100644 --- a/inference-engine/tests/ie_test_utils/functional_test_utils/src/ov_tensor_utils.cpp +++ b/inference-engine/tests/ie_test_utils/functional_test_utils/src/ov_tensor_utils.cpp @@ -75,18 +75,18 @@ void compare(const ov::runtime::Tensor& expected, std::sort(abs_values.begin(), abs_values.end()); double abs_median; if (abs_values.size() % 2 == 0) { - abs_median = (abs_values.at(abs_values.size()/2) + abs_values.at(abs_values.size()/2 + 1))/2.; + abs_median = abs_values.size() > 2 ? + (abs_values.at(abs_values.size()/2) + abs_values.at(abs_values.size()/2 + 1))/2 : (abs_values.front() + abs_values.back())/2; } else { abs_median = abs_values.at(abs_values.size()/2); } - abs_threshold = 0.05 * abs_median; + abs_threshold = abs_median == 0.f ? 1e-5 : 0.05 * abs_median; if (std::is_integral::value) { abs_threshold = std::ceil(abs_threshold); } } } - OPENVINO_ASSERT((!std::isnan(abs_threshold) && !std::isnan(rel_threshold)), - "abs_threshold: ", abs_threshold, " rel_threshold: ", rel_threshold); + ASSERT_TRUE((!std::isnan(abs_threshold) && !std::isnan(rel_threshold))) << "abs_threshold: " << abs_threshold << " rel_threshold: " << rel_threshold; struct Error { double max = 0.; double mean = 0.; @@ -108,8 +108,8 @@ void compare(const ov::runtime::Tensor& expected, err.mean += val; err.count += less(threshold, val); }; - OPENVINO_ASSERT(!std::isnan(expected_value), "Expected value is NAN on coordinate: ", c); - OPENVINO_ASSERT(!std::isnan(actual_value), "Actual value is NAN on coordinate: ", c); + ASSERT_FALSE(std::isnan(expected_value)) << "Expected value is NAN on coordinate: " << c; + ASSERT_FALSE(std::isnan(actual_value)) << "Actual value is NAN on coordinate: " << c; auto abs = std::fabs(expected_value - actual_value); auto rel = expected_value ? (abs/std::fabs(expected_value)) : abs; error(abs_error, abs, abs_threshold); @@ -117,16 +117,16 @@ void compare(const ov::runtime::Tensor& expected, } abs_error.mean /= shape_size(expected_shape); rel_error.mean /= shape_size(expected_shape); - OPENVINO_ASSERT((less(abs_error.max, abs_threshold) && less(rel_error.max, rel_threshold)), - "abs_max < abs_threshold && rel_max < rel_threshold", - "\n\t abs_max: " , abs_error.max, - "\n\t\t coordinate " , abs_error.max_coordinate, - "; abs errors count " , abs_error.count , "; abs mean ", - abs_error.mean , "; abs threshold " , abs_threshold, - "\n\t rel_max: " , rel_error.max, - "\n\t\t coordinate " , rel_error.max_coordinate, - "; rel errors count " , rel_error.count , "; rel mean ", - rel_error.mean , "; rel threshold " , rel_threshold); + ASSERT_TRUE((less(abs_error.max, abs_threshold) && less(rel_error.max, rel_threshold))) << + "abs_max < abs_threshold && rel_max < rel_threshold" << + "\n\t abs_max: " << abs_error.max << + "\n\t\t coordinate " << abs_error.max_coordinate<< + "; abs errors count " << abs_error.count << "; abs mean " << + abs_error.mean << "; abs threshold " << abs_threshold << + "\n\t rel_max: " << rel_error.max << + "\n\t\t coordinate " << rel_error.max_coordinate << + "; rel errors count " << rel_error.count << "; rel mean " << + rel_error.mean << "; rel threshold " << rel_threshold; } void compare( diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp index fda1d45781a..87066ac14a2 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/builders.hpp @@ -76,6 +76,9 @@ std::shared_ptr makeConstant(const element::Type &type, const std::vector< std::shared_ptr makeInputLayer(const element::Type& type, ngraph::helpers::InputLayerType inputType, const std::vector& shape); +std::shared_ptr makeDynamicInputLayer(const element::Type& type, ngraph::helpers::InputLayerType inputType, + const ov::PartialShape& shape); + std::shared_ptr makeBroadcast(const ngraph::Output &in, const ngraph::Output &target_shape, const ngraph::op::BroadcastType& mode, diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp index dc463827077..ac59e122f29 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp @@ -270,7 +270,7 @@ std::vector>> const std::vector &inputTypes = {}); std::vector -interpreterFunction(const std::shared_ptr &function, +interpretFunction(const std::shared_ptr &function, const std::map& inputs); // @@ -320,7 +320,7 @@ std::ostream& operator<<(std::ostream & os, SequenceTestsMode type); std::ostream& operator<<(std::ostream & os, MemoryTransformation type); -void resize_function(const std::vector& targetInputStaticShapes, std::shared_ptr function); +void resize_function(std::shared_ptr function, const std::vector& targetInputStaticShapes); } // namespace helpers } // namespace ngraph diff --git a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/utils/ngraph_helpers.cpp b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/utils/ngraph_helpers.cpp index 90ce19f9aaa..a59cb225058 100644 --- a/inference-engine/tests/ngraph_helpers/ngraph_functions/src/utils/ngraph_helpers.cpp +++ b/inference-engine/tests/ngraph_helpers/ngraph_functions/src/utils/ngraph_helpers.cpp @@ -144,9 +144,8 @@ std::vector>> return outputs; } -std::vector - interpreterFunction(const std::shared_ptr &function, - const std::map& inputs) { +std::vector interpretFunction(const std::shared_ptr &function, + const std::map& inputs) { runtime::Backend::set_backend_shared_library_search_directory(""); auto backend = runtime::Backend::create("INTERPRETER"); @@ -172,7 +171,7 @@ std::vector } auto input = inputIt->second; - const auto &inputSize = input.get_size(); + const auto &inputSize = input.get_byte_size(); NGRAPH_CHECK(parameterSize == inputSize, "Got parameter (", parameter->get_friendly_name(), ") of size ", parameterSize, " bytes, but corresponding input with index ", parameterIndex, @@ -903,9 +902,14 @@ std::ostream& operator<<(std::ostream & os, MemoryTransformation type) { return os; } -void resize_function(const std::vector& targetInputStaticShapes, std::shared_ptr function) { +void resize_function(std::shared_ptr function, + const std::vector& targetInputStaticShapes) { auto params = function->get_parameters(); std::map shapes; + if (params.size() > targetInputStaticShapes.size()) { + throw std::runtime_error("targetInputStaticShapes.size() = " + std::to_string(targetInputStaticShapes.size()) + " != params.size() = " + + std::to_string(params.size())); + } for (size_t i = 0; i < params.size(); i++) { shapes.insert({params[i]->get_output_tensor(0).get_any_name(), targetInputStaticShapes[i]}); }