From 4e33dac7fad7c80b9bad227f930a27c6a8ff24f0 Mon Sep 17 00:00:00 2001 From: Mateusz Bencer Date: Wed, 20 Jan 2021 08:26:41 +0100 Subject: [PATCH] Set output blobs precision for IE tests (#3905) * Calling SetPrecission on cnn network outputs * added tests * get_output_name refactor * add missing test file * run tests on all backends (or disable if backend is not available) * fixed tests * fixed TestEngine --- ngraph/test/CMakeLists.txt | 3 +- ngraph/test/backend/bucketize.in.cpp | 6 +- ngraph/test/models/onnx/add_abc_3d.prototxt | 74 +++++++++++++++++ ngraph/test/onnx/onnx_test_utils.in.cpp | 88 +++++++++++++++++++++ ngraph/test/runtime/ie/unit_test.manifest | 4 +- ngraph/test/util/engine/ie_engines.cpp | 68 ++++++++++++++++ ngraph/test/util/engine/ie_engines.hpp | 37 ++------- 7 files changed, 245 insertions(+), 35 deletions(-) create mode 100644 ngraph/test/models/onnx/add_abc_3d.prototxt create mode 100644 ngraph/test/onnx/onnx_test_utils.in.cpp diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index f54067fae1a..524592e2bf2 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -357,7 +357,8 @@ if (NGRAPH_ONNX_IMPORT_ENABLE AND NOT NGRAPH_USE_PROTOBUF_LITE) onnx/onnx_import_provenance.in.cpp onnx/onnx_import_reshape.in.cpp onnx/onnx_import_rnn.in.cpp - onnx/onnx_import_quant.in.cpp) + onnx/onnx_import_quant.in.cpp + onnx/onnx_test_utils.in.cpp) list(APPEND SRC onnx/onnx_import_exceptions.cpp onnx/onnx_import_library.cpp diff --git a/ngraph/test/backend/bucketize.in.cpp b/ngraph/test/backend/bucketize.in.cpp index 2c5707f8060..658644d858e 100644 --- a/ngraph/test/backend/bucketize.in.cpp +++ b/ngraph/test/backend/bucketize.in.cpp @@ -56,16 +56,16 @@ NGRAPH_TEST(${BACKEND_NAME}, bucketize_left_edge) const auto data = make_shared(element::i32, data_shape); const auto buckets = make_shared(element::f32, bucket_shape); - const auto bucketize = make_shared(data, buckets, element::i64, false); + const auto bucketize = make_shared(data, buckets, element::i32, false); const auto f = make_shared(bucketize, ParameterVector{data, buckets}); vector data_vect = {8, 1, 2, 1, 8, 5, 1, 5, 0, 20}; vector buckets_vect = {1.f, 4.f, 10.f, 20.f}; - vector expected_vect = {2, 1, 1, 1, 2, 2, 1, 2, 0, 4}; + vector expected_vect = {2, 1, 1, 1, 2, 2, 1, 2, 0, 4}; auto test_case = test::TestCase(f); test_case.add_input(data_shape, data_vect); test_case.add_input(bucket_shape, buckets_vect); - test_case.add_expected_output(data_shape, expected_vect); + test_case.add_expected_output(data_shape, expected_vect); test_case.run(); } diff --git a/ngraph/test/models/onnx/add_abc_3d.prototxt b/ngraph/test/models/onnx/add_abc_3d.prototxt new file mode 100644 index 00000000000..4862444ea19 --- /dev/null +++ b/ngraph/test/models/onnx/add_abc_3d.prototxt @@ -0,0 +1,74 @@ +ir_version: 3 +producer_name: "nGraph ONNX Importer" +graph { + node { + input: "A" + input: "B" + output: "X" + name: "add_node1" + op_type: "Add" + } + node { + input: "X" + input: "C" + output: "Y" + name: "add_node2" + op_type: "Add" + } + name: "test_graph" + input { + name: "A" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + } + } + } + } + input { + name: "B" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + } + } + } + } + input { + name: "C" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + } + } + } + } + output { + name: "Y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + } + } + } + } +} +opset_import { + version: 4 +} diff --git a/ngraph/test/onnx/onnx_test_utils.in.cpp b/ngraph/test/onnx/onnx_test_utils.in.cpp new file mode 100644 index 00000000000..a4e50b3efa7 --- /dev/null +++ b/ngraph/test/onnx/onnx_test_utils.in.cpp @@ -0,0 +1,88 @@ +//***************************************************************************** +// Copyright 2017-2021 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#include +#include "gtest/gtest.h" + +#include "default_opset.hpp" +#include "ngraph/file_util.hpp" +#include "ngraph/op/util/op_types.hpp" +#include "onnx_import/editor/editor.hpp" +#include "onnx_import/onnx.hpp" +#include "util/test_control.hpp" + +#include "util/all_close.hpp" +#include "util/all_close_f.hpp" +#include "util/engine/test_engines.hpp" +#include "util/test_case.hpp" +#include "util/test_control.hpp" +#include "util/test_tools.hpp" + +using namespace ngraph; + +static std::string s_manifest = "${MANIFEST}"; + +template +class ElemTypesTests : public ::testing::Test +{ +}; +TYPED_TEST_CASE_P(ElemTypesTests); + +TYPED_TEST_P(ElemTypesTests, onnx_test_add_abc_set_precission) +{ + using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME}); + using DataType = TypeParam; + const element::Type ng_type = element::from(); + + onnx_import::ONNXModelEditor editor{ + file_util::path_join(SERIALIZED_ZOO, "onnx/add_abc_3d.prototxt")}; + + editor.set_input_types({{"A", ng_type}, {"B", ng_type}, {"C", ng_type}}); + + const auto function = onnx_import::import_onnx_model(editor); + auto test_case = test::TestCase(function); + test_case.add_input(std::vector{1, 2, 3}); + test_case.add_input(std::vector{4, 5, 6}); + test_case.add_input(std::vector{7, 8, 9}); + test_case.add_expected_output(Shape{3}, std::vector{12, 15, 18}); + test_case.run(); +} + +TYPED_TEST_P(ElemTypesTests, onnx_test_split_multioutput_set_precission) +{ + using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME}); + using DataType = TypeParam; + const element::Type ng_type = element::from(); + + onnx_import::ONNXModelEditor editor{ + file_util::path_join(SERIALIZED_ZOO, "onnx/split_equal_parts_default.prototxt")}; + + editor.set_input_types({{"input", ng_type}}); + + const auto function = onnx_import::import_onnx_model(editor); + auto test_case = test::TestCase(function); + test_case.add_input(std::vector{1, 2, 3, 4, 5, 6}); + test_case.add_expected_output(Shape{2}, std::vector{1, 2}); + test_case.add_expected_output(Shape{2}, std::vector{3, 4}); + test_case.add_expected_output(Shape{2}, std::vector{5, 6}); + test_case.run(); +} + +REGISTER_TYPED_TEST_CASE_P(ElemTypesTests, + onnx_test_add_abc_set_precission, + onnx_test_split_multioutput_set_precission); +typedef ::testing::Types ElemTypes; +INSTANTIATE_TYPED_TEST_CASE_P(${BACKEND_NAME}, ElemTypesTests, ElemTypes); diff --git a/ngraph/test/runtime/ie/unit_test.manifest b/ngraph/test/runtime/ie/unit_test.manifest index 7e473750e65..226e503552f 100644 --- a/ngraph/test/runtime/ie/unit_test.manifest +++ b/ngraph/test/runtime/ie/unit_test.manifest @@ -1586,5 +1586,7 @@ IE_GPU.onnx_model_gather_elements_int32_axis_0 IE_GPU.onnx_model_gather_elements_int8_axis_1 IE_GPU.onnx_model_gather_elements_float_3D_axis_2 -# incorrect result for Minimum if u16 type is unsupported +# incorrect result for Minimum if u16 type is used minimum_u16 +# incorrect result on Windows if i16 type is used +IE_CPU/ElemTypesTests/1.onnx_test_add_abc_set_precission \ No newline at end of file diff --git a/ngraph/test/util/engine/ie_engines.cpp b/ngraph/test/util/engine/ie_engines.cpp index e7d24c7f678..f7cbdd3b62b 100644 --- a/ngraph/test/util/engine/ie_engines.cpp +++ b/ngraph/test/util/engine/ie_engines.cpp @@ -123,6 +123,41 @@ namespace } }; +namespace +{ + InferenceEngine::Precision ng_type_to_precission(const element::Type& target_type) + { +#if defined(__GNUC__) && !(__GNUC__ == 4 && __GNUC_MINOR__ == 8) +#pragma GCC diagnostic push +#pragma GCC diagnostic error "-Wswitch" +#pragma GCC diagnostic error "-Wswitch-enum" +#endif + switch (target_type) + { + case element::Type_t::boolean: return InferenceEngine::Precision::BOOL; break; + case element::Type_t::bf16: return InferenceEngine::Precision::BF16; break; + case element::Type_t::f16: return InferenceEngine::Precision::FP16; break; + case element::Type_t::f32: return InferenceEngine::Precision::FP32; break; + case element::Type_t::f64: return InferenceEngine::Precision::FP64; break; + case element::Type_t::i8: return InferenceEngine::Precision::I8; break; + case element::Type_t::i16: return InferenceEngine::Precision::I16; break; + case element::Type_t::i32: return InferenceEngine::Precision::I32; break; + case element::Type_t::i64: return InferenceEngine::Precision::I64; break; + case element::Type_t::u8: return InferenceEngine::Precision::U8; break; + case element::Type_t::u16: return InferenceEngine::Precision::U16; break; + case element::Type_t::u32: return InferenceEngine::Precision::U32; break; + case element::Type_t::u64: return InferenceEngine::Precision::U64; break; + case element::Type_t::u1: throw std::runtime_error("unsupported type"); + case element::Type_t::undefined: throw std::runtime_error("unsupported type"); + case element::Type_t::dynamic: throw std::runtime_error("unsupported type"); + } +#if defined(__GNUC__) && !(__GNUC__ == 4 && __GNUC_MINOR__ == 8) +#pragma GCC diagnostic pop +#endif + throw std::runtime_error("unsupported type"); + } +} + test::IE_Engine::IE_Engine(const std::shared_ptr function, const char* device) : m_function{function} { @@ -131,6 +166,13 @@ test::IE_Engine::IE_Engine(const std::shared_ptr function, const char* m_network_inputs = cnn_network.getInputsInfo(); m_network_outputs = cnn_network.getOutputsInfo(); + for (const auto& result : m_function->get_results()) + { + const auto& out_name = get_output_name(result); + m_network_outputs[out_name]->setPrecision( + ng_type_to_precission(result->get_element_type())); + } + InferenceEngine::Core ie; auto exe_network = ie.LoadNetwork(cnn_network, device); m_inference_req = exe_network.CreateInferRequest(); @@ -172,6 +214,32 @@ testing::AssertionResult test::IE_Engine::compare_results(const size_t tolerance return comparison_result; } +std::string test::IE_Engine::get_output_name(const std::shared_ptr& ng_result) +{ + if (m_function->get_results().size() == 1) + { + // ng_result argument is ignored + return m_network_outputs.begin()->first; + } + else + { + const auto& prev_layer = ng_result->input_value(0); + auto network_out_name = prev_layer.get_node_shared_ptr()->get_friendly_name(); + if (prev_layer.get_node_shared_ptr()->get_output_size() != 1) + { + network_out_name += "." + std::to_string(prev_layer.get_index()); + } + + NGRAPH_CHECK(m_network_outputs.count(network_out_name) == 1, + "nGraph function's output number ", + m_allocated_expected_outputs, + " was not found in the CNNNetwork built from it. Function's output name: ", + network_out_name); + + return network_out_name; + } +} + testing::AssertionResult test::IE_Engine::compare_results_with_tolerance_as_fp(const float tolerance) { diff --git a/ngraph/test/util/engine/ie_engines.hpp b/ngraph/test/util/engine/ie_engines.hpp index 9cb3f83e610..5e393cac6ed 100644 --- a/ngraph/test/util/engine/ie_engines.hpp +++ b/ngraph/test/util/engine/ie_engines.hpp @@ -90,36 +90,10 @@ namespace ngraph void add_expected_output(const ngraph::Shape& expected_shape, const std::vector& values) { - std::string network_out_name; - InferenceEngine::DataPtr network_output; - if (m_function->get_results().size() == 1) - { - network_out_name = m_network_outputs.begin()->first; - network_output = m_network_outputs.begin()->second; - } - else - { - const auto& function_output = - m_function->get_results()[m_allocated_expected_outputs]; - - // determine output name in IE convention - // (based on name of node which produces the result) - const auto& prev_layer = function_output->input_value(0); - network_out_name = prev_layer.get_node_shared_ptr()->get_friendly_name(); - if (prev_layer.get_node_shared_ptr()->get_output_size() != 1) - { - network_out_name += "." + std::to_string(prev_layer.get_index()); - } - - NGRAPH_CHECK( - m_network_outputs.count(network_out_name) == 1, - "nGraph function's output number ", - m_allocated_expected_outputs, - " was not found in the CNNNetwork built from it. Function's output name: ", - network_out_name); - - network_output = m_network_outputs[network_out_name]; - } + const auto& function_output = + m_function->get_results()[m_allocated_expected_outputs]; + std::string network_out_name = get_output_name(function_output); + InferenceEngine::DataPtr network_output = m_network_outputs[network_out_name]; auto blob = std::make_shared>(network_output->getTensorDesc()); @@ -158,6 +132,9 @@ namespace ngraph /// Retrieves a set of all ops IE can execute std::set get_ie_ops() const; + + // Get IE blob which corresponds to result of nG Function + std::string get_output_name(const std::shared_ptr& ng_result); }; class IE_CPU_Engine final : public IE_Engine