diff --git a/inference-engine/tests/functional/inference_engine/serialization/single_layer/convert.cpp b/inference-engine/tests/functional/inference_engine/serialization/single_layer/convert.cpp new file mode 100644 index 00000000000..c56d444dcdc --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/serialization/single_layer/convert.cpp @@ -0,0 +1,37 @@ +// Copyright (C) 2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "shared_test_classes/single_layer/convert.hpp" + +#include + +using namespace LayerTestsDefinitions; + +namespace { +const std::vector> inShape = {{1, 2, 3, 4}}; + +const std::vector precisions = { + InferenceEngine::Precision::BOOL, InferenceEngine::Precision::U8, + InferenceEngine::Precision::I8, InferenceEngine::Precision::U16, + InferenceEngine::Precision::I16, InferenceEngine::Precision::U32, + InferenceEngine::Precision::I32, InferenceEngine::Precision::U64, + InferenceEngine::Precision::I64, InferenceEngine::Precision::BF16, + InferenceEngine::Precision::FP16, InferenceEngine::Precision::FP32, + InferenceEngine::Precision::FP64}; + +TEST_P(ConvertLayerTest, Serialize) { + Serialize(); +} + +INSTANTIATE_TEST_CASE_P( + smoke_Serialization_ConvertLayerTest, ConvertLayerTest, + ::testing::Combine(::testing::Values(inShape), + ::testing::ValuesIn(precisions), + ::testing::ValuesIn(precisions), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(InferenceEngine::Layout::ANY), + ::testing::Values(CommonTestUtils::DEVICE_CPU)), + ConvertLayerTest::getTestCaseName); + +} // namespace \ No newline at end of file diff --git a/ngraph/core/reference/include/ngraph/runtime/reference/convert.hpp b/ngraph/core/reference/include/ngraph/runtime/reference/convert.hpp index 0374cb18da0..8591f88c794 100644 --- a/ngraph/core/reference/include/ngraph/runtime/reference/convert.hpp +++ b/ngraph/core/reference/include/ngraph/runtime/reference/convert.hpp @@ -29,6 +29,7 @@ namespace ngraph template <> void convert(const float16* arg, float* out, size_t count); + // overload to handle ngraph::boolean (it is stored as char) template typename std::enable_if::value>::type convert(const TI* arg, TO* out, size_t count) diff --git a/ngraph/core/src/op/convert.cpp b/ngraph/core/src/op/convert.cpp index 47ce6907322..8999113ba08 100644 --- a/ngraph/core/src/op/convert.cpp +++ b/ngraph/core/src/op/convert.cpp @@ -26,6 +26,23 @@ op::Convert::Convert(const Output& arg, const element::Type& destination_t void op::Convert::validate_and_infer_types() { NGRAPH_OP_SCOPE(v0_Convert_validate_and_infer_types); + const element::Type data_et = get_input_element_type(0); + const element::Type destination_et = m_destination_type; + + NODE_VALIDATION_CHECK(this, + data_et != element::u1 && data_et != element::u4 && + data_et != element::i4, + "Input element type '", + data_et, + "' is not supported."); + + NODE_VALIDATION_CHECK(this, + destination_et != element::u1 && destination_et != element::u4 && + destination_et != element::i4, + "Destination element type '", + destination_et, + "' is not supported."); + set_output_type(0, m_destination_type, get_input_partial_shape(0)); } diff --git a/ngraph/test/backend/convert.in.cpp b/ngraph/test/backend/convert.in.cpp index ab9f33c6743..46159d0cbc2 100644 --- a/ngraph/test/backend/convert.in.cpp +++ b/ngraph/test/backend/convert.in.cpp @@ -21,138 +21,421 @@ using namespace ngraph; static string s_manifest = "${MANIFEST}"; using TestEngine = test::ENGINE_CLASS_NAME(${BACKEND_NAME}); - -NGRAPH_TEST(${BACKEND_NAME}, convert_int32_float32) +namespace { - Shape shape{2, 2}; - auto A = make_shared(element::i32, shape); - auto f = make_shared(make_shared(A, element::f32), ParameterVector{A}); + template + void ConvertTest(const std::vector& input, + const Shape& input_shape, + const ngraph::element::Type& input_type, + const std::vector& expected_output, + const ngraph::element::Type& expected_output_type) + { + const auto in = make_shared(input_type, input_shape); + const auto convert = make_shared(in, expected_output_type); + const auto f = make_shared(NodeVector{convert}, ParameterVector{in}); - auto backend = runtime::Backend::create("${BACKEND_NAME}"); + auto test_case = test::TestCase(f); + test_case.add_input(input); + test_case.add_expected_output(expected_output); - // Create some tensors for input/output - auto a = backend->create_tensor(element::i32, shape); - copy_data(a, vector{281, 2, 3, 4}); - auto result = backend->create_tensor(element::f32, shape); + test_case.run(); + } +} // namespace - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a}); - EXPECT_TRUE(test::all_close_f((vector{281, 2, 3, 4}), read_vector(result))); +// destination: boolean +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_boolean) +{ + const uint8_t lowest = std::numeric_limits::lowest(); + const uint8_t max = std::numeric_limits::max(); + + const std::vector input{0, 12, 23, 0, lowest, max}; + const Shape input_shape{2, 3}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 1, 1, 0, 0, 1}; + const element::Type expected_output_type = ngraph::element::boolean; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } -NGRAPH_TEST(${BACKEND_NAME}, convert_uint16_float32) +NGRAPH_TEST(${BACKEND_NAME}, convert_i32_to_boolean) { - Shape shape{2, 2}; - auto A = make_shared(element::u16, shape); - auto f = make_shared(make_shared(A, element::f32), ParameterVector{A}); + const int32_t lowest = std::numeric_limits::lowest(); + const int32_t max = std::numeric_limits::max(); - auto backend = runtime::Backend::create("${BACKEND_NAME}"); + const std::vector input{0, -12, 23, 0, lowest, max}; + const Shape input_shape{2, 3}; + const element::Type input_type = ngraph::element::i32; - // Create some tensors for input/output - auto a = backend->create_tensor(element::u16, shape); - copy_data(a, vector{1, 2, 3, 4}); - auto result = backend->create_tensor(element::f32, shape); + const std::vector expected_output{0, 1, 1, 0, 1, 1}; + const element::Type expected_output_type = ngraph::element::boolean; - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a}); - EXPECT_TRUE(test::all_close_f( - (vector{1, 2, 3, 4}), read_vector(result), MIN_FLOAT_TOLERANCE_BITS)); + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } -NGRAPH_TEST(${BACKEND_NAME}, convert_int32_bool) +NGRAPH_TEST(${BACKEND_NAME}, convert_f32_to_boolean) { - Shape shape{2, 3}; - auto A = make_shared(element::i32, shape); - auto f = - make_shared(make_shared(A, element::boolean), ParameterVector{A}); + const float lowest = std::numeric_limits::lowest(); + const float max = std::numeric_limits::max(); + const float min = std::numeric_limits::min(); + const float pos_inf = std::numeric_limits::infinity(); + const float neg_inf = -std::numeric_limits::infinity(); - auto backend = runtime::Backend::create("${BACKEND_NAME}"); + const std::vector input{0.f, 1.5745f, 0.12352f, 0.f, lowest, max, min, pos_inf, neg_inf}; + const Shape input_shape{3, 3}; + const element::Type input_type = ngraph::element::f32; - int32_t lowest = std::numeric_limits::lowest(); - int32_t max = std::numeric_limits::max(); + const std::vector expected_output{0, 1, 1, 0, 1, 1, 1, 1, 1}; + const element::Type expected_output_type = ngraph::element::boolean; - // Create some tensors for input/output - auto a = backend->create_tensor(element::i32, shape); - copy_data(a, vector{0, 12, 23, 0, lowest, max}); - auto result = backend->create_tensor(element::boolean, shape); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a}); - EXPECT_EQ((vector{0, 1, 1, 0, 1, 1}), read_vector(result)); + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } -NGRAPH_TEST(${BACKEND_NAME}, convert_float32_bool) +// destination: bf16 +NGRAPH_TEST(${BACKEND_NAME}, convert_f32_to_bf16) { - Shape shape{3, 3}; - auto A = make_shared(element::f32, shape); - auto f = - make_shared(make_shared(A, element::boolean), ParameterVector{A}); - - auto backend = runtime::Backend::create("${BACKEND_NAME}"); - - float lowest = std::numeric_limits::lowest(); - float max = std::numeric_limits::max(); - float min = std::numeric_limits::min(); - float pos_inf = std::numeric_limits::infinity(); - float neg_inf = -std::numeric_limits::infinity(); - - // Create some tensors for input/output - auto a = backend->create_tensor(element::f32, shape); - copy_data(a, vector{0.f, 1.5745f, 0.12352f, 0.f, lowest, max, min, pos_inf, neg_inf}); - auto result = backend->create_tensor(element::boolean, shape); - - auto handle = backend->compile(f); - handle->call_with_validate({result}, {a}); - EXPECT_EQ((vector{0, 1, 1, 0, 1, 1, 1, 1, 1}), read_vector(result)); -} - -NGRAPH_TEST(${BACKEND_NAME}, convert_float32_bf16) -{ - const vector a_data = { + const std::vector input{ 0.5f, 1.5f, 0.5f, 2.5f, 1.5f, 0.5f, 3.5f, 2.5f, 0.5f, 0.5f, 2.5f, 0.5f, 0.5f, 0.5f, 1.5f}; + const Shape input_shape{1, 1, 3, 5}; + const element::Type input_type = ngraph::element::f32; - const auto A = make_shared(element::f32, Shape{1, 1, 3, 5}); - const auto convert = make_shared(A, element::bf16); - const auto f = make_shared(NodeVector{convert}, ParameterVector{A}); + const std::vector expected_output(std::begin(input), std::end(input)); + const element::Type expected_output_type = ngraph::element::bf16; - auto test_case = test::TestCase(f); - test_case.add_input(a_data); - test_case.add_expected_output( - std::vector(std::begin(a_data), std::end(a_data))); - - test_case.run(); + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } -NGRAPH_TEST(${BACKEND_NAME}, convert_bf16_float32) +// destination: f16 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_f16) { - const vector a_data = { + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const element::Type expected_output_type = ngraph::element::f16; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: f32 +NGRAPH_TEST(${BACKEND_NAME}, convert_i4_to_f32_is_not_supported_yet) +{ + const std::vector input{0x00, 0x00}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::i4; + + const std::vector expected_output{0.0f, 0.0f, 0.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ASSERT_THROW(ConvertTest(input, input_shape, input_type, expected_output, expected_output_type), + ngraph::NodeValidationFailure); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_i8_to_f32) +{ + const std::vector input{-127, -0, 0, 127}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::i8; + + const std::vector expected_output{-127.0f, -0.0f, 0.0f, 127.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_i16_to_f32) +{ + const std::vector input{-32000, -0, 0, 32000}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::i16; + + const std::vector expected_output{-32000.0f, -0.0f, 0.0f, 32000.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_i32_to_f32) +{ + const std::vector input{-64000, -0, 0, 64000}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::i32; + + const std::vector expected_output{-64000.0f, -0.0f, 0.0f, 64000.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_i64_to_f32) +{ + const std::vector input{-64000, -0, 0, 64000}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::i64; + + const std::vector expected_output{-64000.0f, -0.0f, 0.0f, 64000.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_u1_to_f32_is_not_supported_yet) +{ + const std::vector input{0x00}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::u1; + + const std::vector expected_output{0.0f, 0.0f, 0.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ASSERT_THROW(ConvertTest(input, input_shape, input_type, expected_output, expected_output_type), + ngraph::NodeValidationFailure); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_u4_to_f32_is_not_supported_yet) +{ + const std::vector input{0x00, 0x00}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::u4; + + const std::vector expected_output{0.0f, 0.0f, 0.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ASSERT_THROW(ConvertTest(input, input_shape, input_type, expected_output, expected_output_type), + ngraph::NodeValidationFailure); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_f32) +{ + const std::vector input{255, 128, 32, 0}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{255.0f, 128.0f, 32.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_u16_to_f32) +{ + const std::vector input{64000, 32000, 128, 0}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::u16; + + const std::vector expected_output{64000.0f, 32000.0f, 128.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_u32_to_f32) +{ + const std::vector input{4000000, 2000000, 128, 0}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::u32; + + const std::vector expected_output{4000000.0f, 2000000.0f, 128.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_u64_to_f32) +{ + const std::vector input{4000000, 2000000, 128, 0}; + const Shape input_shape{2, 2}; + const element::Type input_type = ngraph::element::u64; + + const std::vector expected_output{4000000.0f, 2000000.0f, 128.0f, 0.0f}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +NGRAPH_TEST(${BACKEND_NAME}, convert_bf16_to_f32) +{ + const std::vector input{ 0.5, 1.5, 0.5, 2.5, 1.5, 0.5, 3.5, 2.5, 0.5, 0.5, 2.5, 0.5, 0.5, 0.5, 1.5}; + const Shape input_shape{1, 1, 3, 5}; + const element::Type input_type = ngraph::element::bf16; - const auto A = make_shared(element::bf16, Shape{1, 1, 3, 5}); - const auto convert = make_shared(A, element::f32); - const auto f = make_shared(NodeVector{convert}, ParameterVector{A}); + const std::vector expected_output(std::begin(input), std::end(input)); + const element::Type expected_output_type = ngraph::element::f32; - auto test_case = test::TestCase(f); - test_case.add_input(a_data); - test_case.add_expected_output(std::vector(std::begin(a_data), std::end(a_data))); - - test_case.run(); + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } -NGRAPH_TEST(${BACKEND_NAME}, convert_fp16_float32) +NGRAPH_TEST(${BACKEND_NAME}, convert_f16_to_f32) { - std::vector f32vec = {-20.5, -15, -10.5, -0.5, 0, 0.5, 10.5, 15, 20.5}; - std::vector f16vec(std::begin(f32vec), std::end(f32vec)); - std::vector result(f32vec.size()); - runtime::reference::convert(f16vec.data(), result.data(), f32vec.size()); - EXPECT_EQ(result, f32vec); + const std::vector input{-20.5, -15, -10.5, -0.5, 0, 0.5, 10.5, 15, 20.5}; + const Shape input_shape{3, 3}; + const element::Type input_type = ngraph::element::f16; + + const std::vector expected_output{-20.5, -15, -10.5, -0.5, 0, 0.5, 10.5, 15, 20.5}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } -NGRAPH_TEST(${BACKEND_NAME}, convert_uint8_fp16) +NGRAPH_TEST(${BACKEND_NAME}, convert_f32_to_f32) { - std::vector u8vec = {0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; - std::vector f16vec(std::begin(u8vec), std::end(u8vec)); - std::vector result(u8vec.size()); - runtime::reference::convert(u8vec.data(), result.data(), u8vec.size()); - EXPECT_EQ(result, f16vec); + const std::vector input{-20.5, -15, -10.5, -0.5, 0, 0.5, 10.5, 15, 20.5}; + const Shape input_shape{3, 3}; + const element::Type input_type = ngraph::element::f32; + + const std::vector expected_output{-20.5, -15, -10.5, -0.5, 0, 0.5, 10.5, 15, 20.5}; + const element::Type expected_output_type = ngraph::element::f32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: f64 +// not supported by IE, hence no tests + +// destination: i4 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_i4_is_not_supported_yet) +{ + const std::vector input{0, 0, 0, 0}; + const Shape input_shape{4}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0x00, 0x00}; + const element::Type expected_output_type = ngraph::element::i4; + + ASSERT_THROW(ConvertTest(input, input_shape, input_type, expected_output, expected_output_type), + ngraph::NodeValidationFailure); +} + +// destination: i8 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_i8) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 128}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const element::Type expected_output_type = ngraph::element::i8; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: i16 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_i16) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const element::Type expected_output_type = ngraph::element::i16; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: i32 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_i32) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const element::Type expected_output_type = ngraph::element::i32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: i64 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_i64) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 130, 142}; + const element::Type expected_output_type = ngraph::element::i64; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: u1 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_u1_is_not_supported_yet) +{ + const std::vector input{0, 0, 0, 0}; + const Shape input_shape{4}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0x00}; + const element::Type expected_output_type = ngraph::element::u1; + + ASSERT_THROW(ConvertTest(input, input_shape, input_type, expected_output, expected_output_type), + ngraph::NodeValidationFailure); +} + +// destination: u4 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_u4_is_not_supported_yet) +{ + const std::vector input{0, 0, 0, 0}; + const Shape input_shape{4}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0x00, 0x00}; + const element::Type expected_output_type = ngraph::element::u4; + + ASSERT_THROW(ConvertTest(input, input_shape, input_type, expected_output, expected_output_type), + ngraph::NodeValidationFailure); +} + +// destination: u8 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_u8) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const element::Type expected_output_type = ngraph::element::u8; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: u16 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_u16) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const element::Type expected_output_type = ngraph::element::u16; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: u32 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_u32) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const element::Type expected_output_type = ngraph::element::u32; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); +} + +// destination: u64 +NGRAPH_TEST(${BACKEND_NAME}, convert_u8_to_u64) +{ + const std::vector input{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const Shape input_shape{11}; + const element::Type input_type = ngraph::element::u8; + + const std::vector expected_output{0, 10, 15, 20, 43, 56, 78, 99, 102, 110, 127}; + const element::Type expected_output_type = ngraph::element::u64; + + ConvertTest(input, input_shape, input_type, expected_output, expected_output_type); } diff --git a/ngraph/test/runtime/ie/unit_test.manifest b/ngraph/test/runtime/ie/unit_test.manifest index cac9e71d603..9e8bb87ca58 100644 --- a/ngraph/test/runtime/ie/unit_test.manifest +++ b/ngraph/test/runtime/ie/unit_test.manifest @@ -53,7 +53,6 @@ onnx_model_addmul_abc IE_CPU.interpolate_down_scales_const_linear # data [] doesn't exist -convert_float32_bool broadcast_trivial aliased_output bool_init_raw @@ -824,9 +823,6 @@ cum_sum_2dim cum_sum_3d cum_sum_2dim_allmodes -# Cannot create MKLDNNMemoryDesc from TensorDesc. Unsupported precision! -convert_uint16_float32 - # Unsupported primitive of type: Ceiling ceiling @@ -886,7 +882,6 @@ strided_slice_stride_optional divide_int32 divide_cpp_rounding_int32 divide_python_rounding_int32 -convert_int32_bool lesseq_int32 # Constant and Low Precision @@ -1056,6 +1051,22 @@ roll_3d_input roll_3d_input_negative_shift roll_negative_axes +# convert operation +IE_CPU.convert_f16_to_f32 +IE_CPU.convert_u8_to_f16 +IE_CPU.convert_u8_to_i16 +IE_CPU.convert_u8_to_i64 +IE_CPU.convert_u8_to_u16 +IE_CPU.convert_u8_to_u32 +IE_CPU.convert_u8_to_u64 +IE_CPU.convert_u8_to_boolean +IE_CPU.convert_i32_to_boolean +IE_CPU.convert_f32_to_boolean +IE_CPU.convert_u32_to_f32 # NOT_IMPLEMENTED +IE_CPU.convert_i4_to_f32 # NOT_IMPLEMENTED +IE_CPU.convert_u1_to_f32 # NOT_IMPLEMENTED +IE_CPU.convert_u4_to_f32 # NOT_IMPLEMENTED + #------------------------------------------------------------------------------- # # Inference Engine CPU plugin excludes @@ -1418,7 +1429,6 @@ IE_GPU.divide_overload IE_GPU.divide_by_zero_float32 IE_GPU.cosh IE_GPU.cos -IE_GPU.convert_int32_float32 IE_GPU.concat_negative_axis IE_GPU.concat_matrix_colwise IE_GPU.concat_matrix_rowwise