From 5f0b063455a74c6ab2d4f858ee53dba55c478b85 Mon Sep 17 00:00:00 2001 From: Tomasz Adamowicz Date: Fri, 4 Nov 2022 10:26:22 +0100 Subject: [PATCH] [GNA] Support GNA 3.5 (MTL) target in OV (#13478) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Enable TLV * [GNA] Add smoke test for Conv with padding on MTL * [GNA] Allow skipping tests based on gna lib version Co-authored-by: Krzysztof Bruniecki 🥇 --- samples/cpp/speech_sample/main.cpp | 17 +- .../openvino/runtime/intel_gna/properties.hpp | 5 + src/plugins/intel_gna/src/gna_device.cpp | 3 + .../intel_gna/src/gna_plugin_config.cpp | 5 + .../pass_tests/conv_with_padding.cpp | 157 ++++++++++++++++++ .../convert_padded_to_valid_conv.cpp | 8 +- .../single_layer_tests/convolution.cpp | 8 +- .../convolution_negative.cpp | 26 ++- .../single_layer_tests/low_precision.cpp | 8 +- .../skip_tests_check.hpp | 38 +++-- .../convolution_relu_sequence.cpp | 8 +- .../tests/unit/gna_plugin_config_test.cpp | 10 ++ 12 files changed, 237 insertions(+), 56 deletions(-) create mode 100644 src/plugins/intel_gna/tests/functional/pass_tests/conv_with_padding.cpp diff --git a/samples/cpp/speech_sample/main.cpp b/samples/cpp/speech_sample/main.cpp index f79b24a339f..1ec8bfe04c5 100644 --- a/samples/cpp/speech_sample/main.cpp +++ b/samples/cpp/speech_sample/main.cpp @@ -222,10 +222,21 @@ int main(int argc, char* argv[]) { } gnaPluginConfig[ov::hint::inference_precision.name()] = (FLAGS_qb == 8) ? ov::element::i8 : ov::element::i16; auto parse_target = [&](const std::string& target) -> ov::intel_gna::HWGeneration { - return (target == "GNA_TARGET_2_0") ? ov::intel_gna::HWGeneration::GNA_2_0 - : (target == "GNA_TARGET_3_0") ? ov::intel_gna::HWGeneration::GNA_3_0 - : ov::intel_gna::HWGeneration::UNDEFINED; + auto hw_target = ov::intel_gna::HWGeneration::UNDEFINED; + + if (target == "GNA_TARGET_2_0") { + hw_target = ov::intel_gna::HWGeneration::GNA_2_0; + } else if (target == "GNA_TARGET_3_0") { + hw_target = ov::intel_gna::HWGeneration::GNA_3_0; + } else if (target == "GNA_TARGET_3_5") { + hw_target = ov::intel_gna::HWGeneration::GNA_3_5; + } else if (!target.empty()) { + slog::warn << "Unsupported target: " << target << slog::endl; + } + + return hw_target; }; + gnaPluginConfig[ov::intel_gna::execution_target.name()] = parse_target(FLAGS_exec_target); gnaPluginConfig[ov::intel_gna::compile_target.name()] = parse_target(FLAGS_compile_target); gnaPluginConfig[ov::intel_gna::memory_reuse.name()] = !FLAGS_memory_reuse_off; diff --git a/src/inference/include/openvino/runtime/intel_gna/properties.hpp b/src/inference/include/openvino/runtime/intel_gna/properties.hpp index e0420ea4d14..3dcbf9eebec 100644 --- a/src/inference/include/openvino/runtime/intel_gna/properties.hpp +++ b/src/inference/include/openvino/runtime/intel_gna/properties.hpp @@ -119,6 +119,7 @@ enum class HWGeneration { UNDEFINED = 0, //!< GNA HW generation is undefined GNA_2_0 = 1, //!< GNA HW generation 2.0 GNA_3_0 = 2, //!< GNA HW generation 3.0 + GNA_3_5 = 3, //!< GNA HW generation 3.5 }; /** @cond INTERNAL */ @@ -130,6 +131,8 @@ inline std::ostream& operator<<(std::ostream& os, const HWGeneration& hw_generat return os << "GNA_2_0"; case HWGeneration::GNA_3_0: return os << "GNA_3_0"; + case HWGeneration::GNA_3_5: + return os << "GNA_3_5"; default: throw ov::Exception{"Unsupported HW generation!"}; } @@ -144,6 +147,8 @@ inline std::istream& operator>>(std::istream& is, HWGeneration& hw_generation) { hw_generation = HWGeneration::GNA_2_0; } else if (str == "GNA_3_0") { hw_generation = HWGeneration::GNA_3_0; + } else if (str == "GNA_3_5") { + hw_generation = HWGeneration::GNA_3_5; } else { throw ov::Exception{"Unsupported HW generation: " + str}; } diff --git a/src/plugins/intel_gna/src/gna_device.cpp b/src/plugins/intel_gna/src/gna_device.cpp index 44feb59aa85..b94251e9e6f 100644 --- a/src/plugins/intel_gna/src/gna_device.cpp +++ b/src/plugins/intel_gna/src/gna_device.cpp @@ -244,6 +244,7 @@ Gna2DeviceVersion GNADeviceHelper::parseTarget(const std::string& target) { static const std::map targetMap { {GNAPluginNS::common::kGnaTarget2_0, Gna2DeviceVersion2_0}, {GNAPluginNS::common::kGnaTarget3_0, Gna2DeviceVersion3_0}, + {GNAPluginNS::common::kGnaTarget3_5, Gna2DeviceVersion3_5}, {GNAPluginNS::common::kGnaTargetUnspecified, Gna2DeviceVersionSoftwareEmulation}, }; const auto f = targetMap.find(target); @@ -591,6 +592,8 @@ std::string GNADeviceHelper::GetCompileTarget() const { static const std::map targetMap = { {Gna2DeviceVersion2_0, GNAPluginNS::common::kGnaTarget2_0}, {Gna2DeviceVersion3_0, GNAPluginNS::common::kGnaTarget3_0}, + {Gna2DeviceVersion3_5, GNAPluginNS::common::kGnaTarget3_5}, + {Gna2DeviceVersionEmbedded3_5, GNAPluginNS::common::kGnaTarget3_5}, }; const auto target = getTargetDevice(false); auto found = targetMap.find(target); diff --git a/src/plugins/intel_gna/src/gna_plugin_config.cpp b/src/plugins/intel_gna/src/gna_plugin_config.cpp index c8aedca9068..39be3105897 100644 --- a/src/plugins/intel_gna/src/gna_plugin_config.cpp +++ b/src/plugins/intel_gna/src/gna_plugin_config.cpp @@ -44,6 +44,7 @@ OPENVINO_SUPPRESS_DEPRECATED_END static const std::set supportedTargets = { common::kGnaTarget2_0, common::kGnaTarget3_0, + common::kGnaTarget3_5, common::kGnaTargetUnspecified }; @@ -157,6 +158,8 @@ OPENVINO_SUPPRESS_DEPRECATED_END target_str = common::kGnaTarget2_0; } else if (ov::intel_gna::HWGeneration::GNA_3_0 == target) { target_str = common::kGnaTarget3_0; + } else if (ov::intel_gna::HWGeneration::GNA_3_5 == target) { + target_str = common::kGnaTarget3_5; } set_target(target_str); } else if (key == GNA_CONFIG_KEY(EXEC_TARGET)) { @@ -359,10 +362,12 @@ Parameter Config::GetParameter(const std::string& name) const { } else if (name == ov::intel_gna::execution_target) { return ((gnaExecTarget == common::kGnaTarget2_0) ? ov::intel_gna::HWGeneration::GNA_2_0 : (gnaExecTarget == common::kGnaTarget3_0) ? ov::intel_gna::HWGeneration::GNA_3_0 : + (gnaExecTarget == common::kGnaTarget3_5) ? ov::intel_gna::HWGeneration::GNA_3_5 : ov::intel_gna::HWGeneration::UNDEFINED); } else if (name == ov::intel_gna::compile_target) { return ((gnaCompileTarget == common::kGnaTarget2_0) ? ov::intel_gna::HWGeneration::GNA_2_0 : (gnaCompileTarget == common::kGnaTarget3_0) ? ov::intel_gna::HWGeneration::GNA_3_0 : + (gnaCompileTarget == common::kGnaTarget3_5) ? ov::intel_gna::HWGeneration::GNA_3_5 : ov::intel_gna::HWGeneration::UNDEFINED); } else if (name == ov::hint::performance_mode) { return performance_mode; diff --git a/src/plugins/intel_gna/tests/functional/pass_tests/conv_with_padding.cpp b/src/plugins/intel_gna/tests/functional/pass_tests/conv_with_padding.cpp new file mode 100644 index 00000000000..60cb1f18363 --- /dev/null +++ b/src/plugins/intel_gna/tests/functional/pass_tests/conv_with_padding.cpp @@ -0,0 +1,157 @@ +// Copyright (C) 2022 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "shared_test_classes/base/layer_test_utils.hpp" +#include +#include "ngraph_functions/builders.hpp" +#include "../shared_tests_instances/skip_tests_check.hpp" + +typedef std::tuple, // Configuration + std::vector, // Input Shape + std::vector, // Filter Shape + std::vector // Padding Size + > + ConvWithPaddingParams; + +namespace LayerTestsDefinitions { + +class ConvWithPadding : public testing::WithParamInterface, + public LayerTestsUtils::LayerTestsCommon, + public GnaLayerTestCheck { +public: + static std::string getTestCaseName(testing::TestParamInfo obj) { + InferenceEngine::Precision precision; + std::string targetDevice; + std::map configuration; + std::vector input_shape; + std::vector filter_shape; + std::vector padding_size; + + std::tie(precision, targetDevice, configuration, input_shape, filter_shape, padding_size) = obj.param; + + std::ostringstream result; + result << "netPRC=" << precision.name() << "_"; + result << "targetDevice=" << targetDevice << "_"; + + for (auto const& configItem : configuration) { + result << "_configItem=" << configItem.first << "_" << configItem.second; + } + result << "_inputShape=" << CommonTestUtils::vec2str(input_shape) << "_"; + result << "_filterShape=" << CommonTestUtils::vec2str(filter_shape) << "_"; + result << "_paddingSize=" << CommonTestUtils::vec2str(padding_size); + + return result.str(); + } + +protected: + InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override { + InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc()); + blob->allocate(); + + auto* rawBlobDataPtr = blob->buffer().as(); + std::vector values = CommonTestUtils::generate_float_numbers(blob->size(), -0.01f, 0.01f); + for (size_t i = 0; i < blob->size(); i++) { + rawBlobDataPtr[i] = values[i]; + } + return blob; + } + + void SetUp() override { + InferenceEngine::Precision precision; + std::vector input_shape; + std::vector filter_shape; + std::vector padding_size; + + std::tie(precision, targetDevice, configuration, input_shape, filter_shape, padding_size) = this->GetParam(); + + GnaLayerTestCheck::SetUp(targetDevice); + if (GnaLayerTestCheck::gnaLibVersionLessThan(3.5f)) { + GTEST_SKIP() << GnaLayerTestCheck::getLastCmpResultMsg() << std::endl; + } + + auto ng_precision = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(precision); + auto input = std::make_shared(ng_precision, ngraph::Shape{input_shape}); + auto filter = ngraph::builder::makeConstant(ng_precision, filter_shape, {1.f}); + auto conv = std::make_shared(input, + filter, + ov::Strides{1, 1}, + padding_size, + padding_size, + ov::Strides{}); + + auto res = std::make_shared(conv); + function = std::make_shared(ngraph::ResultVector{res}, ngraph::ParameterVector{input}); + } +}; + +using ConvWithPaddingTestPos = ConvWithPadding; +using ConvWithPaddingTestNeg = ConvWithPadding; + +TEST_P(ConvWithPaddingTestPos, CompareWithRefImpl) { + Run(); +}; + +TEST_P(ConvWithPaddingTestNeg, CompareWithRefImpl) { + std::string what; + try { + LoadNetwork(); + } catch (const std::runtime_error& re) { + what.assign(re.what()); + } catch (const std::exception& e) { + what.assign(e.what()); + } catch (...) { + what.assign("Unknown failure occurred."); + } + EXPECT_HAS_SUBSTRING(what, std::string("Convolution's input padding is not supported")); +}; + +const InferenceEngine::Precision net_precisions{InferenceEngine::Precision::FP32}; + +const std::vector> configs_gna_3_0_to_3_5 = { + {{"GNA_DEVICE_MODE", "GNA_SW_EXACT"}, {"GNA_EXEC_TARGET", "GNA_TARGET_3_0"}}, + {{"GNA_DEVICE_MODE", "GNA_SW_EXACT"}, {"GNA_EXEC_TARGET", "GNA_TARGET_3_5"}}}; + +const std::vector> configs_gna_3_0 = { + {{"GNA_DEVICE_MODE", "GNA_SW_EXACT"}, {"GNA_EXEC_TARGET", "GNA_TARGET_3_0"}}}; + +const std::vector> configs_gna_3_5 = { + {{"GNA_DEVICE_MODE", "GNA_SW_EXACT"}, {"GNA_EXEC_TARGET", "GNA_TARGET_3_5"}}}; + +const std::vector input = {1, 8, 16, 16}; +const std::vector filter = {8, 8, 2, 2}; +const std::vector no_padding{}; +const std::vector padding{1, 1}; + +INSTANTIATE_TEST_SUITE_P(smoke_conv_without_padding, + ConvWithPaddingTestPos, + ::testing::Combine(::testing::Values(net_precisions), + ::testing::Values(CommonTestUtils::DEVICE_GNA), + ::testing::ValuesIn(configs_gna_3_0_to_3_5), + ::testing::Values(input), + ::testing::Values(filter), + ::testing::Values(no_padding)), + ConvWithPaddingTestPos::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(smoke_conv_with_padding_gna_3_5, + ConvWithPaddingTestPos, + ::testing::Combine(::testing::Values(net_precisions), + ::testing::Values(CommonTestUtils::DEVICE_GNA), + ::testing::ValuesIn(configs_gna_3_5), + ::testing::Values(input), + ::testing::Values(filter), + ::testing::Values(padding)), + ConvWithPaddingTestPos::getTestCaseName); + +INSTANTIATE_TEST_SUITE_P(smoke_expect_exception_for_conv_with_padding_when_gna_3_0, + ConvWithPaddingTestNeg, + ::testing::Combine(::testing::Values(net_precisions), + ::testing::Values(CommonTestUtils::DEVICE_GNA), + ::testing::ValuesIn(configs_gna_3_0), + ::testing::Values(input), + ::testing::Values(filter), + ::testing::Values(padding)), + ConvWithPaddingTestNeg::getTestCaseName); +} // namespace LayerTestsDefinitions diff --git a/src/plugins/intel_gna/tests/functional/pass_tests/convert_padded_to_valid_conv.cpp b/src/plugins/intel_gna/tests/functional/pass_tests/convert_padded_to_valid_conv.cpp index c45282363e9..d90c06ce22d 100644 --- a/src/plugins/intel_gna/tests/functional/pass_tests/convert_padded_to_valid_conv.cpp +++ b/src/plugins/intel_gna/tests/functional/pass_tests/convert_padded_to_valid_conv.cpp @@ -203,14 +203,10 @@ protected: } }; -class Gna30PaddedToValidConvTest : public PaddedToValidConvTest, GnaLayerTestCheck { +class Gna30PaddedToValidConvTest : public PaddedToValidConvTest { protected: void Run() override { - GnaLayerTestCheck::SkipTestCheck(); - - if (!GnaLayerTestCheck::skipTest) { - PaddedToValidConvTest::Run(); - } + PaddedToValidConvTest::Run(); } void SetUp() override { diff --git a/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution.cpp b/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution.cpp index 9bd6e91bee0..2c7b9f43c29 100644 --- a/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution.cpp +++ b/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution.cpp @@ -12,14 +12,10 @@ using namespace LayerTestsDefinitions; namespace { -class GnaConvolutionLayerTest : public ConvolutionLayerTest, GnaLayerTestCheck { +class GnaConvolutionLayerTest : public ConvolutionLayerTest { protected: void Run() override { - GnaLayerTestCheck::SkipTestCheck(); - - if (!GnaLayerTestCheck::skipTest) { - ConvolutionLayerTest::Run(); - } + ConvolutionLayerTest::Run(); } void SetUp() override { diff --git a/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution_negative.cpp b/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution_negative.cpp index 91add917e13..4771e213e53 100644 --- a/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution_negative.cpp +++ b/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/convolution_negative.cpp @@ -159,24 +159,20 @@ const auto conv2DParametersInvalidDilation = ::testing::Combine( ::testing::Values(ngraph::op::PadType::EXPLICIT) ); -class GnaConv2DNegativeTest : public ConvolutionLayerTest, protected GnaLayerTestCheck { +class GnaConv2DNegativeTest : public ConvolutionLayerTest { protected: virtual std::string expectedSubstring() = 0; void Run() override { - GnaLayerTestCheck::SkipTestCheck(); - - if (!GnaLayerTestCheck::skipTest) { - try { - ConvolutionLayerTest::LoadNetwork(); - FAIL() << "GNA's unsupported configuration of Convolution2D was not detected in ConvolutionLayerTest::LoadNetwork()"; - } - catch (std::runtime_error& e) { - const std::string errorMsg = e.what(); - const auto expected = expectedSubstring(); - ASSERT_STR_CONTAINS(errorMsg, expected); - EXPECT_TRUE(errorMsg.find(expected) != std::string::npos) << "Wrong error message, actula error message: " << errorMsg << - ", expected: " << expected; - } + try { + ConvolutionLayerTest::LoadNetwork(); + FAIL() << "GNA's unsupported configuration of Convolution2D was not detected in ConvolutionLayerTest::LoadNetwork()"; + } + catch (std::runtime_error& e) { + const std::string errorMsg = e.what(); + const auto expected = expectedSubstring(); + ASSERT_STR_CONTAINS(errorMsg, expected); + EXPECT_TRUE(errorMsg.find(expected) != std::string::npos) << "Wrong error message, actula error message: " << errorMsg << + ", expected: " << expected; } } void SetUp() override { diff --git a/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/low_precision.cpp b/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/low_precision.cpp index f52fcda9130..faccc96d912 100644 --- a/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/low_precision.cpp +++ b/src/plugins/intel_gna/tests/functional/shared_tests_instances/single_layer_tests/low_precision.cpp @@ -12,14 +12,10 @@ using namespace LowPrecisionTestDefinitions; namespace { -class GnaLowPrecisionTest : public LowPrecisionTest, GnaLayerTestCheck { +class GnaLowPrecisionTest : public LowPrecisionTest { protected: void Run() override { - GnaLayerTestCheck::SkipTestCheck(); - - if (!GnaLayerTestCheck::skipTest) { - LowPrecisionTest::Run(); - } + LowPrecisionTest::Run(); } void SetUp() override { diff --git a/src/plugins/intel_gna/tests/functional/shared_tests_instances/skip_tests_check.hpp b/src/plugins/intel_gna/tests/functional/shared_tests_instances/skip_tests_check.hpp index 576d4b10487..42073a34f5c 100644 --- a/src/plugins/intel_gna/tests/functional/shared_tests_instances/skip_tests_check.hpp +++ b/src/plugins/intel_gna/tests/functional/shared_tests_instances/skip_tests_check.hpp @@ -4,23 +4,33 @@ #include -class GnaLayerTestCheck : virtual public LayerTestsUtils::LayerTestsCommon { -protected: - bool skipTest = true; +class GnaLayerTestCheck { + float gnaLibVer = 0.0f; + std::string lastMsg; - void SkipTestCheck() { - InferenceEngine::Core ie_core; - std::vector metrics = ie_core.GetMetric(targetDevice, METRIC_KEY(SUPPORTED_METRICS)); +public: + void SetUp(const std::string deviceName) { + InferenceEngine::Core ieCore; + std::vector metrics = ieCore.GetMetric(deviceName, METRIC_KEY(SUPPORTED_METRICS)); - if (targetDevice == "GNA") { + if (deviceName == CommonTestUtils::DEVICE_GNA) { if (std::find(metrics.begin(), metrics.end(), METRIC_KEY(GNA_LIBRARY_FULL_VERSION)) != metrics.end()) { - std::string gnaLibVer = ie_core.GetMetric(targetDevice, METRIC_KEY(GNA_LIBRARY_FULL_VERSION)); - - if (gnaLibVer.rfind("2.1", 0) != 0 && gnaLibVer.rfind("3.", 0) != 0) { - GTEST_SKIP() << "Disabled test due to GNA library version being not 2.1 or 3.X" << std::endl; - } - skipTest = false; + auto gnaLibVerStr = + ieCore.GetMetric(deviceName, METRIC_KEY(GNA_LIBRARY_FULL_VERSION)).as(); + gnaLibVer = std::stof(gnaLibVerStr); } } } -}; + + std::string& getLastCmpResultMsg() { + return lastMsg; + } + + bool gnaLibVersionLessThan(float verToCmp) { + if (gnaLibVer && gnaLibVer < verToCmp) { + lastMsg = "GNA library version is less than " + std::to_string(verToCmp); + return true; + } + return false; + } +}; \ No newline at end of file diff --git a/src/plugins/intel_gna/tests/functional/shared_tests_instances/subgraph_tests/convolution_relu_sequence.cpp b/src/plugins/intel_gna/tests/functional/shared_tests_instances/subgraph_tests/convolution_relu_sequence.cpp index cc5c7b98298..c0d2ef6e674 100644 --- a/src/plugins/intel_gna/tests/functional/shared_tests_instances/subgraph_tests/convolution_relu_sequence.cpp +++ b/src/plugins/intel_gna/tests/functional/shared_tests_instances/subgraph_tests/convolution_relu_sequence.cpp @@ -14,14 +14,10 @@ using namespace SubgraphTestsDefinitions; namespace { -class GnaConvolutionReluSequenceTest : public ConvolutionReluSequenceTest, GnaLayerTestCheck { +class GnaConvolutionReluSequenceTest : public ConvolutionReluSequenceTest { protected: void Run() override { - GnaLayerTestCheck::SkipTestCheck(); - - if (!GnaLayerTestCheck::skipTest) { - ConvolutionReluSequenceTest::Run(); - } + ConvolutionReluSequenceTest::Run(); } void SetUp() override { diff --git a/src/plugins/intel_gna/tests/unit/gna_plugin_config_test.cpp b/src/plugins/intel_gna/tests/unit/gna_plugin_config_test.cpp index c06e2567583..1691942c0d3 100644 --- a/src/plugins/intel_gna/tests/unit/gna_plugin_config_test.cpp +++ b/src/plugins/intel_gna/tests/unit/gna_plugin_config_test.cpp @@ -196,7 +196,12 @@ TEST_F(GNAPluginConfigTest, GnaConfigGnaExecTargetTest) { EXPECT_EQ(config.gnaExecTarget, "GNA_TARGET_2_0"); SetAndCompare(GNA_CONFIG_KEY(EXEC_TARGET), "GNA_TARGET_3_0"); EXPECT_EQ(config.gnaExecTarget, "GNA_TARGET_3_0"); + ExpectThrow(GNA_CONFIG_KEY(EXEC_TARGET), "GNA_TARGET_3_7"); + + SetAndCompare(GNA_CONFIG_KEY(EXEC_TARGET), "GNA_TARGET_3_5"); + EXPECT_EQ(config.gnaExecTarget, "GNA_TARGET_3_5"); + ExpectThrow(GNA_CONFIG_KEY(EXEC_TARGET), "0"); ExpectThrow(GNA_CONFIG_KEY(EXEC_TARGET), "GNA_TARGET_1_5"); ExpectThrow(GNA_CONFIG_KEY(EXEC_TARGET), "GNA_TARGET"); @@ -207,7 +212,12 @@ TEST_F(GNAPluginConfigTest, GnaConfigGnaCompileTargetTest) { EXPECT_EQ(config.gnaCompileTarget, "GNA_TARGET_2_0"); SetAndCompare(GNA_CONFIG_KEY(COMPILE_TARGET), "GNA_TARGET_3_0"); EXPECT_EQ(config.gnaCompileTarget, "GNA_TARGET_3_0"); + ExpectThrow(GNA_CONFIG_KEY(COMPILE_TARGET), "GNA_TARGET_3_7"); + + SetAndCompare(GNA_CONFIG_KEY(COMPILE_TARGET), "GNA_TARGET_3_5"); + EXPECT_EQ(config.gnaCompileTarget, "GNA_TARGET_3_5"); + ExpectThrow(GNA_CONFIG_KEY(COMPILE_TARGET), "0"); ExpectThrow(GNA_CONFIG_KEY(COMPILE_TARGET), "GNA_TARGET_1_5"); ExpectThrow(GNA_CONFIG_KEY(COMPILE_TARGET), "GNA_TARGET");