[CPU][ARM] Fix inference precision for behaviour tests (#19485)

This commit is contained in:
Aleksandr Voron 2023-08-30 07:38:30 +02:00 committed by GitHub
parent e8f1df495c
commit 9b10ef6f6f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -156,11 +156,7 @@ TEST_F(OVClassConfigTestCPU, smoke_PluginSetConfigAffinity) {
TEST_F(OVClassConfigTestCPU, smoke_PluginSetConfigHintInferencePrecision) {
ov::Core ie;
auto value = ov::element::f32;
#if defined(OV_CPU_ARM_ENABLE_FP16)
const auto precision = ov::element::f16;
#else
const auto precision = InferenceEngine::with_cpu_x86_bfloat16() ? ov::element::bf16 : ov::element::f32;
#endif
ASSERT_NO_THROW(value = ie.get_property("CPU", ov::hint::inference_precision));
ASSERT_EQ(precision, value);
@ -194,11 +190,7 @@ TEST_F(OVClassConfigTestCPU, smoke_PluginSetConfigEnableProfiling) {
ASSERT_EQ(enableProfiling, value);
}
#if defined(OV_CPU_ARM_ENABLE_FP16)
const auto expected_precision_for_performance_mode = ov::element::f16;
#else
const auto expected_precision_for_performance_mode = InferenceEngine::with_cpu_x86_bfloat16() ? ov::element::bf16 : ov::element::f32;
#endif
const auto expected_precision_for_performance_mode = InferenceEngine::with_cpu_x86_bfloat16() ? ov::element::bf16 : ov::element::f32;
const auto bf16_if_can_be_emulated = InferenceEngine::with_cpu_x86_avx512_core() ? ov::element::bf16 : ov::element::f32;
using ExpectedModeAndType = std::pair<ov::hint::ExecutionMode, ov::element::Type>;