[CPU][ARM][TESTS] Expect ref implementation for GeluErf (#17611)
This commit is contained in:
parent
aec146ff8c
commit
2680e9b7aa
@ -99,12 +99,16 @@ void ActivationLayerCPUTest::SetUp() {
|
||||
selectedType = getPrimitiveType() + "_" + netPrecision.name();
|
||||
|
||||
#if defined(OPENVINO_ARCH_ARM) || defined(OPENVINO_ARCH_ARM64)
|
||||
if (activationType == ngraph::helpers::ActivationTypes::GeluTanh || // @todo not supported by ACL, can be decomposed with ngraph transformation
|
||||
activationType == ngraph::helpers::ActivationTypes::SoftSign || // @todo not supported by ACL, can be decomposed with ngraph transformation
|
||||
# if defined(OPENVINO_ARCH_ARM)
|
||||
if (activationType == ngraph::helpers::ActivationTypes::GeluErf) // @todo tmp fallback to ref, gelu erf is disabled for 32bit ARM
|
||||
selectedType = std::string("ref_") + netPrecision.name();
|
||||
# endif
|
||||
if (activationType == ngraph::helpers::ActivationTypes::GeluTanh || // @todo not supported by ACL, can be decomposed with ngraph transformation
|
||||
activationType == ngraph::helpers::ActivationTypes::SoftSign || // @todo not supported by ACL, can be decomposed with ngraph transformation
|
||||
inputShapes.front().first.rank().get_length() > 5) // @todo tmp fallback to ref, remove after 6D+ ranks are properly supported
|
||||
selectedType = std::string("ref_") + netPrecision.name();
|
||||
#else
|
||||
if (activationType == ngraph::helpers::ActivationTypes::Log) // @todo tmp fallback to ref, remove after Log is supported in emitters
|
||||
if (activationType == ngraph::helpers::ActivationTypes::Log) // @todo tmp fallback to ref, remove after Log is supported in emitters
|
||||
selectedType = std::string("ref_") + netPrecision.name();
|
||||
#endif
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user