[GNA] Moved PWL functional tests (#10731)

* Moving PWL to ngraph

* improving the running time of php_search; refactoring the pwl operation

* fixed erros & refactored code

* moved PWL op to GNA

* Update src/plugins/intel_gna/ops/pwl.hpp

Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>

* Update src/plugins/intel_gna/ops/reference/pwl.hpp

Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>

* Update src/plugins/intel_gna/ops/pwl.cpp

Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>

* Update src/plugins/intel_gna/transformations/transpose_to_pwl.hpp

Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>

* Update src/plugins/intel_gna/transformations/transpose_to_pwl.cpp

Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>

* fixed compilation error

* Update inference-engine/tests/unit/gna/ngraph/transformations/gna_pwl.cpp

Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>

* added some tests; changed algorithm of checking accuracy of pwl; refactoring

* added first and last segments; added fq and fixed errors

* fixed after review & rewrote some tests on ngraph

* removed debug logs & fixed code style check error

* s/ngraph_helper/ngraph_util

* removed TRANSFORMATIONS_API in PWLApproximation class declaration

* removed OPENVINO_API in Pwl class declaration

* replaced the deprecated version of evaluate() with a new one

* fixed some problems after reviewing

* fixed a problem when a value of function of left point of segment is less than minimum of function

* corrected a value of the right point of last segments

* [GNA] Moved pwl func tests

* Deleted deprecated test

* s/OPENVINO_RTTI/OPENVINO_OP

* Deleted conflicted test file

* fixed after review

Co-authored-by: Dmitrii Khurtin <dmitrii.khurtin@intel.com>
Co-authored-by: Elizaveta Lobanova <elizaveta.lobanova@intel.com>
This commit is contained in:
Andrey Noskov
2022-04-05 11:43:24 +03:00
committed by GitHub
parent 9c8a6aacb7
commit 3a36d90c11
2 changed files with 2 additions and 249 deletions

View File

@@ -95,7 +95,8 @@ const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}}
{Abs, {}},
{Clamp, {{-5, 5}}}
};
const std::map<ActivationTypes, std::vector<std::vector<float>>> preluActivationTypes = {

View File

@@ -1,248 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vector>
#include <gtest/gtest.h>
#include "gna_matcher.hpp"
#include <ngraph/function.hpp>
#include <ngraph/opsets/opset8.hpp>
namespace detail {
template<typename T>
std::shared_ptr<ngraph::Function> CreateActivationFunction(const ngraph::Shape& input_shape) {
auto input_params = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::f32, input_shape);
auto f = std::make_shared<T>(input_params);
auto result = std::make_shared<ngraph::opset8::Result>(f);
return std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, ngraph::ParameterVector{input_params});
}
template<typename T>
std::shared_ptr<ngraph::Function> CreateActivationFunction(const ngraph::Shape& input_shape, double min, double max);
template<>
std::shared_ptr<ngraph::Function> CreateActivationFunction<ngraph::opset8::Clamp>(const ngraph::Shape& input_shape, double min, double max) {
auto input_params = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::f32, input_shape);
auto f = std::make_shared<ngraph::opset8::Clamp>(input_params, min, max);
auto result = std::make_shared<ngraph::opset8::Result>(f);
return std::make_shared<ngraph::Function>(ngraph::ResultVector{result}, ngraph::ParameterVector{input_params});
}
}
class PWLAproximationTest : public GNATest<> {
protected:
void SetUp() override {
}
};
using namespace GNATestIRs;
// Recursive Algorithm
// Precision Threshold
TEST_F(PWLAproximationTest, forTanhOnRecursiveAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Tanh>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActTanh)
.pwl_quantization_precision_threshold(0.0053);
}
TEST_F(PWLAproximationTest, forSigmoidOnRecursiveAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Sigmoid>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActSigmoid)
.pwl_quantization_precision_threshold(0.0027);
}
TEST_F(PWLAproximationTest, forReLUonRecursiveAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Relu>({1, 1, 10, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActRelu)
.pwl_quantization_precision_threshold(0.0001);
}
TEST_F(PWLAproximationTest, forLeakyReLUonRecursiveAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferModel(LeakyReLUActivationModel())
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActLeakyRelu)
.pwl_quantization_precision_threshold(0.0003);
}
TEST_F(PWLAproximationTest, DISABLED_forIdentityOnRecursiveAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferModel(IdentityActivationModel())
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActIdentity)
.pwl_quantization_precision_threshold(0.0003);
}
TEST_F(PWLAproximationTest, forClampOnRecursiveAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Clamp>({1, 10}, -50, 50))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActKaldiLstmClipping)
.pwl_quantization_precision_threshold(0.0001);
}
TEST_F(PWLAproximationTest, forTanhOnUniformAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Tanh>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActTanh)
.pwl_quantization_precision_threshold(0.0009);
}
TEST_F(PWLAproximationTest, forSigmoidOnUniformAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Sigmoid>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActSigmoid)
.pwl_quantization_precision_threshold(0.0004);
}
TEST_F(PWLAproximationTest, DISABLED_forIdentityOnUniformAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferModel(IdentityActivationModel())
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActIdentity)
.pwl_quantization_precision_threshold(0.0003);
}
TEST_F(PWLAproximationTest, forClampOnUniformAlgoWithPrecisionThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Clamp>({1, 10}, -50, 50))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActKaldiLstmClipping)
.pwl_quantization_precision_threshold(0.0001);
}
TEST_F(PWLAproximationTest, forSigmoidonRecursiveAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Sigmoid>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActSigmoid)
.pwl_quantization_segments_threshold(12);
}
TEST_F(PWLAproximationTest, forTanhonRecursiveAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Tanh>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActTanh)
.pwl_quantization_segments_threshold(12);
}
TEST_F(PWLAproximationTest, forReLUonRecursiveAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Relu>({1, 1, 10, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActRelu)
.pwl_quantization_segments_threshold(4);
}
TEST_F(PWLAproximationTest, forLeakyReLUonRecursiveAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferModel(LeakyReLUActivationModel())
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActLeakyRelu)
.pwl_quantization_segments_threshold(4);
}
TEST_F(PWLAproximationTest, DISABLED_forIdentityOnRecursiveAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferModel(IdentityActivationModel())
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActIdentity)
.pwl_quantization_segments_threshold(3);
}
TEST_F(PWLAproximationTest, forClampOnRecursiveAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Clamp>({1, 10}, -50, 50))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActKaldiLstmClipping)
.pwl_quantization_segments_threshold(3);
}
TEST_F(PWLAproximationTest, forSigmoidonUniformAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Sigmoid>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActSigmoid)
.pwl_quantization_segments_threshold(65);
}
TEST_F(PWLAproximationTest, forTanhonUniformAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Tanh>({1, 10}))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActTanh)
.pwl_quantization_segments_threshold(65);
}
TEST_F(PWLAproximationTest, DISABLED_forIdentityOnUniformAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferModel(IdentityActivationModel())
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActIdentity)
.pwl_quantization_segments_threshold(3);
}
TEST_F(PWLAproximationTest, forClampOnUniformAlgoWithSegmentThresholdIsSuccess) {
assert_that().onInferNgraphModel(detail::CreateActivationFunction<ngraph::opset8::Clamp>({1, 10}, -50, 50))
.gna()
.withGNAConfig(GNA_CONFIG_KEY(SCALE_FACTOR), 1.0f)
.withUniformPWLAlgo()
.propagate_forward()
.called_with()
.pwl_quantization_activation(DnnActivationType::kActKaldiLstmClipping)
.pwl_quantization_segments_threshold(3);
}