* Added support for Gelu-6 to the MO * Adding Gelu-6 to ngraph and python API + some tests * Fixed typo in the Gelu approximation mode * Fixed Gelu-6 reference implementation for Tanh mode * Added transformation to downgrade v6::Gelu to v2::Gelu * Added specification for the Gelu-6 * Code style fixes * The Gelu-6 operation specification update * Fixed compilation issue in reference implementation for Gelu * Fix compilation issues for some OSs * Code style fix * One more cpplint issue fix * Fixed Gelu6 reference implementation compilation on Windows. * Code style fix * Fixed various ngraph unit tests * Code style check * Reverted Gelu-2 to be fused op * Fixed Gelu6 downgrade transformation * Added unit test for Gelu6Downgrade transformation * Update copyright year * Updated copyright year * Replaced tab characters with 4 spaces in IR reader tests * Code style fixes * Added default value for GeluApproximation mode for Gelu-6 op * Fixed code style for Gelu-6 * Changed order of parameters for the Gelu evaluate to potentially avoid backward compatibility issues with ARM plugin * Fixed code style * Introduced opset7. Moved Gelu6 to opset7 * Fixed non-updated transformation * Fixed opset version in ngraph Python API for Gelu operation * Fixed typo in the opset number in the documentation * Reverted some changes related to Gelu6 * Updated MO to produce Gelu7 * Updated unit tests for Gelu * Updated Gelu7 specification * Changed gelu reference implementation. Added opset7 to Python packages * Updated Python API tests for Gelu operation * Code style fix * Marked get_approximation_mode function as const * Added missing "const" qualifier * Fixed code style issues in tests * Added extractor for MxNet operation Gelu * Spelling issues fix * Updated MxNet supported symbols * Added NGRAPH_OP_SCOPE for Gelu7 validate_and_infer_types * Fixed a typo in the comment
51 lines
1.8 KiB
C++
51 lines
1.8 KiB
C++
// Copyright (C) 2018-2021 Intel Corporation
|
|
// SPDX-License-Identifier: Apache-2.0
|
|
//
|
|
|
|
#include <gtest/gtest.h>
|
|
|
|
|
|
#include <ngraph/function.hpp>
|
|
#include <ngraph/opsets/opset2.hpp>
|
|
#include <ngraph/opsets/opset7.hpp>
|
|
#include <transformations/op_conversions/gelu7_downgrade.hpp>
|
|
#include <transformations/init_node_info.hpp>
|
|
#include <ngraph/pass/manager.hpp>
|
|
|
|
#include "common_test_utils/ngraph_test_utils.hpp"
|
|
|
|
using namespace testing;
|
|
|
|
TEST(TransformationTests, Gelu7Downgrade) {
|
|
std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
|
|
{
|
|
auto input = std::make_shared<ngraph::opset7::Parameter>(ngraph::element::f32, ngraph::Shape{1, 2, 3});
|
|
auto gelu = std::make_shared<ngraph::opset7::Gelu>(input, ngraph::op::GeluApproximationMode::ERF);
|
|
gelu->set_friendly_name("gelu7");
|
|
|
|
f = std::make_shared<ngraph::Function>(ngraph::NodeVector{gelu}, ngraph::ParameterVector{input});
|
|
|
|
ngraph::pass::Manager manager;
|
|
manager.register_pass<ngraph::pass::InitNodeInfo>();
|
|
manager.register_pass<ngraph::pass::Gelu7Downgrade>();
|
|
manager.run_passes(f);
|
|
ASSERT_NO_THROW(check_rt_info(f));
|
|
}
|
|
|
|
{
|
|
auto input = std::make_shared<ngraph::opset7::Parameter>(ngraph::element::f32, ngraph::Shape{1, 2, 3});
|
|
auto gelu = std::make_shared<ngraph::opset2::Gelu>(input);
|
|
gelu->set_friendly_name("gelu7");
|
|
|
|
f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector{gelu}, ngraph::ParameterVector{input});
|
|
}
|
|
|
|
auto res = compare_functions(f, f_ref);
|
|
ASSERT_TRUE(res.first) << res.second;
|
|
|
|
auto result_node_of_converted_f = f->get_output_op(0);
|
|
auto output_node = result_node_of_converted_f->input(0).get_source_output().get_node_shared_ptr();
|
|
ASSERT_TRUE(output_node->get_friendly_name() == "gelu7") << "Transformation Gelu7Downgrade should keep output names.\n";
|
|
}
|
|
|