[GNA] Replace GNA SoftSign by opset9 SoftSign (#12302)

* Replace GNA SoftSign by opset9 SoftSign

* v9 -> opset9
This commit is contained in:
Nadezhda Ageeva 2022-07-28 16:24:56 +04:00 committed by GitHub
parent 9bd02f76ab
commit 67173a74d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 16 additions and 171 deletions

View File

@ -1,81 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "softsign.hpp"
#include <ngraph/validation_util.hpp>
#include "ngraph/attribute_visitor.hpp"
#include "ngraph/runtime/host_tensor.hpp"
#include <cmath>
#include <cstddef>
namespace ov {
namespace intel_gna {
namespace op {
template <typename T>
void softsign(const T* arg, T* out, size_t count) {
for (size_t i = 0; i < count; i++) {
out[i] = arg[i] / (1 + std::abs(arg[i]));
}
}
SoftSign::SoftSign(const ngraph::Output<ngraph::Node>& arg) : ov::op::util::UnaryElementwiseArithmetic(arg) {
constructor_validate_and_infer_types();
}
std::shared_ptr<ngraph::Node> SoftSign::clone_with_new_inputs(const ngraph::OutputVector& new_args) const {
check_new_args_count(this, new_args);
return std::make_shared<SoftSign>(new_args.at(0));
}
template <ngraph::element::Type_t ET>
inline bool evaluate(const ov::Tensor& arg, ov::Tensor& out, const size_t count) {
using T = typename ngraph::element_type_traits<ET>::value_type;
softsign<T>(arg.data<T>(), out.data<T>(), count);
return true;
}
namespace {
bool evaluate_softsign(const ov::Tensor& arg, ov::Tensor& out) {
bool rc = true;
size_t count = shape_size(arg.get_shape());
switch (arg.get_element_type()) {
case ov::element::Type_t::f16:
rc = evaluate<ov::element::Type_t::f16>(arg, out, count);
break;
case ov::element::Type_t::f32:
rc = evaluate<ov::element::Type_t::f32>(arg, out, count);
break;
default:
rc = false;
break;
}
return rc;
}
} // namespace
bool SoftSign::evaluate(ov::TensorVector& outputs,
const ov::TensorVector& inputs,
const ov::EvaluationContext& evaluation_context) const {
return evaluate_softsign(inputs[0], outputs[0]);
}
bool SoftSign::has_evaluate() const {
switch (get_input_element_type(0)) {
case ngraph::element::f16:
case ngraph::element::f32:
return true;
default:
break;
}
return false;
}
} // namespace op
} // namespace intel_gna
} // namespace ov

View File

@ -1,34 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "openvino/op/op.hpp"
#include "ngraph/node.hpp"
#include "openvino/op/util/unary_elementwise_arithmetic.hpp"
namespace ov {
namespace intel_gna {
namespace op {
/// \brief Neural Activation Function
/// f(x) = x/(1.0 + |x|)
///
class SoftSign : public ov::op::util::UnaryElementwiseArithmetic {
public:
OPENVINO_OP("SoftSign", "intel_gna", ov::op::util::UnaryElementwiseArithmetic);
SoftSign() = default;
/// \brief Constructs an SoftSign operation.
///
/// \param data Input tensor
SoftSign(const ngraph::Output<ngraph::Node>& arg);
std::shared_ptr<Node> clone_with_new_inputs(const ngraph::OutputVector& new_args) const override;
bool evaluate(ov::TensorVector& output_values,
const ov::TensorVector& input_values,
const ov::EvaluationContext & evaluation_context) const override;
bool has_evaluate() const override;
};
} // namespace op
} // namespace intel_gna
} // namespace ov

View File

@ -45,7 +45,7 @@ bool split_search(double lower_bound, double upper_bound) {
double break_bound = get_break_bound<T>(); double break_bound = get_break_bound<T>();
if (std::is_same<T, ngraph::opset8::Sigmoid>::value || if (std::is_same<T, ngraph::opset8::Sigmoid>::value ||
std::is_same<T, ngraph::opset8::Tanh>::value || std::is_same<T, ngraph::opset8::Tanh>::value ||
std::is_same<T, ov::intel_gna::op::SoftSign>::value || std::is_same<T, ngraph::opset9::SoftSign>::value ||
std::is_same<T, ngraph::opset8::Exp>::value || std::is_same<T, ngraph::opset8::Exp>::value ||
std::is_same<T, ngraph::opset8::Power>::value) { std::is_same<T, ngraph::opset8::Power>::value) {
return lower_bound < break_bound && upper_bound > break_bound; return lower_bound < break_bound && upper_bound > break_bound;
@ -208,7 +208,7 @@ template<typename T>
bool is_negative(const details::Function<T>& activation_function, double upper_bound) { bool is_negative(const details::Function<T>& activation_function, double upper_bound) {
if (std::is_same<T, ngraph::opset8::Sigmoid>::value || if (std::is_same<T, ngraph::opset8::Sigmoid>::value ||
std::is_same<T, ngraph::opset8::Tanh>::value || std::is_same<T, ngraph::opset8::Tanh>::value ||
std::is_same<T, ov::intel_gna::op::SoftSign>::value) { std::is_same<T, ngraph::opset9::SoftSign>::value) {
return upper_bound == 0; return upper_bound == 0;
} }
@ -490,7 +490,7 @@ static std::shared_ptr<ngraph::pattern::Matcher> create_matcher(ov::graph_rewrit
ngraph::pattern::any_input(), ngraph::pattern::any_input() }); ngraph::pattern::any_input(), ngraph::pattern::any_input() });
auto powerIE = ngraph::pattern::wrap_type<ngraph::op::PowerIE>({activation_input}); auto powerIE = ngraph::pattern::wrap_type<ngraph::op::PowerIE>({activation_input});
auto log = ngraph::pattern::wrap_type<ngraph::opset8::Log>({activation_input}); auto log = ngraph::pattern::wrap_type<ngraph::opset8::Log>({activation_input});
auto softsign = ngraph::pattern::wrap_type<ov::intel_gna::op::SoftSign>({activation_input}); auto softsign = ngraph::pattern::wrap_type<ngraph::opset9::SoftSign>({activation_input});
auto activation_function = auto activation_function =
std::make_shared<ngraph::pattern::op::Or>(ov::OutputVector{ sigmoid, tanh, exp, power, powerIE, log, softsign }); std::make_shared<ngraph::pattern::op::Or>(ov::OutputVector{ sigmoid, tanh, exp, power, powerIE, log, softsign });
@ -515,7 +515,7 @@ static std::shared_ptr<ngraph::pattern::Matcher> create_matcher(ov::graph_rewrit
ngraph::opset8::Power, ngraph::opset8::Power,
ngraph::op::PowerIE, ngraph::op::PowerIE,
ngraph::opset8::Log, ngraph::opset8::Log,
ov::intel_gna::op::SoftSign>(), ngraph::opset9::SoftSign>(),
fake_quantize_iter != pattern_to_output.end() ? fake_quantize_iter != pattern_to_output.end() ?
fake_quantize_iter->second.get_node_shared_ptr() : std::shared_ptr<ngraph::Node>(), fake_quantize_iter->second.get_node_shared_ptr() : std::shared_ptr<ngraph::Node>(),
iter->second.get_node_shared_ptr(), iter->second.get_node_shared_ptr(),

View File

@ -14,11 +14,10 @@
#include <ngraph/ngraph.hpp> #include <ngraph/ngraph.hpp>
#include <ngraph/pass/graph_rewrite.hpp> #include <ngraph/pass/graph_rewrite.hpp>
#include "ngraph/pattern/matcher.hpp" #include "ngraph/pattern/matcher.hpp"
#include <ngraph/opsets/opset9.hpp>
#include <ngraph/opsets/opset8.hpp> #include <ngraph/opsets/opset8.hpp>
#include <legacy/ngraph_ops/power.hpp> #include <legacy/ngraph_ops/power.hpp>
#include "ops/softsign.hpp"
namespace GNAPluginNS { namespace GNAPluginNS {
/** /**
* @ingroup ie_transformation_common_api * @ingroup ie_transformation_common_api
@ -178,7 +177,7 @@ struct Function<ngraph::opset8::Log> {
}; // struct Function<ngraph::opset8::Log> }; // struct Function<ngraph::opset8::Log>
template<> template<>
struct Function<ov::intel_gna::op::SoftSign> { struct Function<ngraph::opset9::SoftSign> {
static const char* name() { static const char* name() {
return "softsign"; return "softsign";
} }
@ -206,7 +205,7 @@ struct Function<ov::intel_gna::op::SoftSign> {
static double max_value() { static double max_value() {
return 1; return 1;
} }
}; // struct Function<ov::intel_gna::op::SoftSign> }; // struct Function<ngraph::opset9::SoftSign>
template<> template<>
struct Function<ngraph::op::PowerIE> { struct Function<ngraph::op::PowerIE> {
@ -263,7 +262,7 @@ double lower_bound() {
std::is_same<T, ngraph::opset8::Exp>::value || std::is_same<T, ngraph::opset8::Exp>::value ||
std::is_same<T, ngraph::opset8::Tanh>::value || std::is_same<T, ngraph::opset8::Tanh>::value ||
std::is_same<T, ngraph::opset8::Sigmoid>::value || std::is_same<T, ngraph::opset8::Sigmoid>::value ||
std::is_same<T, ov::intel_gna::op::SoftSign>::value>()); std::is_same<T, ngraph::opset9::SoftSign>::value>());
} }
template<typename T> template<typename T>
@ -302,7 +301,7 @@ double upper_bound() {
std::is_same<T, ngraph::opset8::Power>::value || std::is_same<T, ngraph::opset8::Power>::value ||
std::is_same<T, ngraph::op::PowerIE>::value || std::is_same<T, ngraph::op::PowerIE>::value ||
std::is_same<T, ngraph::opset8::Sigmoid>::value || std::is_same<T, ngraph::opset8::Sigmoid>::value ||
std::is_same<T, ov::intel_gna::op::SoftSign>::value>()); std::is_same<T, ngraph::opset9::SoftSign>::value>());
} }
template<typename T> template<typename T>
@ -324,7 +323,7 @@ const char* name() {
std::is_same<T, ngraph::opset8::Power>::value || std::is_same<T, ngraph::opset8::Power>::value ||
std::is_same<T, ngraph::op::PowerIE>::value || std::is_same<T, ngraph::op::PowerIE>::value ||
std::is_same<T, ngraph::opset8::Log>::value || std::is_same<T, ngraph::opset8::Log>::value ||
std::is_same<T, ov::intel_gna::op::SoftSign>::value>()); std::is_same<T, ngraph::opset9::SoftSign>::value>());
} }
template<typename T> template<typename T>

View File

@ -9,11 +9,11 @@
#include "transformations/utils/transformation_helper.hpp" #include "transformations/utils/transformation_helper.hpp"
#include "transformations/utils/utils.hpp" #include "transformations/utils/utils.hpp"
#include <ngraph/opsets/opset9.hpp>
#include <ngraph/opsets/opset8.hpp> #include <ngraph/opsets/opset8.hpp>
#include <ngraph/pattern/op/wrap_type.hpp> #include <ngraph/pattern/op/wrap_type.hpp>
#include <ngraph/pattern/op/or.hpp> #include <ngraph/pattern/op/or.hpp>
#include <ngraph/rt_info.hpp> #include <ngraph/rt_info.hpp>
#include <ops/softsign.hpp>
using namespace GNAPluginNS; using namespace GNAPluginNS;
@ -22,7 +22,7 @@ using Node = std::shared_ptr<ngraph::Node>;
namespace { namespace {
void DoTransformation(Node start_node, Node last_node) { void DoTransformation(Node start_node, Node last_node) {
auto activation = std::make_shared<ov::intel_gna::op::SoftSign>(start_node); auto activation = std::make_shared<ngraph::opset9::SoftSign>(start_node);
activation->set_friendly_name(last_node->get_friendly_name()); activation->set_friendly_name(last_node->get_friendly_name());
ngraph::copy_runtime_info(last_node, activation); ngraph::copy_runtime_info(last_node, activation);
ngraph::replace_node(last_node, activation); ngraph::replace_node(last_node, activation);

View File

@ -1,38 +0,0 @@
// Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "ops/softsign.hpp"
#include <string>
#include <vector>
#include "execute_tools.hpp"
#include "gtest/gtest.h"
#include "ngraph/runtime/host_tensor.hpp"
#include "ngraph/validation_util.hpp"
#include "ngraph/opsets/opset8.hpp"
TEST(op_eval, softsign) {
auto p = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::f32, ngraph::Shape{4});
auto softsign = std::make_shared<ov::intel_gna::op::SoftSign>(p);
auto fun = std::make_shared<ngraph::Function>(ngraph::OutputVector{softsign}, ngraph::ParameterVector{p});
float inputs[] = {-1.0, 0.0, 1.0, 20.0};
std::vector<float> expected_result{-0.5, 0.0, 0.5, 0.952381};
ov::TensorVector result(1);
ov::Tensor input{ov::element::f32, ov::Shape{4}, inputs};
ASSERT_TRUE(fun->evaluate(result, ov::TensorVector{input}));
EXPECT_EQ(result.size(), 1);
EXPECT_EQ(result[0].get_element_type(), ngraph::element::f32);
EXPECT_EQ(result[0].get_shape(), ngraph::Shape{4});
EXPECT_EQ(result[0].get_size(), 4);
const float * result_data = result[0].data<float>();
for (size_t i = 0; i < result[0].get_size(); ++i)
EXPECT_NEAR(result_data[i], expected_result[i], 0.000001);
}

View File

@ -87,8 +87,8 @@ TEST(GnaPwlTest, Exp) {
TEST(GnaPwlTest, SoftSign) { TEST(GnaPwlTest, SoftSign) {
RunTest( RunTest(
pwl_test::CreateActivationFunction<ov::intel_gna::op::SoftSign>({1, 32}), pwl_test::CreateActivationFunction<ngraph::opset9::SoftSign>({1, 32}),
pwl_test::CreateActivationFunction<ov::intel_gna::op::SoftSign>({1, 32}), pwl_test::CreateActivationFunction<ngraph::opset9::SoftSign>({1, 32}),
-10, -10,
10); 10);
} }

View File

@ -8,11 +8,10 @@
#include "common_test_utils/ngraph_test_utils.hpp" #include "common_test_utils/ngraph_test_utils.hpp"
#include <ngraph/function.hpp> #include <ngraph/function.hpp>
#include <ngraph/opsets/opset9.hpp>
#include <ngraph/opsets/opset8.hpp> #include <ngraph/opsets/opset8.hpp>
#include <ngraph/pass/manager.hpp> #include <ngraph/pass/manager.hpp>
#include <transformations/init_node_info.hpp> #include <transformations/init_node_info.hpp>
#include <ops/softsign.hpp>
namespace testing { namespace testing {
namespace { namespace {
@ -21,7 +20,7 @@ std::shared_ptr<ngraph::Function> createSoftSignFunction() {
auto input_params = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::f32, auto input_params = std::make_shared<ngraph::opset8::Parameter>(ngraph::element::f32,
ngraph::Shape{ 1, 1, 1, 64 }); ngraph::Shape{ 1, 1, 1, 64 });
auto softsign = std::make_shared<ov::intel_gna::op::SoftSign>(input_params); auto softsign = std::make_shared<ov::op::v9::SoftSign>(input_params);
ngraph::ResultVector results{ std::make_shared<ngraph::op::Result>(softsign) }; ngraph::ResultVector results{ std::make_shared<ngraph::op::Result>(softsign) };