Remove deprecated v0::Softmax (#2917)
This commit is contained in:
parent
b86f908247
commit
f6332c702d
@ -162,7 +162,6 @@ NGRAPH_OP(Sigmoid, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Sign, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Sin, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Sinh, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Softmax, ngraph::op::v0, 0)
|
||||
NGRAPH_OP(Softmax, ngraph::op::v1, 1)
|
||||
NGRAPH_OP(SpaceToBatch, ngraph::op::v1, 1)
|
||||
NGRAPH_OP(SpaceToDepth, ngraph::op::v0, 0)
|
||||
|
@ -22,54 +22,6 @@ namespace ngraph
|
||||
{
|
||||
namespace op
|
||||
{
|
||||
namespace v0
|
||||
{
|
||||
/// \brief Softmax operation.
|
||||
///
|
||||
class NGRAPH_DEPRECATED(
|
||||
"This operation is deprecated and will be removed soon. "
|
||||
"Use v1::Softmax instead of it.") NGRAPH_API Softmax : public Op
|
||||
{
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
public:
|
||||
static constexpr NodeTypeInfo type_info{"Softmax", 0};
|
||||
const NodeTypeInfo& get_type_info() const override { return type_info; }
|
||||
Softmax() = default;
|
||||
/// \brief Constructs a softmax operation.
|
||||
///
|
||||
/// \param arg Node that produces the first input tensor.<br>
|
||||
/// `[d0, ...]`
|
||||
/// \param axes The axis positions (0-based) on which to calculate the softmax.
|
||||
///
|
||||
/// Output `[d0, ...]`
|
||||
///
|
||||
Softmax(const Output<Node>& arg, const AxisSet& axes);
|
||||
/// \brief Constructs a softmax operation.
|
||||
///
|
||||
/// \param arg Node that produces the first input tensor.<br>
|
||||
/// `[d0, ...]`
|
||||
/// \param axes node produces the axis positions (0-based) on which to calculate the
|
||||
/// softmax.
|
||||
///
|
||||
/// Output `[d0, ...]`
|
||||
///
|
||||
Softmax(const Output<Node>& arg, const Output<Node>& axes);
|
||||
|
||||
void validate_and_infer_types() override;
|
||||
|
||||
virtual std::shared_ptr<Node>
|
||||
clone_with_new_inputs(const OutputVector& new_args) const override;
|
||||
|
||||
bool are_axes_constant() const;
|
||||
const AxisSet get_axes() const;
|
||||
void set_axes(const AxisSet& axes);
|
||||
|
||||
bool evaluate(const HostTensorVector& outputs,
|
||||
const HostTensorVector& inputs) const override;
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
};
|
||||
}
|
||||
|
||||
namespace v1
|
||||
{
|
||||
class NGRAPH_API Softmax : public Op
|
||||
@ -107,10 +59,5 @@ namespace ngraph
|
||||
size_t m_axis;
|
||||
};
|
||||
}
|
||||
|
||||
// default opset version
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
using v0::Softmax;
|
||||
NGRAPH_SUPPRESS_DEPRECATED_END
|
||||
}
|
||||
}
|
||||
|
@ -29,106 +29,9 @@
|
||||
#include "ngraph/runtime/reference/softmax.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
// *** SOFTMAX OP SET 0 ***
|
||||
constexpr NodeTypeInfo op::v0::Softmax::type_info;
|
||||
|
||||
op::v0::Softmax::Softmax(const Output<Node>& arg, const AxisSet& axes)
|
||||
: Op({arg})
|
||||
{
|
||||
set_argument(
|
||||
1,
|
||||
op::Constant::create(element::i64, Shape{axes.to_vector().size()}, axes.to_vector())
|
||||
->output(0));
|
||||
add_provenance_group_member(input_value(1).get_node_shared_ptr());
|
||||
constructor_validate_and_infer_types();
|
||||
}
|
||||
|
||||
op::v0::Softmax::Softmax(const Output<Node>& arg, const Output<Node>& axes)
|
||||
: Op({arg, axes})
|
||||
{
|
||||
constructor_validate_and_infer_types();
|
||||
}
|
||||
|
||||
bool op::v0::Softmax::are_axes_constant() const
|
||||
{
|
||||
return op::is_constant(input_value(1).get_node());
|
||||
}
|
||||
|
||||
const AxisSet op::v0::Softmax::get_axes() const
|
||||
{
|
||||
AxisSet axes;
|
||||
auto const_op = dynamic_pointer_cast<op::Constant>(input_value(1).get_node_shared_ptr());
|
||||
if (const_op)
|
||||
{
|
||||
axes = const_op->get_axis_set_val();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw ngraph_error("get_axes called on a Softmax node whose 'axes' input is not constant");
|
||||
}
|
||||
return axes;
|
||||
}
|
||||
|
||||
void op::v0::Softmax::set_axes(const AxisSet& axes)
|
||||
{
|
||||
shared_ptr<Node> current_const = input_value(1).get_node_shared_ptr();
|
||||
shared_ptr<Node> replacement_const =
|
||||
op::Constant::create(element::i64, Shape{axes.to_vector().size()}, axes.to_vector());
|
||||
this->input(1).replace_source_output(replacement_const->output(0));
|
||||
replace_provenance_group_member(current_const, replacement_const);
|
||||
}
|
||||
|
||||
void op::v0::Softmax::validate_and_infer_types()
|
||||
{
|
||||
const PartialShape& input_shape = get_input_partial_shape(0);
|
||||
|
||||
if (input_shape.is_dynamic())
|
||||
{
|
||||
set_output_type(0, get_input_element_type(0), input_shape);
|
||||
}
|
||||
else
|
||||
{
|
||||
set_output_type(0, get_input_element_type(0), input_shape.to_shape());
|
||||
|
||||
if (are_axes_constant())
|
||||
{
|
||||
auto m_axes = get_axes();
|
||||
for (auto axis : m_axes)
|
||||
{
|
||||
NODE_VALIDATION_CHECK(this,
|
||||
axis < input_shape.rank().get_length(),
|
||||
"Reduction axis (",
|
||||
axis,
|
||||
") is out of bounds (argument shape: ",
|
||||
input_shape,
|
||||
").");
|
||||
}
|
||||
// empty axes == all axes
|
||||
if (m_axes.size() == 0)
|
||||
{
|
||||
for (size_t i = 0; i < get_shape().size(); ++i)
|
||||
{
|
||||
m_axes.insert(i);
|
||||
}
|
||||
set_axes(m_axes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set_input_is_relevant_to_shape(1);
|
||||
}
|
||||
|
||||
shared_ptr<Node> op::v0::Softmax::clone_with_new_inputs(const OutputVector& new_args) const
|
||||
{
|
||||
check_new_args_count(this, new_args);
|
||||
return make_shared<Softmax>(new_args.at(0), new_args.at(1));
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
template <element::Type_t ET>
|
||||
@ -152,14 +55,6 @@ namespace
|
||||
}
|
||||
}
|
||||
|
||||
bool op::v0::Softmax::evaluate(const HostTensorVector& outputs,
|
||||
const HostTensorVector& inputs) const
|
||||
{
|
||||
OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::v0::Softmax::evaluate");
|
||||
outputs[0]->set_unary(inputs[0]);
|
||||
return evaluate_softmax(inputs[0], outputs[0], get_axes());
|
||||
}
|
||||
|
||||
// *** SOFTMAX OP SET V1 ***
|
||||
constexpr NodeTypeInfo op::v1::Softmax::type_info;
|
||||
|
||||
|
@ -35,72 +35,16 @@
|
||||
#include "util/test_control.hpp"
|
||||
#include "util/test_tools.hpp"
|
||||
|
||||
NGRAPH_SUPPRESS_DEPRECATED_START
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
static string s_manifest = "${MANIFEST}";
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_dynamic_axes)
|
||||
{
|
||||
Shape shape_A{2, 3};
|
||||
Shape shape_B{2};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape_A);
|
||||
auto B = make_shared<op::Parameter>(element::i64, shape_B);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, B), ParameterVector{A, B});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}", true);
|
||||
|
||||
auto a = backend->create_tensor(element::f32, shape_A);
|
||||
auto b = backend->create_tensor(element::i64, shape_B);
|
||||
copy_data(a, vector<float>{-3, -2, -1, 0, 1, 2});
|
||||
copy_data(b, vector<int64_t>{0, 1});
|
||||
auto result = backend->create_tensor(element::f32, shape_A);
|
||||
|
||||
auto d = expf(-3) + expf(-2) + expf(-1) + expf(0) + expf(1) + expf(2);
|
||||
|
||||
auto handle = backend->compile(f);
|
||||
handle->call_with_validate({result}, {a, b});
|
||||
vector<float> expected{
|
||||
expf(-3) / d, expf(-2) / d, expf(-1) / d, expf(0) / d, expf(1) / d, expf(2) / d};
|
||||
EXPECT_TRUE(test::all_close_f(expected, read_vector<float>(result)));
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_all)
|
||||
{
|
||||
Shape shape{2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0, 1}), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
auto a = backend->create_tensor(element::f32, shape);
|
||||
copy_data(a, vector<float>{-3, -2, -1, 0, 1, 2});
|
||||
auto result = backend->create_tensor(element::f32, shape);
|
||||
|
||||
auto d = expf(-3) + expf(-2) + expf(-1) + expf(0) + expf(1) + expf(2);
|
||||
|
||||
auto handle = backend->compile(f);
|
||||
handle->call_with_validate({result}, {a});
|
||||
vector<float> expected{
|
||||
expf(-3) / d, expf(-2) / d, expf(-1) / d, expf(0) / d, expf(1) / d, expf(2) / d};
|
||||
EXPECT_TRUE(test::all_close_f(expected, read_vector<float>(result)));
|
||||
|
||||
// empty AxisSet is the same as "full" AxisSet
|
||||
f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{}), ParameterVector{A});
|
||||
backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
auto h1 = backend->compile(f);
|
||||
h1->call_with_validate({result}, {a});
|
||||
EXPECT_TRUE(test::all_close_f(expected, read_vector<float>(result)));
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_axis_3d)
|
||||
{
|
||||
Shape shape{2, 2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 0), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@ -137,7 +81,7 @@ NGRAPH_TEST(${BACKEND_NAME}, softmax_axis_3d_double)
|
||||
{
|
||||
Shape shape{2, 2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f64, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 0), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@ -170,11 +114,11 @@ NGRAPH_TEST(${BACKEND_NAME}, softmax_axis_3d_double)
|
||||
EXPECT_TRUE(test::all_close(expected, read_vector<double>(result)));
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_axis)
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_2d_axis_1)
|
||||
{
|
||||
Shape shape{2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{1}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 1), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@ -196,11 +140,11 @@ NGRAPH_TEST(${BACKEND_NAME}, softmax_axis)
|
||||
EXPECT_TRUE(test::all_close_f(expected, read_vector<float>(result)));
|
||||
}
|
||||
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_axis_2)
|
||||
NGRAPH_TEST(${BACKEND_NAME}, softmax_2d_axis_0)
|
||||
{
|
||||
Shape shape{2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 0), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@ -227,7 +171,7 @@ NGRAPH_TEST(${BACKEND_NAME}, softmax_axis_3d_trivial)
|
||||
{
|
||||
Shape shape{1, 2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 0), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@ -245,7 +189,7 @@ NGRAPH_TEST(${BACKEND_NAME}, softmax_underflow)
|
||||
{
|
||||
Shape shape{2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 0), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
@ -270,7 +214,7 @@ NGRAPH_TEST(${BACKEND_NAME}, softmax_overflow)
|
||||
{
|
||||
Shape shape{2, 3};
|
||||
auto A = make_shared<op::Parameter>(element::f32, shape);
|
||||
auto f = make_shared<Function>(make_shared<op::Softmax>(A, AxisSet{0}), ParameterVector{A});
|
||||
auto f = make_shared<Function>(make_shared<op::v1::Softmax>(A, 0), ParameterVector{A});
|
||||
|
||||
auto backend = runtime::Backend::create("${BACKEND_NAME}");
|
||||
|
||||
|
@ -769,7 +769,7 @@ namespace
|
||||
|
||||
void op_is_Softmax()
|
||||
{
|
||||
op::Softmax node;
|
||||
op::v1::Softmax node;
|
||||
EXPECT_FALSE(op::is_unary_elementwise_arithmetic(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_arithmetic(&node));
|
||||
EXPECT_FALSE(op::is_binary_elementwise_comparison(&node));
|
||||
|
@ -1387,7 +1387,7 @@ protected:
|
||||
case OP_TYPEID::Round_v5:
|
||||
case OP_TYPEID::ShapeOf_v3:
|
||||
case OP_TYPEID::ShapeOf:
|
||||
case OP_TYPEID::Softmax:
|
||||
case OP_TYPEID::Softmax_v1:
|
||||
case OP_TYPEID::Split_v1:
|
||||
case OP_TYPEID::Squeeze:
|
||||
case OP_TYPEID::Subtract:
|
||||
|
@ -34,6 +34,7 @@ NGRAPH_OP(LogicalXor, op::v1)
|
||||
NGRAPH_OP(LogicalNot, op::v1)
|
||||
NGRAPH_OP(GatherTree, op::v1)
|
||||
NGRAPH_OP(OneHot, op::v1)
|
||||
NGRAPH_OP(Softmax, op::v1)
|
||||
NGRAPH_OP(Split, op::v1)
|
||||
NGRAPH_OP(Reshape, op::v1)
|
||||
NGRAPH_OP(Reverse, op::v1)
|
||||
|
@ -117,7 +117,6 @@ NGRAPH_OP(Sigmoid, ngraph::op)
|
||||
NGRAPH_OP(Sign, ngraph::op)
|
||||
NGRAPH_OP(Sin, ngraph::op)
|
||||
NGRAPH_OP(Sinh, ngraph::op)
|
||||
NGRAPH_OP(Softmax, ngraph::op)
|
||||
NGRAPH_OP(SpaceToDepth, ngraph::op)
|
||||
NGRAPH_OP(Sqrt, ngraph::op)
|
||||
NGRAPH_OP(SquaredDifference, ngraph::op)
|
||||
|
@ -284,24 +284,6 @@ namespace opset1_upgrade
|
||||
return replacement_node;
|
||||
}
|
||||
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::Softmax> node)
|
||||
{
|
||||
NGRAPH_CHECK(op::is_constant(node->input_value(1).get_node()),
|
||||
"axes parameter is expected to be a static constant");
|
||||
|
||||
AxisSet axes = node->get_axes();
|
||||
|
||||
NGRAPH_CHECK(
|
||||
axes.size() == 1,
|
||||
"Unable to convert Softmax:0 to Softmax:1 with zero or more than one axis. Node: ",
|
||||
*node);
|
||||
|
||||
auto replacement_node =
|
||||
make_shared<op::v1::Softmax>(node->input_value(0), axes.to_vector()[0]);
|
||||
replace_node(node, replacement_node);
|
||||
return replacement_node;
|
||||
}
|
||||
|
||||
shared_ptr<Node> op_cast(shared_ptr<op::Subtract> node)
|
||||
{
|
||||
return op_cast_binary_elementwise_node<op::v0::Subtract, op::v1::Subtract>(node);
|
||||
|
Loading…
Reference in New Issue
Block a user