diff --git a/docs/ops/activation/SoftMax_1.md b/docs/ops/activation/SoftMax_1.md index 41a28f05792..6d1d4c9818f 100644 --- a/docs/ops/activation/SoftMax_1.md +++ b/docs/ops/activation/SoftMax_1.md @@ -2,7 +2,7 @@ **Versioned name**: *SoftMax-1* -**Category**: *Activation* +**Category**: *Activation function* **Short description**: [Reference](https://github.com/Kulbear/deep-learning-nano-foundation/wiki/ReLU-and-Softmax-Activation-Functions#softmax) @@ -18,6 +18,12 @@ * **Default value**: 1 * **Required**: *no* +**Mathematical Formulation** + +\f[ +y_{c} = \frac{e^{Z_{c}}}{\sum_{d=1}^{C}e^{Z_{d}}} +\f] +where \f$C\f$ is a size of tensor along *axis* dimension. **Inputs**: @@ -27,13 +33,6 @@ * **1**: The resulting tensor of the same shape and type as input tensor. -**Detailed description** - -\f[ -y_{c} = \frac{e^{Z_{c}}}{\sum_{d=1}^{C}e^{Z_{d}}} -\f] -where \f$C\f$ is a size of tensor along *axis* dimension. - **Example** ```xml diff --git a/ngraph/core/include/ngraph/op/softmax.hpp b/ngraph/core/include/ngraph/op/softmax.hpp index c5cb73e3add..e9b4c3f7e00 100644 --- a/ngraph/core/include/ngraph/op/softmax.hpp +++ b/ngraph/core/include/ngraph/op/softmax.hpp @@ -41,7 +41,7 @@ namespace ngraph /// /// Output `[d0, ...]` /// - Softmax(const Output& arg, const size_t axis); + Softmax(const Output& arg, const size_t axis = 1); bool visit_attributes(AttributeVisitor& visitor) override; void validate_and_infer_types() override; diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index 3c528560930..81db61806dc 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -172,6 +172,7 @@ set(SRC type_prop/select.cpp type_prop/shape_of.cpp type_prop/shuffle_channels.cpp + type_prop/softmax.cpp type_prop/softplus.cpp type_prop/space_to_batch.cpp type_prop/space_to_depth.cpp diff --git a/ngraph/test/type_prop/softmax.cpp b/ngraph/test/type_prop/softmax.cpp new file mode 100644 index 00000000000..e76761f0618 --- /dev/null +++ b/ngraph/test/type_prop/softmax.cpp @@ -0,0 +1,37 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#include "gtest/gtest.h" +#include "ngraph/ngraph.hpp" + +using namespace std; +using namespace ngraph; + +TEST(type_prop, softmax_default_axis) +{ + const Shape arg_shape{2, 3}; + auto arg = make_shared(element::f32, arg_shape); + auto sm = make_shared(arg); + ASSERT_EQ(sm->get_axis(), 1); +} + +TEST(type_prop, softmax_out_of_bound_axis) +{ + const Shape arg_shape{2, 3}; + auto arg = make_shared(element::f32, arg_shape); + // axis cannot be a negative number + ASSERT_THROW(make_shared(arg, -1), ngraph::NodeValidationFailure); +}