From 0c373ba79b4d695f11933eedd80b8b640abbb32a Mon Sep 17 00:00:00 2001 From: Piotr Szmelczynski Date: Thu, 29 Oct 2020 09:37:52 +0100 Subject: [PATCH] Revise ReLU (#2863) * remove relu_backprop * Update ReLU spec * change inputs and outputs subsections of ReLU spec * Update Mathematical Formulation subsection * Update Category of ReLU in spec * Update Short description of ReLU in spec --- docs/ops/activation/ReLU_1.md | 18 ++++++++++++------ .../include/ngraph/runtime/reference/relu.hpp | 9 --------- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/docs/ops/activation/ReLU_1.md b/docs/ops/activation/ReLU_1.md index 56b907648e4..f3808ca37a2 100644 --- a/docs/ops/activation/ReLU_1.md +++ b/docs/ops/activation/ReLU_1.md @@ -2,9 +2,9 @@ **Versioned name**: *ReLU-1* -**Category**: *Activation* +**Category**: *Activation function* -**Short description**: [Reference](http://caffe.berkeleyvision.org/tutorial/layers/relu.html) +**Short description**: ReLU element-wise activation function. ([Reference](http://caffe.berkeleyvision.org/tutorial/layers/relu.html)) **Detailed description**: [Reference](https://github.com/Kulbear/deep-learning-nano-foundation/wiki/ReLU-and-Softmax-Activation-Functions#rectified-linear-units) @@ -12,13 +12,19 @@ **Mathematical Formulation** -\f[ -Y_{i}^{( l )} = max(0, Y_{i}^{( l - 1 )}) -\f] +For each element from the input tensor calculates corresponding + element in the output tensor with the following formula: + \f[ + Y_{i}^{( l )} = max(0, Y_{i}^{( l - 1 )}) + \f] **Inputs**: -* **1**: Multidimensional input tensor. Required. +* **1**: Multidimensional input tensor *x* of any supported numeric type. Required. + +**Outputs**: + +* **1**: Result of ReLU function applied to the input tensor *x*. Tensor with shape and type matching the input tensor. Required. **Example** diff --git a/ngraph/core/reference/include/ngraph/runtime/reference/relu.hpp b/ngraph/core/reference/include/ngraph/runtime/reference/relu.hpp index 0fabf4b1ffc..0d32f59f66f 100644 --- a/ngraph/core/reference/include/ngraph/runtime/reference/relu.hpp +++ b/ngraph/core/reference/include/ngraph/runtime/reference/relu.hpp @@ -33,15 +33,6 @@ namespace ngraph out[i] = arg[i] > zero ? arg[i] : zero; } } - template - void relu_backprop(const T* arg, const T* delta_arg, T* out, size_t count) - { - T zero = 0; - for (size_t i = 0; i < count; i++) - { - out[i] = arg[i] > zero ? delta_arg[i] : zero; - } - } } } }