[IE Myriad][IE Tests] Activation layer's constants parametrization. (#2071)

CI passed: https://gitlab-icv.inn.intel.com/inference-engine/product-configs/merge_requests/870
This commit is contained in:
Nikita Kudriavtsev 2020-09-10 12:56:21 +03:00 committed by GitHub
parent d4d460101d
commit ef2581d5c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 144 additions and 126 deletions

View File

@ -22,39 +22,39 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP16 InferenceEngine::Precision::FP16
}; };
const std::vector<ActivationTypes> activationTypes = { const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
Sigmoid, {Sigmoid, {}},
Tanh, {Tanh, {}},
Relu, {Relu, {}},
Exp, {Exp, {}},
Log, {Log, {}},
Sign, {Sign, {}},
Abs, {Abs, {}},
Clamp, {Clamp, {{-2.0f, 2.0f}}},
Negative, {Negative, {}},
Acos, {Acos, {}},
Asin, {Asin, {}},
Atan, {Atan, {}},
Cos, {Cos, {}},
Cosh, {Cosh, {}},
Floor, {Floor, {}},
Sin, {Sin, {}},
Sinh, {Sinh, {}},
Sqrt, {Sqrt, {}},
Tan, {Tan, {}},
Elu, {Elu, {{0.1f}}},
Erf, {Erf, {}},
HardSigmoid, {HardSigmoid, {{0.2f, 0.5f}}},
Selu, {Selu, {{1.6732f, 1.0507f}}},
Ceiling, {Ceiling, {}},
Mish, {Mish, {}},
HSwish, {HSwish, {}},
SoftPlus {SoftPlus, {}}
}; };
const std::vector<ActivationTypes> activationParamTypes = { const std::map<ActivationTypes, std::vector<std::vector<float>>> activationParamTypes = {
PReLu, {PReLu, {{-0.01f}}},
LeakyRelu, {LeakyRelu, {{0.01f}}}
}; };
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = { std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -68,16 +68,16 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> preluBasic = {
}; };
const auto basicCases = ::testing::Combine( const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes), ::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)), ::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_CPU) ::testing::Values(CommonTestUtils::DEVICE_CPU)
); );
const auto basicPreluCases = ::testing::Combine( const auto basicPreluCases = ::testing::Combine(
::testing::ValuesIn(activationParamTypes), ::testing::ValuesIn(CommonTestUtils::combineParams(activationParamTypes)),
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(preluBasic)), ::testing::ValuesIn(CommonTestUtils::combineParams(preluBasic)),
::testing::Values(CommonTestUtils::DEVICE_CPU) ::testing::Values(CommonTestUtils::DEVICE_CPU)
); );

View File

@ -45,7 +45,7 @@ std::vector<InferenceEngine::Precision> netPrecisions = {
std::map<std::string, std::string> additional_config = {}; std::map<std::string, std::string> additional_config = {};
const auto ComparisonTestParams = ::testing::Combine( const auto ComparisonTestParams = ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(inputShapes)), ::testing::ValuesIn(CommonTestUtils::combineParams(inputShapes)),
::testing::ValuesIn(inputsPrecisions), ::testing::ValuesIn(inputsPrecisions),
::testing::ValuesIn(comparisonOpTypes), ::testing::ValuesIn(comparisonOpTypes),
::testing::ValuesIn(secondInputTypes), ::testing::ValuesIn(secondInputTypes),

View File

@ -31,7 +31,7 @@ const std::vector<ngraph::helpers::SqueezeOpType> opTypes = {
INSTANTIATE_TEST_CASE_P(Basic, SqueezeUnsqueezeLayerTest, INSTANTIATE_TEST_CASE_P(Basic, SqueezeUnsqueezeLayerTest,
::testing::Combine( ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineShapes<int>(axesVectors)), ::testing::ValuesIn(CommonTestUtils::combineParams(axesVectors)),
::testing::ValuesIn(opTypes), ::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_CPU)), ::testing::Values(CommonTestUtils::DEVICE_CPU)),

View File

@ -25,14 +25,14 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::U8 InferenceEngine::Precision::U8
}; };
const std::vector<ActivationTypes> activationTypes = { const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
Sigmoid, {Sigmoid, {}},
Tanh, {Tanh, {}},
Relu, {Relu, {}},
Exp, {Exp, {}},
Log, {Log, {}},
Sign, {Sign, {}},
Abs {Abs, {}}
}; };
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = { std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -42,9 +42,9 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
}; };
const auto basicCases = ::testing::Combine( const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes), ::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)), ::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_GNA) ::testing::Values(CommonTestUtils::DEVICE_GNA)
); );

View File

@ -16,35 +16,35 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP16 InferenceEngine::Precision::FP16
}; };
const std::vector<ActivationTypes> activationTypes = { const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
Sigmoid, {Sigmoid, {}},
Tanh, {Tanh, {}},
Relu, {Relu, {}},
Exp, {Exp, {}},
Log, {Log, {}},
Sign, {Sign, {}},
Abs, {Abs, {}},
Gelu, {Gelu, {}},
Clamp, {Clamp, {{-2.0f, 2.0f}}},
Negative, {Negative, {}},
Acos, {Acos, {}},
Asin, {Asin, {}},
Atan, {Atan, {}},
Cos, {Cos, {}},
Cosh, {Cosh, {}},
Floor, {Floor, {}},
Sin, {Sin, {}},
Sinh, {Sinh, {}},
Sqrt, {Sqrt, {}},
Tan, {Tan, {}},
Elu, {Elu, {{0.1f}}},
Erf, {Erf, {}},
HardSigmoid, {HardSigmoid, {{0.2f, 0.5f}}},
Selu, {Selu, {{1.6732f, 1.0507f}}},
Ceiling, {Ceiling, {}},
Mish, {Mish, {}},
HSwish, {HSwish, {}},
SoftPlus {SoftPlus, {}}
}; };
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = { std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -53,9 +53,9 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
}; };
const auto basicCases = ::testing::Combine( const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes), ::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)), ::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_GPU) ::testing::Values(CommonTestUtils::DEVICE_GPU)
); );

View File

@ -45,7 +45,7 @@ std::vector<InferenceEngine::Precision> netPrecisions = {
std::map<std::string, std::string> additional_config = {}; std::map<std::string, std::string> additional_config = {};
const auto ComparisonTestParams = ::testing::Combine( const auto ComparisonTestParams = ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(inputShapes)), ::testing::ValuesIn(CommonTestUtils::combineParams(inputShapes)),
::testing::ValuesIn(inputsPrecisions), ::testing::ValuesIn(inputsPrecisions),
::testing::ValuesIn(comparisonOpTypes), ::testing::ValuesIn(comparisonOpTypes),
::testing::ValuesIn(secondInputTypes), ::testing::ValuesIn(secondInputTypes),

View File

@ -15,16 +15,16 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP16 InferenceEngine::Precision::FP16
}; };
const std::vector<ActivationTypes> activationTypes = { const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
Sigmoid, {Sigmoid, {}},
Tanh, {Tanh, {}},
Relu, {Relu, {}},
Exp, {Exp, {}},
Log, {Log, {}},
Gelu, {Gelu, {}},
Mish, {Mish, {}},
SoftPlus, {SoftPlus, {}},
Swish {Swish, {{0.05f}, {0.8f}, {1.0f}, {15.0f}}}
}; };
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = { std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -33,9 +33,9 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
}; };
const auto basicCases = ::testing::Combine( const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes), ::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions), ::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)), ::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD) ::testing::Values(CommonTestUtils::DEVICE_MYRIAD)
); );

View File

@ -74,7 +74,7 @@ static std::map<ngraph::helpers::ActivationTypes, std::string> activationNames =
}; };
typedef std::tuple< typedef std::tuple<
ngraph::helpers::ActivationTypes, std::pair<ngraph::helpers::ActivationTypes, std::vector<float>>, // Activation type and constant value
InferenceEngine::Precision, InferenceEngine::Precision,
std::pair<std::vector<size_t>, std::vector<size_t>>, std::pair<std::vector<size_t>, std::vector<size_t>>,
std::string> activationParams; std::string> activationParams;
@ -98,8 +98,12 @@ protected:
void SetUp() override; void SetUp() override;
private: private:
void generateActivationBlob(); void generateActivationBlob(std::vector<float> constantsValue);
ngraph::ParameterVector createActivationParams(ngraph::element::Type ngPrc, std::vector<size_t> inShape = {}); ngraph::ParameterVector createActivationParams(
ngraph::element::Type ngPrc, std::vector<size_t> inShape = {});
private:
std::vector<float> constantsValue;
}; };
} // namespace LayerTestsDefinitions } // namespace LayerTestsDefinitions

View File

@ -23,14 +23,15 @@ std::string ActivationLayerTest::getTestCaseName(const testing::TestParamInfo<ac
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;
std::pair<std::vector<size_t>, std::vector<size_t>> shapes; std::pair<std::vector<size_t>, std::vector<size_t>> shapes;
std::string targetDevice; std::string targetDevice;
ngraph::helpers::ActivationTypes activationType; std::pair<ngraph::helpers::ActivationTypes, std::vector<float>> activationDecl;
std::tie(activationType, netPrecision, shapes, targetDevice) = obj.param; std::tie(activationDecl, netPrecision, shapes, targetDevice) = obj.param;
std::ostringstream result; std::ostringstream result;
const char separator = '_'; const char separator = '_';
result << activationNames[activationType] << separator; result << activationNames[activationDecl.first] << separator;
result << "IS=" << CommonTestUtils::vec2str(shapes.first) << separator;; result << "IS=" << CommonTestUtils::vec2str(shapes.first) << separator;
result << "AS=" << CommonTestUtils::vec2str(shapes.second) << separator;; result << "AS=" << CommonTestUtils::vec2str(shapes.second) << separator;
result << "ConstantsValue=" << CommonTestUtils::vec2str(activationDecl.second) << separator;
result << "netPRC=" << netPrecision.name() << separator; result << "netPRC=" << netPrecision.name() << separator;
result << "targetDevice=" << targetDevice; result << "targetDevice=" << targetDevice;
return result.str(); return result.str();
@ -39,10 +40,15 @@ std::string ActivationLayerTest::getTestCaseName(const testing::TestParamInfo<ac
void ActivationLayerTest::SetUp() { void ActivationLayerTest::SetUp() {
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;
std::pair<std::vector<size_t>, std::vector<size_t>> shapes; std::pair<std::vector<size_t>, std::vector<size_t>> shapes;
std::tie(activationType, netPrecision, shapes, targetDevice) = GetParam(); std::pair<ngraph::helpers::ActivationTypes, std::vector<float>> activationDecl;
std::tie(activationDecl, netPrecision, shapes, targetDevice) = GetParam();
activationType = activationDecl.first;
auto constantsValue = activationDecl.second;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {shapes.first}); auto params = ngraph::builder::makeParams(ngPrc, {shapes.first});
auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second); auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second, constantsValue);
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params); function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params);
} }
@ -127,29 +133,29 @@ ngraph::ParameterVector ActivationParamLayerTest::createActivationParams(ngraph:
} }
} }
void ActivationParamLayerTest::generateActivationBlob() { void ActivationParamLayerTest::generateActivationBlob(std::vector<float> constantsValue) {
switch (activationType) { switch (activationType) {
case ngraph::helpers::ActivationTypes::PReLu: { case ngraph::helpers::ActivationTypes::PReLu: {
auto blobNegativeSlope = inferRequest.GetBlob("negativeSlope"); auto blobNegativeSlope = inferRequest.GetBlob("negativeSlope");
float negativeSlope = -0.01f; float negativeSlope = constantsValue[0];
blobNegativeSlope = FuncTestUtils::createAndFillBlobWithFloatArray(blobNegativeSlope->getTensorDesc(), &negativeSlope, 1); blobNegativeSlope = FuncTestUtils::createAndFillBlobWithFloatArray(blobNegativeSlope->getTensorDesc(), &negativeSlope, 1);
} }
case ngraph::helpers::ActivationTypes::LeakyRelu: { case ngraph::helpers::ActivationTypes::LeakyRelu: {
auto blobLeakySlope = inferRequest.GetBlob("leakySlope"); auto blobLeakySlope = inferRequest.GetBlob("leakySlope");
float leakySlope = 0.01f; float leakySlope = constantsValue[0];
blobLeakySlope = FuncTestUtils::createAndFillBlobWithFloatArray(blobLeakySlope->getTensorDesc(), &leakySlope, 1); blobLeakySlope = FuncTestUtils::createAndFillBlobWithFloatArray(blobLeakySlope->getTensorDesc(), &leakySlope, 1);
} }
case ngraph::helpers::ActivationTypes::HardSigmoid: { case ngraph::helpers::ActivationTypes::HardSigmoid: {
auto blobHardSigmoidAlpha = inferRequest.GetBlob("alpha"); auto blobHardSigmoidAlpha = inferRequest.GetBlob("alpha");
auto blobHardSigmoidBeta = inferRequest.GetBlob("beta"); auto blobHardSigmoidBeta = inferRequest.GetBlob("beta");
float alpha = 0.2f, beta = 0.5f; float alpha = constantsValue[0], beta = constantsValue[1];
blobHardSigmoidAlpha = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidAlpha->getTensorDesc(), &alpha, 1); blobHardSigmoidAlpha = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidAlpha->getTensorDesc(), &alpha, 1);
blobHardSigmoidBeta = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidBeta->getTensorDesc(), &beta, 1); blobHardSigmoidBeta = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidBeta->getTensorDesc(), &beta, 1);
} }
case ngraph::helpers::ActivationTypes::Selu: { case ngraph::helpers::ActivationTypes::Selu: {
auto blobHardSigmoidAlpha = inferRequest.GetBlob("alpha"); auto blobHardSigmoidAlpha = inferRequest.GetBlob("alpha");
auto blobHardSigmoidLambda = inferRequest.GetBlob("lambda"); auto blobHardSigmoidLambda = inferRequest.GetBlob("lambda");
float alpha = 1.6732f, lambda = 1.0507f; float alpha = constantsValue[0], lambda = constantsValue[1];
blobHardSigmoidAlpha = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidAlpha->getTensorDesc(), &alpha, 1); blobHardSigmoidAlpha = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidAlpha->getTensorDesc(), &alpha, 1);
blobHardSigmoidLambda = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidLambda->getTensorDesc(), &lambda, 1); blobHardSigmoidLambda = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidLambda->getTensorDesc(), &lambda, 1);
} }
@ -164,7 +170,7 @@ void ActivationParamLayerTest::Infer() {
auto blobInput = inferRequest.GetBlob("Input"); auto blobInput = inferRequest.GetBlob("Input");
blobInput = FuncTestUtils::createAndFillBlobFloat(blobInput->getTensorDesc()); blobInput = FuncTestUtils::createAndFillBlobFloat(blobInput->getTensorDesc());
generateActivationBlob(); generateActivationBlob(constantsValue);
inferRequest.Infer(); inferRequest.Infer();
} }
@ -173,12 +179,18 @@ void ActivationParamLayerTest::Infer() {
void ActivationParamLayerTest::SetUp() { void ActivationParamLayerTest::SetUp() {
InferenceEngine::Precision netPrecision; InferenceEngine::Precision netPrecision;
std::pair<std::vector<size_t>, std::vector<size_t>> shapes; std::pair<std::vector<size_t>, std::vector<size_t>> shapes;
std::tie(activationType, netPrecision, shapes, targetDevice) = GetParam(); std::pair<ngraph::helpers::ActivationTypes, std::vector<float>> activationDecl;
std::tie(activationDecl, netPrecision, shapes, targetDevice) = GetParam();
activationType = activationDecl.first;
constantsValue = activationDecl.second;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision); auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {shapes.first}); auto params = ngraph::builder::makeParams(ngPrc, {shapes.first});
auto activationParams = createActivationParams(ngPrc); auto activationParams = createActivationParams(ngPrc);
params[0]->set_friendly_name("Input"); params[0]->set_friendly_name("Input");
params.insert(params.end(), activationParams.begin(), activationParams.end()); params.insert(params.end(), activationParams.begin(), activationParams.end());
auto activation = ngraph::builder::makeActivation(params, ngPrc, activationType); auto activation = ngraph::builder::makeActivation(params, ngPrc, activationType);
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params); function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params);
} }

View File

@ -70,13 +70,13 @@ inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::CNNNet
return getLayerByName(&icnnnetwork, layerName); return getLayerByName(&icnnnetwork, layerName);
} }
template <typename elementTypeVector> template <typename master, typename slave>
std::vector<std::pair<std::vector<size_t>, std::vector<elementTypeVector>>> std::vector<std::pair<master, slave>> combineParams(
combineShapes(const std::map<std::vector<size_t>, std::vector<std::vector<elementTypeVector>>>& inputShapes) { const std::map<master, std::vector<slave>>& keyValueSets) {
std::vector<std::pair<std::vector<size_t>, std::vector<elementTypeVector>>> resVec; std::vector<std::pair<master, slave>> resVec;
for (auto& inputShape : inputShapes) { for (auto& keyValues : keyValueSets) {
for (auto& item : inputShape.second) { for (auto& item : keyValues.second) {
resVec.push_back({inputShape.first, item}); resVec.push_back({keyValues.first, item});
} }
} }
return resVec; return resVec;

View File

@ -191,7 +191,8 @@ std::shared_ptr<ngraph::Node> makeVariadicSplit(const ngraph::Output<Node> &in,
std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in, std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
const element::Type &type, const element::Type &type,
ngraph::helpers::ActivationTypes activationType, ngraph::helpers::ActivationTypes activationType,
std::vector<size_t> inShape = {}); std::vector<size_t> inShape = {},
std::vector<float> constantsValue = {});
std::shared_ptr<ngraph::Node> makeActivation(const ngraph::ParameterVector &parameters, std::shared_ptr<ngraph::Node> makeActivation(const ngraph::ParameterVector &parameters,
const element::Type &type, const element::Type &type,

View File

@ -15,7 +15,8 @@ namespace builder {
std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in, std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
const element::Type &type, const element::Type &type,
ngraph::helpers::ActivationTypes activationType, ngraph::helpers::ActivationTypes activationType,
std::vector<size_t> inShape) { std::vector<size_t> inShape,
std::vector<float> constantsValue) {
switch (activationType) { switch (activationType) {
case ngraph::helpers::ActivationTypes::Sigmoid: case ngraph::helpers::ActivationTypes::Sigmoid:
return std::make_shared<ngraph::op::Sigmoid>(in); return std::make_shared<ngraph::op::Sigmoid>(in);
@ -27,7 +28,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
auto leaky_slope = std::make_shared<ngraph::op::Constant>( auto leaky_slope = std::make_shared<ngraph::op::Constant>(
ngraph::element::f32, ngraph::element::f32,
inShape, inShape,
std::vector<float>{0.01f}); constantsValue);
return std::make_shared<ngraph::op::PRelu>(in, leaky_slope); return std::make_shared<ngraph::op::PRelu>(in, leaky_slope);
} }
case ngraph::helpers::ActivationTypes::Exp: case ngraph::helpers::ActivationTypes::Exp:
@ -41,7 +42,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
case ngraph::helpers::ActivationTypes::Gelu: case ngraph::helpers::ActivationTypes::Gelu:
return std::make_shared<ngraph::op::Gelu>(in); return std::make_shared<ngraph::op::Gelu>(in);
case ngraph::helpers::ActivationTypes::Clamp: case ngraph::helpers::ActivationTypes::Clamp:
return std::make_shared<ngraph::op::Clamp>(in, -2.0, 2.0); return std::make_shared<ngraph::op::Clamp>(in, constantsValue[0], constantsValue[1]);
case ngraph::helpers::ActivationTypes::Negative: case ngraph::helpers::ActivationTypes::Negative:
return std::make_shared<ngraph::op::Negative>(in); return std::make_shared<ngraph::op::Negative>(in);
case ngraph::helpers::ActivationTypes::Acos: case ngraph::helpers::ActivationTypes::Acos:
@ -65,21 +66,21 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
case ngraph::helpers::ActivationTypes::Tan: case ngraph::helpers::ActivationTypes::Tan:
return std::make_shared<ngraph::op::Tan>(in); return std::make_shared<ngraph::op::Tan>(in);
case ngraph::helpers::ActivationTypes::Elu: case ngraph::helpers::ActivationTypes::Elu:
return std::make_shared<ngraph::op::Elu>(in, 0.1); return std::make_shared<ngraph::op::Elu>(in, constantsValue[0]);
case ngraph::helpers::ActivationTypes::Erf: case ngraph::helpers::ActivationTypes::Erf:
return std::make_shared<ngraph::op::Erf>(in); return std::make_shared<ngraph::op::Erf>(in);
case ngraph::helpers::ActivationTypes::HardSigmoid: { case ngraph::helpers::ActivationTypes::HardSigmoid: {
auto hard_sigmoid_alpha = std::make_shared<ngraph::op::Constant>( auto hard_sigmoid_alpha = std::make_shared<ngraph::op::Constant>(
type, inShape, 0.2f); type, inShape, constantsValue[0]);
auto hard_sigmoid_beta = std::make_shared<ngraph::op::Constant>( auto hard_sigmoid_beta = std::make_shared<ngraph::op::Constant>(
type, inShape, 0.5f); type, inShape, constantsValue[1]);
return std::make_shared<ngraph::op::HardSigmoid>(in, hard_sigmoid_alpha, hard_sigmoid_beta); return std::make_shared<ngraph::op::HardSigmoid>(in, hard_sigmoid_alpha, hard_sigmoid_beta);
} }
case ngraph::helpers::ActivationTypes::Selu: { case ngraph::helpers::ActivationTypes::Selu: {
auto selu_alpha = std::make_shared<ngraph::op::Constant>( auto selu_alpha = std::make_shared<ngraph::op::Constant>(
type, inShape, 1.6732f); type, inShape, constantsValue[0]);
auto selu_lambda = std::make_shared<ngraph::op::Constant>( auto selu_lambda = std::make_shared<ngraph::op::Constant>(
type, inShape, 1.0507f); type, inShape, constantsValue[1]);
return std::make_shared<ngraph::op::Selu>(in, selu_alpha, selu_lambda); return std::make_shared<ngraph::op::Selu>(in, selu_alpha, selu_lambda);
} }
case ngraph::helpers::ActivationTypes::Ceiling: case ngraph::helpers::ActivationTypes::Ceiling:
@ -88,7 +89,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
auto negative_slope = std::make_shared<ngraph::op::Constant>( auto negative_slope = std::make_shared<ngraph::op::Constant>(
ngraph::element::f32, ngraph::element::f32,
inShape, inShape,
std::vector<float>{-0.01f}); constantsValue);
return std::make_shared<ngraph::op::PRelu>(in, negative_slope); return std::make_shared<ngraph::op::PRelu>(in, negative_slope);
} }
case ngraph::helpers::ActivationTypes::Mish: case ngraph::helpers::ActivationTypes::Mish:
@ -98,7 +99,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
case ngraph::helpers::ActivationTypes::SoftPlus: case ngraph::helpers::ActivationTypes::SoftPlus:
return std::make_shared<ngraph::op::v4::SoftPlus>(in); return std::make_shared<ngraph::op::v4::SoftPlus>(in);
case ngraph::helpers::ActivationTypes::Swish: { case ngraph::helpers::ActivationTypes::Swish: {
auto beta = std::make_shared<ngraph::op::Constant>(type, inShape, 1.0f); auto beta = std::make_shared<ngraph::op::Constant>(type, inShape, constantsValue[0]);
return std::make_shared<ngraph::op::v4::Swish>(in, beta); return std::make_shared<ngraph::op::v4::Swish>(in, beta);
} }
default: default: