[IE TESTS] Added single layer tests (#1137)

This commit is contained in:
Roman Lyamin 2020-07-09 10:22:34 +03:00 committed by GitHub
parent 77dc21cbdf
commit c18c103f0f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 147 additions and 15 deletions

View File

@ -29,13 +29,29 @@ const std::vector<ActivationTypes> activationTypes = {
Exp,
Log,
Sign,
Abs
Abs,
Clamp,
Negative,
Acos,
Asin,
Atan,
Cos,
Cosh,
Floor,
Sin,
Sinh,
Sqrt,
Tan,
Elu,
Erf,
HardSigmoid,
Selu
};
const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(std::vector<size_t >({1, 50}), std::vector<size_t >({1, 128})),
::testing::Values(std::vector<size_t>({1, 50}), std::vector<size_t>({1, 128})),
::testing::Values(CommonTestUtils::DEVICE_CPU)
);

View File

@ -24,7 +24,23 @@ const std::vector<ActivationTypes> activationTypes = {
Log,
Sign,
Abs,
Gelu
Gelu,
Clamp,
Negative,
Acos,
Asin,
Atan,
Cos,
Cosh,
Floor,
Sin,
Sinh,
Sqrt,
Tan,
Elu,
Erf,
HardSigmoid,
Selu
};
const auto basicCases = ::testing::Combine(

View File

@ -32,15 +32,31 @@
namespace LayerTestsDefinitions {
static std::map<ngraph::helpers::ActivationTypes, std::string> activationNames = {
{ngraph::helpers::ActivationTypes::Sigmoid, "Sigmoid"},
{ngraph::helpers::ActivationTypes::Tanh, "Tanh"},
{ngraph::helpers::ActivationTypes::Relu, "Relu"},
{ngraph::helpers::ActivationTypes::LeakyRelu, "LeakyRelu"},
{ngraph::helpers::ActivationTypes::Exp, "Exp"},
{ngraph::helpers::ActivationTypes::Log, "Log"},
{ngraph::helpers::ActivationTypes::Sign, "Sign"},
{ngraph::helpers::ActivationTypes::Abs, "Abs"},
{ngraph::helpers::ActivationTypes::Gelu, "Gelu"}
{ngraph::helpers::ActivationTypes::Sigmoid, "Sigmoid"},
{ngraph::helpers::ActivationTypes::Tanh, "Tanh"},
{ngraph::helpers::ActivationTypes::Relu, "Relu"},
{ngraph::helpers::ActivationTypes::LeakyRelu, "LeakyRelu"},
{ngraph::helpers::ActivationTypes::Exp, "Exp"},
{ngraph::helpers::ActivationTypes::Log, "Log"},
{ngraph::helpers::ActivationTypes::Sign, "Sign"},
{ngraph::helpers::ActivationTypes::Abs, "Abs"},
{ngraph::helpers::ActivationTypes::Gelu, "Gelu"},
{ngraph::helpers::ActivationTypes::Clamp, "Clamp"},
{ngraph::helpers::ActivationTypes::Negative, "Negative"},
{ngraph::helpers::ActivationTypes::Acos, "Acos"},
{ngraph::helpers::ActivationTypes::Asin, "Asin"},
{ngraph::helpers::ActivationTypes::Atan, "Atan"},
{ngraph::helpers::ActivationTypes::Cos, "Cos"},
{ngraph::helpers::ActivationTypes::Cosh, "Cosh"},
{ngraph::helpers::ActivationTypes::Floor, "Floor"},
{ngraph::helpers::ActivationTypes::Sin, "Sin"},
{ngraph::helpers::ActivationTypes::Sinh, "Sinh"},
{ngraph::helpers::ActivationTypes::Sqrt, "Sqrt"},
{ngraph::helpers::ActivationTypes::Tan, "Tan"},
{ngraph::helpers::ActivationTypes::Elu, "Elu"},
{ngraph::helpers::ActivationTypes::Erf, "Erf"},
{ngraph::helpers::ActivationTypes::HardSigmoid, "HardSigmoid"},
{ngraph::helpers::ActivationTypes::Selu, "Selu"}
};
typedef std::tuple<

View File

@ -47,8 +47,36 @@ void ActivationLayerTest::SetUp() {
InferenceEngine::Blob::Ptr ActivationLayerTest::GenerateInput(const InferenceEngine::InputInfo &info) const {
bool inPrcSigned = function->get_parameters()[0]->get_element_type().is_signed();
uint32_t data_range = 20;
int32_t data_start_from = activationType == ngraph::helpers::ActivationTypes::Log ? 1 : -10;
int32_t data_start_from;
uint32_t data_range;
switch (activationType) {
case ngraph::helpers::ActivationTypes::Log: {
data_start_from = 1;
data_range = 20;
break;
}
case ngraph::helpers::ActivationTypes::Sqrt: {
data_start_from = 0;
data_range = 20;
break;
}
case ngraph::helpers::ActivationTypes::Asin: {
data_start_from = -1;
data_range = 2;
break;
}
case ngraph::helpers::ActivationTypes::Acos: {
data_start_from = -1;
data_range = 2;
break;
}
default: {
data_start_from = -10;
data_range = 20;
break;
}
}
if (!inPrcSigned) {
data_range = 15;
data_start_from = 0;

View File

@ -89,7 +89,23 @@ enum ActivationTypes {
Log,
Sign,
Abs,
Gelu
Gelu,
Clamp,
Negative,
Acos,
Asin,
Atan,
Cos,
Cosh,
Floor,
Sin,
Sinh,
Sqrt,
Tan,
Elu,
Erf,
HardSigmoid,
Selu
};
enum EltwiseTypes {

View File

@ -19,6 +19,14 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
ngraph::element::f32,
ngraph::Shape{1},
std::vector<float>{0.01f});
auto selu_alpha = std::make_shared<ngraph::op::Constant>(
type, ngraph::Shape(), 1.6732f);
auto selu_lambda = std::make_shared<ngraph::op::Constant>(
type, ngraph::Shape(), 1.0507f);
auto hard_sigmoid_alpha = std::make_shared<ngraph::op::Constant>(
type, ngraph::Shape(), 0.2f);
auto hard_sigmoid_beta = std::make_shared<ngraph::op::Constant>(
type, ngraph::Shape(), 0.5f);
switch (activationType) {
case ngraph::helpers::ActivationTypes::Sigmoid:
@ -39,6 +47,38 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
return std::make_shared<ngraph::op::Abs>(in);
case ngraph::helpers::ActivationTypes::Gelu:
return std::make_shared<ngraph::op::Gelu>(in);
case ngraph::helpers::ActivationTypes::Clamp:
return std::make_shared<ngraph::op::Clamp>(in, -2.0, 2.0);
case ngraph::helpers::ActivationTypes::Negative:
return std::make_shared<ngraph::op::Negative>(in);
case ngraph::helpers::ActivationTypes::Acos:
return std::make_shared<ngraph::op::Acos>(in);
case ngraph::helpers::ActivationTypes::Asin:
return std::make_shared<ngraph::op::Asin>(in);
case ngraph::helpers::ActivationTypes::Atan:
return std::make_shared<ngraph::op::Atan>(in);
case ngraph::helpers::ActivationTypes::Cos:
return std::make_shared<ngraph::op::Cos>(in);
case ngraph::helpers::ActivationTypes::Cosh:
return std::make_shared<ngraph::op::Cosh>(in);
case ngraph::helpers::ActivationTypes::Floor:
return std::make_shared<ngraph::op::Floor>(in);
case ngraph::helpers::ActivationTypes::Sin:
return std::make_shared<ngraph::op::Sin>(in);
case ngraph::helpers::ActivationTypes::Sinh:
return std::make_shared<ngraph::op::Sinh>(in);
case ngraph::helpers::ActivationTypes::Sqrt:
return std::make_shared<ngraph::op::Sqrt>(in);
case ngraph::helpers::ActivationTypes::Tan:
return std::make_shared<ngraph::op::Tan>(in);
case ngraph::helpers::ActivationTypes::Elu:
return std::make_shared<ngraph::op::Elu>(in, 0.1);
case ngraph::helpers::ActivationTypes::Erf:
return std::make_shared<ngraph::op::Erf>(in);
case ngraph::helpers::ActivationTypes::HardSigmoid:
return std::make_shared<ngraph::op::HardSigmoid>(in, hard_sigmoid_alpha, hard_sigmoid_beta);
case ngraph::helpers::ActivationTypes::Selu:
return std::make_shared<ngraph::op::Selu>(in, selu_alpha, selu_lambda);
default:
throw std::runtime_error("Can't create layer for this activation type");
}