[IE Myriad][IE Tests] Activation layer's constants parametrization. (#2071)

CI passed: https://gitlab-icv.inn.intel.com/inference-engine/product-configs/merge_requests/870
This commit is contained in:
Nikita Kudriavtsev 2020-09-10 12:56:21 +03:00 committed by GitHub
parent d4d460101d
commit ef2581d5c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 144 additions and 126 deletions

View File

@ -22,39 +22,39 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP16
};
const std::vector<ActivationTypes> activationTypes = {
Sigmoid,
Tanh,
Relu,
Exp,
Log,
Sign,
Abs,
Clamp,
Negative,
Acos,
Asin,
Atan,
Cos,
Cosh,
Floor,
Sin,
Sinh,
Sqrt,
Tan,
Elu,
Erf,
HardSigmoid,
Selu,
Ceiling,
Mish,
HSwish,
SoftPlus
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}},
{Clamp, {{-2.0f, 2.0f}}},
{Negative, {}},
{Acos, {}},
{Asin, {}},
{Atan, {}},
{Cos, {}},
{Cosh, {}},
{Floor, {}},
{Sin, {}},
{Sinh, {}},
{Sqrt, {}},
{Tan, {}},
{Elu, {{0.1f}}},
{Erf, {}},
{HardSigmoid, {{0.2f, 0.5f}}},
{Selu, {{1.6732f, 1.0507f}}},
{Ceiling, {}},
{Mish, {}},
{HSwish, {}},
{SoftPlus, {}}
};
const std::vector<ActivationTypes> activationParamTypes = {
PReLu,
LeakyRelu,
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationParamTypes = {
{PReLu, {{-0.01f}}},
{LeakyRelu, {{0.01f}}}
};
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -68,16 +68,16 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> preluBasic = {
};
const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes),
::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)),
::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_CPU)
);
const auto basicPreluCases = ::testing::Combine(
::testing::ValuesIn(activationParamTypes),
::testing::ValuesIn(CommonTestUtils::combineParams(activationParamTypes)),
::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(preluBasic)),
::testing::ValuesIn(CommonTestUtils::combineParams(preluBasic)),
::testing::Values(CommonTestUtils::DEVICE_CPU)
);

View File

@ -45,7 +45,7 @@ std::vector<InferenceEngine::Precision> netPrecisions = {
std::map<std::string, std::string> additional_config = {};
const auto ComparisonTestParams = ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(inputShapes)),
::testing::ValuesIn(CommonTestUtils::combineParams(inputShapes)),
::testing::ValuesIn(inputsPrecisions),
::testing::ValuesIn(comparisonOpTypes),
::testing::ValuesIn(secondInputTypes),

View File

@ -31,7 +31,7 @@ const std::vector<ngraph::helpers::SqueezeOpType> opTypes = {
INSTANTIATE_TEST_CASE_P(Basic, SqueezeUnsqueezeLayerTest,
::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineShapes<int>(axesVectors)),
::testing::ValuesIn(CommonTestUtils::combineParams(axesVectors)),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_CPU)),

View File

@ -25,14 +25,14 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::U8
};
const std::vector<ActivationTypes> activationTypes = {
Sigmoid,
Tanh,
Relu,
Exp,
Log,
Sign,
Abs
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}}
};
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -42,9 +42,9 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
};
const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes),
::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)),
::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_GNA)
);

View File

@ -16,35 +16,35 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP16
};
const std::vector<ActivationTypes> activationTypes = {
Sigmoid,
Tanh,
Relu,
Exp,
Log,
Sign,
Abs,
Gelu,
Clamp,
Negative,
Acos,
Asin,
Atan,
Cos,
Cosh,
Floor,
Sin,
Sinh,
Sqrt,
Tan,
Elu,
Erf,
HardSigmoid,
Selu,
Ceiling,
Mish,
HSwish,
SoftPlus
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}},
{Gelu, {}},
{Clamp, {{-2.0f, 2.0f}}},
{Negative, {}},
{Acos, {}},
{Asin, {}},
{Atan, {}},
{Cos, {}},
{Cosh, {}},
{Floor, {}},
{Sin, {}},
{Sinh, {}},
{Sqrt, {}},
{Tan, {}},
{Elu, {{0.1f}}},
{Erf, {}},
{HardSigmoid, {{0.2f, 0.5f}}},
{Selu, {{1.6732f, 1.0507f}}},
{Ceiling, {}},
{Mish, {}},
{HSwish, {}},
{SoftPlus, {}}
};
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -53,9 +53,9 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
};
const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes),
::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)),
::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_GPU)
);

View File

@ -45,7 +45,7 @@ std::vector<InferenceEngine::Precision> netPrecisions = {
std::map<std::string, std::string> additional_config = {};
const auto ComparisonTestParams = ::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(inputShapes)),
::testing::ValuesIn(CommonTestUtils::combineParams(inputShapes)),
::testing::ValuesIn(inputsPrecisions),
::testing::ValuesIn(comparisonOpTypes),
::testing::ValuesIn(secondInputTypes),

View File

@ -15,16 +15,16 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP16
};
const std::vector<ActivationTypes> activationTypes = {
Sigmoid,
Tanh,
Relu,
Exp,
Log,
Gelu,
Mish,
SoftPlus,
Swish
const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Gelu, {}},
{Mish, {}},
{SoftPlus, {}},
{Swish, {{0.05f}, {0.8f}, {1.0f}, {15.0f}}}
};
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
@ -33,9 +33,9 @@ std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
};
const auto basicCases = ::testing::Combine(
::testing::ValuesIn(activationTypes),
::testing::ValuesIn(CommonTestUtils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::ValuesIn(CommonTestUtils::combineShapes<size_t>(basic)),
::testing::ValuesIn(CommonTestUtils::combineParams(basic)),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD)
);

View File

@ -74,7 +74,7 @@ static std::map<ngraph::helpers::ActivationTypes, std::string> activationNames =
};
typedef std::tuple<
ngraph::helpers::ActivationTypes,
std::pair<ngraph::helpers::ActivationTypes, std::vector<float>>, // Activation type and constant value
InferenceEngine::Precision,
std::pair<std::vector<size_t>, std::vector<size_t>>,
std::string> activationParams;
@ -98,8 +98,12 @@ protected:
void SetUp() override;
private:
void generateActivationBlob();
ngraph::ParameterVector createActivationParams(ngraph::element::Type ngPrc, std::vector<size_t> inShape = {});
void generateActivationBlob(std::vector<float> constantsValue);
ngraph::ParameterVector createActivationParams(
ngraph::element::Type ngPrc, std::vector<size_t> inShape = {});
private:
std::vector<float> constantsValue;
};
} // namespace LayerTestsDefinitions

View File

@ -23,14 +23,15 @@ std::string ActivationLayerTest::getTestCaseName(const testing::TestParamInfo<ac
InferenceEngine::Precision netPrecision;
std::pair<std::vector<size_t>, std::vector<size_t>> shapes;
std::string targetDevice;
ngraph::helpers::ActivationTypes activationType;
std::tie(activationType, netPrecision, shapes, targetDevice) = obj.param;
std::pair<ngraph::helpers::ActivationTypes, std::vector<float>> activationDecl;
std::tie(activationDecl, netPrecision, shapes, targetDevice) = obj.param;
std::ostringstream result;
const char separator = '_';
result << activationNames[activationType] << separator;
result << "IS=" << CommonTestUtils::vec2str(shapes.first) << separator;;
result << "AS=" << CommonTestUtils::vec2str(shapes.second) << separator;;
result << activationNames[activationDecl.first] << separator;
result << "IS=" << CommonTestUtils::vec2str(shapes.first) << separator;
result << "AS=" << CommonTestUtils::vec2str(shapes.second) << separator;
result << "ConstantsValue=" << CommonTestUtils::vec2str(activationDecl.second) << separator;
result << "netPRC=" << netPrecision.name() << separator;
result << "targetDevice=" << targetDevice;
return result.str();
@ -39,10 +40,15 @@ std::string ActivationLayerTest::getTestCaseName(const testing::TestParamInfo<ac
void ActivationLayerTest::SetUp() {
InferenceEngine::Precision netPrecision;
std::pair<std::vector<size_t>, std::vector<size_t>> shapes;
std::tie(activationType, netPrecision, shapes, targetDevice) = GetParam();
std::pair<ngraph::helpers::ActivationTypes, std::vector<float>> activationDecl;
std::tie(activationDecl, netPrecision, shapes, targetDevice) = GetParam();
activationType = activationDecl.first;
auto constantsValue = activationDecl.second;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {shapes.first});
auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second);
auto activation = ngraph::builder::makeActivation(params[0], ngPrc, activationType, shapes.second, constantsValue);
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params);
}
@ -127,29 +133,29 @@ ngraph::ParameterVector ActivationParamLayerTest::createActivationParams(ngraph:
}
}
void ActivationParamLayerTest::generateActivationBlob() {
void ActivationParamLayerTest::generateActivationBlob(std::vector<float> constantsValue) {
switch (activationType) {
case ngraph::helpers::ActivationTypes::PReLu: {
auto blobNegativeSlope = inferRequest.GetBlob("negativeSlope");
float negativeSlope = -0.01f;
float negativeSlope = constantsValue[0];
blobNegativeSlope = FuncTestUtils::createAndFillBlobWithFloatArray(blobNegativeSlope->getTensorDesc(), &negativeSlope, 1);
}
case ngraph::helpers::ActivationTypes::LeakyRelu: {
auto blobLeakySlope = inferRequest.GetBlob("leakySlope");
float leakySlope = 0.01f;
float leakySlope = constantsValue[0];
blobLeakySlope = FuncTestUtils::createAndFillBlobWithFloatArray(blobLeakySlope->getTensorDesc(), &leakySlope, 1);
}
case ngraph::helpers::ActivationTypes::HardSigmoid: {
auto blobHardSigmoidAlpha = inferRequest.GetBlob("alpha");
auto blobHardSigmoidBeta = inferRequest.GetBlob("beta");
float alpha = 0.2f, beta = 0.5f;
float alpha = constantsValue[0], beta = constantsValue[1];
blobHardSigmoidAlpha = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidAlpha->getTensorDesc(), &alpha, 1);
blobHardSigmoidBeta = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidBeta->getTensorDesc(), &beta, 1);
}
case ngraph::helpers::ActivationTypes::Selu: {
auto blobHardSigmoidAlpha = inferRequest.GetBlob("alpha");
auto blobHardSigmoidLambda = inferRequest.GetBlob("lambda");
float alpha = 1.6732f, lambda = 1.0507f;
float alpha = constantsValue[0], lambda = constantsValue[1];
blobHardSigmoidAlpha = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidAlpha->getTensorDesc(), &alpha, 1);
blobHardSigmoidLambda = FuncTestUtils::createAndFillBlobWithFloatArray(blobHardSigmoidLambda->getTensorDesc(), &lambda, 1);
}
@ -164,7 +170,7 @@ void ActivationParamLayerTest::Infer() {
auto blobInput = inferRequest.GetBlob("Input");
blobInput = FuncTestUtils::createAndFillBlobFloat(blobInput->getTensorDesc());
generateActivationBlob();
generateActivationBlob(constantsValue);
inferRequest.Infer();
}
@ -173,12 +179,18 @@ void ActivationParamLayerTest::Infer() {
void ActivationParamLayerTest::SetUp() {
InferenceEngine::Precision netPrecision;
std::pair<std::vector<size_t>, std::vector<size_t>> shapes;
std::tie(activationType, netPrecision, shapes, targetDevice) = GetParam();
std::pair<ngraph::helpers::ActivationTypes, std::vector<float>> activationDecl;
std::tie(activationDecl, netPrecision, shapes, targetDevice) = GetParam();
activationType = activationDecl.first;
constantsValue = activationDecl.second;
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {shapes.first});
auto activationParams = createActivationParams(ngPrc);
params[0]->set_friendly_name("Input");
params.insert(params.end(), activationParams.begin(), activationParams.end());
auto activation = ngraph::builder::makeActivation(params, ngPrc, activationType);
function = std::make_shared<ngraph::Function>(ngraph::NodeVector{activation}, params);
}

View File

@ -70,13 +70,13 @@ inline InferenceEngine::CNNLayerPtr getLayerByName(const InferenceEngine::CNNNet
return getLayerByName(&icnnnetwork, layerName);
}
template <typename elementTypeVector>
std::vector<std::pair<std::vector<size_t>, std::vector<elementTypeVector>>>
combineShapes(const std::map<std::vector<size_t>, std::vector<std::vector<elementTypeVector>>>& inputShapes) {
std::vector<std::pair<std::vector<size_t>, std::vector<elementTypeVector>>> resVec;
for (auto& inputShape : inputShapes) {
for (auto& item : inputShape.second) {
resVec.push_back({inputShape.first, item});
template <typename master, typename slave>
std::vector<std::pair<master, slave>> combineParams(
const std::map<master, std::vector<slave>>& keyValueSets) {
std::vector<std::pair<master, slave>> resVec;
for (auto& keyValues : keyValueSets) {
for (auto& item : keyValues.second) {
resVec.push_back({keyValues.first, item});
}
}
return resVec;

View File

@ -191,7 +191,8 @@ std::shared_ptr<ngraph::Node> makeVariadicSplit(const ngraph::Output<Node> &in,
std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
const element::Type &type,
ngraph::helpers::ActivationTypes activationType,
std::vector<size_t> inShape = {});
std::vector<size_t> inShape = {},
std::vector<float> constantsValue = {});
std::shared_ptr<ngraph::Node> makeActivation(const ngraph::ParameterVector &parameters,
const element::Type &type,

View File

@ -15,7 +15,8 @@ namespace builder {
std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
const element::Type &type,
ngraph::helpers::ActivationTypes activationType,
std::vector<size_t> inShape) {
std::vector<size_t> inShape,
std::vector<float> constantsValue) {
switch (activationType) {
case ngraph::helpers::ActivationTypes::Sigmoid:
return std::make_shared<ngraph::op::Sigmoid>(in);
@ -27,7 +28,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
auto leaky_slope = std::make_shared<ngraph::op::Constant>(
ngraph::element::f32,
inShape,
std::vector<float>{0.01f});
constantsValue);
return std::make_shared<ngraph::op::PRelu>(in, leaky_slope);
}
case ngraph::helpers::ActivationTypes::Exp:
@ -41,7 +42,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
case ngraph::helpers::ActivationTypes::Gelu:
return std::make_shared<ngraph::op::Gelu>(in);
case ngraph::helpers::ActivationTypes::Clamp:
return std::make_shared<ngraph::op::Clamp>(in, -2.0, 2.0);
return std::make_shared<ngraph::op::Clamp>(in, constantsValue[0], constantsValue[1]);
case ngraph::helpers::ActivationTypes::Negative:
return std::make_shared<ngraph::op::Negative>(in);
case ngraph::helpers::ActivationTypes::Acos:
@ -65,21 +66,21 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
case ngraph::helpers::ActivationTypes::Tan:
return std::make_shared<ngraph::op::Tan>(in);
case ngraph::helpers::ActivationTypes::Elu:
return std::make_shared<ngraph::op::Elu>(in, 0.1);
return std::make_shared<ngraph::op::Elu>(in, constantsValue[0]);
case ngraph::helpers::ActivationTypes::Erf:
return std::make_shared<ngraph::op::Erf>(in);
case ngraph::helpers::ActivationTypes::HardSigmoid: {
auto hard_sigmoid_alpha = std::make_shared<ngraph::op::Constant>(
type, inShape, 0.2f);
type, inShape, constantsValue[0]);
auto hard_sigmoid_beta = std::make_shared<ngraph::op::Constant>(
type, inShape, 0.5f);
type, inShape, constantsValue[1]);
return std::make_shared<ngraph::op::HardSigmoid>(in, hard_sigmoid_alpha, hard_sigmoid_beta);
}
case ngraph::helpers::ActivationTypes::Selu: {
auto selu_alpha = std::make_shared<ngraph::op::Constant>(
type, inShape, 1.6732f);
type, inShape, constantsValue[0]);
auto selu_lambda = std::make_shared<ngraph::op::Constant>(
type, inShape, 1.0507f);
type, inShape, constantsValue[1]);
return std::make_shared<ngraph::op::Selu>(in, selu_alpha, selu_lambda);
}
case ngraph::helpers::ActivationTypes::Ceiling:
@ -88,7 +89,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
auto negative_slope = std::make_shared<ngraph::op::Constant>(
ngraph::element::f32,
inShape,
std::vector<float>{-0.01f});
constantsValue);
return std::make_shared<ngraph::op::PRelu>(in, negative_slope);
}
case ngraph::helpers::ActivationTypes::Mish:
@ -98,7 +99,7 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
case ngraph::helpers::ActivationTypes::SoftPlus:
return std::make_shared<ngraph::op::v4::SoftPlus>(in);
case ngraph::helpers::ActivationTypes::Swish: {
auto beta = std::make_shared<ngraph::op::Constant>(type, inShape, 1.0f);
auto beta = std::make_shared<ngraph::op::Constant>(type, inShape, constantsValue[0]);
return std::make_shared<ngraph::op::v4::Swish>(in, beta);
}
default: