[IE][VPU]: Supports I32 for some eltwise precisions + tests (#2364)

This commit is contained in:
Andrew Bakalin 2020-09-25 18:29:34 +03:00 committed by GitHub
parent eda9498b79
commit 03d184726a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 304 additions and 47 deletions

View File

@ -19,7 +19,7 @@ set(VPU_SUPPORTED_FIRMWARES usb-ma2450 usb-ma2x8x pcie-ma248x)
# Default packages
#
set(FIRMWARE_PACKAGE_VERSION 1390)
set(FIRMWARE_PACKAGE_VERSION 1395)
set(VPU_CLC_MA2X8X_VERSION "movi-cltools-20.09.1")
#

View File

@ -148,6 +148,9 @@ private:
StageType::Prod,
StageType::Max,
StageType::Div,
StageType::Min,
StageType::Logical_NOT,
StageType::Logical_AND
};
auto supportedDataTypesInput0 = EnumSet<DataType>{DataType::FP16};
if (stageTypesWhichSupportS32.count(operation)) {
@ -167,7 +170,8 @@ private:
static_cast<Handle<StageNode>>(this), dataTypeInput1, supportedDataTypesInput1);
assertInputsOutputsTypes(this, {{dataTypeInput0}, {dataTypeInput1}, {dataTypeInput1}}, {{dataTypeInput1}});
} else if (operation == StageType::Greater && dataTypeInput0 != dataTypeOutput) {
} else if ((operation == StageType::Greater || operation == StageType::Less || operation == StageType::Equal)
&& dataTypeInput0 != dataTypeOutput) {
assertInputsOutputsTypes(this, {{DataType::FP16}, {DataType::FP16}, {DataType::FP16}}, {{DataType::S32}});
} else {
assertInputsOutputsTypes(this, {{dataTypeInput0}, {dataTypeInput0}, {dataTypeInput0}}, {{dataTypeInput0}});

View File

@ -38,10 +38,6 @@ std::vector<ngraph::helpers::InputLayerType> secondInputTypes = {
ngraph::helpers::InputLayerType::PARAMETER,
};
std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
};
std::map<std::string, std::string> additional_config = {};
const auto ComparisonTestParams = ::testing::Combine(
@ -49,7 +45,8 @@ const auto ComparisonTestParams = ::testing::Combine(
::testing::ValuesIn(inputsPrecisions),
::testing::ValuesIn(comparisonOpTypes),
::testing::ValuesIn(secondInputTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(additional_config));

View File

@ -38,10 +38,6 @@ std::vector<ngraph::helpers::InputLayerType> secondInputTypes = {
ngraph::helpers::InputLayerType::PARAMETER,
};
std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
};
std::map<std::string, std::string> additional_config = {};
const auto ComparisonTestParams = ::testing::Combine(
@ -49,7 +45,8 @@ const auto ComparisonTestParams = ::testing::Combine(
::testing::ValuesIn(inputsPrecisions),
::testing::ValuesIn(comparisonOpTypes),
::testing::ValuesIn(secondInputTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::Values(additional_config));

View File

@ -0,0 +1,79 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include "single_layer_tests/comparison.hpp"
#include "common_test_utils/test_constants.hpp"
#include "common/myriad_common_test_utils.hpp"
#include <vpu/private_plugin_config.hpp>
#include <vector>
using namespace LayerTestsDefinitions;
using namespace LayerTestsDefinitions::ComparisonParams;
namespace {
typedef std::map<std::string, std::string> Config;
std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> inputShapes = {
{{5}, {{1}}},
{{5}, {{1}, {1, 1}, {2, 5}, {1, 1, 1}, {2, 2, 5}}},
{{2, 200}, {{1}, {200}, {1, 200}, {2, 200}, {2, 2, 200}}},
{{1, 3, 20}, {{20}, {2, 1, 1}}},
{{2, 17, 3, 4}, {{4}, {1, 3, 4}, {2, 1, 3, 4}}},
{{2, 1, 1, 3, 1}, {{1}, {1, 3, 4}, {2, 1, 3, 4}, {1, 1, 1, 1, 1}}},
};
std::vector<InferenceEngine::Precision> fpTypes = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
};
std::vector<InferenceEngine::Precision> intTypes = {
InferenceEngine::Precision::I32,
};
std::vector<ngraph::helpers::ComparisonTypes> comparisonOpTypesFpToFp = {
ngraph::helpers::ComparisonTypes::EQUAL,
ngraph::helpers::ComparisonTypes::NOT_EQUAL,
ngraph::helpers::ComparisonTypes::GREATER,
ngraph::helpers::ComparisonTypes::GREATER_EQUAL,
ngraph::helpers::ComparisonTypes::LESS,
ngraph::helpers::ComparisonTypes::LESS_EQUAL,
};
std::vector<ngraph::helpers::ComparisonTypes> comparisonOpTypesFpToInt = {
ngraph::helpers::ComparisonTypes::EQUAL,
ngraph::helpers::ComparisonTypes::GREATER,
ngraph::helpers::ComparisonTypes::LESS,
};
INSTANTIATE_TEST_CASE_P(ComparisonFp,
ComparisonLayerTest,
::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineParams(inputShapes)),
::testing::ValuesIn(fpTypes),
::testing::ValuesIn(comparisonOpTypesFpToFp),
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
ComparisonLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(ComparisonInt,
ComparisonLayerTest,
::testing::Combine(
::testing::ValuesIn(CommonTestUtils::combineParams(inputShapes)),
::testing::ValuesIn(fpTypes),
::testing::ValuesIn(comparisonOpTypesFpToInt),
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::I32),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
ComparisonLayerTest::getTestCaseName);
} // namespace

View File

@ -25,14 +25,13 @@ std::vector<std::vector<std::vector<size_t>>> inShapes = {
{{52, 1, 52, 3, 2}, {2}}
};
std::vector<InferenceEngine::Precision> netPrecisions = {
std::vector<InferenceEngine::Precision> fpTypes = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
};
std::vector<ngraph::helpers::InputLayerType> secondaryInputTypes = {
ngraph::helpers::InputLayerType::CONSTANT,
ngraph::helpers::InputLayerType::PARAMETER,
std::vector<InferenceEngine::Precision> intTypes = {
InferenceEngine::Precision::I32,
};
std::vector<CommonTestUtils::OpType> opTypes = {
@ -40,30 +39,44 @@ std::vector<CommonTestUtils::OpType> opTypes = {
CommonTestUtils::OpType::VECTOR,
};
std::vector<ngraph::helpers::EltwiseTypes> eltwiseOpTypes = {
std::vector<ngraph::helpers::EltwiseTypes> eltwiseMathTypesFP = {
ngraph::helpers::EltwiseTypes::MULTIPLY,
ngraph::helpers::EltwiseTypes::SUBTRACT,
ngraph::helpers::EltwiseTypes::ADD
ngraph::helpers::EltwiseTypes::ADD,
ngraph::helpers::EltwiseTypes::DIVIDE,
ngraph::helpers::EltwiseTypes::SQUARED_DIFF,
ngraph::helpers::EltwiseTypes::POWER,
ngraph::helpers::EltwiseTypes::FLOOR_MOD,
};
Config getConfig() {
Config config;
config[InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH] = CONFIG_VALUE(NO);
if (CommonTestUtils::vpu::CheckMyriad2()) {
config[InferenceEngine::MYRIAD_DISABLE_REORDER] = CONFIG_VALUE(YES);
}
return config;
}
std::vector<ngraph::helpers::EltwiseTypes> eltwiseMathTypesINT = {
ngraph::helpers::EltwiseTypes::MULTIPLY,
ngraph::helpers::EltwiseTypes::ADD,
ngraph::helpers::EltwiseTypes::DIVIDE,
};
const auto multiply_params = ::testing::Combine(
::testing::ValuesIn(inShapes),
::testing::ValuesIn(eltwiseOpTypes),
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(getConfig()));
INSTANTIATE_TEST_CASE_P(EltwiseMathFP,
EltwiseLayerTest,
::testing::Combine(
::testing::ValuesIn(inShapes),
::testing::ValuesIn(eltwiseMathTypesFP),
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(fpTypes),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
EltwiseLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(CompareWithRefs, EltwiseLayerTest, multiply_params, EltwiseLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(EltwiseMathInt,
EltwiseLayerTest,
::testing::Combine(
::testing::ValuesIn(inShapes),
::testing::ValuesIn(eltwiseMathTypesINT),
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(intTypes),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
EltwiseLayerTest::getTestCaseName);
} // namespace

View File

@ -0,0 +1,96 @@
// Copyright (C) 2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <gtest/gtest.h>
#include "single_layer_tests/logical.hpp"
#include "common_test_utils/test_constants.hpp"
#include "common/myriad_common_test_utils.hpp"
#include "ngraph_functions/builders.hpp"
#include <vpu/private_plugin_config.hpp>
#include <vector>
using namespace LayerTestsDefinitions;
namespace {
typedef std::map<std::string, std::string> Config;
class LogicalLayerTestVPU : public LogicalLayerTest {
protected:
void SetUp() override {
const auto& inputShapes = std::get<0>(GetParam());
const auto& ngInputsPrecision = std::get<1>(GetParam());
const auto& logicalOpType = std::get<2>(GetParam());
targetDevice = std::get<5>(GetParam());
const auto& additionalConfig = std::get<6>(GetParam());
configuration.insert(additionalConfig.begin(), additionalConfig.end());
outPrc = ngInputsPrecision;
auto ngInputsPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(ngInputsPrecision);
auto inputs = ngraph::builder::makeParams(ngInputsPrc, {inputShapes.first, logicalOpType != ngraph::helpers::LogicalTypes::LOGICAL_NOT ?
inputShapes.second : ngraph::Shape()});
ngraph::NodeVector convertedInputs;
for (const auto& input : inputs) {
convertedInputs.push_back(std::make_shared<ngraph::opset5::Convert>(input, ngraph::element::boolean));
}
const auto logicalNode = ngraph::builder::makeLogical(convertedInputs[0], convertedInputs[1], logicalOpType);
function = std::make_shared<ngraph::Function>(logicalNode, inputs, "Logical");
}
};
TEST_P(LogicalLayerTestVPU, LogicalTests) {
Run();
}
std::map<std::vector<size_t>, std::vector<std::vector<size_t >>> inputShapes = {
{{5}, {{1}}},
{{5}, {{1}, {1, 1}, {2, 5}, {1, 1, 1}, {2, 2, 5}}},
{{2, 200}, {{1}, {200}, {1, 200}, {2, 200}, {2, 2, 200}}},
{{1, 3, 20}, {{20}, {2, 1, 1}}},
{{2, 17, 3, 4}, {{4}, {1, 3, 4}, {2, 1, 3, 4}}},
{{2, 1, 1, 3, 1}, {{1}, {1, 3, 4}, {2, 1, 3, 4}, {1, 1, 1, 1, 1}}},
};
std::map<std::vector<size_t>, std::vector<std::vector<size_t >>> inputShapesNot = {
{{1}, {}},
{{5}, {}},
{{2, 200}, {}},
{{1, 3, 20}, {}},
{{2, 17, 3, 4}, {}},
{{2, 1, 1, 3, 1}, {}},
};
std::vector<ngraph::helpers::LogicalTypes> eltwiseLogicalTypesInt = {
ngraph::helpers::LogicalTypes::LOGICAL_AND,
};
INSTANTIATE_TEST_CASE_P(EltwiseLogicalInt,
LogicalLayerTestVPU,
::testing::Combine(
::testing::ValuesIn(LogicalLayerTest::combineShapes(inputShapes)),
::testing::Values(InferenceEngine::Precision::I32),
::testing::ValuesIn(eltwiseLogicalTypesInt),
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
LogicalLayerTest::getTestCaseName);
INSTANTIATE_TEST_CASE_P(EltwiseLogicalNotInt,
LogicalLayerTestVPU,
::testing::Combine(
::testing::ValuesIn(LogicalLayerTest::combineShapes(inputShapesNot)),
::testing::Values(InferenceEngine::Precision::BOOL),
::testing::Values(ngraph::helpers::LogicalTypes::LOGICAL_NOT),
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
LogicalLayerTest::getTestCaseName);
} // namespace

View File

@ -23,5 +23,7 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*(IEClassLoadNetwork).*(QueryNetworkMULTIWithHETERONoThrow_V10|QueryNetworkHETEROWithMULTINoThrow_V10).*)",
// TODO: Issue: 34348
R"(.*IEClassGetAvailableDevices.*)",
// TODO: Issue: 38643
R"(.*EltwiseLogicalNotInt.*)",
};
}

View File

@ -19,10 +19,11 @@ using InputShapesTuple = std::pair<std::vector<size_t>, std::vector<size_t>>;
typedef std::tuple<
ComparisonParams::InputShapesTuple, // Input shapes tuple
InferenceEngine::Precision, // Inputs precision
InferenceEngine::Precision, // NG Inputs precision
ngraph::helpers::ComparisonTypes, // Comparison op type
ngraph::helpers::InputLayerType, // Second input type
InferenceEngine::Precision, // Net precision
InferenceEngine::Precision, // IE in precision
InferenceEngine::Precision, // IE out precision
std::string, // Device name
std::map<std::string, std::string> // Additional network configuration
> ComparisonTestParams;

View File

@ -30,6 +30,7 @@ typedef std::tuple<
class EltwiseLayerTest : public testing::WithParamInterface<EltwiseTestParams>,
virtual public LayerTestsUtils::LayerTestsCommon {
protected:
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override;
void SetUp() override;
public:

View File

@ -30,6 +30,7 @@ typedef std::tuple<
class LogicalLayerTest : public testing::WithParamInterface<LogicalTestParams>,
virtual public LayerTestsUtils::LayerTestsCommon {
protected:
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo &info) const override;
void SetUp() override;
public:

View File

@ -18,38 +18,62 @@ using namespace LayerTestsDefinitions::ComparisonParams;
namespace LayerTestsDefinitions {
std::string ComparisonLayerTest::getTestCaseName(testing::TestParamInfo<ComparisonTestParams> obj) {
InputShapesTuple inputShapes;
InferenceEngine::Precision inputsPrecision;
InferenceEngine::Precision ngInputsPrecision;
ngraph::helpers::ComparisonTypes comparisonOpType;
ngraph::helpers::InputLayerType secondInputType;
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision ieInPrecision;
InferenceEngine::Precision ieOutPrecision;
std::string targetName;
std::map<std::string, std::string> additional_config;
std::tie(inputShapes, inputsPrecision, comparisonOpType, secondInputType, netPrecision, targetName, additional_config) = obj.param;
std::tie(inputShapes,
ngInputsPrecision,
comparisonOpType,
secondInputType,
ieInPrecision,
ieOutPrecision,
targetName,
additional_config) = obj.param;
std::ostringstream results;
results << "IS0=" << CommonTestUtils::vec2str(inputShapes.first) << "_";
results << "IS1=" << CommonTestUtils::vec2str(inputShapes.second) << "_";
results << "inputsPRC=" << inputsPrecision.name() << "_";
results << "inputsPRC=" << ngInputsPrecision.name() << "_";
results << "comparisonOpType=" << comparisonOpType << "_";
results << "secondInputType=" << secondInputType << "_";
results << "netPRC=" << netPrecision.name() << "_";
if (ieInPrecision != InferenceEngine::Precision::UNSPECIFIED) {
results << "IEInPRC=" << ieInPrecision.name() << "_";
}
if (ieOutPrecision != InferenceEngine::Precision::UNSPECIFIED) {
results << "IEOutPRC=" << ieOutPrecision.name() << "_";
}
results << "targetDevice=" << targetName;
return results.str();
}
void ComparisonLayerTest::SetUp() {
InputShapesTuple inputShapes;
InferenceEngine::Precision inputsPrecision;
InferenceEngine::Precision ngInputsPrecision;
ngraph::helpers::ComparisonTypes comparisonOpType;
ngraph::helpers::InputLayerType secondInputType;
InferenceEngine::Precision netPrecision;
InferenceEngine::Precision ieInPrecision;
InferenceEngine::Precision ieOutPrecision;
std::string targetName;
std::map<std::string, std::string> additional_config;
std::tie(inputShapes, inputsPrecision, comparisonOpType, secondInputType, netPrecision, targetDevice, additional_config) = this->GetParam();
std::tie(inputShapes,
ngInputsPrecision,
comparisonOpType,
secondInputType,
ieInPrecision,
ieOutPrecision,
targetDevice,
additional_config) = this->GetParam();
auto ngInputsPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(inputsPrecision);
auto ngInputsPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(ngInputsPrecision);
configuration.insert(additional_config.begin(), additional_config.end());
inPrc = ieInPrecision;
outPrc = ieOutPrecision;
auto inputs = ngraph::builder::makeParams(ngInputsPrc, {inputShapes.first});
auto secondInput = ngraph::builder::makeInputLayer(ngInputsPrc, secondInputType, inputShapes.second);

View File

@ -35,6 +35,21 @@ std::string EltwiseLayerTest::getTestCaseName(testing::TestParamInfo<EltwiseTest
return results.str();
}
InferenceEngine::Blob::Ptr EltwiseLayerTest::GenerateInput(const InferenceEngine::InputInfo &info) const {
const auto opType = std::get<1>(GetParam());
switch (opType) {
case ngraph::helpers::EltwiseTypes::POWER:
case ngraph::helpers::EltwiseTypes::FLOOR_MOD:
return info.getPrecision().is_float() ? FuncTestUtils::createAndFillBlob(info.getTensorDesc(), 2, 2, 128):
FuncTestUtils::createAndFillBlob(info.getTensorDesc(), 4, 2);
case ngraph::helpers::EltwiseTypes::DIVIDE:
return info.getPrecision().is_float() ? FuncTestUtils::createAndFillBlob(info.getTensorDesc(), 2, 2, 128):
FuncTestUtils::createAndFillBlob(info.getTensorDesc(), 100, 101);
default:
return FuncTestUtils::createAndFillBlob(info.getTensorDesc());
}
}
void EltwiseLayerTest::SetUp() {
std::vector<std::vector<size_t>> inputShapes;
InferenceEngine::Precision netPrecision;

View File

@ -51,6 +51,9 @@ std::vector<InputShapesTuple> LogicalLayerTest::combineShapes(const std::map<std
return resVec;
}
InferenceEngine::Blob::Ptr LogicalLayerTest::GenerateInput(const InferenceEngine::InputInfo &info) const {
return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), 2, 0);
}
void LogicalLayerTest::SetUp() {
InputShapesTuple inputShapes;

View File

@ -117,7 +117,11 @@ enum ActivationTypes {
enum EltwiseTypes {
ADD,
MULTIPLY,
SUBTRACT
SUBTRACT,
DIVIDE,
SQUARED_DIFF,
POWER,
FLOOR_MOD
};
enum ComparisonTypes {

View File

@ -21,6 +21,14 @@ std::shared_ptr<ngraph::Node> makeEltwise(const ngraph::Output<Node> &in0,
return std::make_shared<ngraph::opset3::Subtract>(in0, in1);
case ngraph::helpers::EltwiseTypes::MULTIPLY:
return std::make_shared<ngraph::opset3::Multiply>(in0, in1);
case ngraph::helpers::EltwiseTypes::DIVIDE:
return std::make_shared<ngraph::opset3::Divide>(in0, in1);
case ngraph::helpers::EltwiseTypes::SQUARED_DIFF:
return std::make_shared<ngraph::opset3::SquaredDifference>(in0, in1);
case ngraph::helpers::EltwiseTypes::POWER:
return std::make_shared<ngraph::opset3::Power>(in0, in1);
case ngraph::helpers::EltwiseTypes::FLOOR_MOD:
return std::make_shared<ngraph::opset3::FloorMod>(in0, in1);
default: {
throw std::runtime_error("Incorrect type of Eltwise operation");
}

View File

@ -550,6 +550,18 @@ std::ostream& operator<<(std::ostream & os, ngraph::helpers::EltwiseTypes type)
case ngraph::helpers::EltwiseTypes::ADD:
os << "Sum";
break;
case ngraph::helpers::EltwiseTypes::DIVIDE:
os << "Div";
break;
case ngraph::helpers::EltwiseTypes::SQUARED_DIFF:
os << "SqDiff";
break;
case ngraph::helpers::EltwiseTypes::POWER:
os << "Pow";
break;
case ngraph::helpers::EltwiseTypes::FLOOR_MOD:
os << "FloorMod";
break;
default:
throw std::runtime_error("NOT_SUPPORTED_OP_TYPE");
}