Remove In,OutPrecision

This commit is contained in:
Efode, Irina 2021-10-18 14:46:41 +03:00
parent 5ed1394735
commit 86f10b5c11
16 changed files with 31 additions and 107 deletions

View File

@ -79,8 +79,6 @@ const auto multiply_params = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
::testing::Values(additional_config));
@ -90,8 +88,6 @@ const auto multiply_params_dynamic = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypesDynamic),
::testing::ValuesIn(opTypesDynamic),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
::testing::Values(additional_config));
@ -120,8 +116,6 @@ const auto single_thread_params = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
::testing::Values(additional_config_single_thread));

View File

@ -34,8 +34,6 @@ const std::vector<size_t> axis2D = {
const auto params2D_static = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::undefined),
testing::Values(ov::element::undefined),
testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputStaticShape2D)),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
@ -44,8 +42,6 @@ const auto params2D_static = testing::Combine(
const auto params2D_dynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::undefined),
testing::Values(ov::element::undefined),
testing::ValuesIn(inputDynamicShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
@ -82,8 +78,6 @@ const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4Dstatic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::undefined),
testing::Values(ov::element::undefined),
testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputStaticShape4D)),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),
@ -92,8 +86,6 @@ const auto params4Dstatic = testing::Combine(
const auto params4Ddynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::undefined),
testing::Values(ov::element::undefined),
testing::ValuesIn(inputDynamicShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_TEMPLATE),

View File

@ -79,8 +79,6 @@ const auto multiply_params = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(additional_config));
@ -90,8 +88,6 @@ const auto multiply_params_dynamic = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypesDynamic),
::testing::ValuesIn(opTypesDynamic),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(additional_config));
@ -120,8 +116,6 @@ const auto single_thread_params = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(ov::element::Type_t::undefined),
::testing::Values(CommonTestUtils::DEVICE_CPU),
::testing::Values(additional_config_single_thread));

View File

@ -34,8 +34,6 @@ const std::vector<size_t> axis2D = {
const auto params2D_static = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::Type_t::undefined),
testing::Values(ov::element::Type_t::undefined),
testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputStaticShape2D)),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU),
@ -44,8 +42,6 @@ const auto params2D_static = testing::Combine(
const auto params2D_dynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::Type_t::undefined),
testing::Values(ov::element::Type_t::undefined),
testing::ValuesIn(inputDynamicShape2D),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_CPU),
@ -82,8 +78,6 @@ const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4Dstatic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::Type_t::undefined),
testing::Values(ov::element::Type_t::undefined),
testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputStaticShape4D)),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU),
@ -92,8 +86,6 @@ const auto params4Dstatic = testing::Combine(
const auto params4Ddynamic = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::Type_t::undefined),
testing::Values(ov::element::Type_t::undefined),
testing::ValuesIn(inputDynamicShape4D),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_CPU),

View File

@ -53,8 +53,6 @@ const auto multiply_params = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::undefined),
::testing::Values(ov::element::undefined),
::testing::Values(CommonTestUtils::DEVICE_GNA),
::testing::Values(additional_config));

View File

@ -60,8 +60,6 @@ const auto multiply_params = ::testing::Combine(
::testing::ValuesIn(secondaryInputTypes),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(netPrecisions),
::testing::Values(ov::element::undefined),
::testing::Values(ov::element::undefined),
::testing::Values(CommonTestUtils::DEVICE_GPU),
::testing::Values(additional_config));

View File

@ -27,8 +27,6 @@ const std::vector<size_t> axis2D = {
const auto params2D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::undefined),
testing::Values(ov::element::undefined),
testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputShapes2D)),
testing::ValuesIn(axis2D),
testing::Values(CommonTestUtils::DEVICE_GPU),
@ -52,8 +50,6 @@ const std::vector<size_t> axis4D = {0, 1, 2, 3};
const auto params4D = testing::Combine(
testing::ValuesIn(netPrecisions),
testing::Values(ov::element::undefined),
testing::Values(ov::element::undefined),
testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputShapes4D)),
testing::ValuesIn(axis4D),
testing::Values(CommonTestUtils::DEVICE_GPU),

View File

@ -63,8 +63,6 @@ INSTANTIATE_TEST_SUITE_P(smoke_EltwiseMathFP,
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(fpTypes),
::testing::Values(ov::element::undefined),
::testing::Values(ov::element::undefined),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
EltwiseLayerTest::getTestCaseName);
@ -77,8 +75,6 @@ INSTANTIATE_TEST_SUITE_P(smoke_EltwiseMathInt,
::testing::Values(ngraph::helpers::InputLayerType::PARAMETER),
::testing::ValuesIn(opTypes),
::testing::ValuesIn(intTypes),
::testing::Values(ov::element::undefined),
::testing::Values(ov::element::undefined),
::testing::Values(CommonTestUtils::DEVICE_MYRIAD),
::testing::Values(Config{{InferenceEngine::MYRIAD_DETECT_NETWORK_BATCH, CONFIG_VALUE(NO)}})),
EltwiseLayerTest::getTestCaseName);

View File

@ -27,7 +27,7 @@ public:
void TearDown() override {
if (!configuration.empty()) {
ov::test::PluginCache::get().core().reset();
ov::test::utils::PluginCache::get().core().reset();
}
}
@ -44,12 +44,11 @@ protected:
void init_input_shapes(const InputShapes& shapes);
void init_input_shapes(const InputShape& shapes);
std::shared_ptr<ov::runtime::Core> core = ov::test::PluginCache::get().core();
std::shared_ptr<ov::runtime::Core> core = ov::test::utils::PluginCache::get().core();
std::string targetDevice;
Config configuration;
std::shared_ptr<ov::Function> function, functionRefs = nullptr;
ElementType inType = ov::element::Type_t::undefined, outType = ov::element::Type_t::undefined;
std::map<std::string, ov::runtime::Tensor> inputs;
std::vector<ngraph::PartialShape> inputDynamicShapes;
std::vector<std::vector<ngraph::Shape>> targetStaticShapes;
@ -64,7 +63,6 @@ protected:
LayerTestsUtils::Summary& summary = LayerTestsUtils::Summary::getInstance();;
private:
void resize_function(const std::vector<ov::Shape>& targetInputStaticShapes);
std::vector<ov::runtime::Tensor> calculate_refs();
std::vector<ov::runtime::Tensor> get_plugin_outputs();
};

View File

@ -17,8 +17,6 @@ typedef std::tuple<
ngraph::helpers::InputLayerType, // secondary input type
CommonTestUtils::OpType, // op type
ElementType, // Net precision
ElementType, // Input precision
ElementType, // Output precision
TargetDevice, // Device name
Config // Additional network configuration
> EltwiseTestParams;

View File

@ -12,9 +12,7 @@ namespace subgraph {
using SoftMaxTestParams = std::tuple<
ElementType, // netPrecision
ElementType, // Input precision
ElementType, // Output precision
InputShape, // Dynamic shape + Target static shapes
InputShape, // Dynamic shape + Target static shapes
size_t, // axis
TargetDevice, // targetDevice
Config // config

View File

@ -47,7 +47,7 @@ void SubgraphBaseTest::run() {
try {
if (!inputDynamicShapes.empty()) {
// resize ngraph function according new target shape
resize_function(targetStaticShapeVec);
ngraph::helpers::resize_function(targetStaticShapeVec, functionRefs);
}
generate_inputs(targetStaticShapeVec);
infer();
@ -121,27 +121,7 @@ void SubgraphBaseTest::compare(const std::vector<ov::runtime::Tensor> &expected,
}
}
void SubgraphBaseTest::configure_model() {
// configure input precision
{
auto params = function->get_parameters();
for (auto& param : params) {
if (inType != ov::element::Type_t::undefined) {
param->get_output_tensor(0).set_element_type(inType);
}
}
}
// configure output precision
{
auto results = function->get_results();
for (auto& result : results) {
if (outType != ov::element::Type_t::undefined) {
result->get_output_tensor(0).set_element_type(outType);
}
}
}
}
void SubgraphBaseTest::configure_model() {}
void SubgraphBaseTest::compile_model() {
configure_model();
@ -182,7 +162,7 @@ std::vector<ov::runtime::Tensor> SubgraphBaseTest::calculate_refs() {
std::vector<ov::runtime::Tensor> SubgraphBaseTest::get_plugin_outputs() {
auto outputs = std::vector<ov::runtime::Tensor>{};
for (const auto& output : executableNetwork.outputs()) {
const auto& name = *output.get_tensor().get_names().begin();
const auto& name = output.get_tensor().get_any_name();
outputs.push_back(inferRequest.get_tensor(name));
}
return outputs;
@ -202,17 +182,6 @@ void SubgraphBaseTest::validate() {
compare(expectedOutputs, actualOutputs);
}
void SubgraphBaseTest::resize_function(const std::vector<ov::Shape>& targetInputStaticShapes) {
auto params = function->get_parameters();
std::map<std::string, ov::PartialShape> shapes;
ASSERT_LE(params.size(), targetInputStaticShapes.size());
for (size_t i = 0; i < params.size(); i++) {
shapes.insert({*params[i]->get_output_tensor(0).get_names().begin(), targetInputStaticShapes[i]});
}
function->reshape(shapes);
functionRefs->reshape(shapes);
}
void SubgraphBaseTest::init_input_shapes(const InputShapes& shapes) {
targetStaticShapes = shapes.second;
if (!shapes.first.empty()) {

View File

@ -12,13 +12,13 @@ namespace subgraph {
std::string EltwiseLayerTest::getTestCaseName(const testing::TestParamInfo<EltwiseTestParams>& obj) {
std::pair<std::vector<ngraph::PartialShape>, std::vector<std::vector<ngraph::Shape>>> shapes;
ElementType netType, inType, outType;
ElementType netType;
ngraph::helpers::InputLayerType secondaryInputType;
CommonTestUtils::OpType opType;
ngraph::helpers::EltwiseTypes eltwiseOpType;
std::string targetName;
std::map<std::string, std::string> additional_config;
std::tie(shapes, eltwiseOpType, secondaryInputType, opType, netType, inType, outType, targetName, additional_config) = obj.param;
std::tie(shapes, eltwiseOpType, secondaryInputType, opType, netType, targetName, additional_config) = obj.param;
std::ostringstream results;
results << "IS=" << CommonTestUtils::partialShape2str(shapes.first) << "_";
@ -34,8 +34,6 @@ std::string EltwiseLayerTest::getTestCaseName(const testing::TestParamInfo<Eltwi
results << "secondaryInputType=" << secondaryInputType << "_";
results << "opType=" << opType << "_";
results << "NetType=" << netType << "_";
results << "InType=" << inType << "_";
results << "OutType=" << outType << "_";
results << "trgDev=" << targetName;
return results.str();
}
@ -78,7 +76,7 @@ void EltwiseLayerTest::SetUp() {
CommonTestUtils::OpType opType;
ngraph::helpers::EltwiseTypes eltwiseType;
Config additional_config;
std::tie(shapes, eltwiseType, secondaryInputType, opType, netType, inType, outType, targetDevice, additional_config) =
std::tie(shapes, eltwiseType, secondaryInputType, opType, netType, targetDevice, additional_config) =
this->GetParam();
init_input_shapes(shapes);

View File

@ -13,17 +13,15 @@ namespace test {
namespace subgraph {
std::string SoftMaxLayerTest::getTestCaseName(const testing::TestParamInfo<SoftMaxTestParams>& obj) {
ElementType netType, inType, outType;
ElementType netType;
InputShape shapes;
size_t axis;
TargetDevice targetDevice;
Config config;
std::tie(netType, inType, outType, shapes, axis, targetDevice, config) = obj.param;
std::tie(netType, shapes, axis, targetDevice, config) = obj.param;
std::ostringstream result;
result << "NetType=" << netType << "_";
result << "InType=" << inType << "_";
result << "OutType=" << outType << "_";
result << "IS=" << CommonTestUtils::partialShape2str({shapes.first}) << "_";
result << "TS=";
for (const auto& item : shapes.second) {
@ -40,10 +38,9 @@ void SoftMaxLayerTest::SetUp() {
ElementType ngPrc;
size_t axis;
std::tie(ngPrc, inType, outType, shapes, axis, targetDevice, configuration) = GetParam();
std::tie(ngPrc, shapes, axis, targetDevice, configuration) = GetParam();
init_input_shapes(shapes);
// TODO: iefode: change namespace names a bit later
const auto params = ngraph::builder::makeDynamicParams(ngPrc, inputDynamicShapes);
const auto paramOuts =
ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));

View File

@ -320,5 +320,7 @@ std::ostream& operator<<(std::ostream & os, SequenceTestsMode type);
std::ostream& operator<<(std::ostream & os, MemoryTransformation type);
void resize_function(const std::vector<ov::Shape>& targetInputStaticShapes, std::shared_ptr<ov::Function> function);
} // namespace helpers
} // namespace ngraph

View File

@ -163,25 +163,20 @@ std::vector<ov::runtime::Tensor>
const auto &parameterShape = parameter->get_shape();
const auto &parameterType = parameter->get_element_type();
const auto &parameterSize = shape_size(parameterShape) * parameterType.size();
const auto &parameterIndex = function->get_parameter_index(parameter);
auto inputIt = inputs.find(parameter->get_friendly_name());
// TODO: iefode
if (inputIt == inputs.end()) {
// runtime error
throw std::runtime_error("Parameter: " + parameter->get_friendly_name() + "was nor find in input parameters");
}
auto input = inputIt->second;
const auto inType = input.get_element_type();
// TODO: iefode
// if (inType != element::undefined && inType != parameterType) {
// input = convertOutputPrecision(input, inType, parameterType, shape_size(parameterShape));
// }
// const auto &inputSize = input.size();
// NGRAPH_CHECK(parameterSize == inputSize,
// "Got parameter (", parameter->get_friendly_name(), ") of size ", parameterSize,
// " bytes, but corresponding input with index ", parameterIndex,
// " has ", inputSize, " bytes");
const auto &inputSize = input.get_size();
NGRAPH_CHECK(parameterSize == inputSize,
"Got parameter (", parameter->get_friendly_name(), ") of size ", parameterSize,
" bytes, but corresponding input with index ", parameterIndex,
" has ", inputSize, " bytes");
auto tensor = backend->create_tensor(parameterType, parameterShape);
tensor->write(input.data(), parameterSize);
@ -908,5 +903,14 @@ std::ostream& operator<<(std::ostream & os, MemoryTransformation type) {
return os;
}
void resize_function(const std::vector<ov::Shape>& targetInputStaticShapes, std::shared_ptr<ov::Function> function) {
auto params = function->get_parameters();
std::map<std::string, ov::PartialShape> shapes;
for (size_t i = 0; i < params.size(); i++) {
shapes.insert({params[i]->get_output_tensor(0).get_any_name(), targetInputStaticShapes[i]});
}
function->reshape(shapes);
}
} // namespace helpers
} // namespace ngraph