Add ctest for some test targets (#13727)

* Add ctest for some test targets

* Fixed test coverage

* Replaced unit tests cmake script

* Fixed code style

* Use ov_add_test_target for FEs

* Fixed code style
This commit is contained in:
Ilya Churaev 2022-11-08 10:25:16 +04:00 committed by GitHub
parent 07a0ff58f2
commit 0bb72a4949
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 225 additions and 745 deletions

View File

@ -34,9 +34,22 @@ And build OpenVINO as usual.
In order to generate coverage reports, first of all, the tests must be run. Depending on how many tests are run, the better covegare percentage can be achieved. E.g. for `openvino` component, `InferenceEngineUnitTests`, `ieUnitTests`, `ieFuncTests` must be run as well as plugin tests. In order to generate coverage reports, first of all, the tests must be run. Depending on how many tests are run, the better covegare percentage can be achieved. E.g. for `openvino` component, `InferenceEngineUnitTests`, `ieUnitTests`, `ieFuncTests` must be run as well as plugin tests.
```bash ```bash
$ ctest -V -L IE $ ctest -V
``` ```
OpenVINO components define several common groups which allow to run tests for separate component (with ctest argiment `-L GROUP_NAME`):
- OV - core OpenVINO tests
- IR_FE - IR frontend tests
- ONNX_FE - ONNX frontend tests
- PADDLE_FE - Paddle frontend tests
- TF_FE - TensorFlow frontend tests
- CPU - CPU plugin tests
- GPU - GPU plugin tests
- GNA - GNA plugin tests
- VPU - VPU plugin tests
After sufficient number of tests are executed, the coverage numbers can be calculated. In order to do this, run: After sufficient number of tests are executed, the coverage numbers can be calculated. In order to do this, run:
```bash ```bash

View File

@ -16,10 +16,10 @@ addIeTargetTest(
snippetsNgraphFunctions snippetsNgraphFunctions
ADD_CPPLINT ADD_CPPLINT
LABELS LABELS
IE SNIPPETS IE OV SNIPPETS
) )
ie_faster_build(${TARGET_NAME} ie_faster_build(${TARGET_NAME}
UNITY UNITY
PCH PRIVATE "src/precomp.hpp" PCH PRIVATE "src/precomp.hpp"
) )

View File

@ -1,6 +1,7 @@
# Copyright (C) 2018-2022 Intel Corporation # Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
set(TARGET_NAME ov_core_unit_tests)
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RELEASE OFF) set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RELEASE OFF)
@ -29,421 +30,6 @@ if(LINUX)
set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
endif() endif()
set(SRC
aligned_buffer.cpp
all_close_f.cpp
bfloat16.cpp
build_graph.cpp
builder_autobroadcast.cpp
check.cpp
conditional_compilation/ngraph_cc_collect.cpp
conditional_compilation/ngraph_cc_off.cpp
conditional_compilation/ngraph_cc_on.cpp
constant.cpp
constant_folding.cpp
control_dependencies.cpp
convert_u1_to_string.cpp
coordinate.cpp
coordinate_range.cpp
copy.cpp
copy_runtime_info.cpp
dimension.cpp
element_type.cpp
eval.cpp
evaluate_bound/concat.cpp
evaluate_bound/transpose.cpp
extension.cpp
file_util.cpp
float16.cpp
framework_node.cpp
model.cpp
graph_rewrite.cpp
input_output_assign.cpp
int4.cpp
intervals.cpp
layout.cpp
main.cpp
matcher_pass.cpp
misc.cpp
rtti.cpp
node_input_output.cpp
rtti.cpp
op.cpp
opset.cpp
opset1.cpp
ov_default_allocator_test.cpp
ov_tensor_test.cpp
any.cpp
partial_shape.cpp
pass_config.cpp
pass_manager.cpp
pass/serialization/cleanup.cpp
pass/serialization/const_compression.cpp
pass/serialization/deterministicity.cpp
pass/serialization/serialize.cpp
pass/serialization/from_model.cpp
pattern.cpp
preprocess.cpp
replace_node.cpp
reshape_opt_kernel.cpp
shape.cpp
span.cpp
specialize_function.cpp
tensor.cpp
threading.cpp
type_info.cpp
type_prop/abs.cpp
type_prop/acos.cpp
type_prop/adaptive_avg_pool.cpp
type_prop/adaptive_max_pool.cpp
type_prop/add.cpp
type_prop/asin.cpp
type_prop/asinh.cpp
type_prop/assign.cpp
type_prop/atan.cpp
type_prop/atanh.cpp
type_prop/augru_cell.cpp
type_prop/augru_sequence.cpp
type_prop/avg_pool.cpp
type_prop/batch_norm.cpp
type_prop/batch_to_space.cpp
type_prop/binary_convolution.cpp
type_prop/binary_elementwise.cpp
type_prop/broadcast.cpp
type_prop/bucketize.cpp
type_prop/ceiling.cpp
type_prop/clamp.cpp
type_prop/concat.cpp
type_prop/constant.cpp
type_prop/convert.cpp
type_prop/convert_color_i420.cpp
type_prop/convert_color_nv12.cpp
type_prop/convolution.cpp
type_prop/convolution_backprop_data.cpp
type_prop/cos.cpp
type_prop/cosh.cpp
type_prop/ctc_greedy_decoder.cpp
type_prop/ctc_greedy_decoder_seq_len.cpp
type_prop/ctc_loss.cpp
type_prop/cum_sum.cpp
type_prop/deformable_convolution.cpp
type_prop/deformable_convolution_opset8.cpp
type_prop/deformable_psroi_pooling.cpp
type_prop/detection_output.cpp
type_prop/depth_to_space.cpp
type_prop/dft.cpp
type_prop/divide.cpp
type_prop/dyn_reshape.cpp
type_prop/einsum.cpp
type_prop/erf.cpp
type_prop/exp.cpp
type_prop/experimental_detectron_generate_proposals.cpp
type_prop/generate_proposals.cpp
type_prop/experimental_detectron_roi_feature_extractor.cpp
type_prop/experimental_detectron_topkrois.cpp
type_prop/strided_slice.cpp
type_prop/elu.cpp
type_prop/embeddingbag_offsetssum.cpp
type_prop/experimental_detectron_detection_output.cpp
type_prop/experimental_detectron_prior_grid_generator.cpp
type_prop/extractimagepatches.cpp
type_prop/embeddingbag_packedsum.cpp
type_prop/embedding_segments_sum.cpp
type_prop/eye.cpp
type_prop/fake_quantize.cpp
type_prop/floor.cpp
type_prop/floor_mod.cpp
type_prop/framework_node.cpp
type_prop/gather.cpp
type_prop/gather_elements.cpp
type_prop/gather_nd.cpp
type_prop/gather_tree.cpp
type_prop/gelu.cpp
type_prop/grid_sample.cpp
type_prop/grn.cpp
type_prop/group_convolution.cpp
type_prop/group_convolution_backprop_data.cpp
type_prop/gru_cell.cpp
type_prop/gru_sequence.cpp
type_prop/hard_sigmoid.cpp
type_prop/hsigmoid.cpp
type_prop/hswish.cpp
type_prop/idft.cpp
type_prop/if.cpp
type_prop/interpolate.cpp
type_prop/irdft.cpp
type_prop/is_finite.cpp
type_prop/is_inf.cpp
type_prop/is_nan.cpp
type_prop/logical_and.cpp
type_prop/logical_not.cpp
type_prop/logical_or.cpp
type_prop/logical_xor.cpp
type_prop/lrn.cpp
type_prop/lstm_cell.cpp
type_prop/lstm_sequence.cpp
type_prop/loop.cpp
type_prop/matmul.cpp
type_prop/matrix_nms.cpp
type_prop/maximum.cpp
type_prop/max_pool.cpp
type_prop/minimum.cpp
type_prop/mish.cpp
type_prop/mod.cpp
type_prop/multiclass_nms.cpp
type_prop/multiply.cpp
type_prop/mvn.cpp
type_prop/negative.cpp
type_prop/non_max_suppression.cpp
type_prop/non_zero.cpp
type_prop/normalize_l2.cpp
type_prop/one_hot.cpp
type_prop/pad.cpp
type_prop/parameter.cpp
type_prop/power.cpp
type_prop/prelu.cpp
type_prop/prior_box.cpp
type_prop/proposal.cpp
type_prop/psroi_pooling.cpp
type_prop/prior_box_clustered.cpp
type_prop/random_uniform.cpp
type_prop/range.cpp
type_prop/rdft.cpp
type_prop/read_value.cpp
type_prop/reduce_l1.cpp
type_prop/reduce_l2.cpp
type_prop/reduce_logical_and.cpp
type_prop/reduce_logical_or.cpp
type_prop/reduce_max.cpp
type_prop/reduce_mean.cpp
type_prop/reduce_min.cpp
type_prop/reduce_prod.cpp
type_prop/reduce_sum.cpp
type_prop/region_yolo.cpp
type_prop/relu.cpp
type_prop/reorg_yolo.cpp
type_prop/reshape.cpp
type_prop/result.cpp
type_prop/reverse.cpp
type_prop/reverse_sequence.cpp
type_prop/roi_align.cpp
type_prop/roi_pooling.cpp
type_prop/roll.cpp
type_prop/round.cpp
type_prop/rnn_cell.cpp
type_prop/rnn_sequence.cpp
type_prop/round.cpp
type_prop/scatter_elements_update.cpp
type_prop/scatter_nd_update.cpp
type_prop/scatter_update.cpp
type_prop/select.cpp
type_prop/selu.cpp
type_prop/shape_of.cpp
type_prop/shuffle_channels.cpp
type_prop/sigmoid.cpp
type_prop/sign.cpp
type_prop/sin.cpp
type_prop/sinh.cpp
type_prop/slice.cpp
type_prop/softmax.cpp
type_prop/softplus.cpp
type_prop/softsign.cpp
type_prop/space_to_batch.cpp
type_prop/space_to_depth.cpp
type_prop/split.cpp
type_prop/sqrt.cpp
type_prop/squared_difference.cpp
type_prop/squeeze.cpp
type_prop/subtract.cpp
type_prop/swish.cpp
type_prop/tan.cpp
type_prop/tanh.cpp
type_prop/tensor_iterator.cpp
type_prop/tile.cpp
type_prop/top_k.cpp
type_prop/transpose.cpp
type_prop/unary_elementwise.cpp
type_prop/unique.cpp
type_prop/unsqueeze.cpp
type_prop/variadic_split.cpp
type_prop_layers.cpp
visitors/partial_shape.cpp
visitors/dimension.cpp
visitors/user_op.cpp
visitors/value_map.cpp
visitors/op/abs.cpp
visitors/op/acos.cpp
visitors/op/acosh.cpp
visitors/op/adaptive_avg_pool.cpp
visitors/op/adaptive_max_pool.cpp
visitors/op/add.cpp
visitors/op/asin.cpp
visitors/op/asinh.cpp
visitors/op/assign.cpp
visitors/op/atan.cpp
visitors/op/atanh.cpp
visitors/op/avg_pool.cpp
visitors/op/batch_norm.cpp
visitors/op/batch_to_space.cpp
visitors/op/binary_convolution.cpp
visitors/op/broadcast.cpp
visitors/op/bucketize.cpp
visitors/op/ceiling.cpp
visitors/op/clamp.cpp
visitors/op/concat.cpp
visitors/op/constant.cpp
visitors/op/convert.cpp
visitors/op/convert_color_i420.cpp
visitors/op/convert_color_nv12.cpp
visitors/op/convolution_backprop.cpp
visitors/op/convolution.cpp
visitors/op/cos.cpp
visitors/op/cosh.cpp
visitors/op/ctc_greedy_decoder.cpp
visitors/op/ctc_greedy_decoder_seq_len.cpp
visitors/op/ctc_loss.cpp
visitors/op/cum_sum.cpp
visitors/op/deformable_convolution.cpp
visitors/op/deformable_psroi_pooling.cpp
visitors/op/depth_to_space.cpp
visitors/op/detection_output.cpp
visitors/op/dft.cpp
visitors/op/divide.cpp
visitors/op/einsum.cpp
visitors/op/elu.cpp
visitors/op/embedding_segments_sum.cpp
visitors/op/embeddingbag_offsetssum.cpp
visitors/op/embeddingbag_packedsum.cpp
visitors/op/equal.cpp
visitors/op/erf.cpp
visitors/op/exp.cpp
visitors/op/experimental_detectron_detection_output.cpp
visitors/op/experimental_detectron_generate_proposals.cpp
visitors/op/generate_proposals.cpp
visitors/op/grid_sample.cpp
visitors/op/experimental_detectron_prior_grid_generator.cpp
visitors/op/experimental_detectron_topkrois.cpp
visitors/op/eye.cpp
visitors/op/extractimagepatches.cpp
visitors/op/fake_quantize.cpp
visitors/op/floor_mod.cpp
visitors/op/floor.cpp
visitors/op/gather.cpp
visitors/op/gather_elements.cpp
visitors/op/gather_nd.cpp
visitors/op/gather_tree.cpp
visitors/op/gelu.cpp
visitors/op/greater_equal.cpp
visitors/op/greater.cpp
visitors/op/grn.cpp
visitors/op/gru_cell.cpp
visitors/op/gru_sequence.cpp
visitors/op/group_conv.cpp
visitors/op/hard_sigmoid.cpp
visitors/op/hsigmoid.cpp
visitors/op/hswish.cpp
visitors/op/interpolate.cpp
visitors/op/if.cpp
visitors/op/idft.cpp
visitors/op/irdft.cpp
visitors/op/is_inf.cpp
visitors/op/less_equal.cpp
visitors/op/less.cpp
visitors/op/log.cpp
visitors/op/log_softmax.cpp
visitors/op/logical_and.cpp
visitors/op/logical_or.cpp
visitors/op/logical_not.cpp
visitors/op/logical_xor.cpp
visitors/op/lrn.cpp
visitors/op/lstm_cell.cpp
visitors/op/lstm_sequence.cpp
visitors/op/matmul.cpp
visitors/op/matrix_nms.cpp
visitors/op/max_pool.cpp
visitors/op/maximum.cpp
visitors/op/minimum.cpp
visitors/op/mish.cpp
visitors/op/mod.cpp
visitors/op/multiclass_nms.cpp
visitors/op/multiply.cpp
visitors/op/mvn.cpp
visitors/op/negative.cpp
visitors/op/non_max_suppression.cpp
visitors/op/non_zero.cpp
visitors/op/normalize_l2.cpp
visitors/op/not_equal.cpp
visitors/op/one_hot.cpp
visitors/op/pad.cpp
visitors/op/parameter.cpp
visitors/op/power.cpp
visitors/op/prelu.cpp
visitors/op/prior_box.cpp
visitors/op/prior_box_clustered.cpp
visitors/op/proposal.cpp
visitors/op/psroi_pooling.cpp
visitors/op/random_uniform.cpp
visitors/op/range.cpp
visitors/op/rdft.cpp
visitors/op/read_value.cpp
visitors/op/reduce_l1.cpp
visitors/op/reduce_l2.cpp
visitors/op/reduce_logical_and.cpp
visitors/op/reduce_logical_or.cpp
visitors/op/reduce_max.cpp
visitors/op/reduce_mean.cpp
visitors/op/reduce_min.cpp
visitors/op/reduce_prod.cpp
visitors/op/reduce_sum.cpp
visitors/op/region_yolo.cpp
visitors/op/relu.cpp
visitors/op/reorg_yolo.cpp
visitors/op/reshape.cpp
visitors/op/result.cpp
visitors/op/reverse.cpp
visitors/op/reverse_sequence.cpp
visitors/op/rnn_cell.cpp
visitors/op/rnn_sequence.cpp
visitors/op/roi_pooling.cpp
visitors/op/roll.cpp
visitors/op/round.cpp
visitors/op/scatter_elements_update.cpp
visitors/op/scatter_update.cpp
visitors/op/select.cpp
visitors/op/space_to_depth.cpp
visitors/op/selu.cpp
visitors/op/shape_of.cpp
visitors/op/shuffle_channels.cpp
visitors/op/sigmoid.cpp
visitors/op/sign.cpp
visitors/op/sin.cpp
visitors/op/sinh.cpp
visitors/op/slice.cpp
visitors/op/softmax.cpp
visitors/op/softplus.cpp
visitors/op/softsign.cpp
visitors/op/space_to_batch.cpp
visitors/op/space_to_depth.cpp
visitors/op/split.cpp
visitors/op/sqrt.cpp
visitors/op/squared_difference.cpp
visitors/op/squeeze.cpp
visitors/op/strided_slice.cpp
visitors/op/subtract.cpp
visitors/op/swish.cpp
visitors/op/tan.cpp
visitors/op/tanh.cpp
visitors/op/tensor_iterator.cpp
visitors/op/tile.cpp
visitors/op/topk.cpp
visitors/op/transpose.cpp
visitors/op/unique.cpp
visitors/op/unsqueeze.cpp
visitors/op/variadic_split.cpp
uint4.cpp
validation_utils.cpp
)
# For type relaxed types # For type relaxed types
set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/threading.cpp set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/threading.cpp
PROPERTIES INCLUDE_DIRECTORIES $<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES>) PROPERTIES INCLUDE_DIRECTORIES $<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES>)
@ -455,75 +41,66 @@ if(SUGGEST_OVERRIDE_SUPPORTED)
endif() endif()
list(APPEND UNIT_TESTS_DEPENDENCIES openvino_template_extension) list(APPEND UNIT_TESTS_DEPENDENCIES openvino_template_extension)
list(APPEND UNIT_TESTS_DEPENDENCIES template_extension)
list(APPEND EXCLUDE_TESTS ${CMAKE_CURRENT_SOURCE_DIR}/frontend
${CMAKE_CURRENT_SOURCE_DIR}/dnnl.cpp)
if (ENABLE_TEMPLATE) if (ENABLE_TEMPLATE)
list(APPEND UNIT_TESTS_DEPENDENCIES openvino_template_plugin) list(APPEND UNIT_TESTS_DEPENDENCIES openvino_template_plugin)
set(OP_EVAL_TEST_SRC else()
list(APPEND EXCLUDE_TESTS
# It should be a part of template plugin # It should be a part of template plugin
op_eval/memory.cpp) ${CMAKE_CURRENT_SOURCE_DIR}/op_eval/memory.cpp)
endif() endif()
# SOURCE FOR FRONTEND TESTING ov_add_test_target(
file(GLOB FRONTEND_TESTS_SRC ${CMAKE_CURRENT_SOURCE_DIR}/frontend/frontend_manager.cpp NAME ${TARGET_NAME}
${CMAKE_CURRENT_SOURCE_DIR}/frontend/decoder_transformation_extension.cpp ROOT ${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/frontend/progress_reporter.cpp) EXCLUDED_SOURCE_PATHS
list(APPEND SRC ${FRONTEND_TESTS_SRC}) ${EXCLUDE_TESTS}
DEPENDENCIES
foreach(src IN LISTS SRC OP_EVAL_TEST_SRC) ${UNIT_TESTS_DEPENDENCIES}
if(IS_ABSOLUTE "${src}") # process models
list(APPEND full_src_names ${src}) test_model_zoo
else() LINK_LIBRARIES
list(APPEND full_src_names "${CMAKE_CURRENT_SOURCE_DIR}/${src}") engines_test_util
endif() commonTestUtils
endforeach() ngraph_reference
add_clang_format_target(unit-test_clang FOR_SOURCES ${full_src_names}) ngraph::builder
openvino::util
add_executable(ov_core_unit_tests ${SRC} ${OP_EVAL_TEST_SRC}) ov_shape_inference
${CMAKE_DL_LIBS}
add_dependencies(ov_core_unit_tests template_extension) Threads::Threads
openvino::conditional_compilation
target_include_directories(ov_core_unit_tests PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}) openvino::runtime::dev
ADD_CLANG_FORMAT
LABELS
OV
IE
CORE
)
get_target_property(OV_CORE_SRC_DIR ngraph_obj SOURCE_DIR) get_target_property(OV_CORE_SRC_DIR ngraph_obj SOURCE_DIR)
target_include_directories(ov_core_unit_tests PRIVATE ${OV_CORE_SRC_DIR}/src) target_include_directories(${TARGET_NAME} PRIVATE ${OV_CORE_SRC_DIR}/src
${CMAKE_CURRENT_SOURCE_DIR})
target_compile_definitions(ov_core_unit_tests target_compile_definitions(${TARGET_NAME}
PRIVATE PRIVATE
SHARED_LIB_PREFIX="${CMAKE_SHARED_LIBRARY_PREFIX}" SHARED_LIB_PREFIX="${CMAKE_SHARED_LIBRARY_PREFIX}"
SHARED_LIB_SUFFIX="${IE_BUILD_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}") SHARED_LIB_SUFFIX="${IE_BUILD_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}")
add_dependencies(ov_core_unit_tests ${UNIT_TESTS_DEPENDENCIES}) add_dependencies(${TARGET_NAME} ${UNIT_TESTS_DEPENDENCIES})
target_link_libraries(ov_core_unit_tests PRIVATE
engines_test_util
commonTestUtils
ngraph_reference
ngraph::builder
openvino::util
ov_shape_inference
${CMAKE_DL_LIBS}
Threads::Threads
openvino::conditional_compilation
openvino::runtime::dev)
if (ENABLE_OV_ONNX_FRONTEND) if (ENABLE_OV_ONNX_FRONTEND)
target_compile_definitions(ov_core_unit_tests PRIVATE ENABLE_OV_ONNX_FRONTEND) target_compile_definitions(${TARGET_NAME} PRIVATE ENABLE_OV_ONNX_FRONTEND)
endif() endif()
if (OV_COMPILER_IS_CLANG) if (OV_COMPILER_IS_CLANG)
target_compile_options(ov_core_unit_tests PRIVATE -Wno-undef -Wno-reserved-id-macro) target_compile_options(${TARGET_NAME} PRIVATE -Wno-undef -Wno-reserved-id-macro)
endif() endif()
if(ENABLE_OV_IR_FRONTEND) if(ENABLE_OV_IR_FRONTEND)
add_dependencies(ov_core_unit_tests openvino_ir_frontend) add_dependencies(${TARGET_NAME} openvino_ir_frontend)
endif() endif()
install(TARGETS ov_core_unit_tests
RUNTIME DESTINATION tests
COMPONENT tests
EXCLUDE_FROM_ALL)
add_subdirectory(frontend) add_subdirectory(frontend)
# process models
add_dependencies(ov_core_unit_tests test_model_zoo)

View File

@ -4,9 +4,9 @@
#pragma once #pragma once
#include "openvino/frontend/manager.hpp"
#include "openvino/core/except.hpp" #include "openvino/core/except.hpp"
#include "openvino/core/model.hpp" #include "openvino/core/model.hpp"
#include "openvino/frontend/manager.hpp"
namespace ov { namespace ov {
namespace test { namespace test {

View File

@ -4,10 +4,10 @@
#include <gtest/gtest.h> #include <gtest/gtest.h>
#include "common_test_utils/graph_comparator.hpp"
#include "openvino/opsets/opset8.hpp" #include "openvino/opsets/opset8.hpp"
#include "openvino/pass/serialize.hpp" #include "openvino/pass/serialize.hpp"
#include "read_ir.hpp" #include "read_ir.hpp"
#include "common_test_utils/graph_comparator.hpp"
#include "util/test_common.hpp" #include "util/test_common.hpp"
class TensorNameSerializationTest : public ov::test::TestsCommon { class TensorNameSerializationTest : public ov::test::TestsCommon {

View File

@ -147,7 +147,7 @@ TYPED_TEST_P(ArithmeticOperator, shape_inference_4D_x_3D_numpy_broadcast) {
} }
TYPED_TEST_P(ArithmeticOperator, static_shape_pdpd_doc_examples) { TYPED_TEST_P(ArithmeticOperator, static_shape_pdpd_doc_examples) {
// TODO: PDPD broadcast review, ticket: 93618 // TODO: PDPD broadcast review, ticket: 93618
{ {
auto A = std::make_shared<op::Parameter>(element::f32, Shape{2, 3, 4, 5}); auto A = std::make_shared<op::Parameter>(element::f32, Shape{2, 3, 4, 5});
auto B = std::make_shared<op::Parameter>(element::f32, Shape{3, 4}); auto B = std::make_shared<op::Parameter>(element::f32, Shape{3, 4});
@ -315,14 +315,13 @@ TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_equal_rank_broadcast_nu
EXPECT_EQ(op->get_element_type(), element::f32); EXPECT_EQ(op->get_element_type(), element::f32);
EXPECT_EQ(op->get_output_partial_shape(0), EXPECT_EQ(op->get_output_partial_shape(0),
(PartialShape{Dimension(1, 3), Dimension(2, 7), -1, Dimension(4, 8), -1, Dimension(4, 8), -1, 1, 3})); (PartialShape{Dimension(1, 3), Dimension(2, 7), -1, Dimension(4, 8), -1, Dimension(4, 8), -1, 1, 3}));
} }
TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_a_rank_smaller_broadcast_numpy) { TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_a_rank_smaller_broadcast_numpy) {
// `A` rank smaller // `A` rank smaller
auto A = std::make_shared<op::Parameter>(element::f32, auto A =
PartialShape{Dimension(1, 3), Dimension(4, 8), -1, 1, -1, 1, 3}); std::make_shared<op::Parameter>(element::f32, PartialShape{Dimension(1, 3), Dimension(4, 8), -1, 1, -1, 1, 3});
auto B = std::make_shared<op::Parameter>( auto B = std::make_shared<op::Parameter>(
element::f32, element::f32,
PartialShape{Dimension(1, 3), Dimension(2, 7), -1, 1, Dimension(1, 3), Dimension(4, 8), -1, 1, 3}); PartialShape{Dimension(1, 3), Dimension(2, 7), -1, 1, Dimension(1, 3), Dimension(4, 8), -1, 1, 3});
@ -331,7 +330,7 @@ TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_a_rank_smaller_broadcas
EXPECT_EQ(op->get_element_type(), element::f32); EXPECT_EQ(op->get_element_type(), element::f32);
EXPECT_EQ(op->get_output_partial_shape(0), EXPECT_EQ(op->get_output_partial_shape(0),
(PartialShape{Dimension(1, 3), Dimension(2, 7), -1, Dimension(4, 8), -1, Dimension(4, 8), -1, 1, 3})); (PartialShape{Dimension(1, 3), Dimension(2, 7), -1, Dimension(4, 8), -1, Dimension(4, 8), -1, 1, 3}));
} }
TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_b_rank_smaller_broadcast_numpy) { TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_b_rank_smaller_broadcast_numpy) {
@ -339,18 +338,18 @@ TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_b_rank_smaller_broadcas
auto A = std::make_shared<op::Parameter>( auto A = std::make_shared<op::Parameter>(
element::f32, element::f32,
PartialShape{Dimension(1, 3), Dimension(2, 7), -1, 1, Dimension(1, 3), Dimension(4, 8), -1, 1, 3}); PartialShape{Dimension(1, 3), Dimension(2, 7), -1, 1, Dimension(1, 3), Dimension(4, 8), -1, 1, 3});
auto B = std::make_shared<op::Parameter>(element::f32, auto B =
PartialShape{Dimension(1, 3), Dimension(4, 8), -1, 1, -1, 1, 3}); std::make_shared<op::Parameter>(element::f32, PartialShape{Dimension(1, 3), Dimension(4, 8), -1, 1, -1, 1, 3});
const auto op = std::make_shared<TypeParam>(A, B); const auto op = std::make_shared<TypeParam>(A, B);
EXPECT_EQ(op->get_element_type(), element::f32); EXPECT_EQ(op->get_element_type(), element::f32);
EXPECT_EQ(op->get_output_partial_shape(0), EXPECT_EQ(op->get_output_partial_shape(0),
(PartialShape{Dimension(1, 3), Dimension(2, 7), -1, Dimension(4, 8), -1, Dimension(4, 8), -1, 1, 3})); (PartialShape{Dimension(1, 3), Dimension(2, 7), -1, Dimension(4, 8), -1, Dimension(4, 8), -1, 1, 3}));
} }
TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_broadcast_pdpd) { TYPED_TEST_P(ArithmeticOperator, dynamic_shape_intervals_broadcast_pdpd) {
// TODO: PDPD broadcast review, ticket: 93618 // TODO: PDPD broadcast review, ticket: 93618
{ // Equal rank { // Equal rank
auto A = std::make_shared<op::Parameter>( auto A = std::make_shared<op::Parameter>(
element::f32, element::f32,

View File

@ -8,7 +8,6 @@
#include "openvino/op/op.hpp" #include "openvino/op/op.hpp"
#include "openvino/opsets/opset8.hpp" #include "openvino/opsets/opset8.hpp"
using namespace ov; using namespace ov;
template <class T> template <class T>

View File

@ -8,7 +8,6 @@
#include "openvino/op/op.hpp" #include "openvino/op/op.hpp"
#include "openvino/opsets/opset8.hpp" #include "openvino/opsets/opset8.hpp"
using namespace ov; using namespace ov;
template <class T> template <class T>

View File

@ -9,52 +9,43 @@
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
TEST(type_prop, log_softmax) TEST(type_prop, log_softmax) {
{
auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6}); auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6});
auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 1); auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 1);
EXPECT_EQ(log_softmax_func->get_element_type(), element::f32); EXPECT_EQ(log_softmax_func->get_element_type(), element::f32);
EXPECT_EQ(log_softmax_func->get_shape(), (Shape{1, 3, 6})); EXPECT_EQ(log_softmax_func->get_shape(), (Shape{1, 3, 6}));
} }
TEST(type_prop, log_softmax_incorrect_axis) TEST(type_prop, log_softmax_incorrect_axis) {
{
const auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6}); const auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6});
try try {
{
auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 3); auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 3);
FAIL() << "LogSoftmax node was created with incorrect axis."; FAIL() << "LogSoftmax node was created with incorrect axis.";
} } catch (const NodeValidationFailure& error) {
catch (const NodeValidationFailure& error) EXPECT_HAS_SUBSTRING(error.what(), std::string("Reduction axis (3) is out of bounds"));
{
EXPECT_HAS_SUBSTRING(
error.what(),
std::string("Reduction axis (3) is out of bounds");
} }
} }
TEST(type_prop, log_softmax_partial) // TEST(type_prop, log_softmax_partial)
{ // {
// auto data = make_shared<op::Parameter>(element::f32, PartialShape{1, Dimension::dynamic(), 6});
// auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 1);
// EXPECT_EQ(log_softmax_func->get_element_type(), element::f32);
// ASSERT_TRUE(log_softmax_func->get_output_partial_shape(0).same_scheme(
// (PartialShape{1, Dimension::dynamic(), 6})));
//
// // rank unknown
// auto log_softmax_partial = make_shared<op::v5::LogSoftmax>(
// make_shared<op::Parameter>(element::f32, PartialShape::dynamic()));
// ASSERT_TRUE(
// log_softmax_partial->get_output_partial_shape(0).same_scheme(PartialShape::dynamic()));
// }
TEST(type_prop, log_softmax_partial_static_rank) {
auto data = make_shared<op::Parameter>(element::f32, PartialShape{1, Dimension::dynamic(), 6}); auto data = make_shared<op::Parameter>(element::f32, PartialShape{1, Dimension::dynamic(), 6});
auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 1); auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 1);
EXPECT_EQ(log_softmax_func->get_element_type(), element::f32); EXPECT_EQ(log_softmax_func->get_element_type(), element::f32);
ASSERT_TRUE(log_softmax_func->get_output_partial_shape(0).same_scheme( ASSERT_TRUE(log_softmax_func->get_output_partial_shape(0).same_scheme((PartialShape{1, Dimension::dynamic(), 6})));
(PartialShape{1, Dimension::dynamic(), 6})));
// rank unknown
auto log_softmax_partial = make_shared<op::v5::LogSoftmax>(
make_shared<op::Parameter>(element::f32, PartialShape::dynamic()));
ASSERT_TRUE(
log_softmax_partial->get_output_partial_shape(0).same_scheme(PartialShape::dynamic()));
}
TEST(type_prop, log_softmax_partial_static_rank)
{
auto data = make_shared<op::Parameter>(element::f32, PartialShape{1, Dimension::dynamic(), 6});
auto log_softmax_func = make_shared<op::v5::LogSoftmax>(data, 1);
EXPECT_EQ(log_softmax_func->get_element_type(), element::f32);
ASSERT_TRUE(log_softmax_func->get_output_partial_shape(0).same_scheme(
(PartialShape{1, Dimension::dynamic(), 6})));
ASSERT_TRUE(log_softmax_func->get_output_partial_shape(0).rank().is_static()); ASSERT_TRUE(log_softmax_func->get_output_partial_shape(0).rank().is_static());
} }

View File

@ -9,24 +9,19 @@
#include "util/type_prop.hpp" #include "util/type_prop.hpp"
template <typename T, ngraph::element::Type_t ELEMENT_TYPE> template <typename T, ngraph::element::Type_t ELEMENT_TYPE>
class LogicalOperatorType class LogicalOperatorType {
{
public: public:
using op_type = T; using op_type = T;
static constexpr ngraph::element::Type_t element_type = ELEMENT_TYPE; static constexpr ngraph::element::Type_t element_type = ELEMENT_TYPE;
}; };
template <typename T> template <typename T>
class LogicalOperatorTypeProp : public testing::Test class LogicalOperatorTypeProp : public testing::Test {};
{
};
class LogicalOperatorTypeName class LogicalOperatorTypeName {
{
public: public:
template <typename T> template <typename T>
static std::string GetName(int) static std::string GetName(int) {
{
using OP_Type = typename T::op_type; using OP_Type = typename T::op_type;
const ngraph::Node::type_info_t typeinfo = OP_Type::get_type_info_static(); const ngraph::Node::type_info_t typeinfo = OP_Type::get_type_info_static();
return typeinfo.name; return typeinfo.name;
@ -35,77 +30,59 @@ public:
TYPED_TEST_SUITE_P(LogicalOperatorTypeProp); TYPED_TEST_SUITE_P(LogicalOperatorTypeProp);
namespace namespace {
{ template <typename T>
template <typename T> void incorrect_init(const ngraph::element::Type& type,
void incorrect_init(const ngraph::element::Type& type, const std::string& err,
const std::string& err, const ngraph::Shape& shape1 = {1, 3, 6},
const ngraph::Shape& shape1 = {1, 3, 6}, const ngraph::Shape& shape2 = {1, 3, 6}) {
const ngraph::Shape& shape2 = {1, 3, 6}) auto input1 = std::make_shared<ngraph::op::Parameter>(type, shape1);
{ auto input2 = std::make_shared<ngraph::op::Parameter>(type, shape2);
auto input1 = std::make_shared<ngraph::op::Parameter>(type, shape1); try {
auto input2 = std::make_shared<ngraph::op::Parameter>(type, shape2); auto op = std::make_shared<T>(input1, input2);
try } catch (const ngraph::NodeValidationFailure& error) {
{ EXPECT_HAS_SUBSTRING(error.what(), err);
auto op = std::make_shared<T>(input1, input2);
}
catch (const ngraph::NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), err);
}
} }
} // namespace }
} // namespace
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_f32) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_f32) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>( incorrect_init<OP_Type>(ngraph::element::f32,
ngraph::element::f32, "Operands for logical operators must have boolean element type but have element type f32");
"Operands for logical operators must have boolean element type but have element type f32");
} }
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_f64) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_f64) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>( incorrect_init<OP_Type>(ngraph::element::f64,
ngraph::element::f64, "Operands for logical operators must have boolean element type but have element type f64");
"Operands for logical operators must have boolean element type but have element type f64");
} }
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_i32) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_i32) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>( incorrect_init<OP_Type>(ngraph::element::i32,
ngraph::element::i32, "Operands for logical operators must have boolean element type but have element type i32");
"Operands for logical operators must have boolean element type but have element type i32");
} }
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_i64) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_i64) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>( incorrect_init<OP_Type>(ngraph::element::i64,
ngraph::element::i64, "Operands for logical operators must have boolean element type but have element type i64");
"Operands for logical operators must have boolean element type but have element type i64");
} }
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_u32) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_u32) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>( incorrect_init<OP_Type>(ngraph::element::u32,
ngraph::element::u32, "Operands for logical operators must have boolean element type but have element type u32");
"Operands for logical operators must have boolean element type but have element type u32");
} }
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_u64) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_type_u64) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>( incorrect_init<OP_Type>(ngraph::element::u64,
ngraph::element::u64, "Operands for logical operators must have boolean element type but have element type u64");
"Operands for logical operators must have boolean element type but have element type u64");
} }
TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_shape) TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_shape) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
incorrect_init<OP_Type>(ngraph::element::boolean, incorrect_init<OP_Type>(ngraph::element::boolean,
"Argument shapes are inconsistent", "Argument shapes are inconsistent",
@ -113,14 +90,11 @@ TYPED_TEST_P(LogicalOperatorTypeProp, incorrect_shape)
ngraph::Shape{1, 2, 3}); ngraph::Shape{1, 2, 3});
} }
TYPED_TEST_P(LogicalOperatorTypeProp, broadcast) TYPED_TEST_P(LogicalOperatorTypeProp, broadcast) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
auto input1 = auto input1 = std::make_shared<ngraph::op::Parameter>(ngraph::element::boolean, ngraph::Shape{1, 1, 6});
std::make_shared<ngraph::op::Parameter>(ngraph::element::boolean, ngraph::Shape{1, 1, 6}); auto input2 = std::make_shared<ngraph::op::Parameter>(ngraph::element::boolean, ngraph::Shape{1, 3, 1});
auto input2 =
std::make_shared<ngraph::op::Parameter>(ngraph::element::boolean, ngraph::Shape{1, 3, 1});
auto logical_and = std::make_shared<OP_Type>(input1, input2); auto logical_and = std::make_shared<OP_Type>(input1, input2);

View File

@ -9,8 +9,7 @@
using namespace std; using namespace std;
using namespace ngraph; using namespace ngraph;
struct ReduceParams struct ReduceParams {
{
PartialShape data_ps; PartialShape data_ps;
element::Type data_et; element::Type data_et;
Shape axes_ps; Shape axes_ps;
@ -20,18 +19,13 @@ struct ReduceParams
}; };
template <class T> template <class T>
static std::shared_ptr<Node> makeReduceOp(const ReduceParams& p, bool axes_as_param = false) static std::shared_ptr<Node> makeReduceOp(const ReduceParams& p, bool axes_as_param = false) {
{
auto in_data = make_shared<op::Parameter>(p.data_et, p.data_ps); auto in_data = make_shared<op::Parameter>(p.data_et, p.data_ps);
shared_ptr<Node> in_axes; shared_ptr<Node> in_axes;
if (axes_as_param) if (axes_as_param) {
{
in_axes = make_shared<op::Parameter>(p.axes_et, p.axes_ps); in_axes = make_shared<op::Parameter>(p.axes_et, p.axes_ps);
} } else {
else if (shape_size(p.axes_ps) != p.axes.size()) {
{
if (shape_size(p.axes_ps) != p.axes.size())
{
throw ngraph_error("Axes shape does not match with axes elements"); throw ngraph_error("Axes shape does not match with axes elements");
} }
in_axes = make_shared<op::Constant>(p.axes_et, p.axes_ps, p.axes); in_axes = make_shared<op::Constant>(p.axes_et, p.axes_ps, p.axes);
@ -40,14 +34,11 @@ static std::shared_ptr<Node> makeReduceOp(const ReduceParams& p, bool axes_as_pa
} }
template <class T> template <class T>
class ReduceTest : public testing::Test class ReduceTest : public testing::Test {};
{
};
TYPED_TEST_SUITE_P(ReduceTest); TYPED_TEST_SUITE_P(ReduceTest);
TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer) TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer) {
{
PartialShape data_ps{3, 4, 5}; PartialShape data_ps{3, 4, 5};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -64,8 +55,7 @@ TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_keep_dims) TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_keep_dims) {
{
PartialShape data_ps{3, 4, 5}; PartialShape data_ps{3, 4, 5};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -82,8 +72,7 @@ TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_keep_dims)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_scalar_axis) TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_scalar_axis) {
{
PartialShape data_ps{3, 4, 5}; PartialShape data_ps{3, 4, 5};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -100,8 +89,7 @@ TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_scalar_axis)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_axes_as_param) TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_axes_as_param) {
{
PartialShape data_ps{3, 4, 5}; PartialShape data_ps{3, 4, 5};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -119,8 +107,7 @@ TYPED_TEST_P(ReduceTest, reduce_basic_shape_infer_axes_as_param)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_static) TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_static) {
{
PartialShape data_ps{3, 4, 5, Dimension::dynamic()}; PartialShape data_ps{3, 4, 5, Dimension::dynamic()};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -137,8 +124,7 @@ TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_static)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_static_keep_dims) TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_static_keep_dims) {
{
PartialShape data_ps{3, 4, 5, Dimension::dynamic()}; PartialShape data_ps{3, 4, 5, Dimension::dynamic()};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -155,8 +141,7 @@ TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_static_keep_dims)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_not_static) TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_not_static) {
{
PartialShape data_ps{Dimension::dynamic(), 4, 5, Dimension::dynamic()}; PartialShape data_ps{Dimension::dynamic(), 4, 5, Dimension::dynamic()};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -173,8 +158,7 @@ TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_not_static)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_not_static_keep_dims) TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_not_static_keep_dims) {
{
PartialShape data_ps{Dimension::dynamic(), 4, 5, Dimension::dynamic()}; PartialShape data_ps{Dimension::dynamic(), 4, 5, Dimension::dynamic()};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -191,8 +175,7 @@ TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_reduced_axes_not_static_keep_dims)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_data) TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_data) {
{
PartialShape data_ps{PartialShape::dynamic()}; PartialShape data_ps{PartialShape::dynamic()};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -209,8 +192,7 @@ TYPED_TEST_P(ReduceTest, reduce_dynamic_shape_data)
ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps); ASSERT_EQ(reduce_op->get_output_partial_shape(0), out_ps);
} }
TYPED_TEST_P(ReduceTest, reduce_invalid_axis_out_of_range) TYPED_TEST_P(ReduceTest, reduce_invalid_axis_out_of_range) {
{
PartialShape data_ps{1, 2, 3}; PartialShape data_ps{1, 2, 3};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -221,23 +203,17 @@ TYPED_TEST_P(ReduceTest, reduce_invalid_axis_out_of_range)
bool keep_dims = false; bool keep_dims = false;
const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims}; const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims};
try try {
{
auto reduce_op = makeReduceOp<TypeParam>(params); auto reduce_op = makeReduceOp<TypeParam>(params);
FAIL() << "Invalid axes values not detected"; FAIL() << "Invalid axes values not detected";
} } catch (const NodeValidationFailure& error) {
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "out of the tensor rank range"); EXPECT_HAS_SUBSTRING(error.what(), "out of the tensor rank range");
} } catch (...) {
catch (...)
{
FAIL() << "Axes input values validation check failed for unexpected reason"; FAIL() << "Axes input values validation check failed for unexpected reason";
} }
} }
TYPED_TEST_P(ReduceTest, reduce_invalid_axes_shape) TYPED_TEST_P(ReduceTest, reduce_invalid_axes_shape) {
{
PartialShape data_ps{1, 2, 3}; PartialShape data_ps{1, 2, 3};
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
@ -248,23 +224,17 @@ TYPED_TEST_P(ReduceTest, reduce_invalid_axes_shape)
bool keep_dims = true; bool keep_dims = true;
const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims}; const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims};
try try {
{
auto reduce_op = makeReduceOp<TypeParam>(params); auto reduce_op = makeReduceOp<TypeParam>(params);
FAIL() << "Invalid shape of axes input not detected"; FAIL() << "Invalid shape of axes input not detected";
} } catch (const NodeValidationFailure& error) {
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Axes input must be a scalar or 1D input."); EXPECT_HAS_SUBSTRING(error.what(), "Axes input must be a scalar or 1D input.");
} } catch (...) {
catch (...)
{
FAIL() << "Axes input shape validation check failed for unexpected reason"; FAIL() << "Axes input shape validation check failed for unexpected reason";
} }
} }
TYPED_TEST_P(ReduceTest, reduce_invalid_axes_et) TYPED_TEST_P(ReduceTest, reduce_invalid_axes_et) {
{
element::Type data_et = element::dynamic; element::Type data_et = element::dynamic;
PartialShape data_ps{1, 2, 3}; PartialShape data_ps{1, 2, 3};
@ -275,17 +245,12 @@ TYPED_TEST_P(ReduceTest, reduce_invalid_axes_et)
bool keep_dims = true; bool keep_dims = true;
const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims}; const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims};
try try {
{
auto reduce_op = makeReduceOp<TypeParam>(params); auto reduce_op = makeReduceOp<TypeParam>(params);
FAIL() << "Invalid element type of axes input not detected"; FAIL() << "Invalid element type of axes input not detected";
} } catch (const NodeValidationFailure& error) {
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Element type of axes input must be integer."); EXPECT_HAS_SUBSTRING(error.what(), "Element type of axes input must be integer.");
} } catch (...) {
catch (...)
{
FAIL() << "Axes input element type validation check failed for unexpected reason"; FAIL() << "Axes input element type validation check failed for unexpected reason";
} }
} }
@ -305,14 +270,11 @@ REGISTER_TYPED_TEST_SUITE_P(ReduceTest,
reduce_invalid_axes_et); reduce_invalid_axes_et);
template <class T> template <class T>
class ReduceArithmeticTest : public testing::Test class ReduceArithmeticTest : public testing::Test {};
{
};
TYPED_TEST_SUITE_P(ReduceArithmeticTest); TYPED_TEST_SUITE_P(ReduceArithmeticTest);
TYPED_TEST_P(ReduceArithmeticTest, reduce_arithmetic_invalid_data_et) TYPED_TEST_P(ReduceArithmeticTest, reduce_arithmetic_invalid_data_et) {
{
element::Type data_et = element::boolean; element::Type data_et = element::boolean;
PartialShape data_ps{1, 2, 3}; PartialShape data_ps{1, 2, 3};
@ -323,17 +285,12 @@ TYPED_TEST_P(ReduceArithmeticTest, reduce_arithmetic_invalid_data_et)
bool keep_dims = true; bool keep_dims = true;
const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims}; const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims};
try try {
{
auto reduce_op = makeReduceOp<TypeParam>(params); auto reduce_op = makeReduceOp<TypeParam>(params);
FAIL() << "Invalid element type of data input not detected"; FAIL() << "Invalid element type of data input not detected";
} } catch (const NodeValidationFailure& error) {
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Element type of data input must be numeric."); EXPECT_HAS_SUBSTRING(error.what(), "Element type of data input must be numeric.");
} } catch (...) {
catch (...)
{
FAIL() << "Data input element type validation check failed for unexpected reason"; FAIL() << "Data input element type validation check failed for unexpected reason";
} }
} }
@ -341,14 +298,11 @@ TYPED_TEST_P(ReduceArithmeticTest, reduce_arithmetic_invalid_data_et)
REGISTER_TYPED_TEST_SUITE_P(ReduceArithmeticTest, reduce_arithmetic_invalid_data_et); REGISTER_TYPED_TEST_SUITE_P(ReduceArithmeticTest, reduce_arithmetic_invalid_data_et);
template <class T> template <class T>
class ReduceLogicalTest : public testing::Test class ReduceLogicalTest : public testing::Test {};
{
};
TYPED_TEST_SUITE_P(ReduceLogicalTest); TYPED_TEST_SUITE_P(ReduceLogicalTest);
TYPED_TEST_P(ReduceLogicalTest, reduce_logical_invalid_data_et) TYPED_TEST_P(ReduceLogicalTest, reduce_logical_invalid_data_et) {
{
std::vector<element::Type> element_types{element::f32, element::i32, element::u32}; std::vector<element::Type> element_types{element::f32, element::i32, element::u32};
PartialShape data_ps{1, 2, 3}; PartialShape data_ps{1, 2, 3};
@ -358,20 +312,14 @@ TYPED_TEST_P(ReduceLogicalTest, reduce_logical_invalid_data_et)
bool keep_dims = true; bool keep_dims = true;
for (const auto& data_et : element_types) for (const auto& data_et : element_types) {
{
const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims}; const ReduceParams params{data_ps, data_et, axes_ps, axes, axes_et, keep_dims};
try try {
{
auto reduce_op = makeReduceOp<TypeParam>(params); auto reduce_op = makeReduceOp<TypeParam>(params);
FAIL() << "Invalid element type of data input not detected"; FAIL() << "Invalid element type of data input not detected";
} } catch (const NodeValidationFailure& error) {
catch (const NodeValidationFailure& error)
{
EXPECT_HAS_SUBSTRING(error.what(), "Element type of data input must be boolean."); EXPECT_HAS_SUBSTRING(error.what(), "Element type of data input must be boolean.");
} } catch (...) {
catch (...)
{
FAIL() << "Data input element type validation check failed for unexpected reason"; FAIL() << "Data input element type validation check failed for unexpected reason";
} }
} }

View File

@ -7,7 +7,6 @@
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "ngraph/ngraph.hpp" #include "ngraph/ngraph.hpp"
using namespace ngraph; using namespace ngraph;
template <class T> template <class T>

View File

@ -8,24 +8,19 @@
#include "util/visitor.hpp" #include "util/visitor.hpp"
template <typename T, ngraph::element::Type_t ELEMENT_TYPE> template <typename T, ngraph::element::Type_t ELEMENT_TYPE>
class BinaryOperatorType class BinaryOperatorType {
{
public: public:
using op_type = T; using op_type = T;
static constexpr ngraph::element::Type_t element_type = ELEMENT_TYPE; static constexpr ngraph::element::Type_t element_type = ELEMENT_TYPE;
}; };
template <typename T> template <typename T>
class BinaryOperatorVisitor : public testing::Test class BinaryOperatorVisitor : public testing::Test {};
{
};
class BinaryOperatorTypeName class BinaryOperatorTypeName {
{
public: public:
template <typename T> template <typename T>
static std::string GetName(int) static std::string GetName(int) {
{
using OP_Type = typename T::op_type; using OP_Type = typename T::op_type;
constexpr ngraph::element::Type precision(T::element_type); constexpr ngraph::element::Type precision(T::element_type);
const ngraph::Node::type_info_t typeinfo = OP_Type::get_type_info_static(); const ngraph::Node::type_info_t typeinfo = OP_Type::get_type_info_static();
@ -35,16 +30,13 @@ public:
TYPED_TEST_SUITE_P(BinaryOperatorVisitor); TYPED_TEST_SUITE_P(BinaryOperatorVisitor);
TYPED_TEST_P(BinaryOperatorVisitor, Auto_Broadcast) TYPED_TEST_P(BinaryOperatorVisitor, Auto_Broadcast) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
const ngraph::element::Type_t element_type = TypeParam::element_type; const ngraph::element::Type_t element_type = TypeParam::element_type;
ngraph::test::NodeBuilder::get_ops().register_factory<OP_Type>(); ngraph::test::NodeBuilder::get_ops().register_factory<OP_Type>();
const auto A = const auto A = std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{1, 2, 3});
std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{1, 2, 3}); const auto B = std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{3, 2, 1});
const auto B =
std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{3, 2, 1});
auto auto_broadcast = ngraph::op::AutoBroadcastType::NUMPY; auto auto_broadcast = ngraph::op::AutoBroadcastType::NUMPY;
@ -57,16 +49,13 @@ TYPED_TEST_P(BinaryOperatorVisitor, Auto_Broadcast)
EXPECT_EQ(op_func->get_autob(), g_op_func->get_autob()); EXPECT_EQ(op_func->get_autob(), g_op_func->get_autob());
} }
TYPED_TEST_P(BinaryOperatorVisitor, No_Broadcast) TYPED_TEST_P(BinaryOperatorVisitor, No_Broadcast) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
const ngraph::element::Type_t element_type = TypeParam::element_type; const ngraph::element::Type_t element_type = TypeParam::element_type;
ngraph::test::NodeBuilder::get_ops().register_factory<OP_Type>(); ngraph::test::NodeBuilder::get_ops().register_factory<OP_Type>();
const auto A = const auto A = std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{1, 2, 3});
std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{1, 2, 3}); const auto B = std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{1, 2, 3});
const auto B =
std::make_shared<ngraph::op::Parameter>(element_type, ngraph::PartialShape{1, 2, 3});
const auto op_func = std::make_shared<OP_Type>(A, B); const auto op_func = std::make_shared<OP_Type>(A, B);
ngraph::test::NodeBuilder builder(op_func, {A, B}); ngraph::test::NodeBuilder builder(op_func, {A, B});

View File

@ -2,10 +2,9 @@
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// //
#include "unary_ops.hpp"
#include "openvino/opsets/opset10.hpp" #include "openvino/opsets/opset10.hpp"
#include "unary_ops.hpp"
using Types = ::testing::Types<UnaryOperatorType<ov::op::v10::IsFinite, ngraph::element::f32>>; using Types = ::testing::Types<UnaryOperatorType<ov::op::v10::IsFinite, ngraph::element::f32>>;
INSTANTIATE_TYPED_TEST_SUITE_P(visitor_without_attribute, UnaryOperatorVisitor, Types, UnaryOperatorTypeName); INSTANTIATE_TYPED_TEST_SUITE_P(visitor_without_attribute, UnaryOperatorVisitor, Types, UnaryOperatorTypeName);

View File

@ -3,10 +3,8 @@
// //
#include "gtest/gtest.h" #include "gtest/gtest.h"
#include "ngraph/ngraph.hpp" #include "ngraph/ngraph.hpp"
#include "ngraph/op/util/attr_types.hpp" #include "ngraph/op/util/attr_types.hpp"
#include "util/visitor.hpp" #include "util/visitor.hpp"
using namespace std; using namespace std;
@ -15,24 +13,19 @@ using ngraph::test::NodeBuilder;
using ngraph::test::ValueMap; using ngraph::test::ValueMap;
template <typename T, ngraph::element::Type_t ELEMENT_TYPE> template <typename T, ngraph::element::Type_t ELEMENT_TYPE>
class ReduceOperatorType class ReduceOperatorType {
{
public: public:
using op_type = T; using op_type = T;
static constexpr ngraph::element::Type_t element_type = ELEMENT_TYPE; static constexpr ngraph::element::Type_t element_type = ELEMENT_TYPE;
}; };
template <typename T> template <typename T>
class ReduceOperatorVisitor : public ::testing::Test class ReduceOperatorVisitor : public ::testing::Test {};
{
};
class ReduceOperatorTypeName class ReduceOperatorTypeName {
{
public: public:
template <typename T> template <typename T>
static std::string GetName(int) static std::string GetName(int) {
{
using OP_Type = typename T::op_type; using OP_Type = typename T::op_type;
constexpr ngraph::element::Type precision(T::element_type); constexpr ngraph::element::Type precision(T::element_type);
const ngraph::Node::type_info_t typeinfo = OP_Type::get_type_info_static(); const ngraph::Node::type_info_t typeinfo = OP_Type::get_type_info_static();
@ -42,8 +35,7 @@ public:
TYPED_TEST_SUITE_P(ReduceOperatorVisitor); TYPED_TEST_SUITE_P(ReduceOperatorVisitor);
TYPED_TEST_P(ReduceOperatorVisitor, keep_dims_3D) TYPED_TEST_P(ReduceOperatorVisitor, keep_dims_3D) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
Shape in_shape{3, 4, 5}; Shape in_shape{3, 4, 5};
@ -66,8 +58,7 @@ TYPED_TEST_P(ReduceOperatorVisitor, keep_dims_3D)
EXPECT_EQ(g_reduce_op->get_keep_dims(), reduce_op->get_keep_dims()); EXPECT_EQ(g_reduce_op->get_keep_dims(), reduce_op->get_keep_dims());
} }
TYPED_TEST_P(ReduceOperatorVisitor, do_not_keep_dims_3D) TYPED_TEST_P(ReduceOperatorVisitor, do_not_keep_dims_3D) {
{
using OP_Type = typename TypeParam::op_type; using OP_Type = typename TypeParam::op_type;
Shape in_shape{3, 4, 5}; Shape in_shape{3, 4, 5};
@ -90,6 +81,4 @@ TYPED_TEST_P(ReduceOperatorVisitor, do_not_keep_dims_3D)
EXPECT_EQ(g_reduce_op->get_keep_dims(), reduce_op->get_keep_dims()); EXPECT_EQ(g_reduce_op->get_keep_dims(), reduce_op->get_keep_dims());
} }
REGISTER_TYPED_TEST_SUITE_P(ReduceOperatorVisitor, REGISTER_TYPED_TEST_SUITE_P(ReduceOperatorVisitor, keep_dims_3D, do_not_keep_dims_3D);
keep_dims_3D,
do_not_keep_dims_3D);

View File

@ -18,5 +18,6 @@ ov_add_test_target(
"${CMAKE_CURRENT_SOURCE_DIR}/../include" "${CMAKE_CURRENT_SOURCE_DIR}/../include"
ADD_CLANG_FORMAT ADD_CLANG_FORMAT
LABELS LABELS
IR_FRONTEND OV
IR_FE
) )

View File

@ -122,6 +122,8 @@ foreach(BACKEND_NAME ${ACTIVE_BACKEND_LIST})
endforeach() endforeach()
add_executable(ov_onnx_frontend_tests ${SRC}) add_executable(ov_onnx_frontend_tests ${SRC})
add_test(NAME ov_onnx_frontend_tests COMMAND ov_onnx_frontend_tests)
set_property(TEST ov_onnx_frontend_tests PROPERTY LABELS OV ONNX_FE)
add_dependencies(ov_onnx_frontend_tests template_extension) add_dependencies(ov_onnx_frontend_tests template_extension)

View File

@ -4,19 +4,24 @@
set(TARGET_NAME "paddle_tests") set(TARGET_NAME "paddle_tests")
file(GLOB SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp) ov_add_test_target(
NAME ${TARGET_NAME}
add_executable(${TARGET_NAME} ${SRC}) ROOT ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDENCIES
target_link_libraries(${TARGET_NAME} PRIVATE cnpy frontend_shared_test_classes paddle_test_models
openvino_paddle_frontend openvino::runtime gtest_main_manifest) openvino_paddle_frontend
paddle_fe_standalone_build_test
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME}) LINK_LIBRARIES
cnpy
install(TARGETS ${TARGET_NAME} frontend_shared_test_classes
RUNTIME DESTINATION tests openvino_paddle_frontend
COMPONENT tests openvino::runtime
EXCLUDE_FROM_ALL) gtest_main_manifest
ADD_CLANG_FORMAT
LABELS
OV
PADDLE_FE
)
# Test model generating # Test model generating
ov_check_pip_packages(REQUIREMENTS_FILE "${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt" ov_check_pip_packages(REQUIREMENTS_FILE "${CMAKE_CURRENT_SOURCE_DIR}/requirements.txt"
@ -72,13 +77,9 @@ else()
add_custom_target(paddle_test_models DEPENDS unable_build_paddle_models.txt) add_custom_target(paddle_test_models DEPENDS unable_build_paddle_models.txt)
endif() endif()
add_dependencies(${TARGET_NAME} paddle_test_models)
add_dependencies(${TARGET_NAME} openvino_paddle_frontend)
# Fuzzy tests for PaddlePaddle use IE_CPU engine # Fuzzy tests for PaddlePaddle use IE_CPU engine
if(ENABLE_INTEL_CPU) if(ENABLE_INTEL_CPU)
add_dependencies(${TARGET_NAME} openvino_intel_cpu_plugin) add_dependencies(${TARGET_NAME} openvino_intel_cpu_plugin)
endif() endif()
add_subdirectory(standalone_build) add_subdirectory(standalone_build)
add_dependencies(${TARGET_NAME} paddle_fe_standalone_build_test)

View File

@ -7,4 +7,3 @@
#include <string> #include <string>
static const std::string PADDLE_FE = "paddle"; static const std::string PADDLE_FE = "paddle";

View File

@ -4,20 +4,22 @@
set(TARGET_NAME "ov_tensorflow_frontend_tests") set(TARGET_NAME "ov_tensorflow_frontend_tests")
file(GLOB SRC ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp) ov_add_test_target(
NAME ${TARGET_NAME}
add_executable(${TARGET_NAME} ${SRC}) ROOT ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDENCIES
target_link_libraries(${TARGET_NAME} PRIVATE tensorflow_test_models
gtest_main_manifest frontend_shared_test_classes openvino_tensorflow_frontend tensorflow_fe_standalone_build_test
openvino_tensorflow_frontend_static_tests) LINK_LIBRARIES
gtest_main_manifest
add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME}) frontend_shared_test_classes
openvino_tensorflow_frontend
install(TARGETS ${TARGET_NAME} openvino_tensorflow_frontend_static_tests
RUNTIME DESTINATION tests ADD_CLANG_FORMAT
COMPONENT tests LABELS
EXCLUDE_FROM_ALL) OV
TF_FE
)
# Test model generating # Test model generating
ov_check_pip_package(REQUIREMENT tensorflow ov_check_pip_package(REQUIREMENT tensorflow
@ -67,12 +69,9 @@ else()
add_custom_target(tensorflow_test_models DEPENDS unable_build_tensorflow_models.txt) add_custom_target(tensorflow_test_models DEPENDS unable_build_tensorflow_models.txt)
endif() endif()
add_dependencies(${TARGET_NAME} tensorflow_test_models)
get_target_property(TENSORFLOW_FRONTEND_SRC_DIR openvino_tensorflow_frontend SOURCE_DIR) get_target_property(TENSORFLOW_FRONTEND_SRC_DIR openvino_tensorflow_frontend SOURCE_DIR)
add_subdirectory(standalone_build) add_subdirectory(standalone_build)
add_dependencies(${TARGET_NAME} tensorflow_fe_standalone_build_test)
# #
# Install TensorFlow frontend for tests reasons # Install TensorFlow frontend for tests reasons

View File

@ -63,6 +63,7 @@ addIeTargetTest(
DEPENDENCIES ${DEPENDENCIES} DEPENDENCIES ${DEPENDENCIES}
LABELS LABELS
IE IE
OV
) )
target_compile_definitions(${TARGET_NAME} PRIVATE $<$<BOOL:${BUILD_SHARED_LIBS}>:"BUILD_SHARED_LIBS">) target_compile_definitions(${TARGET_NAME} PRIVATE $<$<BOOL:${BUILD_SHARED_LIBS}>:"BUILD_SHARED_LIBS">)

View File

@ -24,6 +24,7 @@ addIeTargetTest(
mock_engine mock_engine
LABELS LABELS
IE IE
OV
) )
if(SUGGEST_OVERRIDE_SUPPORTED) if(SUGGEST_OVERRIDE_SUPPORTED)