Added suggest-override flag (#6631)

This commit is contained in:
Ilya Lavrenov 2021-07-16 18:55:05 +03:00 committed by GitHub
parent 22fddb4315
commit bc36425381
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
147 changed files with 406 additions and 548 deletions

View File

@ -42,50 +42,6 @@ endforeach()
# Build # Build
# #
function(build_ngraph)
function(ngraph_set option value)
if(NOT DEFINED ${option})
set(${option} ${value} CACHE BOOL "" FORCE)
endif()
endfunction()
if(ENABLE_TESTS AND NOT ANDROID)
ngraph_set(NGRAPH_UNIT_TEST_ENABLE ON)
else()
ngraph_set(NGRAPH_UNIT_TEST_ENABLE OFF)
endif()
if(NOT (ANDROID OR WINDOWS_STORE OR (MSVC AND (ARM OR AARCH64)) ))
ngraph_set(NGRAPH_ONNX_IMPORT_ENABLE ON)
ngraph_set(NGRAPH_PDPD_FRONTEND_ENABLE ON)
else()
ngraph_set(NGRAPH_ONNX_IMPORT_ENABLE OFF)
ngraph_set(NGRAPH_PDPD_FRONTEND_ENABLE OFF)
endif()
if(CMAKE_CXX_COMPILER_ID MATCHES "^(Apple)?Clang$")
ie_add_compiler_flags(-Wno-error=uninitialized -Wno-error=literal-conversion)
elseif(UNIX)
ie_add_compiler_flags(-Wno-error=maybe-uninitialized -Wno-error=return-type)
endif()
# WA for GCC 7.0
if (UNIX)
ie_add_compiler_flags(-Wno-error=return-type -Wno-undef)
elseif(WIN32)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4308 /wd4146 /wd4703 /wd4244 /wd4819")
endif()
if(ENABLE_LTO)
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RELEASE ON)
endif()
ie_cpack_add_component(ngraph REQUIRED)
ie_cpack_add_component(ngraph_dev REQUIRED DEPENDS ngraph)
add_subdirectory(ngraph)
endfunction()
function(openvino_developer_export_targets) function(openvino_developer_export_targets)
cmake_parse_arguments(EXPORT "" "COMPONENT" "TARGETS" ${ARGN}) cmake_parse_arguments(EXPORT "" "COMPONENT" "TARGETS" ${ARGN})
@ -118,9 +74,12 @@ function(openvino_developer_export_targets)
"A list of OpenVINO exported components" FORCE) "A list of OpenVINO exported components" FORCE)
endfunction() endfunction()
ie_cpack_add_component(ngraph REQUIRED)
ie_cpack_add_component(ngraph_dev REQUIRED DEPENDS ngraph)
add_subdirectory(thirdparty) add_subdirectory(thirdparty)
add_subdirectory(openvino) add_subdirectory(openvino)
build_ngraph() add_subdirectory(ngraph)
add_subdirectory(inference-engine) add_subdirectory(inference-engine)
# for Template plugin # for Template plugin

View File

@ -3,6 +3,7 @@
# #
include(ProcessorCount) include(ProcessorCount)
include(CheckCXXCompilerFlag)
# #
# Disables deprecated warnings generation # Disables deprecated warnings generation
@ -292,9 +293,14 @@ else()
elseif(UNIX) elseif(UNIX)
ie_add_compiler_flags(-Wuninitialized -Winit-self) ie_add_compiler_flags(-Wuninitialized -Winit-self)
if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
ie_add_compiler_flags(-Wno-error=switch) ie_add_compiler_flags(-Wno-error=switch
-Winconsistent-missing-override)
else() else()
ie_add_compiler_flags(-Wmaybe-uninitialized) ie_add_compiler_flags(-Wmaybe-uninitialized)
check_cxx_compiler_flag("-Wsuggest-override" SUGGEST_OVERRIDE_SUPPORTED)
if(SUGGEST_OVERRIDE_SUPPORTED)
set(CMAKE_CXX_FLAGS "-Wsuggest-override ${CMAKE_CXX_FLAGS}")
endif()
endif() endif()
endif() endif()
@ -316,3 +322,27 @@ else()
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gc-sections -Wl,--exclude-libs,ALL") set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gc-sections -Wl,--exclude-libs,ALL")
endif() endif()
endif() endif()
# Links provided libraries and include their INTERFACE_INCLUDE_DIRECTORIES as SYSTEM
function(link_system_libraries TARGET_NAME)
set(MODE PRIVATE)
foreach(arg IN LISTS ARGN)
if(arg MATCHES "(PRIVATE|PUBLIC|INTERFACE)")
set(MODE ${arg})
else()
if(TARGET "${arg}")
target_include_directories(${TARGET_NAME}
SYSTEM ${MODE}
$<TARGET_PROPERTY:${arg},INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:${arg},INTERFACE_SYSTEM_INCLUDE_DIRECTORIES>
)
endif()
target_link_libraries(${TARGET_NAME}
${MODE}
${arg}
)
endif()
endforeach()
endfunction()

View File

@ -6,7 +6,7 @@ include(CheckCXXCompilerFlag)
if (ENABLE_SANITIZER) if (ENABLE_SANITIZER)
set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize=address") set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize=address")
CHECK_CXX_COMPILER_FLAG("-fsanitize-recover=address" SANITIZE_RECOVER_ADDRESS_SUPPORTED) check_cxx_compiler_flag("-fsanitize-recover=address" SANITIZE_RECOVER_ADDRESS_SUPPORTED)
if (SANITIZE_RECOVER_ADDRESS_SUPPORTED) if (SANITIZE_RECOVER_ADDRESS_SUPPORTED)
set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize-recover=address") set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize-recover=address")
endif() endif()
@ -18,7 +18,7 @@ if (ENABLE_UB_SANITIZER)
# TODO: Remove -fno-sanitize=null as thirdparty/ocl/clhpp_headers UBSAN compatibility resolved: # TODO: Remove -fno-sanitize=null as thirdparty/ocl/clhpp_headers UBSAN compatibility resolved:
# https://github.com/KhronosGroup/OpenCL-CLHPP/issues/17 # https://github.com/KhronosGroup/OpenCL-CLHPP/issues/17
set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize=undefined -fno-sanitize=null") set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize=undefined -fno-sanitize=null")
CHECK_CXX_COMPILER_FLAG("-fsanitize-recover=undefined" SANITIZE_RECOVER_UNDEFINED_SUPPORTED) check_cxx_compiler_flag("-fsanitize-recover=undefined" SANITIZE_RECOVER_UNDEFINED_SUPPORTED)
if (SANITIZE_RECOVER_UNDEFINED_SUPPORTED) if (SANITIZE_RECOVER_UNDEFINED_SUPPORTED)
set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize-recover=undefined") set(SANITIZER_COMPILER_FLAGS "${SANITIZER_COMPILER_FLAGS} -fsanitize-recover=undefined")
endif() endif()

View File

@ -114,6 +114,25 @@ ie_option (ENABLE_SYSTEM_PUGIXML "use the system copy of pugixml" OFF)
ie_option (ENABLE_CPU_DEBUG_CAPS "enable CPU debug capabilities at runtime" OFF) ie_option (ENABLE_CPU_DEBUG_CAPS "enable CPU debug capabilities at runtime" OFF)
if(ANDROID OR WINDOWS_STORE OR (MSVC AND (ARM OR AARCH64)))
set(protoc_available OFF)
else()
set(protoc_available ON)
endif()
ie_dependent_option(NGRAPH_ONNX_IMPORT_ENABLE "Enable ONNX importer" ON "protoc_available" OFF)
ie_dependent_option(NGRAPH_ONNX_EDITOR_ENABLE "Enable ONNX Editor" ON "NGRAPH_ONNX_IMPORT_ENABLE" OFF)
ie_dependent_option(NGRAPH_PDPD_FRONTEND_ENABLE "Enable PaddlePaddle FrontEnd" ON "protoc_available" OFF)
ie_dependent_option(NGRAPH_USE_PROTOBUF_LITE "Compiles and links with protobuf-lite" OFF
"NGRAPH_ONNX_IMPORT_ENABLE OR NGRAPH_PDPD_FRONTEND_ENABLE" OFF)
ie_dependent_option(NGRAPH_UNIT_TEST_ENABLE "Enables ngraph unit tests" ON "ENABLE_TESTS;NOT ANDROID" OFF)
ie_dependent_option(NGRAPH_UNIT_TEST_BACKENDS_ENABLE "Control the building of unit tests using backends" ON
"NGRAPH_UNIT_TEST_ENABLE" OFF)
option(NGRAPH_DEBUG_ENABLE "Enable output for NGRAPH_DEBUG statements" OFF)
# WA for ngraph python build on Windows debug
list(REMOVE_ITEM IE_OPTIONS NGRAPH_UNIT_TEST_ENABLE NGRAPH_UNIT_TEST_BACKENDS_ENABLE)
# #
# Process featues # Process featues
# #

View File

@ -13,7 +13,7 @@ set_and_check(IE_MAIN_SOURCE_DIR "@IE_MAIN_SOURCE_DIR@") # HDDL
# Variables to export in plugin's projects # Variables to export in plugin's projects
set(ie_options "@IE_OPTIONS@;CMAKE_BUILD_TYPE;CMAKE_SKIP_RPATH;") set(ie_options "@IE_OPTIONS@;CMAKE_BUILD_TYPE;CMAKE_SKIP_RPATH")
list(APPEND ie_options CMAKE_CXX_COMPILER_LAUNCHER CMAKE_C_COMPILER_LAUNCHER) list(APPEND ie_options CMAKE_CXX_COMPILER_LAUNCHER CMAKE_C_COMPILER_LAUNCHER)
file(TO_CMAKE_PATH "${CMAKE_CURRENT_LIST_DIR}" cache_path) file(TO_CMAKE_PATH "${CMAKE_CURRENT_LIST_DIR}" cache_path)
@ -73,6 +73,9 @@ if(NOT MSVC)
ie_add_compiler_flags(-Wno-error=unused-variable) ie_add_compiler_flags(-Wno-error=unused-variable)
if(CMAKE_COMPILER_IS_GNUCXX) if(CMAKE_COMPILER_IS_GNUCXX)
ie_add_compiler_flags(-Wno-error=unused-but-set-variable) ie_add_compiler_flags(-Wno-error=unused-but-set-variable)
if(SUGGEST_OVERRIDE_SUPPORTED)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-suggest-override")
endif()
endif() endif()
endif() endif()

View File

@ -30,7 +30,7 @@ public:
* @param ptrNumMemoryBytes pointer to specific number of memory bytes * @param ptrNumMemoryBytes pointer to specific number of memory bytes
* @return none. * @return none.
*/ */
virtual void GetFileInfo(const char* fileName, uint32_t numArrayToFindSize, uint32_t* ptrNumArrays, uint32_t* ptrNumMemoryBytes); void GetFileInfo(const char* fileName, uint32_t numArrayToFindSize, uint32_t* ptrNumArrays, uint32_t* ptrNumMemoryBytes) override;
/** /**
* @brief Load Kaldi ARK speech feature vector file * @brief Load Kaldi ARK speech feature vector file
@ -43,8 +43,8 @@ public:
* @param ptrNumBytesPerElement pointer to number bytes per element (size of float by default) * @param ptrNumBytesPerElement pointer to number bytes per element (size of float by default)
* @return none. * @return none.
*/ */
virtual void LoadFile(const char* fileName, uint32_t arrayIndex, std::string& ptrName, std::vector<uint8_t>& memory, uint32_t* ptrNumRows, void LoadFile(const char* fileName, uint32_t arrayIndex, std::string& ptrName, std::vector<uint8_t>& memory, uint32_t* ptrNumRows, uint32_t* ptrNumColumns,
uint32_t* ptrNumColumns, uint32_t* ptrNumBytesPerElement); uint32_t* ptrNumBytesPerElement) override;
/** /**
* @brief Save Kaldi ARK speech feature vector file * @brief Save Kaldi ARK speech feature vector file
@ -56,7 +56,7 @@ public:
* @param numColumns number of columns * @param numColumns number of columns
* @return none. * @return none.
*/ */
virtual void SaveFile(const char* fileName, bool shouldAppend, std::string name, void* ptrMemory, uint32_t numRows, uint32_t numColumns); void SaveFile(const char* fileName, bool shouldAppend, std::string name, void* ptrMemory, uint32_t numRows, uint32_t numColumns) override;
}; };
/// @brief Responsible to work with .npz files /// @brief Responsible to work with .npz files
@ -70,7 +70,7 @@ public:
* @param ptrNumMemoryBytes pointer to specific number of memory bytes * @param ptrNumMemoryBytes pointer to specific number of memory bytes
* @return none. * @return none.
*/ */
virtual void GetFileInfo(const char* fileName, uint32_t numArrayToFindSize, uint32_t* ptrNumArrays, uint32_t* ptrNumMemoryBytes); void GetFileInfo(const char* fileName, uint32_t numArrayToFindSize, uint32_t* ptrNumArrays, uint32_t* ptrNumMemoryBytes) override;
/** /**
* @brief Load Numpy* uncompressed NPZ speech feature vector file * @brief Load Numpy* uncompressed NPZ speech feature vector file
@ -83,8 +83,8 @@ public:
* @param ptrNumBytesPerElement pointer to number bytes per element (size of float by default) * @param ptrNumBytesPerElement pointer to number bytes per element (size of float by default)
* @return none. * @return none.
*/ */
virtual void LoadFile(const char* fileName, uint32_t arrayIndex, std::string& ptrName, std::vector<uint8_t>& memory, uint32_t* ptrNumRows, void LoadFile(const char* fileName, uint32_t arrayIndex, std::string& ptrName, std::vector<uint8_t>& memory, uint32_t* ptrNumRows, uint32_t* ptrNumColumns,
uint32_t* ptrNumColumns, uint32_t* ptrNumBytesPerElement); uint32_t* ptrNumBytesPerElement) override;
/** /**
* @brief Save Numpy* uncompressed NPZ speech feature vector file * @brief Save Numpy* uncompressed NPZ speech feature vector file
@ -96,5 +96,5 @@ public:
* @param numColumns number of columns * @param numColumns number of columns
* @return none. * @return none.
*/ */
virtual void SaveFile(const char* fileName, bool shouldAppend, std::string name, void* ptrMemory, uint32_t numRows, uint32_t numColumns); void SaveFile(const char* fileName, bool shouldAppend, std::string name, void* ptrMemory, uint32_t numRows, uint32_t numColumns) override;
}; };

View File

@ -46,8 +46,10 @@ target_link_libraries(${TARGET_NAME} PRIVATE mkldnn
inference_engine_lp_transformations) inference_engine_lp_transformations)
target_include_directories(${TARGET_NAME} PRIVATE target_include_directories(${TARGET_NAME} PRIVATE
${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR})
$<TARGET_PROPERTY:mkldnn,INCLUDE_DIRECTORIES>)
target_include_directories(${TARGET_NAME} SYSTEM PRIVATE
$<TARGET_PROPERTY:mkldnn,INCLUDE_DIRECTORIES>)
# Cross compiled function # Cross compiled function
# TODO: The same for proposal, proposalONNX, topk # TODO: The same for proposal, proposalONNX, topk
@ -64,15 +66,16 @@ ie_add_api_validator_post_build_step(TARGET ${TARGET_NAME})
# add test object library # add test object library
add_library(${TARGET_NAME}_obj OBJECT ${SOURCES} ${HEADERS}) add_library(${TARGET_NAME}_obj OBJECT ${SOURCES} ${HEADERS})
target_link_libraries(${TARGET_NAME}_obj PUBLIC mkldnn) link_system_libraries(${TARGET_NAME}_obj PUBLIC mkldnn)
target_include_directories(${TARGET_NAME}_obj PRIVATE $<TARGET_PROPERTY:inference_engine_preproc_s,INTERFACE_INCLUDE_DIRECTORIES> target_include_directories(${TARGET_NAME}_obj PRIVATE $<TARGET_PROPERTY:inference_engine_preproc_s,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:openvino::itt,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:openvino::itt,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_lp_transformations,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine_lp_transformations,INTERFACE_INCLUDE_DIRECTORIES>
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}
$<TARGET_PROPERTY:openvino::conditional_compilation,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:openvino::conditional_compilation,INTERFACE_INCLUDE_DIRECTORIES>)
$<TARGET_PROPERTY:mkldnn,INCLUDE_DIRECTORIES>)
target_include_directories(${TARGET_NAME}_obj SYSTEM PUBLIC $<TARGET_PROPERTY:mkldnn,INCLUDE_DIRECTORIES>)
set_ie_threading_interface_for(${TARGET_NAME}_obj) set_ie_threading_interface_for(${TARGET_NAME}_obj)

View File

@ -13,17 +13,17 @@ constexpr size_t channelsPos = 1lu;
class PlainFormatCreator : public TensorDescCreator { class PlainFormatCreator : public TensorDescCreator {
public: public:
virtual InferenceEngine::TensorDesc createDesc(const InferenceEngine::Precision& precision, const InferenceEngine::SizeVector& srcDims) const { InferenceEngine::TensorDesc createDesc(const InferenceEngine::Precision& precision, const InferenceEngine::SizeVector& srcDims) const override {
SizeVector order(srcDims.size()); SizeVector order(srcDims.size());
std::iota(order.begin(), order.end(), 0); std::iota(order.begin(), order.end(), 0);
return TensorDesc(precision, srcDims, {srcDims, order}); return TensorDesc(precision, srcDims, {srcDims, order});
} }
virtual size_t getMinimalRank() const { return 0lu; } size_t getMinimalRank() const override { return 0lu; }
}; };
class PerChannelCreator : public TensorDescCreator { class PerChannelCreator : public TensorDescCreator {
public: public:
virtual InferenceEngine::TensorDesc createDesc(const InferenceEngine::Precision &precision, const InferenceEngine::SizeVector &srcDims) const { InferenceEngine::TensorDesc createDesc(const InferenceEngine::Precision &precision, const InferenceEngine::SizeVector &srcDims) const override {
SizeVector order(srcDims.size()); SizeVector order(srcDims.size());
std::iota(order.begin(), order.end(), 0); std::iota(order.begin(), order.end(), 0);
SizeVector blkDims = srcDims; SizeVector blkDims = srcDims;
@ -39,13 +39,13 @@ public:
return TensorDesc(precision, srcDims, {blkDims, order}); return TensorDesc(precision, srcDims, {blkDims, order});
} }
virtual size_t getMinimalRank() const { return 3lu; } size_t getMinimalRank() const override { return 3lu; }
}; };
class ChannelBlockedCreator : public TensorDescCreator { class ChannelBlockedCreator : public TensorDescCreator {
public: public:
ChannelBlockedCreator(size_t blockSize) : _blockSize(blockSize) {} ChannelBlockedCreator(size_t blockSize) : _blockSize(blockSize) {}
virtual InferenceEngine::TensorDesc createDesc(const InferenceEngine::Precision& precision, const InferenceEngine::SizeVector& srcDims) const { InferenceEngine::TensorDesc createDesc(const InferenceEngine::Precision& precision, const InferenceEngine::SizeVector& srcDims) const override {
if (srcDims.size() < 2) { if (srcDims.size() < 2) {
IE_THROW() << "Can't create blocked tensor descriptor!"; IE_THROW() << "Can't create blocked tensor descriptor!";
} }
@ -60,7 +60,7 @@ public:
return TensorDesc(precision, srcDims, {blkDims, order}); return TensorDesc(precision, srcDims, {blkDims, order});
} }
virtual size_t getMinimalRank() const { return 3lu; } size_t getMinimalRank() const override { return 3lu; }
private: private:
size_t _blockSize; size_t _blockSize;

View File

@ -149,7 +149,7 @@ struct jit_has_subnormals_base::reg<cpu_isa_t::sse41> {
template<cpu_isa_t isa> template<cpu_isa_t isa>
struct jit_has_subnormals : public jit_has_subnormals_base { struct jit_has_subnormals : public jit_has_subnormals_base {
void generate() final { void generate() override final { // NOLINT
size_t const vlen = reg<isa>::length; size_t const vlen = reg<isa>::length;
const int sh_bits = std::ilogb(vlen); const int sh_bits = std::ilogb(vlen);

View File

@ -72,6 +72,11 @@ file(GLOB_RECURSE legacy_tests
set_source_files_properties(${legacy_tests} PROPERTIES INCLUDE_DIRECTORIES set_source_files_properties(${legacy_tests} PROPERTIES INCLUDE_DIRECTORIES
$<TARGET_PROPERTY:inference_engine_legacy,INTERFACE_INCLUDE_DIRECTORIES>) $<TARGET_PROPERTY:inference_engine_legacy,INTERFACE_INCLUDE_DIRECTORIES>)
if(SUGGEST_OVERRIDE_SUPPORTED)
set_source_files_properties(${CMAKE_CURRENT_SOURCE_DIR}/caching_test.cpp
PROPERTIES COMPILE_OPTIONS -Wno-suggest-override)
endif()
include(CMakeParseArguments) include(CMakeParseArguments)
# #

View File

@ -25,9 +25,6 @@ using namespace InferenceEngine;
class NGraphReaderTests : public CommonTestUtils::TestsCommon { class NGraphReaderTests : public CommonTestUtils::TestsCommon {
protected: protected:
void TearDown() override {}
void SetUp() override {}
void compareIRs(const std::string& modelV10, const std::string& oldModel, size_t weightsSize = 0, const std::function<void(Blob::Ptr&)>& fillBlob = {}) { void compareIRs(const std::string& modelV10, const std::string& oldModel, size_t weightsSize = 0, const std::function<void(Blob::Ptr&)>& fillBlob = {}) {
Core ie; Core ie;
Blob::Ptr weights; Blob::Ptr weights;

View File

@ -17,10 +17,7 @@ class PreallocatorTests: public ::testing::Test {
protected: protected:
std::vector<float> mybuf; std::vector<float> mybuf;
virtual void TearDown() { void SetUp() override {
}
virtual void SetUp() {
mybuf.resize(10); mybuf.resize(10);
allocator = details::make_pre_allocator(&*mybuf.begin(), mybuf.size()); allocator = details::make_pre_allocator(&*mybuf.begin(), mybuf.size());
} }

View File

@ -56,7 +56,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
FakeQuantizeDecompositionBasicParams basic_params; FakeQuantizeDecompositionBasicParams basic_params;
std::pair<float, float> input_ranges_values; std::pair<float, float> input_ranges_values;
bool should_be_decompos; bool should_be_decompos;

View File

@ -4,11 +4,6 @@
set(TARGET_NAME subgraphsDumperTests) set(TARGET_NAME subgraphsDumperTests)
list(APPEND DEPENDENCIES
unitTestUtils
ngraph
)
addIeTargetTest( addIeTargetTest(
NAME ${TARGET_NAME} NAME ${TARGET_NAME}
ROOT ${CMAKE_CURRENT_SOURCE_DIR} ROOT ${CMAKE_CURRENT_SOURCE_DIR}
@ -18,7 +13,7 @@ addIeTargetTest(
$<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES>
LINK_LIBRARIES LINK_LIBRARIES
PRIVATE PRIVATE
unitTestUtils funcTestUtils
ngraph ngraph
pugixml::static pugixml::static
ADD_CPPLINT ADD_CPPLINT

View File

@ -17,7 +17,7 @@ using ngraph::element::Type_t;
class ConvolutionMatcherTest : public ::testing::Test { class ConvolutionMatcherTest : public ::testing::Test {
protected: protected:
void SetUp() { void SetUp() override {
matcher = SubgraphsDumper::ConvolutionsMatcher(); matcher = SubgraphsDumper::ConvolutionsMatcher();
op_info = LayerTestsUtils::OPInfo(); op_info = LayerTestsUtils::OPInfo();
} }

View File

@ -13,7 +13,7 @@ using ngraph::element::Type_t;
class SingleOpMatcherTest : public ::testing::Test { class SingleOpMatcherTest : public ::testing::Test {
protected: protected:
void SetUp() { void SetUp() override {
matcher = SubgraphsDumper::SingleOpMatcher(); matcher = SubgraphsDumper::SingleOpMatcher();
op_info = LayerTestsUtils::OPInfo(); op_info = LayerTestsUtils::OPInfo();
} }

View File

@ -12,7 +12,7 @@ using ngraph::element::Type_t;
class MatcherConfigTest : public ::testing::Test { class MatcherConfigTest : public ::testing::Test {
protected: protected:
void SetUp() { void SetUp() override {
const auto const1 = std::make_shared<Constant>(Type_t::f32, Shape({5, 5}), 1); const auto const1 = std::make_shared<Constant>(Type_t::f32, Shape({5, 5}), 1);
const auto const2 = std::make_shared<Constant>(Type_t::f32, Shape({5, 5}), 2); const auto const2 = std::make_shared<Constant>(Type_t::f32, Shape({5, 5}), 2);
node = std::make_shared<v1::Add>(const1, const2); node = std::make_shared<v1::Add>(const1, const2);

View File

@ -32,7 +32,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
SizeVector ie_shape; SizeVector ie_shape;
std::tie(inPrc, ie_shape, targetDevice) = this->GetParam(); std::tie(inPrc, ie_shape, targetDevice) = this->GetParam();

View File

@ -32,7 +32,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::EltwiseTestParams basicParamsSet; LayerTestsDefinitions::EltwiseTestParams basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::tie(basicParamsSet, cpuParams) = this->GetParam(); std::tie(basicParamsSet, cpuParams) = this->GetParam();

View File

@ -67,7 +67,7 @@ protected:
ASSERT_TRUE(foundConv) << "Can't find Convolution node"; ASSERT_TRUE(foundConv) << "Can't find Convolution node";
} }
void SetUp() { void SetUp() override {
groupConvLayerTestParamsSet basicParamsSet; groupConvLayerTestParamsSet basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
fusingSpecificParams fusingParams; fusingSpecificParams fusingParams;

View File

@ -49,7 +49,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
groupConvBackpropDataLayerTestParamsSet basicParamsSet; groupConvBackpropDataLayerTestParamsSet basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
fusingSpecificParams fusingParams; fusingSpecificParams fusingParams;

View File

@ -41,7 +41,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
LayerTestsDefinitions::GRUCellParams basicParamsSet; LayerTestsDefinitions::GRUCellParams basicParamsSet;
std::map<std::string, std::string> additionalConfig; std::map<std::string, std::string> additionalConfig;

View File

@ -41,7 +41,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::GRUSequenceParams basicParamsSet; LayerTestsDefinitions::GRUSequenceParams basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::map<std::string, std::string> additionalConfig; std::map<std::string, std::string> additionalConfig;
@ -142,7 +142,7 @@ protected:
} }
} }
void GenerateInputs() { void GenerateInputs() override {
for (const auto &input : executableNetwork.GetInputsInfo()) { for (const auto &input : executableNetwork.GetInputsInfo()) {
const auto &info = input.second; const auto &info = input.second;
auto blob = GenerateInput(*info); auto blob = GenerateInput(*info);

View File

@ -46,7 +46,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::InterpolateLayerTestParams basicParamsSet; LayerTestsDefinitions::InterpolateLayerTestParams basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
fusingSpecificParams fusingParams; fusingSpecificParams fusingParams;

View File

@ -41,7 +41,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::LSTMCellParams basicParamsSet; LayerTestsDefinitions::LSTMCellParams basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::map<std::string, std::string> additionalConfig; std::map<std::string, std::string> additionalConfig;

View File

@ -42,7 +42,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::LSTMSequenceParams basicParamsSet; LayerTestsDefinitions::LSTMSequenceParams basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::map<std::string, std::string> additionalConfig; std::map<std::string, std::string> additionalConfig;
@ -149,7 +149,7 @@ protected:
} }
} }
void GenerateInputs() { void GenerateInputs() override {
for (const auto &input : executableNetwork.GetInputsInfo()) { for (const auto &input : executableNetwork.GetInputsInfo()) {
const auto &info = input.second; const auto &info = input.second;
auto blob = GenerateInput(*info); auto blob = GenerateInput(*info);

View File

@ -33,7 +33,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::padLayerTestParamsSet basicParamsSet; LayerTestsDefinitions::padLayerTestParamsSet basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::tie(basicParamsSet, cpuParams) = this->GetParam(); std::tie(basicParamsSet, cpuParams) = this->GetParam();

View File

@ -37,7 +37,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
poolLayerTestParamsSet basicParamsSet; poolLayerTestParamsSet basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
fusingSpecificParams fusingParams; fusingSpecificParams fusingParams;

View File

@ -41,7 +41,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
LayerTestsDefinitions::RNNCellParams basicParamsSet; LayerTestsDefinitions::RNNCellParams basicParamsSet;
std::map<std::string, std::string> additionalConfig; std::map<std::string, std::string> additionalConfig;

View File

@ -41,7 +41,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::RNNSequenceParams basicParamsSet; LayerTestsDefinitions::RNNSequenceParams basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::map<std::string, std::string> additionalConfig; std::map<std::string, std::string> additionalConfig;
@ -119,7 +119,7 @@ protected:
} }
} }
void GenerateInputs() { void GenerateInputs() override {
for (const auto &input : executableNetwork.GetInputsInfo()) { for (const auto &input : executableNetwork.GetInputsInfo()) {
const auto &info = input.second; const auto &info = input.second;
auto blob = GenerateInput(*info); auto blob = GenerateInput(*info);

View File

@ -34,7 +34,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
LayerTestsDefinitions::shuffleChannelsLayerTestParamsSet basicParamsSet; LayerTestsDefinitions::shuffleChannelsLayerTestParamsSet basicParamsSet;
CPUSpecificParams cpuParams; CPUSpecificParams cpuParams;
std::tie(basicParamsSet, cpuParams) = this->GetParam(); std::tie(basicParamsSet, cpuParams) = this->GetParam();

View File

@ -60,7 +60,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
threshold = 0.1f; threshold = 0.1f;
std::vector<std::vector<size_t>> inputShapes; std::vector<std::vector<size_t>> inputShapes;

View File

@ -46,7 +46,7 @@ public:
} }
protected: protected:
void SetUp() { void SetUp() override {
threshold = 0.1f; threshold = 0.1f;
Shape inputShape; Shape inputShape;

View File

@ -15,7 +15,7 @@ namespace CPULayerTestsDefinitions {
class InputNoReorderEltwiseBF16 : virtual public LayerTestsUtils::LayerTestsCommon, class InputNoReorderEltwiseBF16 : virtual public LayerTestsUtils::LayerTestsCommon,
public CPUTestsBase { public CPUTestsBase {
protected: protected:
void SetUp() { void SetUp() override {
auto netPrecision = inPrc = Precision::FP32; auto netPrecision = inPrc = Precision::FP32;
outPrc = Precision::BF16; outPrc = Precision::BF16;
targetDevice = CommonTestUtils::DEVICE_CPU; targetDevice = CommonTestUtils::DEVICE_CPU;

View File

@ -43,7 +43,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc()); InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc());
blob->allocate(); blob->allocate();

View File

@ -56,7 +56,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc()); InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc());
blob->allocate(); blob->allocate();
@ -122,7 +122,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), inputDataMax - inputDataMin, inputDataMin, return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), inputDataMax - inputDataMin, inputDataMin,
1 / inputDataResolution); 1 / inputDataResolution);
} }

View File

@ -59,7 +59,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), inputDataMax - inputDataMin, inputDataMin, 1 / inputDataResolution); return FuncTestUtils::createAndFillBlob(info.getTensorDesc(), inputDataMax - inputDataMin, inputDataMin, 1 / inputDataResolution);
} }

View File

@ -51,7 +51,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc()); InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc());
blob->allocate(); blob->allocate();

View File

@ -48,7 +48,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc()); InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc());
blob->allocate(); blob->allocate();
@ -123,7 +123,7 @@ public:
return result.str(); return result.str();
} }
InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const { InferenceEngine::Blob::Ptr GenerateInput(const InferenceEngine::InputInfo& info) const override {
InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc()); InferenceEngine::Blob::Ptr blob = make_blob_with_precision(info.getTensorDesc());
blob->allocate(); blob->allocate();

View File

@ -25,7 +25,7 @@ class RemoteBlob_Test : public CommonTestUtils::TestsCommon {
protected: protected:
std::shared_ptr<ngraph::Function> fn_ptr; std::shared_ptr<ngraph::Function> fn_ptr;
virtual void SetUp() { void SetUp() override {
fn_ptr = ngraph::builder::subgraph::makeSplitMultiConvConcat(); fn_ptr = ngraph::builder::subgraph::makeSplitMultiConvConcat();
} }
}; };

View File

@ -21,7 +21,7 @@ protected:
std::vector<std::string> outputsToAdd; std::vector<std::string> outputsToAdd;
std::string deviceName; std::string deviceName;
void SetUp(); void SetUp() override;
public: public:
static std::string getTestCaseName(const testing::TestParamInfo<addOutputsParams> &obj); static std::string getTestCaseName(const testing::TestParamInfo<addOutputsParams> &obj);
}; };

View File

@ -21,7 +21,7 @@ protected:
std::vector<std::string> statesToQuery; std::vector<std::string> statesToQuery;
std::string deviceName; std::string deviceName;
void SetUp(); void SetUp() override;
InferenceEngine::ExecutableNetwork PrepareNetwork(); InferenceEngine::ExecutableNetwork PrepareNetwork();
public: public:
static std::string getTestCaseName(const testing::TestParamInfo<memoryStateParams> &obj); static std::string getTestCaseName(const testing::TestParamInfo<memoryStateParams> &obj);

View File

@ -20,7 +20,7 @@ private:
// vector which is later used for comparison // vector which is later used for comparison
struct exec_graph_walker : pugi::xml_tree_walker { struct exec_graph_walker : pugi::xml_tree_walker {
std::vector<pugi::xml_node> nodes; std::vector<pugi::xml_node> nodes;
virtual bool for_each(pugi::xml_node &node); bool for_each(pugi::xml_node &node) override;
}; };
// compare_docs() helper // compare_docs() helper

View File

@ -51,13 +51,13 @@ function(add_common_utils ADD_TARGET_NAME)
target_include_directories(${ADD_TARGET_NAME} target_include_directories(${ADD_TARGET_NAME}
PUBLIC PUBLIC
${IE_TESTS_ROOT}/ie_test_utils
$<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine,INTERFACE_INCLUDE_DIRECTORIES>
PRIVATE PRIVATE
$<TARGET_PROPERTY:inference_engine_legacy,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine_legacy,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>
$<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:inference_engine_transformations,INTERFACE_INCLUDE_DIRECTORIES>
) )
target_include_directories(${ADD_TARGET_NAME} SYSTEM PUBLIC ${IE_TESTS_ROOT}/ie_test_utils)
target_compile_definitions(${ADD_TARGET_NAME} PUBLIC ${ARGN}) target_compile_definitions(${ADD_TARGET_NAME} PUBLIC ${ARGN})
endfunction() endfunction()

View File

@ -2,6 +2,10 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
if(SUGGEST_OVERRIDE_SUPPORTED)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-suggest-override")
endif()
set(TARGET_NAME unitTestUtils) set(TARGET_NAME unitTestUtils)
add_subdirectory(mocks/mock_engine) add_subdirectory(mocks/mock_engine)

View File

@ -374,7 +374,8 @@ std::shared_ptr<ngraph::Function> ConcatFunction::getOriginalWithSplitedIntermed
Output<Node> lastOutput = intermediateOp->output(1); Output<Node> lastOutput = intermediateOp->output(1);
if (addConvolution) { if (addConvolution) {
auto weights = ngraph::opset1::Constant::create( auto weights = ngraph::opset1::Constant::create(
precision, ngraph::Shape{ inputShape[1].get_length() / numSplit, inputShape[1].get_length() / numSplit, 1, 1 }, { 1 }); precision, ngraph::Shape{ static_cast<size_t>(inputShape[1].get_length() / numSplit),
static_cast<size_t>(inputShape[1].get_length() / numSplit), 1, 1 }, { 1 });
auto convolution = std::make_shared<ngraph::opset1::Convolution>( auto convolution = std::make_shared<ngraph::opset1::Convolution>(
intermediateOp->output(1), intermediateOp->output(1),
weights, weights,
@ -1260,7 +1261,8 @@ std::shared_ptr<ngraph::Function> ConcatFunction::getReferenceWithSplitedInterme
if (addConvolution) { if (addConvolution) {
auto weights = ngraph::opset1::Constant::create( auto weights = ngraph::opset1::Constant::create(
precision, precision,
ngraph::Shape{ inputShape[1].get_length() / numSplit, inputShape[1].get_length() / numSplit, 1, 1 }, { 1 }); ngraph::Shape{ static_cast<size_t>(inputShape[1].get_length() / numSplit),
static_cast<size_t>(inputShape[1].get_length() / numSplit), 1, 1 }, { 1 });
auto convolution = std::make_shared<ngraph::opset1::Convolution>( auto convolution = std::make_shared<ngraph::opset1::Convolution>(
lastDequantization2, lastDequantization2,

View File

@ -12,10 +12,12 @@ addIeTargetTest(
OBJECT_FILES OBJECT_FILES
$<TARGET_OBJECTS:MKLDNNPlugin_obj> $<TARGET_OBJECTS:MKLDNNPlugin_obj>
LINK_LIBRARIES LINK_LIBRARIES
unitTestUtils gtest
gtest_main
mkldnn mkldnn
inference_engine_transformations inference_engine_transformations
inference_engine_lp_transformations inference_engine_lp_transformations
inference_engine_s
ADD_CPPLINT ADD_CPPLINT
LABELS LABELS
CPU CPU

View File

@ -8,11 +8,9 @@ if (NOT NGRAPH_USE_PROTOBUF_LITE)
addIeTargetTest( addIeTargetTest(
NAME ${TARGET_NAME} NAME ${TARGET_NAME}
ROOT ${CMAKE_CURRENT_SOURCE_DIR} ROOT ${CMAKE_CURRENT_SOURCE_DIR}
DEPENDENCIES
ngraph
onnx_importer
LINK_LIBRARIES LINK_LIBRARIES
unitTestUtils gtest
gtest_main
onnx_importer onnx_importer
DEFINES DEFINES
ONNX_MODELS_DIR=\"${CMAKE_CURRENT_SOURCE_DIR}/models\" ONNX_MODELS_DIR=\"${CMAKE_CURRENT_SOURCE_DIR}/models\"

View File

@ -8,9 +8,15 @@ addIeTargetTest(
NAME ${TARGET_NAME} NAME ${TARGET_NAME}
ROOT ${CMAKE_CURRENT_SOURCE_DIR} ROOT ${CMAKE_CURRENT_SOURCE_DIR}
LINK_LIBRARIES LINK_LIBRARIES
unitTestUtils gmock
commonTestUtils_s
GNAPlugin_test_static GNAPlugin_test_static
ADD_CPPLINT ADD_CPPLINT
LABELS LABELS
GNA GNA
) )
if(SUGGEST_OVERRIDE_SUPPORTED)
set_source_files_properties(gna_model_serial_test.cpp
PROPERTIES COMPILE_OPTIONS -Wno-suggest-override)
endif()

View File

@ -16,7 +16,6 @@ addIeTargetTest(
NAME ${TARGET_NAME} NAME ${TARGET_NAME}
ROOT ${CMAKE_CURRENT_SOURCE_DIR} ROOT ${CMAKE_CURRENT_SOURCE_DIR}
LINK_LIBRARIES LINK_LIBRARIES
unitTestUtils
inference_engine_lp_transformations inference_engine_lp_transformations
${OpenCV_LIBRARIES} ${OpenCV_LIBRARIES}
ADD_CPPLINT ADD_CPPLINT
@ -26,3 +25,10 @@ addIeTargetTest(
LABELS LABELS
IE IE
) )
if(SUGGEST_OVERRIDE_SUPPORTED)
set_source_files_properties(cpp_interfaces/ie_memory_state_internal_test.cpp
PROPERTIES COMPILE_OPTIONS -Wno-suggest-override)
endif()
link_system_libraries(${TARGET_NAME} PRIVATE unitTestUtils)

View File

@ -32,10 +32,7 @@ protected:
shared_ptr<IInferRequest> request; shared_ptr<IInferRequest> request;
ResponseDesc dsc; ResponseDesc dsc;
virtual void TearDown() { void SetUp() override {
}
virtual void SetUp() {
mock_impl.reset(new MockIInferRequestInternal()); mock_impl.reset(new MockIInferRequestInternal());
request = std::make_shared<InferRequestBase>(mock_impl); request = std::make_shared<InferRequestBase>(mock_impl);
} }

View File

@ -56,10 +56,10 @@ protected:
MockTaskExecutor::Ptr mockTaskExecutor; MockTaskExecutor::Ptr mockTaskExecutor;
virtual void TearDown() { void TearDown() override {
} }
virtual void SetUp() { void SetUp() override {
InputsDataMap inputsInfo; InputsDataMap inputsInfo;
OutputsDataMap outputsInfo; OutputsDataMap outputsInfo;
mockTaskExecutor = make_shared<MockTaskExecutor>(); mockTaskExecutor = make_shared<MockTaskExecutor>();

View File

@ -30,7 +30,7 @@ class VariableStateTests : public ::testing::Test {
SoExecutableNetworkInternal net; SoExecutableNetworkInternal net;
IInferRequestInternal::Ptr req; IInferRequestInternal::Ptr req;
virtual void SetUp() { void SetUp() override {
mockExeNetworkInternal = make_shared<MockIExecutableNetworkInternal>(); mockExeNetworkInternal = make_shared<MockIExecutableNetworkInternal>();
mockInferRequestInternal = make_shared<MockIInferRequestInternal>(); mockInferRequestInternal = make_shared<MockIInferRequestInternal>();
mockVariableStateInternal = make_shared<MockIVariableStateInternal>(); mockVariableStateInternal = make_shared<MockIVariableStateInternal>();
@ -199,14 +199,12 @@ TEST_F(VariableStateTests, VariableStateCanPropagateGetLastState) {
ASSERT_FLOAT_EQ(saver->cbuffer().as<const float*>()[2], 125); ASSERT_FLOAT_EQ(saver->cbuffer().as<const float*>()[2], 125);
IE_SUPPRESS_DEPRECATED_END IE_SUPPRESS_DEPRECATED_END
} }
class VariableStateInternalMockImpl : public IVariableStateInternal { class VariableStateInternalMockImpl : public IVariableStateInternal {
public: public:
VariableStateInternalMockImpl(const char* name) : IVariableStateInternal(name) {} VariableStateInternalMockImpl(const char* name) : IVariableStateInternal(name) {}
MOCK_METHOD0(Reset, void()); MOCK_METHOD0(Reset, void());
}; };
TEST_F(VariableStateTests, VariableStateInternalCanSaveName) { TEST_F(VariableStateTests, VariableStateInternalCanSaveName) {
IVariableStateInternal::Ptr pState(new VariableStateInternalMockImpl("VariableStateInternalMockImpl")); IVariableStateInternal::Ptr pState(new VariableStateInternalMockImpl("VariableStateInternalMockImpl"));
ASSERT_STREQ(pState->GetName().c_str(), "VariableStateInternalMockImpl"); ASSERT_STREQ(pState->GetName().c_str(), "VariableStateInternalMockImpl");

View File

@ -35,14 +35,14 @@ protected:
ResponseDesc dsc; ResponseDesc dsc;
StatusCode sts; StatusCode sts;
virtual void TearDown() { void TearDown() override {
EXPECT_TRUE(Mock::VerifyAndClearExpectations(mock_plugin_impl.get())); EXPECT_TRUE(Mock::VerifyAndClearExpectations(mock_plugin_impl.get()));
EXPECT_TRUE(Mock::VerifyAndClearExpectations(mockIExeNetworkInternal.get())); EXPECT_TRUE(Mock::VerifyAndClearExpectations(mockIExeNetworkInternal.get()));
EXPECT_TRUE(Mock::VerifyAndClearExpectations(mockExeNetworkTS.get())); EXPECT_TRUE(Mock::VerifyAndClearExpectations(mockExeNetworkTS.get()));
EXPECT_TRUE(Mock::VerifyAndClearExpectations(mockInferRequestInternal.get())); EXPECT_TRUE(Mock::VerifyAndClearExpectations(mockInferRequestInternal.get()));
} }
virtual void SetUp() { void SetUp() override {
pluginId = "TEST"; pluginId = "TEST";
mock_plugin_impl.reset(new MockInferencePluginInternal()); mock_plugin_impl.reset(new MockInferencePluginInternal());
mock_plugin_impl->SetName(pluginId); mock_plugin_impl->SetName(pluginId);

View File

@ -16,10 +16,6 @@
class BlobTests: public ::testing::Test { class BlobTests: public ::testing::Test {
protected: protected:
virtual void TearDown() {}
virtual void SetUp() {}
std::shared_ptr<MockAllocator> createMockAllocator() { std::shared_ptr<MockAllocator> createMockAllocator() {
return std::shared_ptr<MockAllocator>(new MockAllocator()); return std::shared_ptr<MockAllocator>(new MockAllocator());
} }

View File

@ -42,13 +42,13 @@ protected:
MockIInferencePlugin* mockIPlugin; MockIInferencePlugin* mockIPlugin;
InferencePlugin plugin; InferencePlugin plugin;
virtual void TearDown() { void TearDown() override {
mockIExeNet.reset(); mockIExeNet.reset();
exeNetwork = {}; exeNetwork = {};
plugin = {}; plugin = {};
} }
virtual void SetUp() { void SetUp() override {
mockIExeNet = std::make_shared<MockIExecutableNetworkInternal>(); mockIExeNet = std::make_shared<MockIExecutableNetworkInternal>();
auto mockIPluginPtr = std::make_shared<MockIInferencePlugin>(); auto mockIPluginPtr = std::make_shared<MockIInferencePlugin>();
ON_CALL(*mockIPluginPtr, LoadNetwork(MatcherCast<const CNNNetwork&>(_), _)).WillByDefault(Return(mockIExeNet)); ON_CALL(*mockIPluginPtr, LoadNetwork(MatcherCast<const CNNNetwork&>(_), _)).WillByDefault(Return(mockIExeNet));
@ -113,12 +113,12 @@ class ExecutableNetworkWithIInferReqTests : public ExecutableNetworkTests {
protected: protected:
std::shared_ptr<MockIInferRequestInternal> mockIInferReq_p; std::shared_ptr<MockIInferRequestInternal> mockIInferReq_p;
virtual void TearDown() { void TearDown() override {
ExecutableNetworkTests::TearDown(); ExecutableNetworkTests::TearDown();
mockIInferReq_p.reset(); mockIInferReq_p.reset();
} }
virtual void SetUp() { void SetUp() override {
ExecutableNetworkTests::SetUp(); ExecutableNetworkTests::SetUp();
mockIInferReq_p = std::make_shared<MockIInferRequestInternal>(); mockIInferReq_p = std::make_shared<MockIInferRequestInternal>();
} }
@ -143,10 +143,7 @@ protected:
std::shared_ptr<IExecutableNetwork> exeNetwork; std::shared_ptr<IExecutableNetwork> exeNetwork;
ResponseDesc dsc; ResponseDesc dsc;
virtual void TearDown() { void SetUp() override {
}
virtual void SetUp() {
mock_impl.reset(new MockIExecutableNetworkInternal()); mock_impl.reset(new MockIExecutableNetworkInternal());
exeNetwork = std::make_shared<ExecutableNetworkBase>(mock_impl); exeNetwork = std::make_shared<ExecutableNetworkBase>(mock_impl);
} }

View File

@ -25,7 +25,7 @@ protected:
return CommonTestUtils::pre + mockEngineName + IE_BUILD_POSTFIX + CommonTestUtils::ext; return CommonTestUtils::pre + mockEngineName + IE_BUILD_POSTFIX + CommonTestUtils::ext;
} }
virtual void SetUp() { void SetUp() override {
std::string libraryName = get_mock_engine_name(); std::string libraryName = get_mock_engine_name();
sharedObjectLoader.reset(new SharedObjectLoader(libraryName.c_str())); sharedObjectLoader.reset(new SharedObjectLoader(libraryName.c_str()));
createPluginEngineProxy = make_std_function<IInferencePlugin*(IInferencePlugin*)>("CreatePluginEngineProxy"); createPluginEngineProxy = make_std_function<IInferencePlugin*(IInferencePlugin*)>("CreatePluginEngineProxy");

View File

@ -19,7 +19,6 @@ addIeTargetTest(
$<TARGET_PROPERTY:ngraphFunctions,INTERFACE_INCLUDE_DIRECTORIES> $<TARGET_PROPERTY:ngraphFunctions,INTERFACE_INCLUDE_DIRECTORIES>
LINK_LIBRARIES LINK_LIBRARIES
vpu_graph_transformer_test_static vpu_graph_transformer_test_static
unitTestUtils
mvnc mvnc
ngraph ngraph
interpreter_backend interpreter_backend
@ -29,3 +28,5 @@ addIeTargetTest(
VPU VPU
MYRIAD MYRIAD
) )
link_system_libraries(${TARGET_NAME} PRIVATE unitTestUtils)

View File

@ -126,9 +126,6 @@ protected:
ie = PluginCache::get().ie(); ie = PluginCache::get().ie();
} }
void TearDown() override {
}
public: public:
std::shared_ptr<ngraph::Function> createSubgraph(const SizeVector &dims, InferenceEngine::Precision prc = InferenceEngine::Precision::FP32) { std::shared_ptr<ngraph::Function> createSubgraph(const SizeVector &dims, InferenceEngine::Precision prc = InferenceEngine::Precision::FP32) {
ngraph::element::Type type = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(prc); ngraph::element::Type type = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(prc);

View File

@ -32,7 +32,7 @@ template<typename P = std::nullptr_t>
class PlgTest : public testing::TestWithParam<PlgTestParam<P>> { class PlgTest : public testing::TestWithParam<PlgTestParam<P>> {
protected: protected:
std::map<std::string, std::string> config; std::map<std::string, std::string> config;
virtual void SetUp() { void SetUp() override {
device_name = std::get<0>(this->GetParam()); device_name = std::get<0>(this->GetParam());
std::transform(device_name.begin(), device_name.end(), std::transform(device_name.begin(), device_name.end(),
device_name.begin(), [] (char v) { return v == '_' ? ':' : v; }); device_name.begin(), [] (char v) { return v == '_' ? ':' : v; });

View File

@ -83,6 +83,5 @@ protected:
std::map <std::string, std::string> config_; std::map <std::string, std::string> config_;
//Operations //Operations
virtual void SetUp() override = 0;
virtual void InitConfig(); virtual void InitConfig();
}; };

View File

@ -49,7 +49,7 @@ public:
protected: protected:
//Operations //Operations
virtual void SetUp() override; void SetUp() override;
virtual void InitConfig() override; virtual void InitConfig() override;
template <class T> template <class T>

View File

@ -56,7 +56,7 @@ class PoolingTest : public myriadLayersTests_nightly,
pooling_layer_params, vpu::LayoutPreference, Types...>> pooling_layer_params, vpu::LayoutPreference, Types...>>
{ {
public: public:
virtual void SetUp() { void SetUp() override {
myriadLayersTests_nightly::SetUp(); myriadLayersTests_nightly::SetUp();
auto p = ::testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, pooling_layer_params, vpu::LayoutPreference, Types...>>::GetParam(); auto p = ::testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, pooling_layer_params, vpu::LayoutPreference, Types...>>::GetParam();
_input_tensor = std::get<0>(p); _input_tensor = std::get<0>(p);
@ -118,7 +118,7 @@ class GlobalPoolingTest : public myriadLayersTests_nightly,
public testing::WithParamInterface<GlobalPoolingTestParam> public testing::WithParamInterface<GlobalPoolingTestParam>
{ {
public: public:
virtual void SetUp() { void SetUp() override {
myriadLayersTests_nightly::SetUp(); myriadLayersTests_nightly::SetUp();
auto params = ::testing::WithParamInterface<GlobalPoolingTestParam>::GetParam(); auto params = ::testing::WithParamInterface<GlobalPoolingTestParam>::GetParam();
_input_tensor = std::get<0>(params); _input_tensor = std::get<0>(params);
@ -166,7 +166,7 @@ class PoolingTestPad4 : public myriadLayersTests_nightly,
public testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, paddings4, vpu::LayoutPreference, Types...>> public testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, paddings4, vpu::LayoutPreference, Types...>>
{ {
public: public:
virtual void SetUp() { void SetUp() override {
myriadLayersTests_nightly::SetUp(); myriadLayersTests_nightly::SetUp();
auto p = ::testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, paddings4, vpu::LayoutPreference, Types...>>::GetParam(); auto p = ::testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, paddings4, vpu::LayoutPreference, Types...>>::GetParam();
_input_tensor = std::get<0>(p); _input_tensor = std::get<0>(p);
@ -225,7 +225,7 @@ class ConvolutionTest : public myriadLayersTests_nightly,
public testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, param_size, uint32_t, uint32_t, Types...>> public testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, param_size, uint32_t, uint32_t, Types...>>
{ {
public: public:
virtual void SetUp() { void SetUp() override {
myriadLayersTests_nightly::SetUp(); myriadLayersTests_nightly::SetUp();
auto p = ::testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, param_size, uint32_t, uint32_t, Types...>>::GetParam(); auto p = ::testing::WithParamInterface<std::tuple<InferenceEngine::SizeVector, param_size, param_size, param_size, uint32_t, uint32_t, Types...>>::GetParam();
_input_tensor = std::get<0>(p); _input_tensor = std::get<0>(p);
@ -281,7 +281,7 @@ class FCTest : public myriadLayersTests_nightly,
public testing::WithParamInterface<std::tuple<fcon_test_params, int32_t, int32_t, Types...>> public testing::WithParamInterface<std::tuple<fcon_test_params, int32_t, int32_t, Types...>>
{ {
public: public:
virtual void SetUp() { void SetUp() override {
myriadLayersTests_nightly::SetUp(); myriadLayersTests_nightly::SetUp();
auto p = ::testing::WithParamInterface<std::tuple<fcon_test_params, int32_t, int32_t, Types...>>::GetParam(); auto p = ::testing::WithParamInterface<std::tuple<fcon_test_params, int32_t, int32_t, Types...>>::GetParam();
_par = std::get<0>(p); _par = std::get<0>(p);

View File

@ -29,6 +29,12 @@ if (ENABLE_GNA)
list(APPEND TEST_SRC ${GNA_TESTS}) list(APPEND TEST_SRC ${GNA_TESTS})
list(APPEND TEST_DEPS GNAPlugin_test_static) list(APPEND TEST_DEPS GNAPlugin_test_static)
if(SUGGEST_OVERRIDE_SUPPORTED)
set_source_files_properties(engines/gna/graph_tools/graph_copy_tests.cpp
engines/gna/graph_tools/graph_tools_test.cpp
PROPERTIES COMPILE_OPTIONS -Wno-suggest-override)
endif()
# TODO: fix GNA tests # TODO: fix GNA tests
if(OFF) if(OFF)
set(gna_stub "${CMAKE_CURRENT_SOURCE_DIR}/engines/gna/gna_api_stub.cpp") set(gna_stub "${CMAKE_CURRENT_SOURCE_DIR}/engines/gna/gna_api_stub.cpp")
@ -55,6 +61,15 @@ endif()
if (ENABLE_MYRIAD) if (ENABLE_MYRIAD)
include(${XLINK_DIR}/XLink.cmake) include(${XLINK_DIR}/XLink.cmake)
if(SUGGEST_OVERRIDE_SUPPORTED)
set_source_files_properties(engines/vpu/myriad_tests/helpers/myriad_test_case.cpp
engines/vpu/mvnc/watchdog_tests.cpp
engines/vpu/sw_conv_adaptation.cpp
engines/vpu/myriad_tests/myriad_engine_tests.cpp
engines/vpu/myriad_tests/myriad_metrics_tests.cpp
PROPERTIES COMPILE_OPTIONS -Wno-suggest-override)
endif()
file(GLOB file(GLOB
VPU_TESTS VPU_TESTS
engines/vpu/*cpp engines/vpu/*cpp

View File

@ -232,7 +232,7 @@ protected:
const int count = 10; const int count = 10;
std::vector<int> vec; std::vector<int> vec;
virtual void SetUp() override { void SetUp() override {
for (int i = 0; i < count; ++i) { for (int i = 0; i < count; ++i) {
vec.push_back(i); vec.push_back(i);
} }

View File

@ -20,12 +20,6 @@ InferenceEngine::Precision defaultPrecision{InferenceEngine::Precision::FP32};
class LayersTests : public ::testing::Test { class LayersTests : public ::testing::Test {
public: public:
virtual void TearDown() {
}
virtual void SetUp() {
}
static InferenceEngine::LayerParams getParamsForLayer(std::string name, std::string type, static InferenceEngine::LayerParams getParamsForLayer(std::string name, std::string type,
InferenceEngine::Precision precision) { InferenceEngine::Precision precision) {
InferenceEngine::LayerParams params = {}; InferenceEngine::LayerParams params = {};

View File

@ -6,14 +6,7 @@ if(ENABLE_MYRIAD)
add_subdirectory(movidius) add_subdirectory(movidius)
endif() endif()
if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") if((CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") AND (MSVC_VERSION VERSION_GREATER_EQUAL "1910"))
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=unknown-warning-option -Wno-error=inconsistent-missing-override -Wno-error=pass-failed")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=unknown-warning-option -Wno-error=inconsistent-missing-override -Wno-error=pass-failed")
elseif(CMAKE_COMPILER_IS_GNUCC AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 9.1)
# On g++ 9.3.0 (Ubuntu 20.04) the ADE library raises "redundant-move" warnings
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=redundant-move")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=redundant-move")
elseif((CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") AND (MSVC_VERSION VERSION_GREATER_EQUAL "1910"))
# 1910 version of Visual Studio 2017 # 1910 version of Visual Studio 2017
# This flagis needed for enabling SIMD vectorization with command '#pragma omp simd'. # This flagis needed for enabling SIMD vectorization with command '#pragma omp simd'.
# Compilation with '/openmp:experimental' key allow us to enable vectorizatikon capability in MSVC. # Compilation with '/openmp:experimental' key allow us to enable vectorizatikon capability in MSVC.
@ -30,19 +23,12 @@ if (ENABLE_CLDNN)
else() else()
set(CLDNN__INCLUDE_TESTS OFF CACHE BOOL "" FORCE) set(CLDNN__INCLUDE_TESTS OFF CACHE BOOL "" FORCE)
endif() endif()
if (WIN32)
set(CLDNN__ARCHITECTURE_TARGET "Windows64" CACHE STRING "" FORCE)
elseif (ANDROID)
set(CLDNN__ARCHITECTURE_TARGET "Android64" CACHE STRING "" FORCE)
else()
set(CLDNN__ARCHITECTURE_TARGET "Linux64" CACHE STRING "" FORCE)
endif()
set(CLDNN_THREADING "${THREADING}" CACHE STRING "" FORCE) set(CLDNN_THREADING "${THREADING}" CACHE STRING "" FORCE)
set(GPU_DEBUG_CONFIG OFF CACHE BOOL "Enable debug config feature") set(GPU_DEBUG_CONFIG OFF CACHE BOOL "Enable debug config feature")
add_subdirectory(clDNN) add_subdirectory(clDNN)
endif() endif()
if(ENABLE_MKL_DNN) function(ie_add_mkldnn)
set(DNNL_ENABLE_CONCURRENT_EXEC ON CACHE BOOL "" FORCE) set(DNNL_ENABLE_CONCURRENT_EXEC ON CACHE BOOL "" FORCE)
set(DNNL_ENABLE_PRIMITIVE_CACHE OFF CACHE BOOL "" FORCE) ## TODO: try it later set(DNNL_ENABLE_PRIMITIVE_CACHE OFF CACHE BOOL "" FORCE) ## TODO: try it later
set(DNNL_ENABLE_MAX_CPU_ISA OFF CACHE BOOL "" FORCE) ## TODO: try it later set(DNNL_ENABLE_MAX_CPU_ISA OFF CACHE BOOL "" FORCE) ## TODO: try it later
@ -56,6 +42,18 @@ if(ENABLE_MKL_DNN)
set(OpenMP_cmake_included ON) ## to skip "omp simd" inside a code. Lead to some crashes inside NDK LLVM.. set(OpenMP_cmake_included ON) ## to skip "omp simd" inside a code. Lead to some crashes inside NDK LLVM..
endif() endif()
if(CMAKE_COMPILER_IS_GNUCXX)
ie_add_compiler_flags(-Wno-undef)
if(SUGGEST_OVERRIDE_SUPPORTED)
# xbyak compilation fails
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-suggest-override")
endif()
endif()
add_subdirectory(mkl-dnn EXCLUDE_FROM_ALL) add_subdirectory(mkl-dnn EXCLUDE_FROM_ALL)
add_library(mkldnn ALIAS dnnl) add_library(mkldnn ALIAS dnnl)
endfunction()
if(ENABLE_MKL_DNN)
ie_add_mkldnn()
endif() endif()

View File

@ -2,12 +2,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
cmake_minimum_required (VERSION 3.1)
# ======================================================================================================
# ======================================================================================================
# ======================================================================================================
# Name of project (helper constant variable). # Name of project (helper constant variable).
set(CLDNN__PROJ_NAME "clDNN") set(CLDNN__PROJ_NAME "clDNN")
@ -58,18 +52,6 @@ set(CLDNN__CODEGEN_INCDIR "${CLDNN__CODEGEN_DIR}/include")
# ============================================ CMAKE OPTIONS =========================================== # ============================================ CMAKE OPTIONS ===========================================
# ====================================================================================================== # ======================================================================================================
# Include and build: Core of clDNN framework.
set(CLDNN__INCLUDE_CORE ON CACHE BOOL "Include and build: clDNN core.")
mark_as_advanced(CLDNN__INCLUDE_CORE)
# ======================================================================================================
# Include and build: Kernel selector for clDNN framework.
set(CLDNN__INCLUDE_KERNEL_SELECTOR ON CACHE BOOL "Include and build: clDNN kernel selector.")
mark_as_advanced(CLDNN__INCLUDE_KERNEL_SELECTOR)
# ======================================================================================================
# Include and build: Tests (unit tests and small acceptance tests) for clDNN framework. # Include and build: Tests (unit tests and small acceptance tests) for clDNN framework.
set(CLDNN__INCLUDE_TESTS ON CACHE BOOL "Include and build: clDNN framework's tests.") set(CLDNN__INCLUDE_TESTS ON CACHE BOOL "Include and build: clDNN framework's tests.")
mark_as_advanced(CLDNN__INCLUDE_TESTS) mark_as_advanced(CLDNN__INCLUDE_TESTS)
@ -96,10 +78,6 @@ set(CLDNN_UTILS__RAPIDJSON_INCDIRS "utils/rapidjson" CACHE INTERNAL "Paths to in
set(CLDNN_BUILD__PROJ__clDNN "clDNN_lib") set(CLDNN_BUILD__PROJ__clDNN "clDNN_lib")
set(CLDNN_BUILD__PROJ_LABEL__clDNN "clDNN") set(CLDNN_BUILD__PROJ_LABEL__clDNN "clDNN")
# ================================================ Outputs =============================================
set(CLDNN_BUILD__PROJ_OUTPUT_NAME__clDNN "clDNN${CLDNN__OUT_CPU_SUFFIX}")
# ===================================== Include/Link directories ======================================= # ===================================== Include/Link directories =======================================
include_directories( include_directories(
@ -109,13 +87,11 @@ include_directories(
) )
# =================================== Link targets and dependencies ==================================== # =================================== Link targets and dependencies ====================================
if(CLDNN__INCLUDE_CORE) add_subdirectory(src)
add_subdirectory(src) add_subdirectory(runtime)
add_subdirectory(runtime)
endif()
if(CLDNN__INCLUDE_TESTS) if(CLDNN__INCLUDE_TESTS)
add_subdirectory(tests) add_subdirectory(tests)
endif() endif()
if(CLDNN__INCLUDE_KERNEL_SELECTOR)
add_subdirectory(kernel_selector) add_subdirectory(kernel_selector)
endif()

View File

@ -6,7 +6,6 @@
set(CLDNN_BUILD__PROJ "cldnn_kernel_selector") set(CLDNN_BUILD__PROJ "cldnn_kernel_selector")
set(CLDNN_BUILD__PROJ_LABEL "${CLDNN_BUILD__PROJ}") set(CLDNN_BUILD__PROJ_LABEL "${CLDNN_BUILD__PROJ}")
set(CLDNN_BUILD__PROJ_OUTPUT_NAME "${CLDNN_BUILD__PROJ}${CLDNN__OUT_CPU_SUFFIX}")
# ========================================= Source/Header files ======================================== # ========================================= Source/Header files ========================================
@ -125,7 +124,6 @@ add_library("${CLDNN_BUILD__PROJ}" STATIC
) )
set_property(TARGET "${CLDNN_BUILD__PROJ}" PROPERTY PROJECT_LABEL "${CLDNN_BUILD__PROJ_LABEL}") set_property(TARGET "${CLDNN_BUILD__PROJ}" PROPERTY PROJECT_LABEL "${CLDNN_BUILD__PROJ_LABEL}")
set_property(TARGET "${CLDNN_BUILD__PROJ}" PROPERTY OUTPUT_NAME "${CLDNN_BUILD__PROJ_OUTPUT_NAME}")
if(COMMAND add_cpplint_target) if(COMMAND add_cpplint_target)
add_cpplint_target("${CLDNN_BUILD__PROJ}_cpplint" FOR_TARGETS "${CLDNN_BUILD__PROJ}") add_cpplint_target("${CLDNN_BUILD__PROJ}_cpplint" FOR_TARGETS "${CLDNN_BUILD__PROJ}")

View File

@ -15,7 +15,7 @@ struct activation_params : public base_params {
MultiDataTensor inputActivationParams; MultiDataTensor inputActivationParams;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
auto k = base_params::GetParamsKey(); auto k = base_params::GetParamsKey();
if (!inputActivationParams.empty()) { if (!inputActivationParams.empty()) {
k.EnableActivationAdditionalParamsAsInput(); k.EnableActivationAdditionalParamsAsInput();

View File

@ -21,7 +21,7 @@ struct arg_max_min_params : public base_params {
uint32_t outputs_num = 1; uint32_t outputs_num = 1;
bool values_first = false; bool values_first = false;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k = base_params::GetParamsKey(); ParamsKey k = base_params::GetParamsKey();
k.EnableArgMaxMinAxis(argMaxMinAxis); k.EnableArgMaxMinAxis(argMaxMinAxis);

View File

@ -16,10 +16,6 @@ struct average_unpooling_params : public base_params {
uSize unpoolSize; uSize unpoolSize;
uSize unpoolStride; uSize unpoolStride;
virtual ParamsKey GetParamsKey() const {
return base_params::GetParamsKey();
}
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -17,7 +17,6 @@ struct batch_to_space_params : public base_params {
DimTensor<uint32_t> block_shape; DimTensor<uint32_t> block_shape;
DimTensor<uint32_t> crops_begin; DimTensor<uint32_t> crops_begin;
DimTensor<uint32_t> crops_end; DimTensor<uint32_t> crops_end;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@ -42,7 +41,7 @@ public:
struct DispatchData : public CommonDispatchData {}; struct DispatchData : public CommonDispatchData {};
protected: protected:
virtual bool Validate(const Params&, const optional_params&) const; bool Validate(const Params&, const optional_params&) const override;
virtual JitConstants GetJitConstants(const batch_to_space_params& params) const; virtual JitConstants GetJitConstants(const batch_to_space_params& params) const;
virtual CommonDispatchData SetDefault(const batch_to_space_params& params, const optional_params&) const; virtual CommonDispatchData SetDefault(const batch_to_space_params& params, const optional_params&) const;
KernelsData GetCommonKernelsData(const Params& params, const optional_params&) const; KernelsData GetCommonKernelsData(const Params& params, const optional_params&) const;

View File

@ -18,7 +18,7 @@ struct concatenation_params : public base_params {
bool isAligned = true; bool isAligned = true;
size_t misalignment = 0; size_t misalignment = 0;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
auto k = base_params::GetParamsKey(); auto k = base_params::GetParamsKey();
k.EnableConcatAxis(axis); k.EnableConcatAxis(axis);
return k; return k;
@ -32,7 +32,7 @@ struct concatenation_optional_params : optional_params {
concatenation_optional_params() : optional_params(KernelType::CONCATENATION) {} concatenation_optional_params() : optional_params(KernelType::CONCATENATION) {}
bool kernelPerInput = true; bool kernelPerInput = true;
virtual ParamsKey GetSupportedKey() const { ParamsKey GetSupportedKey() const override {
ParamsKey k = optional_params::GetSupportedKey(); ParamsKey k = optional_params::GetSupportedKey();
if (kernelPerInput) { if (kernelPerInput) {

View File

@ -17,8 +17,6 @@ struct cum_sum_params : public base_params {
CumSumAxis axis; CumSumAxis axis;
bool exclusive; bool exclusive;
bool reverse; bool reverse;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -18,8 +18,6 @@ struct depth_to_space_params : public base_params {
, mode(DepthToSpaceMode::DEPTH_FIRST) {} , mode(DepthToSpaceMode::DEPTH_FIRST) {}
size_t block_size; size_t block_size;
DepthToSpaceMode mode; DepthToSpaceMode mode;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@ -45,7 +43,7 @@ public:
}; };
protected: protected:
virtual bool Validate(const Params&, const optional_params&) const; bool Validate(const Params&, const optional_params&) const override;
virtual JitConstants GetJitConstants(const depth_to_space_params& params) const; virtual JitConstants GetJitConstants(const depth_to_space_params& params) const;
virtual CommonDispatchData SetDefault(const depth_to_space_params& params) const; virtual CommonDispatchData SetDefault(const depth_to_space_params& params) const;
KernelsData GetCommonKernelsData(const Params& params, const optional_params&) const; KernelsData GetCommonKernelsData(const Params& params, const optional_params&) const;

View File

@ -75,7 +75,7 @@ struct eltwise_params : public base_params {
bool int8_quantization = false; bool int8_quantization = false;
bool broadcast = false; bool broadcast = false;
virtual ParamsKey GetParamsKey() const; ParamsKey GetParamsKey() const override;
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -16,8 +16,6 @@ struct embedding_bag_params : public base_params {
EmbeddingBagType type; EmbeddingBagType type;
int32_t default_index; int32_t default_index;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -20,8 +20,6 @@ struct extract_image_patches_params : public base_params {
std::vector<unsigned int> strides; std::vector<unsigned int> strides;
std::vector<unsigned int> rates; std::vector<unsigned int> rates;
std::string auto_pad; std::string auto_pad;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -16,7 +16,7 @@ struct fully_connected_params : public weight_bias_params {
QuantizationType quantization = QuantizationType::NONE; QuantizationType quantization = QuantizationType::NONE;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k = weight_bias_params::GetParamsKey(); ParamsKey k = weight_bias_params::GetParamsKey();
k.EnableQuantization(quantization); k.EnableQuantization(quantization);

View File

@ -16,7 +16,6 @@ struct gather_params : public base_params {
GatherAxis axis; GatherAxis axis;
int64_t batch_dim; int64_t batch_dim;
bool support_neg_ind; bool support_neg_ind;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -16,8 +16,6 @@ struct gather_nd_params : public base_params {
uint8_t indices_rank; uint8_t indices_rank;
uint8_t batch_dims; uint8_t batch_dims;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@ -33,7 +31,7 @@ public:
virtual ~GatherNDKernelRef() {} virtual ~GatherNDKernelRef() {}
virtual JitConstants GetJitConstants(const gather_nd_params& params) const; virtual JitConstants GetJitConstants(const gather_nd_params& params) const;
virtual CommonDispatchData SetDefault(const gather_nd_params& params, const optional_params&) const; virtual CommonDispatchData SetDefault(const gather_nd_params& params, const optional_params&) const;
KernelsData GetKernelsData(const Params& params, const optional_params& options) const; KernelsData GetKernelsData(const Params& params, const optional_params& options) const override;
ParamsKey GetSupportedKey() const override; ParamsKey GetSupportedKey() const override;
std::vector<FusedOpType> GetSupportedFusedOps() const override { std::vector<FusedOpType> GetSupportedFusedOps() const override {
return { FusedOpType::QUANTIZE, return { FusedOpType::QUANTIZE,

View File

@ -21,7 +21,7 @@ struct gemm_params : public base_params {
bool transpose_input1; bool transpose_input1;
QuantizationType quantization = QuantizationType::NONE; QuantizationType quantization = QuantizationType::NONE;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k = base_params::GetParamsKey(); ParamsKey k = base_params::GetParamsKey();
k.EnableQuantization(quantization); k.EnableQuantization(quantization);
return k; return k;

View File

@ -21,7 +21,7 @@ struct lrn_params : public base_params {
float k = 0.f; float k = 0.f;
uint32_t localSize = 0; uint32_t localSize = 0;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey _k = base_params::GetParamsKey(); ParamsKey _k = base_params::GetParamsKey();
_k.EnableLRNMode(normMode); _k.EnableLRNMode(normMode);

View File

@ -13,8 +13,6 @@ namespace kernel_selector {
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
struct max_unpooling_params : public base_params { struct max_unpooling_params : public base_params {
max_unpooling_params() : base_params(KernelType::MAX_UNPOOLING) {} max_unpooling_params() : base_params(KernelType::MAX_UNPOOLING) {}
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -20,7 +20,7 @@ struct mvn_params : public base_params {
float epsilon = 0.0f; float epsilon = 0.0f;
MVNEpsMode mvnEpsMode = MVNEpsMode::INSIDE_SQRT; MVNEpsMode mvnEpsMode = MVNEpsMode::INSIDE_SQRT;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k = base_params::GetParamsKey(); ParamsKey k = base_params::GetParamsKey();
k.EnableMVNMode(mvnMode); k.EnableMVNMode(mvnMode);

View File

@ -18,7 +18,7 @@ struct normalize_params : public base_params {
float epsilon = 1e-10f; float epsilon = 1e-10f;
DataTensor scaleTable; DataTensor scaleTable;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k = base_params::GetParamsKey(); ParamsKey k = base_params::GetParamsKey();
k.EnableNormalizeMode(normMode); k.EnableNormalizeMode(normMode);

View File

@ -15,7 +15,7 @@ public:
virtual ~PermuteKernelBase() {} virtual ~PermuteKernelBase() {}
bool Validate(const Params& p, const optional_params& o) const override; bool Validate(const Params& p, const optional_params& o) const override;
KernelsData GetKernelsData(const Params& params, const optional_params& options) const; KernelsData GetKernelsData(const Params& params, const optional_params& options) const override;
protected: protected:
virtual JitConstants GetJitConstants(const permute_params& params, const CommonDispatchData& dispatchData) const; virtual JitConstants GetJitConstants(const permute_params& params, const CommonDispatchData& dispatchData) const;
virtual CommonDispatchData SetDefault(const permute_params& params) const = 0; virtual CommonDispatchData SetDefault(const permute_params& params) const = 0;

View File

@ -19,7 +19,7 @@ public:
virtual ~PermuteKernelRef() {} virtual ~PermuteKernelRef() {}
bool Validate(const Params& p, const optional_params& o) const override; bool Validate(const Params& p, const optional_params& o) const override;
KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const; KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override;
ParamsKey GetSupportedKey() const override; ParamsKey GetSupportedKey() const override;
protected: protected:

View File

@ -18,11 +18,11 @@ public:
virtual ~PermuteKernel_tile_8x8_4x4() {} virtual ~PermuteKernel_tile_8x8_4x4() {}
bool Validate(const Params& p, const optional_params& o) const override; bool Validate(const Params& p, const optional_params& o) const override;
KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const; KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override;
ParamsKey GetSupportedKey() const override; ParamsKey GetSupportedKey() const override;
protected: protected:
JitConstants GetJitConstants(const permute_params& params, const CommonDispatchData& dispatchData) const; JitConstants GetJitConstants(const permute_params& params, const CommonDispatchData& dispatchData) const override;
CommonDispatchData SetDefault(const permute_params& params) const; CommonDispatchData SetDefault(const permute_params& params) const override;
std::vector<FusedOpType> GetSupportedFusedOps() const override { std::vector<FusedOpType> GetSupportedFusedOps() const override {
return { return {
FusedOpType::ACTIVATION, FusedOpType::ACTIVATION,

View File

@ -18,11 +18,11 @@ public:
virtual ~PermuteKernel_tile_8x8_4x4_fsv() {} virtual ~PermuteKernel_tile_8x8_4x4_fsv() {}
bool Validate(const Params& p, const optional_params& o) const override; bool Validate(const Params& p, const optional_params& o) const override;
KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const; KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override;
ParamsKey GetSupportedKey() const override; ParamsKey GetSupportedKey() const override;
protected: protected:
JitConstants GetJitConstants(const permute_params& params, const CommonDispatchData& dispatchData) const; JitConstants GetJitConstants(const permute_params& params, const CommonDispatchData& dispatchData) const override;
CommonDispatchData SetDefault(const permute_params& params) const; CommonDispatchData SetDefault(const permute_params& params) const override;
std::vector<FusedOpType> GetSupportedFusedOps() const override { std::vector<FusedOpType> GetSupportedFusedOps() const override {
return { return {
FusedOpType::ACTIVATION, FusedOpType::ACTIVATION,

View File

@ -15,7 +15,6 @@ struct permute_params : public base_params {
permute_params() : base_params(KernelType::PERMUTE) {} permute_params() : base_params(KernelType::PERMUTE) {}
std::vector<uint16_t> order; std::vector<uint16_t> order;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -22,7 +22,7 @@ struct pooling_params : public base_params {
uSize poolStride; uSize poolStride;
uSize poolPad; uSize poolPad;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k = base_params::GetParamsKey(); ParamsKey k = base_params::GetParamsKey();
k.EnablePoolType(poolType); k.EnablePoolType(poolType);

View File

@ -53,7 +53,7 @@ struct quantize_params : public base_params {
float out_scale; float out_scale;
float out_shift; float out_shift;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
auto k = base_params::GetParamsKey(); auto k = base_params::GetParamsKey();
if (packed_binary_output) if (packed_binary_output)
k.EnableQuantizePackedBinaryOutput(); k.EnableQuantizePackedBinaryOutput();

View File

@ -17,8 +17,6 @@ struct reduce_params : public base_params {
ReduceMode reduceMode; ReduceMode reduceMode;
std::vector<uint16_t> reduceAxes; std::vector<uint16_t> reduceAxes;
int32_t keepDims; int32_t keepDims;
virtual ParamsKey GetParamsKey() const { return base_params::GetParamsKey(); }
}; };
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -21,7 +21,7 @@ struct region_yolo_params : public base_params {
uint32_t mask_size; uint32_t mask_size;
bool do_softmax; bool do_softmax;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
auto k = base_params::GetParamsKey(); auto k = base_params::GetParamsKey();
return k; return k;
} }

View File

@ -16,7 +16,7 @@ public:
KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override; KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override;
ParamsKey GetSupportedKey() const override; ParamsKey GetSupportedKey() const override;
protected: protected:
JitConstants GetJitConstants(const reorder_params& params) const; JitConstants GetJitConstants(const reorder_params& params) const override;
CommonDispatchData SetDefault(const reorder_params& params) const; CommonDispatchData SetDefault(const reorder_params& params) const override;
}; };
} // namespace kernel_selector } // namespace kernel_selector

View File

@ -26,7 +26,7 @@ struct reorder_params : public base_params {
bool winograd = false; bool winograd = false;
bool has_padded_output = false; bool has_padded_output = false;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
auto k = base_params::GetParamsKey(); auto k = base_params::GetParamsKey();
if (winograd) { if (winograd) {
@ -54,7 +54,7 @@ struct reorder_weights_params : public Params {
bool winograd = false; bool winograd = false;
bool rotate_180 = false; bool rotate_180 = false;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
ParamsKey k; ParamsKey k;
k.EnableInputWeightsType(input.GetDType()); k.EnableInputWeightsType(input.GetDType());
k.EnableOutputWeightsType(output.GetDType()); k.EnableOutputWeightsType(output.GetDType());
@ -95,7 +95,7 @@ protected:
virtual JitConstants GetJitConstants(const reorder_params& params) const; virtual JitConstants GetJitConstants(const reorder_params& params) const;
virtual DispatchData SetDefault(const reorder_weights_params& params) const; virtual DispatchData SetDefault(const reorder_weights_params& params) const;
virtual DispatchData SetDefault(const reorder_params& params) const; virtual DispatchData SetDefault(const reorder_params& params) const;
virtual bool Validate(const Params&, const optional_params&) const { return true; } bool Validate(const Params&, const optional_params&) const override { return true; }
KernelsData GetCommonKernelsData(const reorder_weights_params& params, KernelsData GetCommonKernelsData(const reorder_weights_params& params,
const optional_params&) const; const optional_params&) const;
KernelsData GetCommonKernelsData(const reorder_params& params, const optional_params&) const; KernelsData GetCommonKernelsData(const reorder_params& params, const optional_params&) const;

View File

@ -16,7 +16,7 @@ public:
KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override; KernelsPriority GetKernelsPriority(const Params& params, const optional_params& options) const override;
ParamsKey GetSupportedKey() const override; ParamsKey GetSupportedKey() const override;
protected: protected:
JitConstants GetJitConstants(const reorder_params& params) const; JitConstants GetJitConstants(const reorder_params& params) const override;
CommonDispatchData SetDefault(const reorder_params& params) const; CommonDispatchData SetDefault(const reorder_params& params) const override;
}; };
} // namespace kernel_selector } // namespace kernel_selector

View File

@ -16,7 +16,7 @@ struct reorg_yolo_params : public base_params {
uint32_t stride; uint32_t stride;
virtual ParamsKey GetParamsKey() const { ParamsKey GetParamsKey() const override {
auto k = base_params::GetParamsKey(); auto k = base_params::GetParamsKey();
return k; return k;
} }

Some files were not shown because too many files have changed in this diff Show More