Removed CPU GEMM cmake stuff (#5642)

* Removed CPU GEMM cmake stuff

* Fixed code style
This commit is contained in:
Ilya Lavrenov 2021-05-14 21:27:12 +03:00 committed by GitHub
parent c67c2f4691
commit c500f0a783
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 18 additions and 112 deletions

View File

@ -38,62 +38,6 @@ if (ENABLE_MYRIAD)
include(cmake/vpu_dependencies.cmake)
endif()
## enable cblas_gemm from OpenBLAS package
if (ENABLE_MKL_DNN AND GEMM STREQUAL "OPENBLAS")
if(AARCH64)
if(DEFINED ENV{THIRDPARTY_SERVER_PATH})
set(IE_PATH_TO_DEPS "$ENV{THIRDPARTY_SERVER_PATH}")
elseif(DEFINED THIRDPARTY_SERVER_PATH)
set(IE_PATH_TO_DEPS "${THIRDPARTY_SERVER_PATH}")
else()
message(WARNING "OpenBLAS is not found!")
endif()
if(DEFINED IE_PATH_TO_DEPS)
reset_deps_cache(OpenBLAS_DIR)
RESOLVE_DEPENDENCY(OpenBLAS
ARCHIVE_LIN "keembay/openblas_0.3.7_yocto_kmb.tar.xz"
TARGET_PATH "${TEMP}/openblas_0.3.7_yocto_kmb"
ENVIRONMENT "OpenBLAS_DIR"
SHA256 "c75aac901d5297d6d60a4b1f941f0335d8fd7f52e0dff8c445f644e2e45e6fba")
update_deps_cache(OpenBLAS_DIR "${OpenBLAS}/lib/cmake/openblas" "Path to OpenBLAS package folder")
find_package(OpenBLAS QUIET)
if(OpenBLAS_FOUND)
set(BLAS_FOUND TRUE)
set(BLAS_INCLUDE_DIRS ${OpenBLAS_INCLUDE_DIRS})
set(BLAS_LIBRARIES ${OpenBLAS_LIBRARIES})
endif()
unset(IE_PATH_TO_DEPS)
endif()
endif()
if(NOT BLAS_LIBRARIES OR NOT BLAS_INCLUDE_DIRS)
find_package(BLAS REQUIRED)
if(BLAS_FOUND)
find_path(BLAS_INCLUDE_DIRS cblas.h)
else()
message(ERROR "OpenBLAS not found: install OpenBLAS or set -DBLAS_INCLUDE_DIRS=<path to dir with cblas.h> and -DBLAS_LIBRARIES=<path to libopenblas.so or openblas.lib>")
endif()
endif()
debug_message(STATUS "openblas=" ${BLAS_LIBRARIES})
endif ()
## MKL-ML package
if (GEMM STREQUAL "MKL")
if(NOT MKLROOT)
message(FATAL_ERROR "MKLROOT not found: install MKL and set -DMKLROOT=<path_to_MKL>")
endif()
set(MKL ${MKLROOT})
debug_message(STATUS "mkl_ml=" ${MKLROOT})
endif ()
## Intel OMP package
if (THREADING STREQUAL "OMP")
reset_deps_cache(OMP)

View File

@ -8,23 +8,6 @@ ie_dependent_option (ENABLE_GNA "GNA support for inference engine" ON "NOT APPLE
ie_dependent_option (ENABLE_CLDNN_TESTS "Enable clDNN unit tests" OFF "ENABLE_CLDNN" OFF)
# "MKL-DNN library might use MKL-ML or OpenBLAS for gemm tasks: MKL|OPENBLAS|JIT"
if (ENABLE_MKL_DNN)
if(AARCH64)
set(GEMM_DEFAULT "OPENBLAS")
else()
set(GEMM_DEFAULT "JIT")
endif()
set(GEMM "${GEMM_DEFAULT}" CACHE STRING "GEMM implementation")
set_property(CACHE GEMM PROPERTY STRINGS "MKL" "OPENBLAS" "JIT")
list (APPEND IE_OPTIONS GEMM)
if (NOT GEMM STREQUAL "MKL" AND
NOT GEMM STREQUAL "OPENBLAS" AND
NOT GEMM STREQUAL "JIT")
message(FATAL_ERROR "GEMM should be set to MKL, OPENBLAS or JIT. Default option is ${GEMM_DEFAULT}")
endif()
endif()
# "MKL-DNN library based on OMP or TBB or Sequential implementation: TBB|OMP|SEQ"
if(X86 OR ARM OR (MSVC AND (ARM OR AARCH64)) )
set(THREADING_DEFAULT "SEQ")

View File

@ -16,25 +16,16 @@ if (ENABLE_CPU_DEBUG_CAPS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DCPU_DEBUG_CAPS")
endif()
file(GLOB_RECURSE SOURCES
${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB_RECURSE SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
file(GLOB_RECURSE HEADERS ${CMAKE_CURRENT_SOURCE_DIR}/*.h
${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
file(GLOB HEADERS
${CMAKE_CURRENT_SOURCE_DIR}/*.h
${CMAKE_CURRENT_SOURCE_DIR}/*.hpp)
addVersionDefines(mkldnn_plugin.cpp CI_BUILD_NUMBER MKL_VERSION)
addVersionDefines(mkldnn_plugin.cpp CI_BUILD_NUMBER)
include_directories(
$<TARGET_PROPERTY:inference_engine_plugin_api,INTERFACE_INCLUDE_DIRECTORIES>
${CMAKE_CURRENT_SOURCE_DIR}/mkldnn
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_BINARY_DIR}/include
)
if (GEMM STREQUAL "MKL")
log_rpath_from_dir(MKL "${MKL}/lib")
endif()
${CMAKE_CURRENT_SOURCE_DIR})
# create plugin
@ -99,17 +90,3 @@ set_target_properties(${TARGET_NAME}_obj PROPERTIES EXCLUDE_FROM_ALL ON)
set_target_properties(${TARGET_NAME} ${TARGET_NAME}_obj
PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELEASE ${ENABLE_LTO})
# install
if(GEMM STREQUAL "MKL")
install(DIRECTORY "${MKL}/include"
DESTINATION ${IE_CPACK_IE_DIR}/external/mkltiny_lnx
COMPONENT cpu)
install(FILES "${MKLLIB}"
DESTINATION ${IE_CPACK_IE_DIR}/external/mkltiny_lnx/lib
COMPONENT cpu)
install(FILES "${MKL}/version.info"
DESTINATION ${IE_CPACK_IE_DIR}/external/mkltiny_lnx
COMPONENT cpu)
endif()

View File

@ -13,9 +13,9 @@ public:
jit_emu_vcvtneps2bf16(mkldnn::impl::cpu::x64::jit_generator* host, mkldnn::impl::cpu::x64::cpu_isa_t host_isa, const MKLDNNNode* node,
InferenceEngine::Precision exec_prc = InferenceEngine::Precision::BF16) : jit_emitter(host, host_isa, node, exec_prc) {
prepare_table();
};
}
size_t get_inputs_num() const override { return 1; };
size_t get_inputs_num() const override { return 1; }
private:
void emit_impl(const std::vector<size_t>& in_vec_idxs, const std::vector<size_t>& out_vec_idxs,
@ -71,4 +71,4 @@ private:
size_t aux_vecs_count() const override { return 2; }
};
} // namespace MKLDNNPlugin {
} // namespace MKLDNNPlugin

View File

@ -190,9 +190,11 @@ private:
class jit_squared_difference_emitter : public jit_emitter {
public:
jit_squared_difference_emitter(mkldnn::impl::cpu::x64::jit_generator *host, mkldnn::impl::cpu::x64::cpu_isa_t host_isa, const MKLDNNNode* node,
jit_squared_difference_emitter(mkldnn::impl::cpu::x64::jit_generator *host, mkldnn::impl::cpu::x64::cpu_isa_t host_isa,
const MKLDNNNode* node,
InferenceEngine::Precision exec_prc = InferenceEngine::Precision::FP32);
jit_squared_difference_emitter(mkldnn::impl::cpu::x64::jit_generator *host, mkldnn::impl::cpu::x64::cpu_isa_t host_isa, const std::shared_ptr<ngraph::Node>& n,
jit_squared_difference_emitter(mkldnn::impl::cpu::x64::jit_generator *host, mkldnn::impl::cpu::x64::cpu_isa_t host_isa,
const std::shared_ptr<ngraph::Node>& n,
InferenceEngine::Precision exec_prc = InferenceEngine::Precision::FP32);
size_t get_inputs_num() const override;

View File

@ -23,6 +23,7 @@ public:
void emit_impl(const std::vector<size_t> &in_idxs, const std::vector<size_t> &out_idxs,
const std::vector<size_t> &pool_vec_idxs, const std::vector<size_t> &pool_gpr_idxs,
const emitter_context *emit_context = nullptr) const override {};
protected:
jit_mkldnn_emitter(mkldnn::impl::cpu::x64::jit_generator *host, mkldnn::impl::cpu::x64::cpu_isa_t host_isa, const MKLDNNNode* node,
InferenceEngine::Precision exec_prc = InferenceEngine::Precision::FP32);
@ -34,7 +35,6 @@ protected:
float alpha {0.f};
float beta {0.f};
protected:
std::shared_ptr<mkldnn::impl::cpu::x64::jit_uni_eltwise_injector_f32<mkldnn::impl::cpu::x64::sse41>> eltwise_injector_sse42;
std::shared_ptr<mkldnn::impl::cpu::x64::jit_uni_eltwise_injector_f32<mkldnn::impl::cpu::x64::avx2>> eltwise_injector_avx2;
std::shared_ptr<mkldnn::impl::cpu::x64::jit_uni_eltwise_injector_f32<mkldnn::impl::cpu::x64::avx512_common>> eltwise_injector_avx512_common;

View File

@ -59,7 +59,7 @@ protected:
private:
void addZeroPoints(mkldnn::primitive_attr& attr) const;
void setPostOps(mkldnn::primitive_attr &attr, bool initWeights) const ;
void setPostOps(mkldnn::primitive_attr &attr, bool initWeights) const;
void filterSupportedDescriptors();
bool isPossibleToSkipInitConfig(MKLDNNDescriptor &desc) const;

View File

@ -86,11 +86,11 @@ public:
inline bool getAcrossChannels() const {
return acrossChannels_;
};
}
inline bool getNormalizeVariance() const {
return normalizeVariance_;
};
}
bool canFuse(const MKLDNNNodePtr& node) const override;

View File

@ -29,7 +29,7 @@ private:
void rollImpl();
std::vector<size_t> shape;
const static std::vector<size_t> supportedPrecisionSizes;
static const std::vector<size_t> supportedPrecisionSizes;
std::string layerErrorPrefix;
size_t numOfDims;

View File

@ -18,7 +18,7 @@ inline std::string getRTInfoValue(const std::map<std::string, std::shared_ptr<ng
} else {
return "";
}
};
}
inline std::string getPrimitivesPriorityValue(const std::shared_ptr<ngraph::Node> &node) {
const auto &rtInfo = node->get_rt_info();