Merge remote-tracking branch 'upstream/master' into itikhono/extension/json_config
This commit is contained in:
commit
153bcc9f54
@ -162,7 +162,7 @@ jobs:
|
||||
$(REPO_DIR)
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
|
||||
- script: ls -alR $(BUILD_DIR)/temp/
|
||||
- script: ls -alR $(REPO_DIR)/temp/
|
||||
displayName: 'List temp SDKs'
|
||||
|
||||
- script: ccache --zero-stats --max-size=1T --show-config
|
||||
@ -207,7 +207,7 @@ jobs:
|
||||
set -e
|
||||
mkdir $(INSTALL_DIR)/opencv/
|
||||
cmake -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake
|
||||
cp -R $(BUILD_DIR)/temp/opencv_4.5.2_ubuntu20/opencv/* $(INSTALL_DIR)/opencv/
|
||||
cp -R $(REPO_DIR)/temp/opencv_4.5.2_ubuntu20/opencv/* $(INSTALL_DIR)/opencv/
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'Install tests'
|
||||
|
||||
@ -232,7 +232,7 @@ jobs:
|
||||
- script: |
|
||||
export DATA_PATH=$(MODELS_PATH)
|
||||
export MODELS_PATH=$(MODELS_PATH)
|
||||
. $(SETUPVARS) -pyver 3.8 && python3 -m pytest -s $(INSTALL_TEST_DIR)/pyngraph $(PYTHON_STATIC_ARGS) --junitxml=TEST-Pyngraph.xml --ignore=$(INSTALL_TEST_DIR)/pyngraph/tests/test_onnx/test_zoo_models.py --ignore=$(INSTALL_TEST_DIR)/pyngraph/tests/test_onnx/test_backend.py
|
||||
. $(SETUPVARS) -pyver 3.8 && python3 -m pytest -s $(INSTALL_TEST_DIR)/pyngraph $(PYTHON_STATIC_ARGS) --junitxml=TEST-Pyngraph.xml --ignore=$(INSTALL_TEST_DIR)/pyngraph/tests/test_utils/test_utils.py --ignore=$(INSTALL_TEST_DIR)/pyngraph/tests/test_onnx/test_zoo_models.py --ignore=$(INSTALL_TEST_DIR)/pyngraph/tests/test_onnx/test_backend.py
|
||||
displayName: 'nGraph Python Bindings Tests'
|
||||
continueOnError: false
|
||||
|
||||
|
@ -73,7 +73,7 @@ jobs:
|
||||
$(REPO_DIR)
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
|
||||
- script: ls -alR $(BUILD_DIR)/temp/
|
||||
- script: ls -alR $(REPO_DIR)/temp/
|
||||
displayName: 'List temp SDKs'
|
||||
|
||||
- script: |
|
||||
|
@ -126,7 +126,7 @@ jobs:
|
||||
$(REPO_DIR)
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
|
||||
- script: ls -alR $(BUILD_DIR)/temp/
|
||||
- script: ls -alR $(REPO_DIR)/temp/
|
||||
displayName: 'List temp SDKs'
|
||||
|
||||
- script: ninja
|
||||
@ -147,7 +147,7 @@ jobs:
|
||||
set -e
|
||||
mkdir $(INSTALL_DIR)/opencv/
|
||||
cmake -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake
|
||||
cp -R $(BUILD_DIR)/temp/opencv_4.5.2_ubuntu20/opencv/* $(INSTALL_DIR)/opencv/
|
||||
cp -R $(REPO_DIR)/temp/opencv_4.5.2_ubuntu20/opencv/* $(INSTALL_DIR)/opencv/
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'Install tests'
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- releases/*
|
||||
paths:
|
||||
exclude:
|
||||
|
@ -100,7 +100,7 @@ jobs:
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'CMake'
|
||||
|
||||
- script: ls -alR $(BUILD_DIR)/temp/
|
||||
- script: ls -alR $(REPO_DIR)/temp/
|
||||
displayName: 'List temp SDKs'
|
||||
|
||||
- script: ninja
|
||||
@ -121,7 +121,7 @@ jobs:
|
||||
set -e
|
||||
mkdir $(INSTALL_DIR)/opencv/
|
||||
cmake -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake
|
||||
cp -R $(BUILD_DIR)/temp/opencv_4.5.2_osx/opencv/* $(INSTALL_DIR)/opencv/
|
||||
cp -R $(REPO_DIR)/temp/opencv_4.5.2_osx/opencv/* $(INSTALL_DIR)/opencv/
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'Install tests'
|
||||
|
||||
|
@ -136,7 +136,7 @@ jobs:
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'CMake'
|
||||
|
||||
- script: dir $(BUILD_DIR)\temp\ /s
|
||||
- script: dir $(REPO_DIR)\temp\ /s
|
||||
displayName: 'List temp SDKs'
|
||||
|
||||
- script: |
|
||||
@ -163,7 +163,7 @@ jobs:
|
||||
workingDirectory: $(BUILD_SAMPLES_TESTS_DIR)
|
||||
displayName: 'Install Samples Tests'
|
||||
|
||||
- script: $(CMAKE_CMD) -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake && xcopy $(BUILD_DIR)\temp\opencv_4.5.2\opencv\* $(INSTALL_DIR)\opencv\ /e /h /y
|
||||
- script: $(CMAKE_CMD) -DCMAKE_INSTALL_PREFIX=$(INSTALL_DIR) -DCOMPONENT=tests -P cmake_install.cmake && xcopy $(REPO_DIR)\temp\opencv_4.5.2\opencv\* $(INSTALL_DIR)\opencv\ /e /h /y
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'Install tests'
|
||||
|
||||
|
@ -70,7 +70,7 @@ jobs:
|
||||
workingDirectory: $(BUILD_DIR)
|
||||
displayName: 'CMake'
|
||||
|
||||
- script: dir $(BUILD_DIR)\temp\ /s
|
||||
- script: dir $(REPO_DIR)\temp\ /s
|
||||
displayName: 'List temp SDKs'
|
||||
|
||||
- script: call "$(MSVS_VARS_PATH)" && $(WORK_DIR)\ninja-win\ninja
|
||||
|
4
.gitmodules
vendored
4
.gitmodules
vendored
@ -59,9 +59,11 @@
|
||||
[submodule "tools/pot/thirdparty/open_model_zoo"]
|
||||
path = tools/pot/thirdparty/open_model_zoo
|
||||
url = https://github.com/openvinotoolkit/open_model_zoo.git
|
||||
[submodule "thirdparty/nlohmann_json"]
|
||||
[submodule "thirdparty/json/nlohmann_json"]
|
||||
path = thirdparty/json/nlohmann_json
|
||||
url = https://github.com/nlohmann/json.git
|
||||
shallow = true
|
||||
[submodule "thirdparty/json/nlohmann_json_schema_validator"]
|
||||
path = thirdparty/json/nlohmann_json_schema_validator
|
||||
url = https://github.com/pboettch/json-schema-validator.git
|
||||
shallow = true
|
||||
|
@ -49,6 +49,7 @@ foreach(component IN LISTS openvino_export_components)
|
||||
file(REMOVE "${CMAKE_BINARY_DIR}/${component}_dev_targets.cmake")
|
||||
unset(${component} CACHE)
|
||||
endforeach()
|
||||
unset(openvino_export_components CACHE)
|
||||
|
||||
#
|
||||
# Build
|
||||
@ -95,6 +96,11 @@ add_subdirectory(inference-engine/src)
|
||||
add_subdirectory(src)
|
||||
add_subdirectory(samples)
|
||||
add_subdirectory(inference-engine)
|
||||
|
||||
# Enable interpreter backend
|
||||
if (ENABLE_TESTS OR ENABLE_TEMPLATE)
|
||||
add_subdirectory(docs/template_plugin/backend)
|
||||
endif()
|
||||
include(cmake/extra_modules.cmake)
|
||||
add_subdirectory(docs)
|
||||
add_subdirectory(tools)
|
||||
|
34
CODEOWNERS
34
CODEOWNERS
@ -6,15 +6,26 @@ CODEOWNERS @openvinotoolkit/openvino-admins @openvinotoolkit/openvino-maintaine
|
||||
|
||||
# CI:
|
||||
Jenkinsfile @openvinotoolkit/openvino-admins
|
||||
azure-pipelines.yml @openvinotoolkit/openvino-admins
|
||||
/.github/ @openvinotoolkit/openvino-admins
|
||||
/.ci/ @openvinotoolkit/openvino-admins
|
||||
|
||||
# OpenVINO Samples:
|
||||
/samples/ @openvinotoolkit/openvino-samples-maintainers
|
||||
|
||||
# OpenVINO Scripts:
|
||||
/scripts/ @openvinotoolkit/openvino-scripts-maintainers
|
||||
|
||||
# QA Tests:
|
||||
/tests/ @openvinotoolkit/openvino-tests-maintainers
|
||||
|
||||
# OpenVINO Scripts
|
||||
/scripts/ @openvinotoolkit/openvino-scripts-maintainers
|
||||
# Tools:
|
||||
/tools/ @openvinotoolkit/openvino-tools-maintainers
|
||||
|
||||
# Model Optimizer:
|
||||
/tools/mo/ @openvinotoolkit/openvino-mo-maintainers
|
||||
|
||||
# POT:
|
||||
/tools/pot/ @openvinotoolkit/openvino-pot-maintainers
|
||||
|
||||
# IE Core:
|
||||
/inference-engine/ @openvinotoolkit/openvino-ie-maintainers
|
||||
@ -22,6 +33,8 @@ azure-pipelines.yml @openvinotoolkit/openvino-admins
|
||||
/src/common/transformations/ @GlebKazantaev @ilyachur
|
||||
/src/common/legacy/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/src/common/ @openvinotoolkit/openvino-ie-maintainers
|
||||
/src/core/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/src/frontends/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/inference-engine/tests_deprecated/readers/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
|
||||
# IE CPU:
|
||||
@ -62,20 +75,7 @@ azure-pipelines.yml @openvinotoolkit/openvino-admins
|
||||
/inference-engine/tests/functional/inference_engine/ngraph_reader/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/inference-engine/tests/functional/inference_engine/transformations/ @openvinotoolkit/openvino-ie-tests-maintainers @openvinotoolkit/openvino-ngraph-maintainers
|
||||
|
||||
# MO:
|
||||
/tools/mo/ @openvinotoolkit/openvino-mo-maintainers
|
||||
|
||||
# nGraph:
|
||||
/src/core/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
/src/frontends/ @openvinotoolkit/openvino-ngraph-maintainers
|
||||
|
||||
# POT Tools
|
||||
/tools/pot/ @openvinotoolkit/openvino-pot-maintainers
|
||||
|
||||
# Tools
|
||||
/tools/ @openvinotoolkit/openvino-tools-maintainers
|
||||
|
||||
# Documentation
|
||||
# Documentation:
|
||||
/docs/ @openvinotoolkit/openvino-docs-maintainers
|
||||
/docs/template_plugin/ @openvinotoolkit/openvino-ie-template-maintainers
|
||||
*.md @openvinotoolkit/openvino-docs-maintainers
|
||||
|
@ -5,7 +5,7 @@
|
||||
cmake_policy(SET CMP0054 NEW)
|
||||
|
||||
# TODO: fix it
|
||||
set_temp_directory(TEMP "${CMAKE_BINARY_DIR}")
|
||||
set_temp_directory(TEMP "${CMAKE_SOURCE_DIR}")
|
||||
|
||||
if(ENABLE_SAME_BRANCH_FOR_MODELS)
|
||||
branchName(MODELS_BRANCH)
|
||||
@ -287,8 +287,8 @@ if(ENABLE_INTEL_GNA)
|
||||
set(GNA_HASH "cc954e67525006bf8bd353a6682e38bf208f6d74e973e0fc292850e721f17452")
|
||||
endif()
|
||||
if(GNA_LIBRARY_VERSION STREQUAL "GNA2")
|
||||
set(GNA_VERSION "03.00.00.1377")
|
||||
set(GNA_HASH "d45fb48994d8c2803a16e88e29ae48851066325b97c1c6c4a5bf4f4573d55c65")
|
||||
set(GNA_VERSION "03.00.00.1455")
|
||||
set(GNA_HASH "8ac1af18eb32777b00193f4f8c252ee4f8bd64a9069138b4a5aaeebd82ead464")
|
||||
endif()
|
||||
|
||||
set(FILES_TO_EXTRACT_LIST gna_${GNA_VERSION}/include)
|
||||
|
@ -207,7 +207,7 @@ endif()
|
||||
macro(ov_install_static_lib target comp)
|
||||
if(NOT BUILD_SHARED_LIBS)
|
||||
install(TARGETS ${target} EXPORT OpenVINOTargets
|
||||
ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH} COMPONENT ${comp})
|
||||
ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH} COMPONENT ${comp} ${ARGN})
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
|
@ -187,13 +187,13 @@ macro(ov_add_frontend)
|
||||
if(proto_files)
|
||||
if(OV_FRONTEND_PROTOBUF_LITE)
|
||||
if(NOT protobuf_lite_installed)
|
||||
ov_install_static_lib(${Protobuf_LITE_LIBRARIES} ngraph)
|
||||
ov_install_static_lib(${Protobuf_LITE_LIBRARIES} core)
|
||||
set(protobuf_lite_installed ON CACHE INTERNAL "" FORCE)
|
||||
endif()
|
||||
link_system_libraries(${TARGET_NAME} PRIVATE ${Protobuf_LITE_LIBRARIES})
|
||||
else()
|
||||
if(NOT protobuf_installed)
|
||||
ov_install_static_lib(${Protobuf_LIBRARIES} ngraph)
|
||||
ov_install_static_lib(${Protobuf_LIBRARIES} core)
|
||||
set(protobuf_installed ON CACHE INTERNAL "" FORCE)
|
||||
endif()
|
||||
link_system_libraries(${TARGET_NAME} PRIVATE ${Protobuf_LIBRARIES})
|
||||
@ -216,18 +216,18 @@ macro(ov_add_frontend)
|
||||
set(export_set EXPORT OpenVINOTargets)
|
||||
endif()
|
||||
install(TARGETS ${TARGET_NAME} ${export_set}
|
||||
RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT ngraph
|
||||
ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH} COMPONENT ngraph
|
||||
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT ngraph)
|
||||
RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT core
|
||||
ARCHIVE DESTINATION ${IE_CPACK_ARCHIVE_PATH} COMPONENT core
|
||||
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT core)
|
||||
else()
|
||||
ov_install_static_lib(${TARGET_NAME} ngraph)
|
||||
ov_install_static_lib(${TARGET_NAME} core)
|
||||
endif()
|
||||
|
||||
if(OV_FRONTEND_LINKABLE_FRONTEND)
|
||||
# install -dev part
|
||||
install(DIRECTORY ${${TARGET_NAME}_INCLUDE_DIR}/${OV_FRONTEND_NAME}_frontend
|
||||
DESTINATION ${FRONTEND_INSTALL_INCLUDE}
|
||||
COMPONENT ngraph_dev
|
||||
COMPONENT core_dev
|
||||
FILES_MATCHING PATTERN "*.hpp")
|
||||
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES EXPORT_NAME frontend::${OV_FRONTEND_NAME})
|
||||
@ -236,6 +236,6 @@ macro(ov_add_frontend)
|
||||
endif()
|
||||
else()
|
||||
# skipped frontend has to be installed in static libraries case
|
||||
ov_install_static_lib(${TARGET_NAME} ngraph)
|
||||
ov_install_static_lib(${TARGET_NAME} core)
|
||||
endif()
|
||||
endmacro()
|
||||
|
@ -13,7 +13,7 @@ function(ie_generate_dev_package_config)
|
||||
APPEND FILE "${CMAKE_BINARY_DIR}/${component}_dev_targets.cmake")
|
||||
list(APPEND all_dev_targets ${${component}})
|
||||
endforeach()
|
||||
add_custom_target(ie_dev_targets ALL DEPENDS ${all_dev_targets})
|
||||
add_custom_target(ie_dev_targets DEPENDS ${all_dev_targets})
|
||||
|
||||
configure_package_config_file("${OpenVINO_SOURCE_DIR}/cmake/templates/InferenceEngineDeveloperPackageConfig.cmake.in"
|
||||
"${CMAKE_BINARY_DIR}/InferenceEngineDeveloperPackageConfig.cmake"
|
||||
@ -32,8 +32,8 @@ endfunction()
|
||||
|
||||
function(register_extra_modules)
|
||||
# post export
|
||||
openvino_developer_export_targets(COMPONENT inference_engine TARGETS inference_engine)
|
||||
openvino_developer_export_targets(COMPONENT ngraph TARGETS ngraph)
|
||||
openvino_developer_export_targets(COMPONENT core TARGETS inference_engine)
|
||||
openvino_developer_export_targets(COMPONENT core TARGETS ngraph)
|
||||
|
||||
set(InferenceEngineDeveloperPackage_DIR "${CMAKE_CURRENT_BINARY_DIR}/runtime")
|
||||
|
||||
@ -93,9 +93,6 @@ endfunction()
|
||||
# Extra modules support
|
||||
#
|
||||
|
||||
# for Template plugin
|
||||
openvino_developer_export_targets(COMPONENT ngraph TARGETS ngraph_backend interpreter_backend)
|
||||
|
||||
# this InferenceEngineDeveloperPackageConfig.cmake is not used
|
||||
# during extra modules build since it's generated after modules
|
||||
# are configured
|
||||
|
@ -63,10 +63,10 @@ Caffe*-specific parameters:
|
||||
-k K Path to CustomLayersMapping.xml to register custom
|
||||
layers
|
||||
--mean_file MEAN_FILE, -mf MEAN_FILE
|
||||
Mean image to be used for the input. Should be a
|
||||
[DEPRECATED] Mean image to be used for the input. Should be a
|
||||
binaryproto file
|
||||
--mean_file_offsets MEAN_FILE_OFFSETS, -mo MEAN_FILE_OFFSETS
|
||||
Mean image offsets to be used for the input
|
||||
[DEPRECATED] Mean image offsets to be used for the input
|
||||
binaryproto file. When the mean image is bigger than
|
||||
the expected input, it is cropped. By default, centers
|
||||
of the input image and the mean image are the same and
|
||||
|
@ -42,7 +42,7 @@ To convert a Paddle\* model:
|
||||
Parameters to convert your model:
|
||||
|
||||
* [Framework-agnostic parameters](Converting_Model_General.md): These parameters are used to convert a model trained with any supported framework.
|
||||
> **NOTE:** `--scale`, `--scale_values`, `--mean_values`, `--mean_file` are not supported in the current version of mo_paddle.
|
||||
> **NOTE:** `--scale`, `--scale_values`, `--mean_values` are not supported in the current version of mo_paddle.
|
||||
|
||||
### Example of Converting a Paddle* Model
|
||||
Below is the example command to convert yolo v3 Paddle\* network to OpenVINO IR network with Model Optimizer.
|
||||
|
@ -6,7 +6,7 @@ mo --input_model INPUT_MODEL --output_dir <OUTPUT_MODEL_DIR>
|
||||
```
|
||||
You need to have have write permissions for an output directory.
|
||||
|
||||
> **NOTE**: Some models require using additional arguments to specify conversion parameters, such as `--input_shape`, `--scale`, `--scale_values`, `--mean_values`, `--mean_file`. To learn about when you need to use these parameters, refer to [Converting a Model Using General Conversion Parameters](Converting_Model_General.md).
|
||||
> **NOTE**: Some models require using additional arguments to specify conversion parameters, such as `--input_shape`, `--scale`, `--scale_values`, `--mean_values`. To learn about when you need to use these parameters, refer to [Converting a Model Using General Conversion Parameters](Converting_Model_General.md).
|
||||
|
||||
To adjust the conversion process, you may use general parameters defined in the [Converting a Model Using General Conversion Parameters](Converting_Model_General.md) and
|
||||
Framework-specific parameters for:
|
||||
|
@ -151,7 +151,7 @@ Usually neural network models are trained with the normalized input data. This m
|
||||
|
||||
In the first case, the Model Optimizer generates the IR with required pre-processing layers and Inference Engine samples may be used to infer the model.
|
||||
|
||||
In the second case, information about mean/scale values should be provided to the Model Optimizer to embed it to the generated IR. Model Optimizer provides a number of command line parameters to specify them: `--scale`, `--scale_values`, `--mean_values`, `--mean_file`.
|
||||
In the second case, information about mean/scale values should be provided to the Model Optimizer to embed it to the generated IR. Model Optimizer provides a number of command line parameters to specify them: `--scale`, `--scale_values`, `--mean_values`.
|
||||
|
||||
If both mean and scale values are specified, the mean is subtracted first and then scale is applied. Input values are *divided* by the scale value(s).
|
||||
|
||||
|
@ -12,7 +12,7 @@ OpenVINO™ toolkit is a comprehensive toolkit for quickly developing applicatio
|
||||
|
||||
The Intel® Distribution of OpenVINO™ toolkit\*:
|
||||
- Enables CNN-based deep learning inference on the edge
|
||||
- Supports heterogeneous execution across Intel® CPU, Intel® Integrated Graphics, Intel® Neural Compute Stick 2, and Intel® Vision Accelerator Design with Intel® Movidius™ VPUs
|
||||
- Supports heterogeneous execution across Intel® CPU and Intel® Integrated Graphics
|
||||
- Speeds time-to-market via an easy-to-use library of computer vision functions and pre-optimized kernels
|
||||
|
||||
The **runtime package** includes the following components installed by default:
|
||||
|
@ -1,13 +1,13 @@
|
||||
#include <ie_core.hpp>
|
||||
#include <ngraph/function.hpp>
|
||||
#include <ngraph/pass/visualize_tree.hpp>
|
||||
#include <openvino/core/model.hpp>
|
||||
#include <openvino/pass/visualize_tree.hpp>
|
||||
|
||||
int main() {
|
||||
using namespace InferenceEngine;
|
||||
//! [part0]
|
||||
std::shared_ptr<ngraph::Function> nGraph;
|
||||
std::shared_ptr<ov::Model> model;
|
||||
// ...
|
||||
ngraph::pass::VisualizeTree("after.png").run_on_function(nGraph); // Visualize the nGraph function to an image
|
||||
ov::pass::VisualizeTree("after.png").run_on_model(model); // Visualize the nGraph function to an image
|
||||
//! [part0]
|
||||
return 0;
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ auto network = core.ReadNetwork("sample.xml");
|
||||
auto function = network.getFunction();
|
||||
//! [part0]
|
||||
for (auto && op : function->get_ops())
|
||||
op->get_rt_info()["affinity"] = std::make_shared<ngraph::VariantWrapper<std::string>>("CPU");
|
||||
op->get_rt_info()["affinity"] = "CPU";
|
||||
//! [part0]
|
||||
return 0;
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ res.supportedLayersMap["layerName"] = "CPU";
|
||||
for (auto&& node : function->get_ops()) {
|
||||
auto& affinity = res.supportedLayersMap[node->get_friendly_name()];
|
||||
// Store affinity mapping using node runtime information
|
||||
node->get_rt_info()["affinity"] = std::make_shared<ngraph::VariantWrapper<std::string>>(affinity);
|
||||
node->get_rt_info()["affinity"] = affinity;
|
||||
}
|
||||
|
||||
// load network with affinities set before
|
||||
|
@ -62,7 +62,7 @@ int main() {
|
||||
//! [ov_api_2_0:create_core]
|
||||
|
||||
//! [ov_api_2_0:read_model]
|
||||
std::shared_ptr<ov::Function> network = core.read_model("model.xml");
|
||||
std::shared_ptr<ov::Model> network = core.read_model("model.xml");
|
||||
//! [ov_api_2_0:read_model]
|
||||
|
||||
//! [ov_api_2_0:get_inputs_outputs]
|
||||
@ -71,11 +71,11 @@ int main() {
|
||||
//! [ov_api_2_0:get_inputs_outputs]
|
||||
|
||||
//! [ov_api_2_0:compile_model]
|
||||
ov::runtime::ExecutableNetwork exec_network = core.compile_model(network, "CPU");
|
||||
ov::runtime::CompiledModel compiled_model = core.compile_model(network, "CPU");
|
||||
//! [ov_api_2_0:compile_model]
|
||||
|
||||
//! [ov_api_2_0:create_infer_request]
|
||||
ov::runtime::InferRequest infer_request = exec_network.create_infer_request();
|
||||
ov::runtime::InferRequest infer_request = compiled_model.create_infer_request();
|
||||
//! [ov_api_2_0:create_infer_request]
|
||||
|
||||
inputs_aligned(infer_request);
|
||||
|
@ -40,8 +40,7 @@ int main() {
|
||||
auto result = std::make_shared<ov::opset8::Result>(concat);
|
||||
result->set_friendly_name("result"); // operation name
|
||||
|
||||
auto f =
|
||||
std::make_shared<ov::Function>(ov::ResultVector{result}, ov::ParameterVector{data1, data2}, "function_name");
|
||||
auto f = std::make_shared<ov::Model>(ov::ResultVector{result}, ov::ParameterVector{data1, data2}, "function_name");
|
||||
//! [ov:graph]
|
||||
return 0;
|
||||
}
|
||||
|
54
docs/template_plugin/backend/CMakeLists.txt
Normal file
54
docs/template_plugin/backend/CMakeLists.txt
Normal file
@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2018-2021 Intel Corporation
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
set (SRC
|
||||
backend.cpp
|
||||
backend.hpp
|
||||
cache.cpp
|
||||
cache.hpp
|
||||
executable.cpp
|
||||
executable.hpp
|
||||
performance_counter.hpp
|
||||
pass/dyn_elimination.cpp
|
||||
pass/dyn_elimination.hpp
|
||||
pass/shape_relevance.cpp
|
||||
pass/shape_relevance.hpp
|
||||
int_backend.cpp int_executable.cpp evaluates_map.cpp
|
||||
)
|
||||
|
||||
add_library(interpreter_backend STATIC EXCLUDE_FROM_ALL ${SRC})
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCXX)
|
||||
ie_add_compiler_flags(-Wno-missing-declarations)
|
||||
ie_add_compiler_flags(-Wno-sign-compare)
|
||||
endif()
|
||||
|
||||
ie_faster_build(interpreter_backend UNITY)
|
||||
|
||||
target_compile_definitions(interpreter_backend
|
||||
PRIVATE
|
||||
SHARED_LIB_PREFIX="${CMAKE_SHARED_LIBRARY_PREFIX}"
|
||||
SHARED_LIB_SUFFIX="${IE_BUILD_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}"
|
||||
)
|
||||
target_link_libraries(interpreter_backend PRIVATE ngraph::builder ngraph::reference openvino::util)
|
||||
|
||||
target_include_directories(interpreter_backend PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
|
||||
|
||||
file(GLOB_RECURSE all_backends_src "${CMAKE_CURRENT_SOURCE_DIR}/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/*.hpp")
|
||||
add_clang_format_target(interpreter_backend_clang FOR_SOURCES ${all_backends_src})
|
||||
|
||||
|
||||
# developer package
|
||||
openvino_developer_export_targets(COMPONENT core TARGETS interpreter_backend)
|
||||
|
||||
install(TARGETS interpreter_backend
|
||||
RUNTIME DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT tests OPTIONAL EXCLUDE_FROM_ALL
|
||||
ARCHIVE DESTINATION ${IE_CPACK_RUNTIME_PATH} COMPONENT tests OPTIONAL EXCLUDE_FROM_ALL
|
||||
LIBRARY DESTINATION ${IE_CPACK_LIBRARY_PATH} COMPONENT tests OPTIONAL EXCLUDE_FROM_ALL)
|
||||
if(NOT BUILD_SHARED_LIBS)
|
||||
install(TARGETS interpreter_backend
|
||||
RUNTIME DESTINATION tests COMPONENT tests OPTIONAL EXCLUDE_FROM_ALL
|
||||
ARCHIVE DESTINATION tests COMPONENT tests OPTIONAL EXCLUDE_FROM_ALL
|
||||
LIBRARY DESTINATION tests COMPONENT tests OPTIONAL EXCLUDE_FROM_ALL)
|
||||
endif()
|
44
docs/template_plugin/backend/backend.cpp
Normal file
44
docs/template_plugin/backend/backend.cpp
Normal file
@ -0,0 +1,44 @@
|
||||
// Copyright (C) 2018-2021 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include "backend.hpp"
|
||||
|
||||
#include <sstream>
|
||||
|
||||
#include "int_backend.hpp"
|
||||
#include "ngraph/file_util.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
#include "openvino/util/file_util.hpp"
|
||||
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
runtime::Backend::~Backend() {}
|
||||
|
||||
std::shared_ptr<runtime::Backend> runtime::Backend::create() {
|
||||
auto inner_backend = make_shared<interpreter::INTBackend>();
|
||||
|
||||
return inner_backend;
|
||||
}
|
||||
|
||||
std::shared_ptr<ngraph::runtime::Tensor> runtime::Backend::create_dynamic_tensor(
|
||||
const ngraph::element::Type& /* element_type */,
|
||||
const PartialShape& /* shape */) {
|
||||
throw std::invalid_argument("This backend does not support dynamic tensors");
|
||||
}
|
||||
|
||||
bool runtime::Backend::is_supported(const Node& /* node */) const {
|
||||
// The default behavior is that a backend does not support any ops. If this is not the case
|
||||
// then override this method and enhance.
|
||||
return false;
|
||||
}
|
||||
|
||||
std::shared_ptr<runtime::Executable> runtime::Backend::load(istream& /* input_stream */) {
|
||||
throw runtime_error("load operation unimplemented.");
|
||||
}
|
||||
|
||||
bool runtime::Backend::set_config(const map<string, string>& /* config */, string& error) {
|
||||
error = "set_config not supported";
|
||||
return false;
|
||||
}
|
@ -7,7 +7,6 @@
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
|
||||
#include "backend_visibility.hpp"
|
||||
#include "executable.hpp"
|
||||
#include "ngraph/function.hpp"
|
||||
#include "ngraph/shape.hpp"
|
||||
@ -25,7 +24,7 @@ class Backend;
|
||||
/// \brief Interface to a generic backend.
|
||||
///
|
||||
/// Backends are responsible for function execution and value allocation.
|
||||
class BACKEND_API ngraph::runtime::Backend {
|
||||
class ngraph::runtime::Backend {
|
||||
public:
|
||||
virtual ~Backend();
|
||||
/// \brief Create a new Backend object
|
||||
@ -38,11 +37,7 @@ public:
|
||||
/// DynamicWrapperBackend. This feature is EXPERIMENTAL.
|
||||
/// \returns shared_ptr to a new Backend or nullptr if the named backend
|
||||
/// does not exist.
|
||||
static std::shared_ptr<Backend> create(const std::string& type, bool must_support_dynamic = false);
|
||||
|
||||
/// \brief Query the list of registered devices
|
||||
/// \returns A vector of all registered devices.
|
||||
static std::vector<std::string> get_registered_devices();
|
||||
static std::shared_ptr<Backend> create();
|
||||
|
||||
/// \brief Create a tensor specific to this backend
|
||||
/// This call is used when an output is dynamic and not known until execution time. When
|
||||
@ -115,17 +110,9 @@ public:
|
||||
/// parameter value is valid.
|
||||
virtual bool set_config(const std::map<std::string, std::string>& config, std::string& error);
|
||||
|
||||
static void set_backend_shared_library_search_directory(const std::string& path);
|
||||
static const std::string& get_backend_shared_library_search_directory();
|
||||
|
||||
/// \brief Get the version of the backend
|
||||
/// The default value of 0.0.0 is chosen to be a parsable version number
|
||||
virtual std::string get_version() const {
|
||||
return "0.0.0";
|
||||
}
|
||||
|
||||
private:
|
||||
// mutex to modify s_backend_shared_library_search_directory thread safe
|
||||
static std::mutex m_mtx;
|
||||
static std::string s_backend_shared_library_search_directory;
|
||||
};
|
@ -2133,8 +2133,7 @@ runtime::reference::custom_evaluate_function evaluate = [](const std::shared_ptr
|
||||
for (size_t i = 0; i < results.size(); ++i) {
|
||||
outputTensors.push_back(std::make_shared<HostTensor>());
|
||||
}
|
||||
runtime::Backend::set_backend_shared_library_search_directory("");
|
||||
auto backend = runtime::Backend::create("INTERPRETER");
|
||||
auto backend = runtime::Backend::create();
|
||||
auto handle = backend->compile(function);
|
||||
handle->call_with_validate(outputTensors, inputTensors);
|
||||
|
@ -3,7 +3,6 @@
|
||||
//
|
||||
|
||||
#pragma once
|
||||
#include "int_backend_visibility.hpp"
|
||||
#include "ngraph/node.hpp"
|
||||
|
||||
namespace ngraph {
|
@ -6,7 +6,6 @@
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "backend_visibility.hpp"
|
||||
#include "ngraph/function.hpp"
|
||||
#include "ngraph/runtime/tensor.hpp"
|
||||
#include "ngraph/shape.hpp"
|
||||
@ -19,7 +18,7 @@ class Executable;
|
||||
}
|
||||
} // namespace ngraph
|
||||
|
||||
class BACKEND_API ngraph::runtime::Executable {
|
||||
class ngraph::runtime::Executable {
|
||||
public:
|
||||
Executable();
|
||||
virtual ~Executable();
|
@ -4,8 +4,6 @@
|
||||
|
||||
#include "int_backend.hpp"
|
||||
|
||||
#include "backend_manager.hpp"
|
||||
#include "int_backend_visibility.hpp"
|
||||
#include "int_executable.hpp"
|
||||
#include "ngraph/except.hpp"
|
||||
#include "ngraph/runtime/host_tensor.hpp"
|
||||
@ -14,12 +12,6 @@
|
||||
using namespace std;
|
||||
using namespace ngraph;
|
||||
|
||||
extern "C" INTERPRETER_BACKEND_API void ngraph_register_interpreter_backend() {
|
||||
runtime::BackendManager::register_backend("INTERPRETER", [](const std::string& /* config */) {
|
||||
return std::make_shared<runtime::interpreter::INTBackend>();
|
||||
});
|
||||
}
|
||||
|
||||
runtime::interpreter::INTBackend::INTBackend() {}
|
||||
|
||||
runtime::interpreter::INTBackend::INTBackend(const vector<string>& unsupported_op_name_list)
|
@ -11,8 +11,6 @@
|
||||
#include <vector>
|
||||
|
||||
#include "backend.hpp"
|
||||
#include "backend_manager.hpp"
|
||||
#include "int_backend_visibility.hpp"
|
||||
#include "ngraph/runtime/tensor.hpp"
|
||||
|
||||
namespace ngraph {
|
||||
@ -24,7 +22,7 @@ class INTExecutable;
|
||||
} // namespace runtime
|
||||
} // namespace ngraph
|
||||
|
||||
class INTERPRETER_BACKEND_API ngraph::runtime::interpreter::INTBackend : public Backend {
|
||||
class ngraph::runtime::interpreter::INTBackend : public Backend {
|
||||
public:
|
||||
INTBackend();
|
||||
INTBackend(const std::vector<std::string>& unsupported_op_name_list);
|
@ -6,7 +6,6 @@
|
||||
|
||||
#include <cstring>
|
||||
|
||||
#include "backend_manager.hpp"
|
||||
#include "evaluates_map.hpp"
|
||||
#include "ngraph/except.hpp"
|
||||
#include "ngraph/ops.hpp"
|
@ -13,7 +13,6 @@
|
||||
#include <vector>
|
||||
|
||||
#include "backend.hpp"
|
||||
#include "int_backend_visibility.hpp"
|
||||
#include "ngraph/ops.hpp"
|
||||
#include "ngraph/runtime/aligned_buffer.hpp"
|
||||
#include "ngraph/runtime/reference/hard_sigmoid.hpp"
|
||||
@ -31,7 +30,7 @@ class INTExecutable;
|
||||
} // namespace runtime
|
||||
} // namespace ngraph
|
||||
|
||||
class INTERPRETER_BACKEND_API ngraph::runtime::interpreter::INTExecutable : public Executable {
|
||||
class ngraph::runtime::interpreter::INTExecutable : public Executable {
|
||||
friend class INTBackend;
|
||||
|
||||
public:
|
@ -4,7 +4,6 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "backend_visibility.hpp"
|
||||
#include "ngraph/pass/graph_rewrite.hpp"
|
||||
#include "ngraph/util.hpp"
|
||||
|
||||
@ -37,7 +36,7 @@ namespace pass {
|
||||
/// <td> \image html dyn_broadcast_post_dyneliminate.svg </td>
|
||||
/// </tr>
|
||||
/// </table>
|
||||
class BACKEND_API DynElimination : public GraphRewrite {
|
||||
class DynElimination : public GraphRewrite {
|
||||
public:
|
||||
DynElimination();
|
||||
|
@ -33,7 +33,7 @@ using namespace ngraph;
|
||||
// Neither N0 nor N1 will be flagged as shape-relevant. (N1 does feed into the "shape" input of N3,
|
||||
// but only via the value-irrelevant input of ShapeOf.)
|
||||
//
|
||||
bool pass::ShapeRelevance::run_on_function(std::shared_ptr<Function> f) {
|
||||
bool pass::ShapeRelevance::run_on_model(const std::shared_ptr<Function>& f) {
|
||||
// TODO(amprocte): We are probably reinventing the wheel with the graph traversal here; the
|
||||
// reason is that we need to cut the traversal short in cases where input values are
|
||||
// irrelevant. See if there is a way to reduce this duplication.
|
@ -4,15 +4,14 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "backend_visibility.hpp"
|
||||
#include "ngraph/pass/pass.hpp"
|
||||
|
||||
namespace ngraph {
|
||||
namespace pass {
|
||||
class BACKEND_API ShapeRelevance : public FunctionPass {
|
||||
class ShapeRelevance : public FunctionPass {
|
||||
public:
|
||||
ShapeRelevance() : FunctionPass() {}
|
||||
virtual bool run_on_function(std::shared_ptr<ngraph::Function> f) override;
|
||||
bool run_on_model(const std::shared_ptr<ngraph::Function>& m) override;
|
||||
};
|
||||
} // namespace pass
|
||||
} // namespace ngraph
|
@ -7,12 +7,11 @@
|
||||
#include <cstddef>
|
||||
#include <string>
|
||||
|
||||
#include "backend_visibility.hpp"
|
||||
#include "ngraph/node.hpp"
|
||||
|
||||
namespace ngraph {
|
||||
namespace runtime {
|
||||
class BACKEND_API PerformanceCounter {
|
||||
class PerformanceCounter {
|
||||
public:
|
||||
PerformanceCounter(const std::shared_ptr<const Node>& n, size_t us, size_t calls)
|
||||
: m_node(n),
|
@ -27,18 +27,14 @@ target_include_directories(${TARGET_NAME} PRIVATE
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE
|
||||
IE::inference_engine
|
||||
IE::inference_engine_transformations
|
||||
interpreter_backend
|
||||
ngraph::reference
|
||||
${NGRAPH_LIBRARIES})
|
||||
|
||||
# Link inference backend library to plugin. Here we use ngraph interpreter_backend as example
|
||||
target_link_libraries(${TARGET_NAME} PRIVATE
|
||||
IE::interpreter_backend)
|
||||
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES INTERPROCEDURAL_OPTIMIZATION_RELEASE ${ENABLE_LTO})
|
||||
|
||||
# ATTENTION: uncomment to register a plugin in the plugins.xml file
|
||||
# ie_register_plugins(MAIN_TARGET ${TARGET_NAME}
|
||||
# POSSIBLE_PLUGINS ${TARGET_NAME})
|
||||
# [cmake:plugin]
|
||||
|
||||
ov_install_static_lib(ngraph_backend ngraph)
|
||||
ov_install_static_lib(interpreter_backend ngraph)
|
||||
ov_install_static_lib(interpreter_backend tests)
|
||||
|
@ -221,7 +221,7 @@ void TemplatePlugin::ExecutableNetwork::Export(std::ostream& modelStream) {
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
ov::pass::Serialize serializer(xmlFile, binFile, custom_opsets);
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
serializer.run_on_function(_function);
|
||||
serializer.run_on_model(_function);
|
||||
|
||||
auto m_constants = binFile.str();
|
||||
auto m_model = xmlFile.str();
|
||||
|
@ -35,8 +35,7 @@ Plugin::Plugin() {
|
||||
_pluginName = "TEMPLATE";
|
||||
|
||||
// create ngraph backend which performs inference using ngraph reference implementations
|
||||
ngraph::runtime::Backend::set_backend_shared_library_search_directory("");
|
||||
_backend = ngraph::runtime::Backend::create("INTERPRETER");
|
||||
_backend = ngraph::runtime::Backend::create();
|
||||
|
||||
// create default stream executor with a given name
|
||||
_waitExecutor =
|
||||
|
@ -15,7 +15,7 @@ NGRAPH_RTTI_DEFINITION(ngraph::pass::AddPreprocessing, "AddPreprocessing", 0);
|
||||
ngraph::pass::AddPreprocessing::AddPreprocessing(const InferenceEngine::InputsDataMap& inputInfoMap)
|
||||
: m_inputInfoMap(inputInfoMap) {}
|
||||
|
||||
bool ngraph::pass::AddPreprocessing::run_on_function(std::shared_ptr<ngraph::Function> f) {
|
||||
bool ngraph::pass::AddPreprocessing::run_on_model(const std::shared_ptr<ngraph::Function>& f) {
|
||||
ngraph::pass::AddMeanSubtract::MeanMap meanMap;
|
||||
ngraph::pass::AddStdScale::ScaleMap scaleMap;
|
||||
|
||||
|
@ -32,5 +32,5 @@ public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
explicit AddPreprocessing(const InferenceEngine::InputsDataMap& inputInfoMap);
|
||||
|
||||
bool run_on_function(std::shared_ptr<ngraph::Function> f) override;
|
||||
bool run_on_model(const std::shared_ptr<ngraph::Function>& m) override;
|
||||
};
|
||||
|
@ -12,7 +12,7 @@ using namespace ngraph;
|
||||
// template_function_transformation.cpp
|
||||
NGRAPH_RTTI_DEFINITION(ngraph::pass::MyFunctionTransformation, "MyFunctionTransformation", 0);
|
||||
|
||||
bool pass::MyFunctionTransformation::run_on_function(std::shared_ptr<ngraph::Function> f) {
|
||||
bool pass::MyFunctionTransformation::run_on_model(const std::shared_ptr<ov::Model>& f) {
|
||||
RUN_ON_FUNCTION_SCOPE(MyFunctionTransformation);
|
||||
// Example transformation code
|
||||
NodeVector nodes;
|
||||
|
@ -19,6 +19,6 @@ class MyFunctionTransformation;
|
||||
class ngraph::pass::MyFunctionTransformation : public ngraph::pass::FunctionPass {
|
||||
public:
|
||||
NGRAPH_RTTI_DECLARATION;
|
||||
bool run_on_function(std::shared_ptr<ngraph::Function> f) override;
|
||||
bool run_on_model(const std::shared_ptr<ov::Model>& f) override;
|
||||
};
|
||||
// ! [function_pass:template_transformation_hpp]
|
||||
|
@ -143,7 +143,7 @@ void run_matcher_with_gr(std::shared_ptr<ngraph::Function> f) {
|
||||
ngraph::pass::GraphRewrite pass;
|
||||
pass.add_matcher<ngraph::pass::DecomposeDivideMatcher>();
|
||||
pass.add_matcher<ngraph::pass::ReluReluFusionMatcher>();
|
||||
pass.run_on_function(f);
|
||||
pass.run_on_model(f);
|
||||
// ! [matcher_pass:graph_rewrite]
|
||||
}
|
||||
|
||||
|
@ -49,12 +49,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto log = std::make_shared<op::v0::Abs>(in);
|
||||
return std::make_shared<Function>(NodeVector{log}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{log}, ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -39,10 +39,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto acos = std::make_shared<op::v0::Acos>(in);
|
||||
return std::make_shared<Function>(NodeVector {acos}, ParameterVector {in});
|
||||
return std::make_shared<Model>(NodeVector {acos}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -79,4 +79,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
.input({{2}, element::u64, std::vector<uint64_t> {0, 1}})
|
||||
.expected({{2}, element::u64, std::vector<uint64_t> {1, 0}})),
|
||||
ReferenceAcosLayerTest::getTestCaseName);
|
||||
} // namespace reference_tests
|
||||
} // namespace reference_tests
|
||||
|
@ -39,10 +39,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto acosh = std::make_shared<op::v3::Acosh>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {acosh}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {acosh}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -64,14 +64,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const Shape& adaptive_shape,
|
||||
const std::vector<int64_t> adaptive_values) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto out = op::v0::Constant::create<int64_t>(element::Type_t::i64, adaptive_shape, adaptive_values);
|
||||
const auto adaptive_avg_pool = std::make_shared<op::v8::AdaptiveAvgPool>(in, out);
|
||||
return std::make_shared<Function>(NodeVector{adaptive_avg_pool}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{adaptive_avg_pool}, ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -178,4 +178,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
::testing::ValuesIn(generateCombinedParamsForAdaptiveAvgPool()),
|
||||
ReferenceAdaptiveAvgPoolLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -67,14 +67,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const Shape& adaptive_shape,
|
||||
const std::vector<int64_t> adaptive_values) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto out = op::v0::Constant::create<int64_t>(element::Type_t::i64, adaptive_shape, adaptive_values);
|
||||
const auto adaptive_max_pool = std::make_shared<op::v8::AdaptiveMaxPool>(in, out);
|
||||
return std::make_shared<Function>(adaptive_max_pool->outputs(), ParameterVector{in});
|
||||
return std::make_shared<Model>(adaptive_max_pool->outputs(), ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -214,4 +214,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
::testing::ValuesIn(generateCombinedParamsForAdaptiveMaxPool()),
|
||||
ReferenceAdaptiveMaxPoolLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -56,14 +56,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape1,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape1,
|
||||
const PartialShape& input_shape2,
|
||||
const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in1 = std::make_shared<op::v0::Parameter>(input_type, input_shape1);
|
||||
const auto in2 = std::make_shared<op::v0::Parameter>(input_type, input_shape2);
|
||||
const auto add = std::make_shared<op::v1::Add>(in1, in2);
|
||||
return std::make_shared<Function>(NodeVector{add}, ParameterVector{in1, in2});
|
||||
return std::make_shared<Model>(NodeVector{add}, ParameterVector{in1, in2});
|
||||
}
|
||||
};
|
||||
|
||||
@ -87,7 +87,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape1,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape1,
|
||||
const PartialShape& input_shape2,
|
||||
const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
@ -97,7 +97,7 @@ private:
|
||||
add = std::make_shared<op::v1::Add>(add, add);
|
||||
add = std::make_shared<op::v1::Add>(add, add);
|
||||
add = std::make_shared<op::v1::Add>(add, add);
|
||||
return std::make_shared<Function>(NodeVector{add}, ParameterVector{in1, in2});
|
||||
return std::make_shared<Model>(NodeVector{add}, ParameterVector{in1, in2});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -40,10 +40,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto Asin = std::make_shared<op::v0::Asin>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {Asin}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Asin}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -96,4 +96,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
.expected({{2}, element::u64, std::vector<uint64_t> {0, 1}})),
|
||||
|
||||
ReferenceAsinLayerTest::getTestCaseName);
|
||||
} // namespace reference_tests
|
||||
} // namespace reference_tests
|
||||
|
@ -40,10 +40,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto Asinh = std::make_shared<op::v3::Asinh>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {Asinh}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Asinh}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -96,4 +96,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
.expected({{6}, element::u64, std::vector<uint64_t> {0, 1, 1, 2, 2, 2}})),
|
||||
|
||||
ReferenceAsinhLayerTest::getTestCaseName);
|
||||
} // namespace reference_tests
|
||||
} // namespace reference_tests
|
||||
|
@ -39,10 +39,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto atan = std::make_shared<op::v0::Atan>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector{atan}, ParameterVector{in});
|
||||
return std::make_shared<ov::Model>(NodeVector{atan}, ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -94,4 +94,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
.input({{5}, element::u64, std::vector<uint64_t>{0, 1, 2, 3, 4}})
|
||||
.expected({{5}, element::u64, std::vector<uint64_t>{0, 1, 1, 1, 1}})),
|
||||
ReferenceAtanLayerTest::getTestCaseName);
|
||||
} // namespace reference_tests
|
||||
} // namespace reference_tests
|
||||
|
@ -44,10 +44,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto out = std::make_shared<op::v3::Atanh>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector{out}, ParameterVector{in});
|
||||
return std::make_shared<ov::Model>(NodeVector{out}, ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -87,7 +87,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const Strides& strides,
|
||||
const Shape& pads_begin,
|
||||
@ -105,7 +105,7 @@ private:
|
||||
exclude_pad,
|
||||
rounding_type,
|
||||
pad_type);
|
||||
return std::make_shared<Function>(NodeVector{avgPool}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{avgPool}, ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -268,4 +268,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
::testing::ValuesIn(generateCombinedParamsForAvgPool()),
|
||||
ReferenceAvgPoolLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -71,92 +71,95 @@ void CommonReferenceTest::Validate() {
|
||||
|
||||
ASSERT_EQ(refOutData.size(), actualOutData.size());
|
||||
for (size_t i = 0; i < refOutData.size(); i++) {
|
||||
ValidateBlobs(refOutData[i], actualOutData[i], threshold, abs_threshold);
|
||||
ValidateBlobs(refOutData[i], actualOutData[i], threshold, abs_threshold, actual_comparision_size);
|
||||
}
|
||||
}
|
||||
|
||||
void CommonReferenceTest::ValidateBlobs(const ov::runtime::Tensor& refBlob, const ov::runtime::Tensor& outBlob,
|
||||
float threshold, float abs_threshold) {
|
||||
float threshold, float abs_threshold, size_t actual_comparision_size) {
|
||||
ASSERT_EQ(refBlob.get_element_type(), outBlob.get_element_type());
|
||||
ASSERT_EQ(refBlob.get_byte_size(), outBlob.get_byte_size());
|
||||
|
||||
if (actual_comparision_size == 0)
|
||||
actual_comparision_size = refBlob.get_size();
|
||||
|
||||
const auto& element_type = refBlob.get_element_type();
|
||||
switch (element_type) {
|
||||
case ov::element::bf16:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<ov::bfloat16, ov::bfloat16>(
|
||||
refBlob.data<const ov::bfloat16>(), outBlob.data<const ov::bfloat16>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::f16:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<ov::float16, ov::float16>(
|
||||
refBlob.data<const ov::float16>(), outBlob.data<const ov::float16>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::f32:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<float, float>(
|
||||
refBlob.data<const float>(), outBlob.data<const float>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::f64:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<double, double>(
|
||||
refBlob.data<const double>(), outBlob.data<const double>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::i8:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<int8_t, int8_t>(
|
||||
refBlob.data<const int8_t>(), outBlob.data<const int8_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::i16:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<int16_t, int16_t>(
|
||||
refBlob.data<const int16_t>(), outBlob.data<const int16_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::i32:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<int32_t, int32_t>(
|
||||
refBlob.data<const int32_t>(), outBlob.data<const int32_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::i64:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<int64_t, int64_t>(
|
||||
refBlob.data<const int64_t>(), outBlob.data<const int64_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::boolean:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<bool, bool>(
|
||||
refBlob.data<const bool>(), outBlob.data<const bool>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::u8:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<uint8_t, uint8_t>(
|
||||
refBlob.data<const uint8_t>(), outBlob.data<const uint8_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::u16:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<uint16_t, uint16_t>(
|
||||
refBlob.data<const uint16_t>(), outBlob.data<const uint16_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::u32:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<uint32_t, uint32_t>(
|
||||
refBlob.data<const uint32_t>(), outBlob.data<const uint32_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::u64:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<uint64_t, uint64_t>(
|
||||
refBlob.data<const uint64_t>(), outBlob.data<const uint64_t>(),
|
||||
refBlob.get_size(), threshold, abs_threshold);
|
||||
actual_comparision_size, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::i4:
|
||||
case ov::element::u4:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<int8_t, int8_t>(
|
||||
static_cast<const int8_t*>(refBlob.data()), static_cast<const int8_t*>(outBlob.data()),
|
||||
refBlob.get_size() / 2, threshold, abs_threshold);
|
||||
actual_comparision_size / 2, threshold, abs_threshold);
|
||||
break;
|
||||
case ov::element::u1:
|
||||
LayerTestsUtils::LayerTestsCommon::Compare<int8_t, int8_t>(
|
||||
static_cast<const int8_t*>(refBlob.data()), static_cast<const int8_t*>(outBlob.data()),
|
||||
refBlob.get_size() / 8, threshold, abs_threshold);
|
||||
actual_comparision_size / 8, threshold, abs_threshold);
|
||||
break;
|
||||
default:
|
||||
FAIL() << "Comparator for " << element_type << " element type isn't supported";
|
||||
|
@ -5,10 +5,10 @@
|
||||
#pragma once
|
||||
|
||||
#include "openvino/core/shape.hpp"
|
||||
#include "openvino/runtime/allocator.hpp"
|
||||
#include "openvino/runtime/tensor.hpp"
|
||||
#include "openvino/runtime/core.hpp"
|
||||
#include "openvino/core/type/element_type.hpp"
|
||||
#include "openvino/runtime/allocator.hpp"
|
||||
#include "openvino/runtime/core.hpp"
|
||||
#include "openvino/runtime/tensor.hpp"
|
||||
|
||||
namespace reference_tests {
|
||||
|
||||
@ -22,29 +22,31 @@ public:
|
||||
void Infer();
|
||||
virtual void Validate();
|
||||
|
||||
static void ValidateBlobs(const ov::runtime::Tensor& refBlob, const ov::runtime::Tensor& outBlob,
|
||||
float threshold, float abs_threshold);
|
||||
static void ValidateBlobs(const ov::runtime::Tensor& refBlob,
|
||||
const ov::runtime::Tensor& outBlob,
|
||||
float threshold,
|
||||
float abs_threshold,
|
||||
size_t actual_comparision_size = 0);
|
||||
|
||||
protected:
|
||||
const std::string targetDevice;
|
||||
std::shared_ptr<ov::runtime::Core> core;
|
||||
std::shared_ptr<ov::Function> function;
|
||||
std::shared_ptr<ov::Model> function;
|
||||
|
||||
ov::runtime::ExecutableNetwork executableNetwork;
|
||||
ov::runtime::CompiledModel executableNetwork;
|
||||
ov::runtime::InferRequest inferRequest;
|
||||
std::vector<ov::runtime::Tensor> inputData;
|
||||
std::vector<ov::runtime::Tensor> refOutData;
|
||||
std::vector<ov::runtime::Tensor> actualOutData;
|
||||
float threshold = 1e-2f; // Relative diff
|
||||
float abs_threshold = -1.f; // Absolute diff (not used when negative)
|
||||
float threshold = 1e-2f; // Relative diff
|
||||
float abs_threshold = -1.f; // Absolute diff (not used when negative)
|
||||
size_t actual_comparision_size = 0; // For ref output data is smaller than output blob size
|
||||
};
|
||||
|
||||
template <class T>
|
||||
ov::runtime::Tensor CreateTensor(const ov::element::Type& element_type,
|
||||
const std::vector<T>& values,
|
||||
size_t size = 0) {
|
||||
ov::runtime::Tensor CreateTensor(const ov::element::Type& element_type, const std::vector<T>& values, size_t size = 0) {
|
||||
size_t real_size = size ? size : values.size() * sizeof(T) / element_type.size();
|
||||
ov::runtime::Tensor tensor { element_type, {real_size} };
|
||||
ov::runtime::Tensor tensor{element_type, {real_size}};
|
||||
std::memcpy(tensor.data(), values.data(), std::min(real_size * element_type.size(), sizeof(T) * values.size()));
|
||||
|
||||
return tensor;
|
||||
@ -53,9 +55,9 @@ ov::runtime::Tensor CreateTensor(const ov::element::Type& element_type,
|
||||
// Create blob with correct input shape (not 1-dimensional). Will be used in tests with dynamic input shapes
|
||||
template <class T>
|
||||
ov::runtime::Tensor CreateTensor(const ov::Shape& shape,
|
||||
const ov::element::Type& element_type,
|
||||
const std::vector<T>& values) {
|
||||
ov::runtime::Tensor tensor { element_type, shape };
|
||||
const ov::element::Type& element_type,
|
||||
const std::vector<T>& values) {
|
||||
ov::runtime::Tensor tensor{element_type, shape};
|
||||
std::memcpy(tensor.data(), values.data(), sizeof(T) * values.size());
|
||||
|
||||
return tensor;
|
||||
@ -67,16 +69,19 @@ ov::runtime::Tensor CreateTensor(const ov::Shape& shape,
|
||||
struct Tensor {
|
||||
Tensor() = default;
|
||||
|
||||
Tensor(const ov::Shape& shape, ov::element::Type type, const ov::runtime::Tensor& data): shape {shape}, type {type}, data {data} {}
|
||||
Tensor(const ov::Shape& shape, ov::element::Type type, const ov::runtime::Tensor& data)
|
||||
: shape{shape},
|
||||
type{type},
|
||||
data{data} {}
|
||||
|
||||
template <typename T>
|
||||
Tensor(const ov::Shape& shape, ov::element::Type type, const std::vector<T>& data_elements)
|
||||
: Tensor {shape, type, CreateTensor(type, data_elements)} {}
|
||||
: Tensor{shape, type, CreateTensor(type, data_elements)} {}
|
||||
|
||||
// Temporary constructor to create blob with passed input shape (not 1-dimensional)
|
||||
template <typename T>
|
||||
Tensor(ov::element::Type type, const ov::Shape& shape, const std::vector<T>& data_elements)
|
||||
: Tensor {shape, type, CreateTensor(shape, type, data_elements)} {}
|
||||
: Tensor{shape, type, CreateTensor(shape, type, data_elements)} {}
|
||||
|
||||
ov::Shape shape;
|
||||
ov::element::Type type;
|
||||
|
@ -77,7 +77,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& input_shape,
|
||||
const element::Type_t& input_type,
|
||||
const float epsilon) {
|
||||
Shape channel_shape{input_shape.at(1)};
|
||||
@ -88,7 +88,7 @@ private:
|
||||
auto variance = std::make_shared<op::v0::Parameter>(input_type, channel_shape);
|
||||
auto batch_norm = std::make_shared<op::v0::BatchNormInference>(in, gamma, beta, mean, variance, epsilon);
|
||||
|
||||
return std::make_shared<ov::Function>(batch_norm, ParameterVector{in, gamma, beta, mean, variance});
|
||||
return std::make_shared<ov::Model>(batch_norm, ParameterVector{in, gamma, beta, mean, variance});
|
||||
}
|
||||
};
|
||||
|
||||
@ -102,7 +102,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& input_shape,
|
||||
const element::Type_t& input_type,
|
||||
const float epsilon) {
|
||||
Shape channel_shape{input_shape.at(1)};
|
||||
@ -113,7 +113,7 @@ private:
|
||||
auto variance = std::make_shared<op::v0::Parameter>(input_type, channel_shape);
|
||||
auto batch_norm = std::make_shared<op::v5::BatchNormInference>(in, gamma, beta, mean, variance, epsilon);
|
||||
|
||||
return std::make_shared<ov::Function>(batch_norm, ParameterVector{in, gamma, beta, mean, variance});
|
||||
return std::make_shared<ov::Model>(batch_norm, ParameterVector{in, gamma, beta, mean, variance});
|
||||
}
|
||||
};
|
||||
|
||||
@ -258,4 +258,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
::testing::ValuesIn(generateCombinedParamsForBatchNorm()),
|
||||
ReferenceBatchNormV5LayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -59,13 +59,13 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BatchToSpaceParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BatchToSpaceParams& params) {
|
||||
const auto data = std::make_shared<opset1::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto blockShape = std::make_shared<opset1::Constant>(element::i64, params.blockShapeTensor.shape, params.blockShapeTensor.data.data());
|
||||
const auto cropsBegin = std::make_shared<opset1::Constant>(element::i64, params.cropsBeginTensor.shape, params.cropsBeginTensor.data.data());
|
||||
const auto cropsEnd = std::make_shared<opset1::Constant>(element::i64, params.cropsEndTensor.shape, params.cropsEndTensor.data.data());
|
||||
const auto batchToSpace = std::make_shared<opset2::BatchToSpace>(data, blockShape, cropsBegin, cropsEnd);
|
||||
return std::make_shared<Function>(NodeVector {batchToSpace}, ParameterVector {data});
|
||||
return std::make_shared<Model>(NodeVector {batchToSpace}, ParameterVector {data});
|
||||
}
|
||||
};
|
||||
|
||||
@ -219,4 +219,4 @@ std::vector<BatchToSpaceParams> generateBatchToSpaceCombinedParams() {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_BatchToSpace_With_Hardcoded_Refs, ReferenceBatchToSpaceLayerTest,
|
||||
testing::ValuesIn(generateBatchToSpaceCombinedParams()), ReferenceBatchToSpaceLayerTest::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -89,7 +89,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BinaryConvolutionParams& params, const std::vector<uint8_t>& filterData) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BinaryConvolutionParams& params, const std::vector<uint8_t>& filterData) {
|
||||
const op::PadType auto_pad{op::PadType::EXPLICIT};
|
||||
const auto in = std::make_shared<op::v0::Parameter>(params.inType, params.inputShape);
|
||||
auto filter = std::make_shared<opset8::Constant>(ov::element::u1, params.filterShape, &filterData[0]);
|
||||
@ -102,7 +102,7 @@ private:
|
||||
params.mode,
|
||||
params.padValue,
|
||||
auto_pad);
|
||||
return std::make_shared<ov::Function>(NodeVector {BinaryConvolution}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {BinaryConvolution}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -313,4 +313,4 @@ std::vector<BinaryConvolutionParams> generateBinaryConvolutionCombinedParams() {
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_BinaryConvolution_With_Hardcoded_Refs, ReferenceBinaryConvolutionLayerTest,
|
||||
testing::ValuesIn(generateBinaryConvolutionCombinedParams()), ReferenceBinaryConvolutionLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -52,9 +52,9 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BroadcastParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BroadcastParams& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto f = std::make_shared<Function>(
|
||||
const auto f = std::make_shared<Model>(
|
||||
std::make_shared<opset1::Broadcast>(A, opset1::Constant::create(params.targetShapeTensor.type,
|
||||
params.targetShapeTensor.shape,
|
||||
params.targetShapeTensor.data.data())),
|
||||
@ -69,9 +69,9 @@ TEST_P(ReferenceBroadcastTest, CompareWithRefs) {
|
||||
|
||||
class ReferenceBroadcastTestV3 : public ReferenceBroadcastTest {
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BroadcastParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BroadcastParams& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto f = std::make_shared<Function>(
|
||||
const auto f = std::make_shared<Model>(
|
||||
std::make_shared<opset3::Broadcast>(A, opset1::Constant::create(params.targetShapeTensor.type,
|
||||
params.targetShapeTensor.shape,
|
||||
params.targetShapeTensor.data.data())),
|
||||
@ -123,9 +123,9 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BroadcastParamsExplicitAxis& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BroadcastParamsExplicitAxis& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto f = std::make_shared<Function>(
|
||||
const auto f = std::make_shared<Model>(
|
||||
std::make_shared<opset1::Broadcast>(A,
|
||||
opset1::Constant::create(params.targetShapeTensor.type,
|
||||
params.targetShapeTensor.shape,
|
||||
@ -191,7 +191,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BroadcastParamsTestHelper& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BroadcastParamsTestHelper& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(element::f32, params.shapeA);
|
||||
const auto shape_const = opset1::Constant::create(element::u64, Shape{params.shapeR.size()}, params.shapeR);
|
||||
std::shared_ptr<Node> broadcast;
|
||||
@ -201,7 +201,7 @@ private:
|
||||
} else {
|
||||
broadcast = std::make_shared<opset1::Broadcast>(A, shape_const);
|
||||
}
|
||||
auto f = std::make_shared<Function>(broadcast, ParameterVector{A});
|
||||
auto f = std::make_shared<Model>(broadcast, ParameterVector{A});
|
||||
return f;
|
||||
}
|
||||
|
||||
@ -226,7 +226,7 @@ TEST_P(ReferenceBroadcastTestTestHelper, CompareWithRefs) {
|
||||
|
||||
class ReferenceBroadcastTestExplicitAxisReversed : public ReferenceBroadcastTestExplicitAxis {
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const BroadcastParamsExplicitAxis& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const BroadcastParamsExplicitAxis& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
auto broadcast = std::make_shared<opset1::Broadcast>(
|
||||
A,
|
||||
@ -239,7 +239,7 @@ private:
|
||||
auto reverse = std::make_shared<opset1::Reverse>(broadcast,
|
||||
opset1::Constant::create(element::i64, {1}, {1}),
|
||||
opset1::Reverse::Mode::INDEX);
|
||||
auto f = std::make_shared<Function>(NodeVector{reverse}, ParameterVector{A});
|
||||
auto f = std::make_shared<Model>(NodeVector{reverse}, ParameterVector{A});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -525,4 +525,4 @@ std::vector<BroadcastParamsExplicitAxis> generateCombinedParamsExplicitAxisRever
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Broadcast_With_Hardcoded_Refs, ReferenceBroadcastTestExplicitAxisReversed,
|
||||
testing::ValuesIn(generateCombinedParamsExplicitAxisReversed()), ReferenceBroadcastTestExplicitAxis::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -59,12 +59,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const element::Type& input_type, const PartialShape& input_pshape,
|
||||
static std::shared_ptr<Model> CreateFunction(const element::Type& input_type, const PartialShape& input_pshape,
|
||||
const element::Type& bucket_type, const PartialShape& bucket_pshape,
|
||||
const bool with_right_bound, const element::Type& output_type) {
|
||||
auto data = std::make_shared<op::v0::Parameter>(input_type, input_pshape);
|
||||
auto buckets = std::make_shared<op::v0::Parameter>(bucket_type, bucket_pshape);
|
||||
return std::make_shared<Function>(std::make_shared<op::v3::Bucketize>(data, buckets, output_type, with_right_bound),
|
||||
return std::make_shared<Model>(std::make_shared<op::v3::Bucketize>(data, buckets, output_type, with_right_bound),
|
||||
ParameterVector {data, buckets});
|
||||
}
|
||||
};
|
||||
|
@ -49,12 +49,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto ceiling = std::make_shared<op::v0::Ceiling>(in);
|
||||
return std::make_shared<Function>(NodeVector {ceiling}, ParameterVector {in});
|
||||
return std::make_shared<Model>(NodeVector {ceiling}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -53,11 +53,11 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ov::PartialShape& input_shape, const ov::element::Type& input_type,
|
||||
static std::shared_ptr<Model> CreateFunction(const ov::PartialShape& input_shape, const ov::element::Type& input_type,
|
||||
const ov::element::Type& expected_output_type, const double min, const double max) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto Clamp = std::make_shared<op::v0::Clamp>(in, min, max);
|
||||
return std::make_shared<ov::Function>(NodeVector {Clamp}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Clamp}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -198,4 +198,4 @@ std::vector<ClampParams> generateClampCombinedParams() {
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Clamp_With_Hardcoded_Refs, ReferenceClampLayerTest,
|
||||
testing::ValuesIn(generateClampCombinedParams()), ReferenceClampLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -46,13 +46,13 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<ov::Function> CreateFunction(ngraph::helpers::ComparisonTypes comp_op_type, const ov::PartialShape& input_shape1,
|
||||
static std::shared_ptr<ov::Model> CreateFunction(ngraph::helpers::ComparisonTypes comp_op_type, const ov::PartialShape& input_shape1,
|
||||
const ov::PartialShape& input_shape2, const ov::element::Type& input_type,
|
||||
const ov::element::Type& expected_output_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape1);
|
||||
const auto in2 = std::make_shared<op::v0::Parameter>(input_type, input_shape2);
|
||||
const auto comp = ngraph::builder::makeComparison(in, in2, comp_op_type);
|
||||
return std::make_shared<ov::Function>(ov::NodeVector {comp}, ov::ParameterVector {in, in2});
|
||||
return std::make_shared<ov::Model>(ov::NodeVector {comp}, ov::ParameterVector {in, in2});
|
||||
}
|
||||
};
|
||||
} // namespace ComparisonOpsRefTestDefinitions
|
||||
|
@ -58,7 +58,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParams& params) {
|
||||
std::shared_ptr<opset1::Parameter> A, B, C;
|
||||
if (params.dynamicShape.is_dynamic()) {
|
||||
A = std::make_shared<opset1::Parameter>(params.A.type, params.dynamicShape);
|
||||
@ -69,7 +69,7 @@ private:
|
||||
B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
C = std::make_shared<opset1::Parameter>(params.C.type, params.C.shape);
|
||||
}
|
||||
auto f = std::make_shared<Function>(std::make_shared<opset1::Concat>(NodeVector{A, B, C}, params.axis), ParameterVector{A, B, C});
|
||||
auto f = std::make_shared<Model>(std::make_shared<opset1::Concat>(NodeVector{A, B, C}, params.axis), ParameterVector{A, B, C});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -183,7 +183,7 @@ public:
|
||||
inputs_param.push_back(A);
|
||||
inputs.push_back(A);
|
||||
}
|
||||
function = std::make_shared<Function>(std::make_shared<opset1::Concat>(inputs, 0), inputs_param);
|
||||
function = std::make_shared<Model>(std::make_shared<opset1::Concat>(inputs, 0), inputs_param);
|
||||
|
||||
std::vector<float> ref_result;
|
||||
for (uint32_t i = 0; i < params.numInputs; i++) {
|
||||
@ -284,7 +284,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlace2dTensor& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlace2dTensor& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto add1 = std::make_shared<opset1::Add>(A, B);
|
||||
@ -292,7 +292,7 @@ private:
|
||||
const auto D = std::make_shared<opset1::Parameter>(params.D.type, params.D.shape);
|
||||
const auto add2 = std::make_shared<opset1::Add>(C, D);
|
||||
const auto subtract = std::make_shared<opset1::Subtract>(C, A);
|
||||
const auto f = std::make_shared<Function>(
|
||||
const auto f = std::make_shared<Model>(
|
||||
std::make_shared<opset1::Concat>(NodeVector{add1, add2, subtract}, params.axis),
|
||||
ParameterVector{A, B, C, D});
|
||||
return f;
|
||||
@ -389,7 +389,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlacePropagate2dTensor& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlacePropagate2dTensor& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto add1 = std::make_shared<opset1::Add>(A, B);
|
||||
@ -398,7 +398,7 @@ private:
|
||||
const auto add2 = std::make_shared<opset1::Add>(C, D);
|
||||
const auto concat1 = std::make_shared<opset1::Concat>(NodeVector{add1, add2}, params.axis);
|
||||
const auto subtract = std::make_shared<opset1::Subtract>(C, A);
|
||||
const auto f = std::make_shared<Function>(
|
||||
const auto f = std::make_shared<Model>(
|
||||
std::make_shared<opset1::Concat>(NodeVector{concat1, subtract}, params.axis),
|
||||
ParameterVector{A, B, C, D});
|
||||
return f;
|
||||
@ -489,13 +489,13 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlaceTree1& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlaceTree1& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto add1 = std::make_shared<opset1::Add>(A, B);
|
||||
const auto add2 = std::make_shared<opset1::Add>(A, B);
|
||||
const auto concat = std::make_shared<opset1::Concat>(NodeVector{add1, add2}, params.axis);
|
||||
const auto f = std::make_shared<Function>(std::make_shared<opset1::Add>(concat, concat),
|
||||
const auto f = std::make_shared<Model>(std::make_shared<opset1::Add>(concat, concat),
|
||||
ParameterVector{A, B});
|
||||
return f;
|
||||
}
|
||||
@ -583,7 +583,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlaceTree2& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlaceTree2& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto add1 = std::make_shared<opset1::Add>(A, B);
|
||||
@ -591,7 +591,7 @@ private:
|
||||
const auto concat1 = std::make_shared<opset1::Concat>(NodeVector{add1, add2}, params.axis);
|
||||
const auto concat2 = std::make_shared<opset1::Concat>(NodeVector{add1, add2}, params.axis);
|
||||
const auto concat12 = std::make_shared<opset1::Concat>(NodeVector{concat1, concat2}, params.axis);
|
||||
const auto f = std::make_shared<Function>(std::make_shared<opset1::Add>(concat12, concat12),
|
||||
const auto f = std::make_shared<Model>(std::make_shared<opset1::Add>(concat12, concat12),
|
||||
ParameterVector{A, B});
|
||||
return f;
|
||||
}
|
||||
@ -680,7 +680,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlaceTree3& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlaceTree3& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto concat1 = std::make_shared<opset1::Concat>(NodeVector{A, B}, params.axis);
|
||||
@ -690,7 +690,7 @@ private:
|
||||
const auto concat12 = std::make_shared<opset1::Concat>(NodeVector{concat1, concat2}, params.axis);
|
||||
const auto concat34 = std::make_shared<opset1::Concat>(NodeVector{concat3, concat4}, params.axis);
|
||||
const auto concat14 = std::make_shared<opset1::Concat>(NodeVector{concat12, concat34}, params.axis);
|
||||
const auto f = std::make_shared<Function>(std::make_shared<opset1::Add>(concat14, concat14),
|
||||
const auto f = std::make_shared<Model>(std::make_shared<opset1::Add>(concat14, concat14),
|
||||
ParameterVector{A, B});
|
||||
return f;
|
||||
}
|
||||
@ -781,14 +781,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlaceAddConcat& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlaceAddConcat& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto add1 = std::make_shared<opset1::Add>(A, B);
|
||||
const auto add2 = std::make_shared<opset1::Add>(add1, add1);
|
||||
const auto concat = std::make_shared<opset1::Concat>(NodeVector{add1, add2}, params.axis);
|
||||
const auto add3 = std::make_shared<opset1::Add>(concat, concat);
|
||||
const auto f = std::make_shared<Function>(add3, ParameterVector{A, B});
|
||||
const auto f = std::make_shared<Model>(add3, ParameterVector{A, B});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -875,7 +875,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsInPlaceAddConcat2& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsInPlaceAddConcat2& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto add1 = std::make_shared<opset1::Add>(A, B);
|
||||
@ -886,7 +886,7 @@ private:
|
||||
const auto concat1 = std::make_shared<opset1::Concat>(NodeVector{add1, add2, add3}, params.axis);
|
||||
const auto concat2 = std::make_shared<opset1::Concat>(NodeVector{add4, add2, add5}, params.axis);
|
||||
const auto add6 = std::make_shared<opset1::Add>(concat1, concat2);
|
||||
const auto f = std::make_shared<Function>(add6, ParameterVector{A, B});
|
||||
const auto f = std::make_shared<Model>(add6, ParameterVector{A, B});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -976,12 +976,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParams5d& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParams5d& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto C = std::make_shared<opset1::Parameter>(params.C.type, params.C.shape);
|
||||
const auto concat = std::make_shared<opset1::Concat>(NodeVector{A, B, C}, params.axis);
|
||||
const auto f = std::make_shared<Function>(concat, ParameterVector{A, B, C});
|
||||
const auto f = std::make_shared<Model>(concat, ParameterVector{A, B, C});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -1103,11 +1103,11 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsZeroLength1dLast& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsZeroLength1dLast& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto concat = std::make_shared<opset1::Concat>(NodeVector{A, B}, params.axis);
|
||||
const auto f = std::make_shared<Function>(concat, ParameterVector{A, B});
|
||||
const auto f = std::make_shared<Model>(concat, ParameterVector{A, B});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -1197,12 +1197,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsZeroLength1dMiddle& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsZeroLength1dMiddle& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto C = std::make_shared<opset1::Parameter>(params.C.type, params.C.shape);
|
||||
const auto concat = std::make_shared<opset1::Concat>(NodeVector{A, B, C}, params.axis);
|
||||
const auto f = std::make_shared<Function>(concat, ParameterVector{A, B, C});
|
||||
const auto f = std::make_shared<Model>(concat, ParameterVector{A, B, C});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -1287,10 +1287,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsZeroZero& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsZeroZero& params) {
|
||||
const auto constant_1 = std::make_shared<opset1::Constant>(params.A.type, params.A.shape, params.A.data.data());
|
||||
const auto concat_1 = std::make_shared<opset1::Concat>(NodeVector{constant_1, constant_1}, params.axis);
|
||||
const auto f = std::make_shared<Function>(concat_1, ParameterVector{});
|
||||
const auto f = std::make_shared<Model>(concat_1, ParameterVector{});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -1379,12 +1379,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConcatParamsZeroLength4dMiddle& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConcatParamsZeroLength4dMiddle& params) {
|
||||
const auto A = std::make_shared<opset1::Parameter>(params.A.type, params.A.shape);
|
||||
const auto B = std::make_shared<opset1::Parameter>(params.B.type, params.B.shape);
|
||||
const auto C = std::make_shared<opset1::Parameter>(params.C.type, params.C.shape);
|
||||
const auto concat = std::make_shared<opset1::Concat>(NodeVector{A, B, C}, params.axis);
|
||||
const auto f = std::make_shared<Function>(concat, ParameterVector{A, B, C});
|
||||
const auto f = std::make_shared<Model>(concat, ParameterVector{A, B, C});
|
||||
return f;
|
||||
}
|
||||
};
|
||||
@ -1430,4 +1430,4 @@ INSTANTIATE_TEST_SUITE_P(smoke_Concat_With_Hardcoded_Refs, ReferenceConcatTestZe
|
||||
testing::ValuesIn(generateCombinedParamsZeroLength4dMiddle()), ReferenceConcatTestZeroLength4dMiddle::getTestCaseName);
|
||||
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -59,9 +59,9 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ParamType& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ParamType& params) {
|
||||
auto A = op::v0::Constant::create(params.inType, params.inputShape.to_shape(), params.inputData.data());
|
||||
return std::make_shared<Function>(A, ParameterVector{});
|
||||
return std::make_shared<Model>(A, ParameterVector{});
|
||||
}
|
||||
};
|
||||
|
||||
@ -74,10 +74,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ParamType& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ParamType& params) {
|
||||
auto A = op::v0::Constant::create(params.inType, params.inputShape.to_shape(), params.inputData.data());
|
||||
auto B = op::v0::Constant::create(params.inType, params.inputShape.to_shape(), params.inputData.data());
|
||||
return std::make_shared<Function>(NodeVector{A, B}, ParameterVector{});
|
||||
return std::make_shared<Model>(NodeVector{A, B}, ParameterVector{});
|
||||
}
|
||||
};
|
||||
|
||||
@ -90,9 +90,9 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ParamType& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ParamType& params) {
|
||||
auto A = op::v0::Constant::create(params.inType, params.inputShape.to_shape(), params.inputData.data());
|
||||
return std::make_shared<Function>(std::make_shared<op::v0::Abs>(A), ParameterVector{});
|
||||
return std::make_shared<Model>(std::make_shared<op::v0::Abs>(A), ParameterVector{});
|
||||
}
|
||||
};
|
||||
|
||||
@ -105,12 +105,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ParamType& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ParamType& params) {
|
||||
const auto A = std::make_shared<op::v0::Constant>(
|
||||
params.inType,
|
||||
params.inputShape.to_shape(),
|
||||
std::vector<std::string>{std::to_string(*reinterpret_cast<int*>(params.inputData.data()))});
|
||||
return std::make_shared<Function>(A, ParameterVector{});
|
||||
return std::make_shared<Model>(A, ParameterVector{});
|
||||
}
|
||||
};
|
||||
|
||||
@ -123,10 +123,10 @@ public:
|
||||
}
|
||||
|
||||
protected:
|
||||
static std::shared_ptr<Function> CreateFunction(const ParamType& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ParamType& params) {
|
||||
auto A = op::v0::Constant::create(params.inType, params.inputShape.to_shape(), params.inputData.data());
|
||||
auto B = op::v0::Constant::create(params.inType, params.inputShape.to_shape(), {true, true, true, true});
|
||||
return std::make_shared<Function>(std::make_shared<op::v1::Equal>(A, B), ParameterVector{});
|
||||
return std::make_shared<Model>(std::make_shared<op::v1::Equal>(A, B), ParameterVector{});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -51,12 +51,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<ov::Function> CreateFunction(const ov::PartialShape& input_shape, const ov::element::Type& input_type,
|
||||
static std::shared_ptr<ov::Model> CreateFunction(const ov::PartialShape& input_shape, const ov::element::Type& input_type,
|
||||
const ov::element::Type& expected_output_type,
|
||||
const ngraph::helpers::ConversionTypes& conversion_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto convert = ngraph::builder::makeConversion(in, expected_output_type, conversion_type);
|
||||
return std::make_shared<ov::Function>(ov::NodeVector {convert}, ov::ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(ov::NodeVector {convert}, ov::ParameterVector {in});
|
||||
}
|
||||
};
|
||||
} // namespace ConversionOpsRefTestDefinitions
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <openvino/core/function.hpp>
|
||||
#include <openvino/core/model.hpp>
|
||||
#include <tuple>
|
||||
#include <openvino/op/i420_to_rgb.hpp>
|
||||
#include <openvino/op/i420_to_bgr.hpp>
|
||||
@ -26,23 +26,23 @@ public:
|
||||
|
||||
public:
|
||||
template <typename T>
|
||||
static std::shared_ptr<Function> CreateFunction(const Tensor& input) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Tensor& input) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input.type, input.shape);
|
||||
std::shared_ptr<Node> conv;
|
||||
conv = std::make_shared<T>(in);
|
||||
auto res = std::make_shared<op::v0::Result>(conv);
|
||||
return std::make_shared<Function>(ResultVector{res}, ParameterVector {in});
|
||||
return std::make_shared<Model>(ResultVector{res}, ParameterVector {in});
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static std::shared_ptr<Function> CreateFunction3(const Tensor& input1, const Tensor& input2, const Tensor& input3) {
|
||||
static std::shared_ptr<Model> CreateFunction3(const Tensor& input1, const Tensor& input2, const Tensor& input3) {
|
||||
const auto in1 = std::make_shared<op::v0::Parameter>(input1.type, input1.shape);
|
||||
const auto in2 = std::make_shared<op::v0::Parameter>(input2.type, input2.shape);
|
||||
const auto in3 = std::make_shared<op::v0::Parameter>(input3.type, input3.shape);
|
||||
std::shared_ptr<Node> conv;
|
||||
conv = std::make_shared<T>(in1, in2, in3);
|
||||
auto res = std::make_shared<op::v0::Result>(conv);
|
||||
return std::make_shared<Function>(ResultVector{res}, ParameterVector {in1, in2, in3});
|
||||
return std::make_shared<Model>(ResultVector{res}, ParameterVector {in1, in2, in3});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <openvino/core/function.hpp>
|
||||
#include <openvino/core/model.hpp>
|
||||
#include <tuple>
|
||||
#include <openvino/op/nv12_to_rgb.hpp>
|
||||
#include <openvino/op/nv12_to_bgr.hpp>
|
||||
@ -26,22 +26,22 @@ public:
|
||||
|
||||
public:
|
||||
template <typename T>
|
||||
static std::shared_ptr<Function> CreateFunction(const Tensor& input) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Tensor& input) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input.type, input.shape);
|
||||
std::shared_ptr<Node> conv;
|
||||
conv = std::make_shared<T>(in);
|
||||
auto res = std::make_shared<op::v0::Result>(conv);
|
||||
return std::make_shared<Function>(ResultVector{res}, ParameterVector {in});
|
||||
return std::make_shared<Model>(ResultVector{res}, ParameterVector {in});
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static std::shared_ptr<Function> CreateFunction2(const Tensor& input1, const Tensor& input2) {
|
||||
static std::shared_ptr<Model> CreateFunction2(const Tensor& input1, const Tensor& input2) {
|
||||
const auto in1 = std::make_shared<op::v0::Parameter>(input1.type, input1.shape);
|
||||
const auto in2 = std::make_shared<op::v0::Parameter>(input2.type, input2.shape);
|
||||
std::shared_ptr<Node> conv;
|
||||
conv = std::make_shared<T>(in1, in2);
|
||||
auto res = std::make_shared<op::v0::Result>(conv);
|
||||
return std::make_shared<Function>(ResultVector{res}, ParameterVector {in1, in2});
|
||||
return std::make_shared<Model>(ResultVector{res}, ParameterVector {in1, in2});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -94,7 +94,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConvolutionParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConvolutionParams& params) {
|
||||
const op::PadType auto_pad{op::PadType::EXPLICIT};
|
||||
|
||||
const auto in = std::make_shared<op::v0::Parameter>(params.inType, params.inputShape);
|
||||
@ -115,9 +115,9 @@ private:
|
||||
params.padEnd,
|
||||
params.dialations,
|
||||
auto_pad);
|
||||
return std::make_shared<ov::Function>(NodeVector {Convolution2}, ParameterVector {in, filter});
|
||||
return std::make_shared<ov::Model>(NodeVector {Convolution2}, ParameterVector {in, filter});
|
||||
} else {
|
||||
return std::make_shared<ov::Function>(NodeVector {Convolution}, ParameterVector {in, filter});
|
||||
return std::make_shared<ov::Model>(NodeVector {Convolution}, ParameterVector {in, filter});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -1086,4 +1086,4 @@ std::vector<ConvolutionParams> generateConvolutionCombinedParams() {
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Convolution_With_Hardcoded_Refs, ReferenceConvolutionLayerTest,
|
||||
testing::ValuesIn(generateConvolutionCombinedParams()), ReferenceConvolutionLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -93,7 +93,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ConvolutionBackpropParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ConvolutionBackpropParams& params) {
|
||||
const op::PadType auto_pad{op::PadType::EXPLICIT};
|
||||
|
||||
const auto in = std::make_shared<op::v0::Parameter>(params.inType, params.inputShape);
|
||||
@ -107,7 +107,7 @@ private:
|
||||
auto_pad,
|
||||
params.outPadding);
|
||||
|
||||
return std::make_shared<ov::Function>(NodeVector {ConvolutionBackprop}, ParameterVector {in, filter});
|
||||
return std::make_shared<ov::Model>(NodeVector {ConvolutionBackprop}, ParameterVector {in, filter});
|
||||
}
|
||||
};
|
||||
|
||||
@ -1773,4 +1773,4 @@ std::vector<ConvolutionBackpropParams> generateConvolutionBackpropCombinedParams
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_ConvolutionBackprop_With_Hardcoded_Refs, ReferenceConvolutionBackpropLayerTest,
|
||||
testing::ValuesIn(generateConvolutionBackpropCombinedParams()), ReferenceConvolutionBackpropLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -39,10 +39,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto Cos = std::make_shared<op::v0::Cos>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {Cos}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Cos}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -95,4 +95,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
.expected({{3}, element::u64, std::vector<uint64_t> {1, 0, 0}})),
|
||||
|
||||
ReferenceCosLayerTest::getTestCaseName);
|
||||
} // namespace reference_tests
|
||||
} // namespace reference_tests
|
||||
|
@ -39,10 +39,10 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const Shape& shape, const element::Type& type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(type, shape);
|
||||
const auto Cosh = std::make_shared<op::v0::Cosh>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {Cosh}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Cosh}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -95,4 +95,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
.expected({{4}, element::u64, std::vector<uint64_t> {1, 2, 4, 27}})),
|
||||
|
||||
ReferenceCoshLayerTest::getTestCaseName);
|
||||
} // namespace reference_tests
|
||||
} // namespace reference_tests
|
||||
|
@ -53,12 +53,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const CTCGreedyDecoderParams& params) {
|
||||
std::shared_ptr<Function> function;
|
||||
static std::shared_ptr<Model> CreateFunction(const CTCGreedyDecoderParams& params) {
|
||||
std::shared_ptr<Model> function;
|
||||
const auto data = std::make_shared<op::v0::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto indices = std::make_shared<op::v0::Parameter>(params.masksTensor.type, params.masksTensor.shape);
|
||||
const auto decoder = std::make_shared<op::v0::CTCGreedyDecoder>(data, indices, params.ctcMergedRepeat);
|
||||
function = std::make_shared<ov::Function>(NodeVector{decoder}, ParameterVector{data, indices});
|
||||
function = std::make_shared<ov::Model>(NodeVector{decoder}, ParameterVector{data, indices});
|
||||
return function;
|
||||
}
|
||||
};
|
||||
@ -129,4 +129,4 @@ std::vector<CTCGreedyDecoderParams> generateCombinedParams() {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_CTCGreedyDecoder_With_Hardcoded_Refs, ReferenceCTCGreedyDecoderTest,
|
||||
testing::ValuesIn(generateCombinedParams()), ReferenceCTCGreedyDecoderTest::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -73,14 +73,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const CTCGreedyDecoderSeqLenParams& params) {
|
||||
std::shared_ptr<Function> function;
|
||||
static std::shared_ptr<Model> CreateFunction(const CTCGreedyDecoderSeqLenParams& params) {
|
||||
std::shared_ptr<Model> function;
|
||||
const auto data = std::make_shared<op::v0::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto seq_len = std::make_shared<op::v0::Parameter>(params.seqLenTensor.type, params.seqLenTensor.shape);
|
||||
auto blank_index = std::make_shared<op::v0::Constant>(params.blankIndexTensor.type, params.blankIndexTensor.shape,
|
||||
params.blankIndexTensor.data.data());
|
||||
const auto decoder = std::make_shared<op::v6::CTCGreedyDecoderSeqLen>(data, seq_len, blank_index, params.mergeRepeated);
|
||||
function = std::make_shared<ov::Function>(decoder->outputs(), ParameterVector{data, seq_len});
|
||||
function = std::make_shared<ov::Model>(decoder->outputs(), ParameterVector{data, seq_len});
|
||||
return function;
|
||||
}
|
||||
};
|
||||
@ -114,12 +114,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const CTCGreedyDecoderSeqLenParamsNoOptionalInput& params) {
|
||||
std::shared_ptr<Function> function;
|
||||
static std::shared_ptr<Model> CreateFunction(const CTCGreedyDecoderSeqLenParamsNoOptionalInput& params) {
|
||||
std::shared_ptr<Model> function;
|
||||
const auto data = std::make_shared<op::v0::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto seq_len = std::make_shared<op::v0::Parameter>(params.seqLenTensor.type, params.seqLenTensor.shape);
|
||||
const auto decoder = std::make_shared<op::v6::CTCGreedyDecoderSeqLen>(data, seq_len, params.mergeRepeated);
|
||||
function = std::make_shared<ov::Function>(decoder->outputs(), ParameterVector{data, seq_len});
|
||||
function = std::make_shared<ov::Model>(decoder->outputs(), ParameterVector{data, seq_len});
|
||||
return function;
|
||||
}
|
||||
};
|
||||
@ -225,4 +225,4 @@ INSTANTIATE_TEST_SUITE_P(smoke_CTCGreedyDecoderSeqLen_With_Hardcoded_Refs, Refer
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_CTCGreedyDecoderSeqLen_With_Hardcoded_Refs, ReferenceCTCGreedyDecoderSeqLenTestNoOptionalInput,
|
||||
testing::ValuesIn(generateCombinedParamsNoOptionalInput()), ReferenceCTCGreedyDecoderSeqLenTestNoOptionalInput::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -61,7 +61,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const CTCLossParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const CTCLossParams& params) {
|
||||
const auto A = std::make_shared<op::v0::Parameter>(params.logits.type, params.logits.shape); // logits
|
||||
const auto B = std::make_shared<op::v0::Parameter>(params.logitsLen.type, params.logitsLen.shape); // logitsLen
|
||||
const auto C = std::make_shared<op::v0::Parameter>(params.labels.type, params.labels.shape); // labels
|
||||
@ -69,7 +69,7 @@ private:
|
||||
const auto E = std::make_shared<op::v0::Parameter>(params.blankIdx.type, params.blankIdx.shape); // blankIdx
|
||||
|
||||
const auto ctcLoss = std::make_shared<op::v4::CTCLoss>(A, B, C, D, E, params.preprocessCollapseRepeated, params.ctcMergeRepeated, params.unique);
|
||||
return std::make_shared<ov::Function>(NodeVector {ctcLoss}, ParameterVector {A, B, C, D, E});
|
||||
return std::make_shared<ov::Model>(NodeVector {ctcLoss}, ParameterVector {A, B, C, D, E});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -85,18 +85,18 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& data_shape, const element::Type& data_type, const PartialShape& axis_shape,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& data_shape, const element::Type& data_type, const PartialShape& axis_shape,
|
||||
const element::Type& axis_type, const bool execlusive, const bool reverse) {
|
||||
const auto data_param = std::make_shared<op::v0::Parameter>(data_type, data_shape);
|
||||
const auto axis_param = std::make_shared<op::v0::Parameter>(axis_type, axis_shape);
|
||||
const auto cum_sum = std::make_shared<op::v0::CumSum>(data_param, axis_param, execlusive, reverse);
|
||||
return std::make_shared<ov::Function>(NodeVector {cum_sum}, ParameterVector {data_param, axis_param});
|
||||
return std::make_shared<ov::Model>(NodeVector {cum_sum}, ParameterVector {data_param, axis_param});
|
||||
}
|
||||
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& data_shape, const element::Type& data_type) {
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& data_shape, const element::Type& data_type) {
|
||||
const auto data_param = std::make_shared<op::v0::Parameter>(data_type, data_shape);
|
||||
const auto cum_sum = std::make_shared<op::v0::CumSum>(data_param);
|
||||
return std::make_shared<ov::Function>(NodeVector {cum_sum}, ParameterVector {data_param});
|
||||
return std::make_shared<ov::Model>(NodeVector {cum_sum}, ParameterVector {data_param});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -133,7 +133,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const DeformableConvolutionParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const DeformableConvolutionParams& params) {
|
||||
const op::PadType auto_pad{op::PadType::EXPLICIT};
|
||||
|
||||
const auto in = std::make_shared<op::v0::Parameter>(params.inType, params.inputShape);
|
||||
@ -149,7 +149,7 @@ private:
|
||||
auto_pad,
|
||||
params.group,
|
||||
params.deformableGroup);
|
||||
return std::make_shared<ov::Function>(NodeVector {DeformableConvolution}, ParameterVector {in, offset, filter});
|
||||
return std::make_shared<ov::Model>(NodeVector {DeformableConvolution}, ParameterVector {in, offset, filter});
|
||||
}
|
||||
};
|
||||
|
||||
@ -191,7 +191,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const DeformableConvolutionParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const DeformableConvolutionParams& params) {
|
||||
const op::PadType auto_pad{op::PadType::EXPLICIT};
|
||||
|
||||
const auto in = std::make_shared<op::v0::Parameter>(params.inType, params.inputShape);
|
||||
@ -211,7 +211,7 @@ private:
|
||||
params.group,
|
||||
params.deformableGroup,
|
||||
params.use_bilinear_interpolation_padding);
|
||||
return std::make_shared<ov::Function>(NodeVector {DeformableConvolutionV8}, ParameterVector {in, offset, filter, mask});
|
||||
return std::make_shared<ov::Model>(NodeVector {DeformableConvolutionV8}, ParameterVector {in, offset, filter, mask});
|
||||
} else {
|
||||
const auto DeformableConvolutionV8 = std::make_shared<op::v8::DeformableConvolution>(in,
|
||||
offset,
|
||||
@ -224,7 +224,7 @@ private:
|
||||
params.group,
|
||||
params.deformableGroup,
|
||||
params.use_bilinear_interpolation_padding);
|
||||
return std::make_shared<ov::Function>(NodeVector {DeformableConvolutionV8}, ParameterVector {in, offset, filter});
|
||||
return std::make_shared<ov::Model>(NodeVector {DeformableConvolutionV8}, ParameterVector {in, offset, filter});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -5795,4 +5795,4 @@ INSTANTIATE_TEST_SUITE_P(smoke_DeformableConvolution_With_Hardcoded_Refs, Refere
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_DeformableConvolutionV8_With_Hardcoded_Refs, ReferenceDeformableConvolutionV8LayerTest,
|
||||
testing::ValuesIn(generateDeformableConvolutionV8CombinedParams()), ReferenceDeformableConvolutionV8LayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -153,7 +153,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const DeformablePSROIPoolingParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const DeformablePSROIPoolingParams& params) {
|
||||
const auto input = std::make_shared<op::v0::Parameter>(params.inputType, params.inputShape);
|
||||
const auto rois = std::make_shared<op::v0::Parameter>(params.roisType, params.roisShape);
|
||||
if (params.offsetsShape.size() != 0) {
|
||||
@ -169,7 +169,7 @@ private:
|
||||
params.spatialBinsY,
|
||||
params.transStd,
|
||||
params.partSize);
|
||||
return std::make_shared<ov::Function>(NodeVector {DeformablePSROIPooling}, ParameterVector {input, rois, offsets});
|
||||
return std::make_shared<ov::Model>(NodeVector {DeformablePSROIPooling}, ParameterVector {input, rois, offsets});
|
||||
} else {
|
||||
const auto DeformablePSROIPooling = std::make_shared<opset1::DeformablePSROIPooling>(input,
|
||||
rois,
|
||||
@ -181,7 +181,7 @@ private:
|
||||
params.spatialBinsY,
|
||||
params.transStd,
|
||||
params.partSize);
|
||||
return std::make_shared<ov::Function>(NodeVector {DeformablePSROIPooling}, ParameterVector {input, rois});
|
||||
return std::make_shared<ov::Model>(NodeVector {DeformablePSROIPooling}, ParameterVector {input, rois});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -351,4 +351,4 @@ std::vector<DeformablePSROIPoolingParams> generateDeformablePSROIPoolingCombined
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_DeformablePSROIPooling_With_Hardcoded_Refs, ReferenceDeformablePSROIPoolingLayerTest,
|
||||
testing::ValuesIn(generateDeformablePSROIPoolingCombinedParams()), ReferenceDeformablePSROIPoolingLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -49,12 +49,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const DepthToSpaceParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const DepthToSpaceParams& params) {
|
||||
opset1::DepthToSpace::DepthToSpaceMode mode = params.mode == "DEPTH_FIRST" ?
|
||||
opset1::DepthToSpace::DepthToSpaceMode::DEPTH_FIRST : opset1::DepthToSpace::DepthToSpaceMode::BLOCKS_FIRST;
|
||||
const auto data = std::make_shared<opset1::Parameter>(params.dataTensor.type, params.dataTensor.shape);
|
||||
const auto depthToSpace = std::make_shared<opset1::DepthToSpace>(data, mode, params.blockSize);
|
||||
return std::make_shared<Function>(NodeVector {depthToSpace}, ParameterVector {data});
|
||||
return std::make_shared<Model>(NodeVector {depthToSpace}, ParameterVector {data});
|
||||
}
|
||||
};
|
||||
|
||||
@ -154,4 +154,4 @@ std::vector<DepthToSpaceParams> generateDepthToSpaceCombinedParams() {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_DepthToSpace_With_Hardcoded_Refs, ReferenceDepthToSpaceLayerTest,
|
||||
testing::ValuesIn(generateDepthToSpaceCombinedParams()), ReferenceDepthToSpaceLayerTest::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -179,7 +179,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const DetectionOutputParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const DetectionOutputParams& params) {
|
||||
const auto loc = std::make_shared<op::v0::Parameter>(params.inType, params.locShape);
|
||||
const auto conf = std::make_shared<op::v0::Parameter>(params.inType, params.confShape);
|
||||
const auto priorBoxes = std::make_shared<op::v0::Parameter>(params.inType, params.priorBoxesShape);
|
||||
@ -187,10 +187,10 @@ private:
|
||||
const auto auxConf = std::make_shared<op::v0::Parameter>(params.inType, params.auxConfShape);
|
||||
const auto auxLoc = std::make_shared<op::v0::Parameter>(params.inType, params.auxLocShape);
|
||||
const auto DetectionOutput = std::make_shared<op::v0::DetectionOutput>(loc, conf, priorBoxes, auxConf, auxLoc, params.attrs);
|
||||
return std::make_shared<ov::Function>(NodeVector {DetectionOutput}, ParameterVector {loc, conf, priorBoxes, auxConf, auxLoc});
|
||||
return std::make_shared<ov::Model>(NodeVector {DetectionOutput}, ParameterVector {loc, conf, priorBoxes, auxConf, auxLoc});
|
||||
} else {
|
||||
const auto DetectionOutput = std::make_shared<op::v0::DetectionOutput>(loc, conf, priorBoxes, params.attrs);
|
||||
return std::make_shared<ov::Function>(NodeVector {DetectionOutput}, ParameterVector {loc, conf, priorBoxes});
|
||||
return std::make_shared<ov::Model>(NodeVector {DetectionOutput}, ParameterVector {loc, conf, priorBoxes});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -224,7 +224,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const DetectionOutputParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const DetectionOutputParams& params) {
|
||||
const auto loc = std::make_shared<op::v0::Parameter>(params.inType, params.locShape);
|
||||
const auto conf = std::make_shared<op::v0::Parameter>(params.inType, params.confShape);
|
||||
const auto priorBoxes = std::make_shared<op::v0::Parameter>(params.inType, params.priorBoxesShape);
|
||||
@ -233,11 +233,11 @@ private:
|
||||
const auto auxLoc = std::make_shared<op::v0::Parameter>(params.inType, params.auxLocShape);
|
||||
const auto DetectionOutput =
|
||||
std::make_shared<op::v8::DetectionOutput>(loc, conf, priorBoxes, auxConf, auxLoc, params.attrs_v8);
|
||||
return std::make_shared<ov::Function>(NodeVector{DetectionOutput},
|
||||
return std::make_shared<ov::Model>(NodeVector{DetectionOutput},
|
||||
ParameterVector{loc, conf, priorBoxes, auxConf, auxLoc});
|
||||
} else {
|
||||
const auto DetectionOutput = std::make_shared<op::v8::DetectionOutput>(loc, conf, priorBoxes, params.attrs_v8);
|
||||
return std::make_shared<ov::Function>(NodeVector{DetectionOutput}, ParameterVector{loc, conf, priorBoxes});
|
||||
return std::make_shared<ov::Model>(NodeVector{DetectionOutput}, ParameterVector{loc, conf, priorBoxes});
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -574,4 +574,4 @@ INSTANTIATE_TEST_SUITE_P(smoke_DetectionOutput_With_Hardcoded_Refs,
|
||||
testing::ValuesIn(generateDetectionOutputCombinedParams()),
|
||||
ReferenceDetectionOutputV8LayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -71,18 +71,18 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(DFTParams& p) {
|
||||
static std::shared_ptr<Model> CreateFunction(DFTParams& p) {
|
||||
auto in = std::make_shared<op::v0::Parameter>(p.m_input_type, p.m_input_shape);
|
||||
auto dft = std::make_shared<op::v7::DFT>(in, p.m_axes);
|
||||
|
||||
return std::make_shared<ov::Function>(dft, ParameterVector{in});
|
||||
return std::make_shared<ov::Model>(dft, ParameterVector{in});
|
||||
}
|
||||
|
||||
static std::shared_ptr<Function> CreateFunctionWithSignal(DFTParams& p) {
|
||||
static std::shared_ptr<Model> CreateFunctionWithSignal(DFTParams& p) {
|
||||
auto in = std::make_shared<op::v0::Parameter>(p.m_input_type, p.m_input_shape);
|
||||
auto dft = std::make_shared<op::v7::DFT>(in, p.m_axes, p.m_signal);
|
||||
|
||||
return std::make_shared<ov::Function>(dft, ParameterVector{in});
|
||||
return std::make_shared<ov::Model>(dft, ParameterVector{in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -1408,4 +1408,4 @@ INSTANTIATE_TEST_SUITE_P(
|
||||
::testing::ValuesIn(generateCombinedParamsForDFT()),
|
||||
ReferenceDFTLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -70,14 +70,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape1,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape1,
|
||||
const PartialShape& input_shape2,
|
||||
const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in1 = std::make_shared<op::v0::Parameter>(input_type, input_shape1);
|
||||
const auto in2 = std::make_shared<op::v0::Parameter>(input_type, input_shape2);
|
||||
const auto divide = std::make_shared<op::v1::Divide>(in1, in2);
|
||||
return std::make_shared<Function>(NodeVector{divide}, ParameterVector{in1, in2});
|
||||
return std::make_shared<Model>(NodeVector{divide}, ParameterVector{in1, in2});
|
||||
}
|
||||
};
|
||||
|
||||
@ -101,7 +101,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape1,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape1,
|
||||
const PartialShape& input_shape2,
|
||||
const element::Type& input_type,
|
||||
const element::Type& expected_output_type,
|
||||
@ -109,7 +109,7 @@ private:
|
||||
const auto in1 = std::make_shared<op::v0::Parameter>(input_type, input_shape1);
|
||||
const auto in2 = std::make_shared<op::v0::Parameter>(input_type, input_shape2);
|
||||
const auto divide = std::make_shared<op::v1::Divide>(in1, in2, pythondiv);
|
||||
return std::make_shared<Function>(NodeVector{divide}, ParameterVector{in1, in2});
|
||||
return std::make_shared<Model>(NodeVector{divide}, ParameterVector{in1, in2});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -50,11 +50,11 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
const element::Type& expected_output_type, const double alpha) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto Elu = std::make_shared<op::v0::Elu>(in, alpha);
|
||||
return std::make_shared<ov::Function>(NodeVector {Elu}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Elu}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -143,4 +143,4 @@ std::vector<EluParams> generateEluCombinedParams() {
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_Elu_With_Hardcoded_Refs, ReferenceEluLayerTest,
|
||||
testing::ValuesIn(generateEluCombinedParams()), ReferenceEluLayerTest::getTestCaseName);
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -9,7 +9,7 @@
|
||||
#include "base_reference_test.hpp"
|
||||
|
||||
using namespace reference_tests;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace InferenceEngine;
|
||||
|
||||
struct EmbeddingSegmentsSumParams {
|
||||
@ -79,7 +79,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(
|
||||
static std::shared_ptr<Model> CreateFunction(
|
||||
const PartialShape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> indices,
|
||||
@ -87,7 +87,7 @@ private:
|
||||
const std::shared_ptr<ngraph::opset1::Constant> num_segments,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> default_index,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> per_sample_weights) {
|
||||
const auto in = std::make_shared<op::Parameter>(input_type, input_shape);
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
|
||||
if (default_index) {
|
||||
if (per_sample_weights) {
|
||||
@ -97,18 +97,18 @@ private:
|
||||
num_segments,
|
||||
default_index,
|
||||
per_sample_weights);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
} else {
|
||||
const auto ess = std::make_shared<op::v3::EmbeddingSegmentsSum>(in,
|
||||
indices,
|
||||
segment_ids,
|
||||
num_segments,
|
||||
default_index);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
}
|
||||
} else {
|
||||
const auto ess = std::make_shared<op::v3::EmbeddingSegmentsSum>(in, indices, segment_ids, num_segments);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -9,7 +9,7 @@
|
||||
#include "base_reference_test.hpp"
|
||||
|
||||
using namespace reference_tests;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace InferenceEngine;
|
||||
|
||||
struct EmbeddingBagOffsetsSumParams {
|
||||
@ -74,14 +74,14 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(
|
||||
static std::shared_ptr<ov::Model> CreateFunction(
|
||||
const PartialShape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> indices,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> offsets,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> default_index,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> per_sample_weights) {
|
||||
const auto in = std::make_shared<op::Parameter>(input_type, input_shape);
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
|
||||
if (default_index) {
|
||||
if (per_sample_weights) {
|
||||
@ -90,14 +90,14 @@ private:
|
||||
offsets,
|
||||
default_index,
|
||||
per_sample_weights);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
} else {
|
||||
const auto ess = std::make_shared<op::v3::EmbeddingBagOffsetsSum>(in, indices, offsets, default_index);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
}
|
||||
} else {
|
||||
const auto ess = std::make_shared<op::v3::EmbeddingBagOffsetsSum>(in, indices, offsets);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -9,7 +9,7 @@
|
||||
#include "base_reference_test.hpp"
|
||||
|
||||
using namespace reference_tests;
|
||||
using namespace ngraph;
|
||||
using namespace ov;
|
||||
using namespace InferenceEngine;
|
||||
|
||||
struct EmbeddingBagPackedSumParams {
|
||||
@ -63,19 +63,19 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(
|
||||
static std::shared_ptr<Model> CreateFunction(
|
||||
const PartialShape& input_shape,
|
||||
const element::Type& input_type,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> indices,
|
||||
const std::shared_ptr<ngraph::opset1::Constant> per_sample_weights) {
|
||||
const auto in = std::make_shared<op::Parameter>(input_type, input_shape);
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
|
||||
if (per_sample_weights) {
|
||||
const auto ess = std::make_shared<op::v3::EmbeddingBagPackedSum>(in, indices, per_sample_weights);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
} else {
|
||||
const auto ess = std::make_shared<op::v3::EmbeddingBagPackedSum>(in, indices);
|
||||
return std::make_shared<Function>(NodeVector{ess}, ParameterVector{in});
|
||||
return std::make_shared<Model>(NodeVector{ess}, ParameterVector{in});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -60,11 +60,11 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto erf = std::make_shared<op::v0::Erf>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {erf}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {erf}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -49,11 +49,11 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto Exp = std::make_shared<op::v0::Exp>(in);
|
||||
return std::make_shared<ov::Function>(NodeVector {Exp}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {Exp}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
@ -75,12 +75,12 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
static std::shared_ptr<Model> CreateFunction(const PartialShape& input_shape, const element::Type& input_type,
|
||||
const element::Type& expected_output_type) {
|
||||
const auto in = std::make_shared<op::v0::Parameter>(input_type, input_shape);
|
||||
const auto Exp = std::make_shared<op::v0::Exp>(in);
|
||||
const auto ExpInPlace = std::make_shared<op::v0::Exp>(Exp);
|
||||
return std::make_shared<ov::Function>(NodeVector {ExpInPlace}, ParameterVector {in});
|
||||
return std::make_shared<ov::Model>(NodeVector {ExpInPlace}, ParameterVector {in});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -79,7 +79,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ExperimentalDOParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ExperimentalDOParams& params) {
|
||||
const auto rois = std::make_shared<op::v0::Parameter>(params.inType, params.roisShape);
|
||||
const auto deltas = std::make_shared<op::v0::Parameter>(params.inType, params.deltasShape);
|
||||
const auto scores = std::make_shared<op::v0::Parameter>(params.inType, params.scoresShape);
|
||||
@ -89,7 +89,7 @@ private:
|
||||
scores,
|
||||
im_info,
|
||||
params.attrs);
|
||||
return std::make_shared<ov::Function>(ExperimentalDO->outputs(), ParameterVector {rois, deltas, scores, im_info});
|
||||
return std::make_shared<ov::Model>(ExperimentalDO->outputs(), ParameterVector {rois, deltas, scores, im_info});
|
||||
}
|
||||
};
|
||||
|
||||
@ -173,4 +173,4 @@ std::vector<ExperimentalDOParams> generateExperimentalDOCombinedParams() {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_ExperimentalDetectronDetectionOutput_With_Hardcoded_Refs, ReferenceExperimentalDOLayerTest,
|
||||
testing::ValuesIn(generateExperimentalDOCombinedParams()), ReferenceExperimentalDOLayerTest::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
@ -0,0 +1,226 @@
|
||||
// Copyright (C) 2021 Intel Corporation
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
//
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "openvino/op/experimental_detectron_prior_grid_generator.hpp"
|
||||
#include "base_reference_test.hpp"
|
||||
|
||||
using namespace reference_tests;
|
||||
using namespace ov;
|
||||
|
||||
using Attrs = op::v6::ExperimentalDetectronPriorGridGenerator::Attributes;
|
||||
|
||||
namespace {
|
||||
struct ExperimentalPGGParams {
|
||||
template <class IT>
|
||||
ExperimentalPGGParams(const Attrs& attrs,
|
||||
const PartialShape& priorsShape,
|
||||
const PartialShape& featureMapShape,
|
||||
const PartialShape& imageSizeInfoShape,
|
||||
const Shape& outRefShape,
|
||||
const element::Type& iType,
|
||||
const std::vector<IT>& priorsValues,
|
||||
const std::vector<IT>& refValues,
|
||||
const std::string& testcaseName = "")
|
||||
: attrs(attrs),
|
||||
priorsShape(priorsShape),
|
||||
featureMapShape(featureMapShape),
|
||||
imageSizeInfoShape(imageSizeInfoShape),
|
||||
outRefShape(outRefShape),
|
||||
inType(iType),
|
||||
outType(iType),
|
||||
priorsData(CreateTensor(iType, priorsValues)),
|
||||
refData(CreateTensor(outRefShape, iType, refValues)),
|
||||
testcaseName(testcaseName) {
|
||||
std::vector<IT> featureMapValues(shape_size(featureMapShape.get_shape()));
|
||||
std::iota(featureMapValues.begin(), featureMapValues.end(), 0);
|
||||
featureMapData = CreateTensor(iType, featureMapValues);
|
||||
|
||||
std::vector<IT> imageSizeInfoValues(shape_size(imageSizeInfoShape.get_shape()));
|
||||
std::iota(imageSizeInfoValues.begin(), imageSizeInfoValues.end(), 0);
|
||||
imageSizeInfoData = CreateTensor(iType, imageSizeInfoValues);
|
||||
|
||||
if (shape_size(outRefShape) > refValues.size())
|
||||
actualComparisonSize = refValues.size();
|
||||
else
|
||||
actualComparisonSize = 0;
|
||||
}
|
||||
|
||||
Attrs attrs;
|
||||
PartialShape priorsShape;
|
||||
PartialShape featureMapShape;
|
||||
PartialShape imageSizeInfoShape;
|
||||
Shape outRefShape;
|
||||
size_t actualComparisonSize;
|
||||
ov::element::Type inType;
|
||||
ov::element::Type outType;
|
||||
ov::runtime::Tensor priorsData;
|
||||
ov::runtime::Tensor featureMapData;
|
||||
ov::runtime::Tensor imageSizeInfoData;
|
||||
ov::runtime::Tensor refData;
|
||||
std::string testcaseName;
|
||||
};
|
||||
|
||||
class ReferenceExperimentalPGGLayerTest : public testing::TestWithParam<ExperimentalPGGParams>, public CommonReferenceTest {
|
||||
public:
|
||||
void SetUp() override {
|
||||
auto params = GetParam();
|
||||
function = CreateFunction(params);
|
||||
inputData = {params.priorsData, params.featureMapData, params.imageSizeInfoData};
|
||||
refOutData = {params.refData};
|
||||
|
||||
if (params.actualComparisonSize > 0)
|
||||
actual_comparision_size = params.actualComparisonSize;
|
||||
}
|
||||
static std::string getTestCaseName(const testing::TestParamInfo<ExperimentalPGGParams>& obj) {
|
||||
auto param = obj.param;
|
||||
std::ostringstream result;
|
||||
result << "priorsShape=" << param.priorsShape << "_";
|
||||
result << "featureMapShape=" << param.featureMapShape << "_";
|
||||
result << "imageSizeInfoShape=" << param.imageSizeInfoShape << "_";
|
||||
result << "iType=" << param.inType << "_";
|
||||
result << "oType=" << param.outType << "_";
|
||||
result << "flatten=" << param.attrs.flatten << "_";
|
||||
result << "h=" << param.attrs.h << "_";
|
||||
result << "w=" << param.attrs.w << "_";
|
||||
result << "stride_x=" << param.attrs.stride_x << "_";
|
||||
result << "stride_y=" << param.attrs.stride_y;
|
||||
if (param.testcaseName != "")
|
||||
result << "_" << param.testcaseName;
|
||||
return result.str();
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Model> CreateFunction(const ExperimentalPGGParams& params) {
|
||||
const auto priors = std::make_shared<op::v0::Parameter>(params.inType, params.priorsShape);
|
||||
const auto featureMap = std::make_shared<op::v0::Parameter>(params.inType, params.featureMapShape);
|
||||
const auto im_info = std::make_shared<op::v0::Parameter>(params.inType, params.imageSizeInfoShape);
|
||||
const auto ExperimentalPGG = std::make_shared<op::v6::ExperimentalDetectronPriorGridGenerator>(priors,
|
||||
featureMap,
|
||||
im_info,
|
||||
params.attrs);
|
||||
return std::make_shared<ov::Model>(NodeVector {ExperimentalPGG}, ParameterVector {priors, featureMap, im_info});
|
||||
}
|
||||
};
|
||||
|
||||
TEST_P(ReferenceExperimentalPGGLayerTest, CompareWithRefs) {
|
||||
Exec();
|
||||
}
|
||||
|
||||
template <element::Type_t IN_ET>
|
||||
std::vector<ExperimentalPGGParams> generateExperimentalPGGFloatParams() {
|
||||
using T = typename element_type_traits<IN_ET>::value_type;
|
||||
|
||||
std::vector<ExperimentalPGGParams> experimentalPGGParams {
|
||||
ExperimentalPGGParams(Attrs{true, 0, 0, 4.0f, 4.0f},
|
||||
{3, 4},
|
||||
{1, 16, 4, 5},
|
||||
{1, 3, 100, 200},
|
||||
{60, 4},
|
||||
IN_ET,
|
||||
std::vector<T>{-24.5, -12.5, 24.5, 12.5, -16.5, -16.5, 16.5, 16.5, -12.5, -24.5, 12.5, 24.5},
|
||||
std::vector<T>{-22.5, -10.5, 26.5, 14.5, -14.5, -14.5, 18.5, 18.5, -10.5, -22.5, 14.5, 26.5, -18.5, -10.5, 30.5, 14.5,
|
||||
-10.5, -14.5, 22.5, 18.5, -6.5, -22.5, 18.5, 26.5, -14.5, -10.5, 34.5, 14.5, -6.5, -14.5, 26.5, 18.5,
|
||||
-2.5, -22.5, 22.5, 26.5, -10.5, -10.5, 38.5, 14.5, -2.5, -14.5, 30.5, 18.5, 1.5, -22.5, 26.5, 26.5,
|
||||
-6.5, -10.5, 42.5, 14.5, 1.5, -14.5, 34.5, 18.5, 5.5, -22.5, 30.5, 26.5, -22.5, -6.5, 26.5, 18.5,
|
||||
-14.5, -10.5, 18.5, 22.5, -10.5, -18.5, 14.5, 30.5, -18.5, -6.5, 30.5, 18.5, -10.5, -10.5, 22.5, 22.5,
|
||||
-6.5, -18.5, 18.5, 30.5, -14.5, -6.5, 34.5, 18.5, -6.5, -10.5, 26.5, 22.5, -2.5, -18.5, 22.5, 30.5,
|
||||
-10.5, -6.5, 38.5, 18.5, -2.5, -10.5, 30.5, 22.5, 1.5, -18.5, 26.5, 30.5, -6.5, -6.5, 42.5, 18.5,
|
||||
1.5, -10.5, 34.5, 22.5, 5.5, -18.5, 30.5, 30.5, -22.5, -2.5, 26.5, 22.5, -14.5, -6.5, 18.5, 26.5,
|
||||
-10.5, -14.5, 14.5, 34.5, -18.5, -2.5, 30.5, 22.5, -10.5, -6.5, 22.5, 26.5, -6.5, -14.5, 18.5, 34.5,
|
||||
-14.5, -2.5, 34.5, 22.5, -6.5, -6.5, 26.5, 26.5, -2.5, -14.5, 22.5, 34.5, -10.5, -2.5, 38.5, 22.5,
|
||||
-2.5, -6.5, 30.5, 26.5, 1.5, -14.5, 26.5, 34.5, -6.5, -2.5, 42.5, 22.5, 1.5, -6.5, 34.5, 26.5,
|
||||
5.5, -14.5, 30.5, 34.5, -22.5, 1.5, 26.5, 26.5, -14.5, -2.5, 18.5, 30.5, -10.5, -10.5, 14.5, 38.5,
|
||||
-18.5, 1.5, 30.5, 26.5, -10.5, -2.5, 22.5, 30.5, -6.5, -10.5, 18.5, 38.5, -14.5, 1.5, 34.5, 26.5,
|
||||
-6.5, -2.5, 26.5, 30.5, -2.5, -10.5, 22.5, 38.5, -10.5, 1.5, 38.5, 26.5, -2.5, -2.5, 30.5, 30.5,
|
||||
1.5, -10.5, 26.5, 38.5, -6.5, 1.5, 42.5, 26.5, 1.5, -2.5, 34.5, 30.5, 5.5, -10.5, 30.5, 38.5}),
|
||||
ExperimentalPGGParams(Attrs{false, 0, 0, 8.0f, 8.0f},
|
||||
{3, 4},
|
||||
{1, 16, 3, 7},
|
||||
{1, 3, 100, 200},
|
||||
{3, 7, 3, 4},
|
||||
IN_ET,
|
||||
std::vector<T>{-44.5, -24.5, 44.5, 24.5, -32.5, -32.5, 32.5, 32.5, -24.5, -44.5, 24.5, 44.5},
|
||||
std::vector<T>{-40.5, -20.5, 48.5, 28.5, -28.5, -28.5, 36.5, 36.5, -20.5, -40.5, 28.5, 48.5, -32.5, -20.5, 56.5, 28.5,
|
||||
-20.5, -28.5, 44.5, 36.5, -12.5, -40.5, 36.5, 48.5, -24.5, -20.5, 64.5, 28.5, -12.5, -28.5, 52.5, 36.5,
|
||||
-4.5, -40.5, 44.5, 48.5, -16.5, -20.5, 72.5, 28.5, -4.5, -28.5, 60.5, 36.5, 3.5, -40.5, 52.5, 48.5,
|
||||
-8.5, -20.5, 80.5, 28.5, 3.5, -28.5, 68.5, 36.5, 11.5, -40.5, 60.5, 48.5, -0.5, -20.5, 88.5, 28.5,
|
||||
11.5, -28.5, 76.5, 36.5, 19.5, -40.5, 68.5, 48.5, 7.5, -20.5, 96.5, 28.5, 19.5, -28.5, 84.5, 36.5,
|
||||
27.5, -40.5, 76.5, 48.5, -40.5, -12.5, 48.5, 36.5, -28.5, -20.5, 36.5, 44.5, -20.5, -32.5, 28.5, 56.5,
|
||||
-32.5, -12.5, 56.5, 36.5, -20.5, -20.5, 44.5, 44.5, -12.5, -32.5, 36.5, 56.5, -24.5, -12.5, 64.5, 36.5,
|
||||
-12.5, -20.5, 52.5, 44.5, -4.5, -32.5, 44.5, 56.5, -16.5, -12.5, 72.5, 36.5, -4.5, -20.5, 60.5, 44.5,
|
||||
3.5, -32.5, 52.5, 56.5, -8.5, -12.5, 80.5, 36.5, 3.5, -20.5, 68.5, 44.5, 11.5, -32.5, 60.5, 56.5,
|
||||
-0.5, -12.5, 88.5, 36.5, 11.5, -20.5, 76.5, 44.5, 19.5, -32.5, 68.5, 56.5, 7.5, -12.5, 96.5, 36.5,
|
||||
19.5, -20.5, 84.5, 44.5, 27.5, -32.5, 76.5, 56.5, -40.5, -4.5, 48.5, 44.5, -28.5, -12.5, 36.5, 52.5,
|
||||
-20.5, -24.5, 28.5, 64.5, -32.5, -4.5, 56.5, 44.5, -20.5, -12.5, 44.5, 52.5, -12.5, -24.5, 36.5, 64.5,
|
||||
-24.5, -4.5, 64.5, 44.5, -12.5, -12.5, 52.5, 52.5, -4.5, -24.5, 44.5, 64.5, -16.5, -4.5, 72.5, 44.5,
|
||||
-4.5, -12.5, 60.5, 52.5, 3.5, -24.5, 52.5, 64.5, -8.5, -4.5, 80.5, 44.5, 3.5, -12.5, 68.5, 52.5,
|
||||
11.5, -24.5, 60.5, 64.5, -0.5, -4.5, 88.5, 44.5, 11.5, -12.5, 76.5, 52.5, 19.5, -24.5, 68.5, 64.5,
|
||||
7.5, -4.5, 96.5, 44.5, 19.5, -12.5, 84.5, 52.5, 27.5, -24.5, 76.5, 64.5}),
|
||||
ExperimentalPGGParams(Attrs{true, 3, 6, 64.0f, 64.0f},
|
||||
{3, 4},
|
||||
{1, 16, 100, 100},
|
||||
{1, 3, 100, 200},
|
||||
{30000, 4},
|
||||
IN_ET,
|
||||
std::vector<T>{-364.5, -184.5, 364.5, 184.5, -256.5, -256.5, 256.5, 256.5, -180.5, -360.5, 180.5, 360.5},
|
||||
std::vector<T>{-332.5, -152.5, 396.5, 216.5, -224.5, -224.5, 288.5, 288.5, -148.5, -328.5, 212.5, 392.5, -268.5, -152.5,
|
||||
460.5, 216.5, -160.5, -224.5, 352.5, 288.5, -84.5, -328.5, 276.5, 392.5, -204.5, -152.5, 524.5, 216.5,
|
||||
-96.5, -224.5, 416.5, 288.5, -20.5, -328.5, 340.5, 392.5, -140.5, -152.5, 588.5, 216.5, -32.5, -224.5,
|
||||
480.5, 288.5, 43.5, -328.5, 404.5, 392.5, -76.5, -152.5, 652.5, 216.5, 31.5, -224.5, 544.5, 288.5,
|
||||
107.5, -328.5, 468.5, 392.5, -12.5, -152.5, 716.5, 216.5, 95.5, -224.5, 608.5, 288.5, 171.5, -328.5,
|
||||
532.5, 392.5, -332.5, -88.5, 396.5, 280.5, -224.5, -160.5, 288.5, 352.5, -148.5, -264.5, 212.5, 456.5,
|
||||
-268.5, -88.5, 460.5, 280.5, -160.5, -160.5, 352.5, 352.5, -84.5, -264.5, 276.5, 456.5, -204.5, -88.5,
|
||||
524.5, 280.5, -96.5, -160.5, 416.5, 352.5, -20.5, -264.5, 340.5, 456.5, -140.5, -88.5, 588.5, 280.5,
|
||||
-32.5, -160.5, 480.5, 352.5, 43.5, -264.5, 404.5, 456.5, -76.5, -88.5, 652.5, 280.5, 31.5, -160.5,
|
||||
544.5, 352.5, 107.5, -264.5, 468.5, 456.5, -12.5, -88.5, 716.5, 280.5, 95.5, -160.5, 608.5, 352.5,
|
||||
171.5, -264.5, 532.5, 456.5, -332.5, -24.5, 396.5, 344.5, -224.5, -96.5, 288.5, 416.5, -148.5, -200.5,
|
||||
212.5, 520.5, -268.5, -24.5, 460.5, 344.5, -160.5, -96.5, 352.5, 416.5, -84.5, -200.5, 276.5, 520.5,
|
||||
-204.5, -24.5, 524.5, 344.5, -96.5, -96.5, 416.5, 416.5, -20.5, -200.5, 340.5, 520.5, -140.5, -24.5,
|
||||
588.5, 344.5, -32.5, -96.5, 480.5, 416.5, 43.5, -200.5, 404.5, 520.5, -76.5, -24.5, 652.5, 344.5,
|
||||
31.5, -96.5, 544.5, 416.5, 107.5, -200.5, 468.5, 520.5, -12.5, -24.5, 716.5, 344.5, 95.5, -96.5,
|
||||
608.5, 416.5, 171.5, -200.5, 532.5, 520.5}),
|
||||
ExperimentalPGGParams(Attrs{false, 5, 3, 32.0f, 32.0f},
|
||||
{3, 4},
|
||||
{1, 16, 100, 100},
|
||||
{1, 3, 100, 200},
|
||||
{100, 100, 3, 4},
|
||||
IN_ET,
|
||||
std::vector<T>{-180.5, -88.5, 180.5, 88.5, -128.5, -128.5, 128.5, 128.5, -92.5, -184.5, 92.5, 184.5},
|
||||
std::vector<T>{-164.5, -72.5, 196.5, 104.5, -112.5, -112.5, 144.5, 144.5, -76.5, -168.5, 108.5, 200.5, -132.5, -72.5,
|
||||
228.5, 104.5, -80.5, -112.5, 176.5, 144.5, -44.5, -168.5, 140.5, 200.5, -100.5, -72.5, 260.5, 104.5,
|
||||
-48.5, -112.5, 208.5, 144.5, -12.5, -168.5, 172.5, 200.5, -164.5, -40.5, 196.5, 136.5, -112.5, -80.5,
|
||||
144.5, 176.5, -76.5, -136.5, 108.5, 232.5, -132.5, -40.5, 228.5, 136.5, -80.5, -80.5, 176.5, 176.5,
|
||||
-44.5, -136.5, 140.5, 232.5, -100.5, -40.5, 260.5, 136.5, -48.5, -80.5, 208.5, 176.5, -12.5, -136.5,
|
||||
172.5, 232.5, -164.5, -8.5, 196.5, 168.5, -112.5, -48.5, 144.5, 208.5, -76.5, -104.5, 108.5, 264.5,
|
||||
-132.5, -8.5, 228.5, 168.5, -80.5, -48.5, 176.5, 208.5, -44.5, -104.5, 140.5, 264.5, -100.5, -8.5,
|
||||
260.5, 168.5, -48.5, -48.5, 208.5, 208.5, -12.5, -104.5, 172.5, 264.5, -164.5, 23.5, 196.5, 200.5,
|
||||
-112.5, -16.5, 144.5, 240.5, -76.5, -72.5, 108.5, 296.5, -132.5, 23.5, 228.5, 200.5, -80.5, -16.5,
|
||||
176.5, 240.5, -44.5, -72.5, 140.5, 296.5, -100.5, 23.5, 260.5, 200.5, -48.5, -16.5, 208.5, 240.5,
|
||||
-12.5, -72.5, 172.5, 296.5, -164.5, 55.5, 196.5, 232.5, -112.5, 15.5, 144.5, 272.5, -76.5, -40.5,
|
||||
108.5, 328.5, -132.5, 55.5, 228.5, 232.5, -80.5, 15.5, 176.5, 272.5, -44.5, -40.5, 140.5, 328.5,
|
||||
-100.5, 55.5, 260.5, 232.5, -48.5, 15.5, 208.5, 272.5, -12.5, -40.5, 172.5, 328.5}),
|
||||
};
|
||||
return experimentalPGGParams;
|
||||
}
|
||||
|
||||
std::vector<ExperimentalPGGParams> generateExperimentalPGGCombinedParams() {
|
||||
const std::vector<std::vector<ExperimentalPGGParams>> experimentalPGGTypeParams {
|
||||
generateExperimentalPGGFloatParams<element::Type_t::f64>(),
|
||||
generateExperimentalPGGFloatParams<element::Type_t::f32>(),
|
||||
generateExperimentalPGGFloatParams<element::Type_t::f16>(),
|
||||
generateExperimentalPGGFloatParams<element::Type_t::bf16>(),
|
||||
};
|
||||
std::vector<ExperimentalPGGParams> combinedParams;
|
||||
|
||||
for (const auto& params : experimentalPGGTypeParams) {
|
||||
combinedParams.insert(combinedParams.end(), params.begin(), params.end());
|
||||
}
|
||||
return combinedParams;
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_ExperimentalDetectronPriorGridGenerator_With_Hardcoded_Refs, ReferenceExperimentalPGGLayerTest,
|
||||
testing::ValuesIn(generateExperimentalPGGCombinedParams()), ReferenceExperimentalPGGLayerTest::getTestCaseName);
|
||||
} // namespace
|
@ -77,7 +77,7 @@ public:
|
||||
}
|
||||
|
||||
private:
|
||||
static std::shared_ptr<Function> CreateFunction(const ExperimentalGPParams& params) {
|
||||
static std::shared_ptr<Model> CreateFunction(const ExperimentalGPParams& params) {
|
||||
const auto im_info = std::make_shared<op::v0::Parameter>(params.inType, params.imageSizeInfoShape);
|
||||
const auto anchors = std::make_shared<op::v0::Parameter>(params.inType, params.anchorsShape);
|
||||
const auto deltas = std::make_shared<op::v0::Parameter>(params.inType, params.deltasShape);
|
||||
@ -87,7 +87,7 @@ private:
|
||||
deltas,
|
||||
scores,
|
||||
params.attrs);
|
||||
return std::make_shared<ov::Function>(ExperimentalGP->outputs(), ParameterVector {im_info, anchors, deltas, scores});
|
||||
return std::make_shared<ov::Model>(ExperimentalGP->outputs(), ParameterVector {im_info, anchors, deltas, scores});
|
||||
}
|
||||
};
|
||||
|
||||
@ -207,4 +207,4 @@ std::vector<ExperimentalGPParams> generateExperimentalGPCombinedParams() {
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(smoke_ExperimentalDetectronGenerateProposalsSingleImage_With_Hardcoded_Refs, ReferenceExperimentalGPLayerTest,
|
||||
testing::ValuesIn(generateExperimentalGPCombinedParams()), ReferenceExperimentalGPLayerTest::getTestCaseName);
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user