diff --git a/src/inference/src/dev/core_impl.cpp b/src/inference/src/dev/core_impl.cpp index 8028e2fc553..d19e4a862c2 100644 --- a/src/inference/src/dev/core_impl.cpp +++ b/src/inference/src/dev/core_impl.cpp @@ -710,13 +710,11 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::shared_ptr< auto plugin = get_plugin(parsed._deviceName); ov::SoPtr res; auto cacheManager = coreConfig.get_cache_config_for_device(plugin, parsed._config)._cacheManager; - if (cacheManager && device_supports_model_caching(plugin)) { + // Skip caching for proxy plugin. HW plugin will load network from the cache + if (cacheManager && device_supports_model_caching(plugin) && !is_proxy_device(plugin)) { CacheContent cacheContent{cacheManager}; cacheContent.blobId = ov::ModelCache::compute_hash(model, create_compile_config(plugin, parsed._config)); - std::unique_ptr lock; - // Proxy plugin fallback to lowlevel device - if (!is_proxy_device(plugin)) - lock = cacheGuard.get_hash_lock(cacheContent.blobId); + std::unique_ptr lock = cacheGuard.get_hash_lock(cacheContent.blobId); res = load_model_from_cache(cacheContent, plugin, parsed._config, ov::SoPtr{}, [&]() { return compile_model_and_cache(model, plugin, @@ -746,13 +744,11 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::shared_ptr< auto plugin = get_plugin(parsed._deviceName); ov::SoPtr res; auto cacheManager = coreConfig.get_cache_config_for_device(plugin, parsed._config)._cacheManager; - if (cacheManager && device_supports_model_caching(plugin)) { + // Skip caching for proxy plugin. HW plugin will load network from the cache + if (cacheManager && device_supports_model_caching(plugin) && !is_proxy_device(plugin)) { CacheContent cacheContent{cacheManager}; cacheContent.blobId = ov::ModelCache::compute_hash(model, create_compile_config(plugin, parsed._config)); - std::unique_ptr lock; - // Proxy plugin fallback to lowlevel device - if (!is_proxy_device(plugin)) - lock = cacheGuard.get_hash_lock(cacheContent.blobId); + std::unique_ptr lock = cacheGuard.get_hash_lock(cacheContent.blobId); res = load_model_from_cache(cacheContent, plugin, parsed._config, context, [&]() { return compile_model_and_cache(model, plugin, parsed._config, context, cacheContent); }); @@ -793,13 +789,11 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::string& mod ov::SoPtr compiled_model; auto cacheManager = coreConfig.get_cache_config_for_device(plugin, parsed._config)._cacheManager; - if (cacheManager && device_supports_model_caching(plugin)) { + // Skip caching for proxy plugin. HW plugin will load network from the cache + if (cacheManager && device_supports_model_caching(plugin) && !is_proxy_device(plugin)) { CacheContent cacheContent{cacheManager, model_path}; cacheContent.blobId = ov::ModelCache::compute_hash(model_path, create_compile_config(plugin, parsed._config)); - std::unique_ptr lock; - // Proxy plugin fallback to lowlevel device - if (!is_proxy_device(plugin)) - lock = cacheGuard.get_hash_lock(cacheContent.blobId); + std::unique_ptr lock = cacheGuard.get_hash_lock(cacheContent.blobId); compiled_model = load_model_from_cache(cacheContent, plugin, parsed._config, ov::SoPtr{}, [&]() { auto cnnNetwork = ReadNetwork(model_path, std::string()); @@ -830,14 +824,12 @@ ov::SoPtr ov::CoreImpl::compile_model(const std::string& mod ov::SoPtr compiled_model; auto cacheManager = coreConfig.get_cache_config_for_device(plugin, parsed._config)._cacheManager; - if (cacheManager && device_supports_model_caching(plugin)) { + // Skip caching for proxy plugin. HW plugin will load network from the cache + if (cacheManager && device_supports_model_caching(plugin) && !is_proxy_device(plugin)) { CacheContent cacheContent{cacheManager}; cacheContent.blobId = ov::ModelCache::compute_hash(model_str, weights, create_compile_config(plugin, parsed._config)); - std::unique_ptr lock; - // Proxy plugin fallback to lowlevel device - if (!is_proxy_device(plugin)) - lock = cacheGuard.get_hash_lock(cacheContent.blobId); + std::unique_ptr lock = cacheGuard.get_hash_lock(cacheContent.blobId); compiled_model = load_model_from_cache(cacheContent, plugin, parsed._config, ov::SoPtr{}, [&]() { auto cnnNetwork = read_model(model_str, weights); diff --git a/src/inference/src/dev/icompiled_model_wrapper.cpp b/src/inference/src/dev/icompiled_model_wrapper.cpp index f8a7f0fa001..de5b72b1fdd 100644 --- a/src/inference/src/dev/icompiled_model_wrapper.cpp +++ b/src/inference/src/dev/icompiled_model_wrapper.cpp @@ -6,6 +6,7 @@ #include "dev/converter_utils.hpp" #include "ie_plugin_config.hpp" +#include "openvino/core/except.hpp" InferenceEngine::ICompiledModelWrapper::ICompiledModelWrapper( const std::shared_ptr& model) @@ -29,7 +30,11 @@ std::shared_ptr InferenceEngine::ICompiledModelWrapper:: } void InferenceEngine::ICompiledModelWrapper::export_model(std::ostream& model) const { - m_model->Export(model); + try { + m_model->Export(model); + } catch (const InferenceEngine::NotImplemented& ex) { + OPENVINO_ASSERT_HELPER(ov::NotImplemented, "", false, ex.what()); + } } std::shared_ptr InferenceEngine::ICompiledModelWrapper::get_runtime_model() const { diff --git a/src/plugins/hetero/src/compiled_model.cpp b/src/plugins/hetero/src/compiled_model.cpp index dc62b37d4cd..519218e18b1 100644 --- a/src/plugins/hetero/src/compiled_model.cpp +++ b/src/plugins/hetero/src/compiled_model.cpp @@ -688,26 +688,32 @@ void ov::hetero::CompiledModel::export_model(std::ostream& model_stream) const { for (const auto& comp_model_desc : m_compiled_submodels) { if (get_plugin()->get_core()->device_supports_model_caching(comp_model_desc.device)) { - comp_model_desc.compiled_model->export_model(model_stream); - } else { - auto model = comp_model_desc.model; - if (!model) - OPENVINO_THROW("OpenVINO Model is empty"); - - std::stringstream xmlFile, binFile; - ov::pass::Serialize serializer(xmlFile, binFile); - serializer.run_on_model(model); - - auto constants = binFile.str(); - auto model_str = xmlFile.str(); - - auto dataSize = static_cast(model_str.size()); - model_stream.write(reinterpret_cast(&dataSize), sizeof(dataSize)); - model_stream.write(model_str.c_str(), dataSize); - - dataSize = static_cast(constants.size()); - model_stream.write(reinterpret_cast(&dataSize), sizeof(dataSize)); - model_stream.write(reinterpret_cast(&constants[0]), dataSize); + try { + // Batch plugin reports property of low level plugin + // If we use Batch plugin inside hetero, we won't be able to call export + // Auto batch plugin will throw NOT_IMPLEMENTED + comp_model_desc.compiled_model->export_model(model_stream); + continue; + } catch (ov::NotImplemented&) { + } } + auto model = comp_model_desc.model; + if (!model) + OPENVINO_THROW("OpenVINO Model is empty"); + + std::stringstream xmlFile, binFile; + ov::pass::Serialize serializer(xmlFile, binFile); + serializer.run_on_model(model); + + auto constants = binFile.str(); + auto model_str = xmlFile.str(); + + auto dataSize = static_cast(model_str.size()); + model_stream.write(reinterpret_cast(&dataSize), sizeof(dataSize)); + model_stream.write(model_str.c_str(), dataSize); + + dataSize = static_cast(constants.size()); + model_stream.write(reinterpret_cast(&dataSize), sizeof(dataSize)); + model_stream.write(reinterpret_cast(&constants[0]), dataSize); } -} \ No newline at end of file +} diff --git a/src/plugins/proxy/tests/CMakeLists.txt b/src/plugins/proxy/tests/CMakeLists.txt index fb0135e44ca..04ff0fea9ff 100644 --- a/src/plugins/proxy/tests/CMakeLists.txt +++ b/src/plugins/proxy/tests/CMakeLists.txt @@ -4,12 +4,34 @@ set(TARGET_NAME ov_proxy_plugin_tests) +set(DEPENDENCIES + mock_engine + openvino::runtime + func_test_utils +) + +set(COMPILE_DEFINITIONS "CI_BUILD_NUMBER=\"mock_version\"") + +if(ENABLE_AUTO_BATCH) + list(APPEND DEPENDENCIES openvino_auto_batch_plugin) + list(APPEND COMPILE_DEFINITIONS ENABLE_AUTO_BATCH) +endif() + +if(ENABLE_HETERO) + list(APPEND DEPENDENCIES openvino_hetero_plugin) + list(APPEND COMPILE_DEFINITIONS HETERO_ENABLED) +endif() + +if(ENABLE_OV_IR_FRONTEND) + list(APPEND DEPENDENCIES openvino_ir_frontend) + list(APPEND COMPILE_DEFINITIONS IR_FRONTEND_ENABLED) +endif() + ov_add_test_target( NAME ${TARGET_NAME} ROOT ${CMAKE_CURRENT_SOURCE_DIR} DEPENDENCIES - openvino::runtime - mock_engine + ${DEPENDENCIES} LINK_LIBRARIES openvino::runtime::dev gtest @@ -20,16 +42,5 @@ ov_add_test_target( PROXY_PLUGIN ) -target_compile_definitions(${TARGET_NAME} PRIVATE CI_BUILD_NUMBER=\"mock_version\") - +target_compile_definitions(${TARGET_NAME} PRIVATE ${COMPILE_DEFINITIONS}) target_include_directories(${TARGET_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/../dev_api) - -if(ENABLE_HETERO) - add_dependencies(${TARGET_NAME} openvino_hetero_plugin) - target_compile_definitions(${TARGET_NAME} PRIVATE HETERO_ENABLED) -endif() - -if(ENABLE_OV_IR_FRONTEND) - add_dependencies(${TARGET_NAME} openvino_ir_frontend) - target_compile_definitions(${TARGET_NAME} PRIVATE IR_FRONTEND_ENABLED) -endif() diff --git a/src/plugins/proxy/tests/properties_tests.cpp b/src/plugins/proxy/tests/properties_tests.cpp index 72aa44301ca..c4058ec846c 100644 --- a/src/plugins/proxy/tests/properties_tests.cpp +++ b/src/plugins/proxy/tests/properties_tests.cpp @@ -1,6 +1,7 @@ // Copyright (C) 2018-2023 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // +#include "openvino/runtime/properties.hpp" #include "proxy_tests.hpp" using namespace ov::proxy::tests; @@ -46,7 +47,7 @@ TEST_F(ProxyTests, set_property_for_primary_device_full_name) { TEST_F(ProxyTests, get_property_on_default_device) { const std::string dev_name = "MOCK"; auto supported_properties = core.get_property(dev_name, ov::supported_properties); - EXPECT_EQ(8, supported_properties.size()); + EXPECT_EQ(12, supported_properties.size()); size_t mutable_pr(0), immutable_pr(0); for (auto&& property : supported_properties) { property.is_mutable() ? mutable_pr++ : immutable_pr++; @@ -59,21 +60,21 @@ TEST_F(ProxyTests, get_property_on_default_device) { EXPECT_EQ("000102030405060708090a0b0c0d0e0f", get_string_value(core.get_property(dev_name, property))); } else if (property == ov::device::priorities) { auto value = core.get_property(dev_name, property).as>(); - EXPECT_EQ(value.size(), 2); + ASSERT_EQ(value.size(), 2); EXPECT_EQ(value[0], "ABC"); EXPECT_EQ(value[1], "BDE"); } else { EXPECT_NO_THROW(core.get_property(dev_name, property)); } } - EXPECT_EQ(5, immutable_pr); - EXPECT_EQ(3, mutable_pr); + EXPECT_EQ(6, immutable_pr); + EXPECT_EQ(6, mutable_pr); } TEST_F(ProxyTests, get_property_on_mixed_device) { const std::string dev_name = "MOCK.1"; auto supported_properties = core.get_property(dev_name, ov::supported_properties); - EXPECT_EQ(8, supported_properties.size()); + EXPECT_EQ(12, supported_properties.size()); size_t mutable_pr(0), immutable_pr(0); for (auto&& property : supported_properties) { property.is_mutable() ? mutable_pr++ : immutable_pr++; @@ -86,21 +87,21 @@ TEST_F(ProxyTests, get_property_on_mixed_device) { EXPECT_EQ("00020406080a0c0e10121416181a1c1e", get_string_value(core.get_property(dev_name, property))); } else if (property == ov::device::priorities) { auto value = core.get_property(dev_name, property).as>(); - EXPECT_EQ(value.size(), 2); + ASSERT_EQ(value.size(), 2); EXPECT_EQ(value[0], "ABC"); EXPECT_EQ(value[1], "BDE"); } else { core.get_property(dev_name, property); } } - EXPECT_EQ(5, immutable_pr); - EXPECT_EQ(3, mutable_pr); + EXPECT_EQ(6, immutable_pr); + EXPECT_EQ(6, mutable_pr); } TEST_F(ProxyTests, get_property_on_specified_device) { const std::string dev_name = "MOCK.3"; auto supported_properties = core.get_property(dev_name, ov::supported_properties); - EXPECT_EQ(7, supported_properties.size()); + EXPECT_EQ(8, supported_properties.size()); size_t mutable_pr(0), immutable_pr(0); for (auto&& property : supported_properties) { property.is_mutable() ? mutable_pr++ : immutable_pr++; @@ -113,21 +114,21 @@ TEST_F(ProxyTests, get_property_on_specified_device) { EXPECT_EQ("0004080c1014181c2024282c3034383c", get_string_value(core.get_property(dev_name, property))); } else if (property == ov::device::priorities) { auto value = core.get_property(dev_name, property).as>(); - EXPECT_EQ(value.size(), 1); + ASSERT_EQ(value.size(), 1); EXPECT_EQ(value[0], "BDE"); } else { EXPECT_NO_THROW(core.get_property(dev_name, property)); } } EXPECT_EQ(5, immutable_pr); - EXPECT_EQ(2, mutable_pr); + EXPECT_EQ(3, mutable_pr); } TEST_F(ProxyTests, get_property_for_changed_default_device) { const std::string dev_name = "MOCK"; core.set_property(dev_name, ov::device::id(3)); auto supported_properties = core.get_property(dev_name, ov::supported_properties); - EXPECT_EQ(7, supported_properties.size()); + EXPECT_EQ(8, supported_properties.size()); size_t mutable_pr(0), immutable_pr(0); for (auto&& property : supported_properties) { property.is_mutable() ? mutable_pr++ : immutable_pr++; @@ -140,12 +141,205 @@ TEST_F(ProxyTests, get_property_for_changed_default_device) { EXPECT_EQ("0004080c1014181c2024282c3034383c", get_string_value(core.get_property(dev_name, property))); } else if (property == ov::device::priorities) { auto value = core.get_property(dev_name, property).as>(); - EXPECT_EQ(value.size(), 1); + ASSERT_EQ(value.size(), 1); EXPECT_EQ(value[0], "BDE"); } else { EXPECT_NO_THROW(core.get_property(dev_name, property)); } } EXPECT_EQ(5, immutable_pr); - EXPECT_EQ(2, mutable_pr); + EXPECT_EQ(3, mutable_pr); } + +TEST_F(ProxyTests, get_property_on_loaded_default_uninit_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK"; + EXPECT_EQ(0, core.get_property(dev_name, ov::num_streams)); + core.set_property(dev_name, ov::num_streams(2)); + EXPECT_EQ(2, core.get_property(dev_name, ov::num_streams)); +} + +TEST_F(ProxyTests, set_property_for_loaded_fallback_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + EXPECT_EQ(0, core.get_property(dev_name, ov::num_streams)); + core.set_property(dev_name, ov::num_streams(2)); + EXPECT_EQ(2, core.get_property(dev_name, ov::num_streams)); + core.set_property(dev_name, ov::device::properties("BDE", ov::enable_profiling(true))); + EXPECT_EQ(false, core.get_property(dev_name, ov::enable_profiling)); +} + +TEST_F(ProxyTests, set_cache_dir_for_loaded_fallback_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + core.set_property(dev_name, ov::cache_dir("test_cache")); + auto model = create_model_with_subtract(); + auto compiled_model = core.compile_model(model, "MOCK.1", ov::cache_dir("test_cache")); + auto infer_request = compiled_model.create_infer_request(); + auto input_tensor = create_and_fill_tensor(model->input().get_element_type(), model->input().get_shape()); + infer_request.set_input_tensor(input_tensor); + infer_request.infer(); +} + +TEST_F(ProxyTests, set_property_for_loaded_primary_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + core.set_property(dev_name, ov::device::properties("ABC", ov::enable_profiling(true))); + EXPECT_EQ(true, core.get_property(dev_name, ov::enable_profiling)); +} + +TEST_F(ProxyTests, set_property_for_loaded_primary_device_full_name) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + core.set_property(dev_name, ov::device::properties("ABC.abc_b", ov::enable_profiling(true))); + EXPECT_EQ(true, core.get_property(dev_name, ov::enable_profiling)); +} + +TEST_F(ProxyTests, get_property_on_loaded_default_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK"; + auto supported_properties = core.get_property(dev_name, ov::supported_properties); + EXPECT_EQ(12, supported_properties.size()); + size_t mutable_pr(0), immutable_pr(0); + for (auto&& property : supported_properties) { + property.is_mutable() ? mutable_pr++ : immutable_pr++; + if (property == ov::num_streams) { + EXPECT_EQ("0", get_string_value(core.get_property(dev_name, property))); + core.set_property(dev_name, ov::num_streams(2)); + EXPECT_TRUE(core.get_property(dev_name, property).is()); + EXPECT_EQ("2", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::uuid) { + EXPECT_EQ("000102030405060708090a0b0c0d0e0f", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::priorities) { + auto value = core.get_property(dev_name, property).as>(); + ASSERT_EQ(value.size(), 2); + EXPECT_EQ(value[0], "ABC"); + EXPECT_EQ(value[1], "BDE"); + } else { + EXPECT_NO_THROW(core.get_property(dev_name, property)); + } + } + EXPECT_EQ(6, immutable_pr); + EXPECT_EQ(6, mutable_pr); +} + +TEST_F(ProxyTests, get_property_loaded_on_mixed_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + auto supported_properties = core.get_property(dev_name, ov::supported_properties); + EXPECT_EQ(12, supported_properties.size()); + size_t mutable_pr(0), immutable_pr(0); + for (auto&& property : supported_properties) { + property.is_mutable() ? mutable_pr++ : immutable_pr++; + if (property == ov::num_streams) { + EXPECT_EQ("0", get_string_value(core.get_property(dev_name, property))); + core.set_property(dev_name, ov::num_streams(2)); + EXPECT_TRUE(core.get_property(dev_name, property).is()); + EXPECT_EQ("2", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::uuid) { + EXPECT_EQ("00020406080a0c0e10121416181a1c1e", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::priorities) { + auto value = core.get_property(dev_name, property).as>(); + ASSERT_EQ(value.size(), 2); + EXPECT_EQ(value[0], "ABC"); + EXPECT_EQ(value[1], "BDE"); + } else { + core.get_property(dev_name, property); + } + } + EXPECT_EQ(6, immutable_pr); + EXPECT_EQ(6, mutable_pr); +} + +TEST_F(ProxyTests, get_property_loaded_on_specified_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.3"; + auto supported_properties = core.get_property(dev_name, ov::supported_properties); + EXPECT_EQ(8, supported_properties.size()); + size_t mutable_pr(0), immutable_pr(0); + for (auto&& property : supported_properties) { + property.is_mutable() ? mutable_pr++ : immutable_pr++; + if (property == ov::enable_profiling) { + EXPECT_EQ("NO", get_string_value(core.get_property(dev_name, property))); + core.set_property(dev_name, ov::enable_profiling(true)); + EXPECT_TRUE(core.get_property(dev_name, property).is()); + EXPECT_EQ("YES", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::uuid) { + EXPECT_EQ("0004080c1014181c2024282c3034383c", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::priorities) { + auto value = core.get_property(dev_name, property).as>(); + ASSERT_EQ(value.size(), 1); + EXPECT_EQ(value[0], "BDE"); + } else { + EXPECT_NO_THROW(core.get_property(dev_name, property)); + } + } + EXPECT_EQ(5, immutable_pr); + EXPECT_EQ(3, mutable_pr); +} + +TEST_F(ProxyTests, get_property_for_loaded_changed_default_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK"; + core.set_property(dev_name, ov::device::id(3)); + auto supported_properties = core.get_property(dev_name, ov::supported_properties); + EXPECT_EQ(8, supported_properties.size()); + size_t mutable_pr(0), immutable_pr(0); + for (auto&& property : supported_properties) { + property.is_mutable() ? mutable_pr++ : immutable_pr++; + if (property == ov::enable_profiling) { + EXPECT_EQ("NO", get_string_value(core.get_property(dev_name, property))); + core.set_property(dev_name, ov::enable_profiling(true)); + EXPECT_TRUE(core.get_property(dev_name, property).is()); + EXPECT_EQ("YES", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::uuid) { + EXPECT_EQ("0004080c1014181c2024282c3034383c", get_string_value(core.get_property(dev_name, property))); + } else if (property == ov::device::priorities) { + auto value = core.get_property(dev_name, property).as>(); + ASSERT_EQ(value.size(), 1); + EXPECT_EQ(value[0], "BDE"); + } else { + EXPECT_NO_THROW(core.get_property(dev_name, property)); + } + } + EXPECT_EQ(5, immutable_pr); + EXPECT_EQ(3, mutable_pr); +} + +#ifdef ENABLE_AUTO_BATCH + +# ifdef HETERO_ENABLED + +TEST_F(ProxyTests, set_cache_dir_for_auto_batch_hetero_fallback_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + core.set_property(dev_name, ov::cache_dir("test_cache")); + auto model = create_model_with_add(); + auto compiled_model = core.compile_model(model, + "MOCK.1", + ov::cache_dir("test_cache"), + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); + auto infer_request = compiled_model.create_infer_request(); + auto input_tensor = create_and_fill_tensor(model->input().get_element_type(), model->input().get_shape()); + infer_request.set_input_tensor(input_tensor); + infer_request.infer(); +} + +# endif + +TEST_F(ProxyTests, set_cache_dir_for_auto_batch_main_fallback_device) { + core.get_available_devices(); + const std::string dev_name = "MOCK.1"; + core.set_property(dev_name, ov::cache_dir("test_cache")); + auto model = create_model_with_add(); + auto compiled_model = core.compile_model(model, + "MOCK.0", + ov::cache_dir("test_cache"), + ov::hint::performance_mode(ov::hint::PerformanceMode::THROUGHPUT)); + auto infer_request = compiled_model.create_infer_request(); + auto input_tensor = create_and_fill_tensor(model->input().get_element_type(), model->input().get_shape()); + infer_request.set_input_tensor(input_tensor); + infer_request.infer(); +} + +#endif diff --git a/src/plugins/proxy/tests/proxy_tests.cpp b/src/plugins/proxy/tests/proxy_tests.cpp index 4a7db474882..d2bac8ac722 100644 --- a/src/plugins/proxy/tests/proxy_tests.cpp +++ b/src/plugins/proxy/tests/proxy_tests.cpp @@ -66,6 +66,10 @@ void ov::proxy::tests::ProxyTests::SetUp() { } } +void ov::proxy::tests::ProxyTests::TearDown() { + CommonTestUtils::removeDir("test_cache"); +} + ov::Tensor ov::proxy::tests::ProxyTests::create_and_fill_tensor(const ov::element::Type& type, const ov::Shape& shape) { switch (type) { case ov::element::Type_t::i64: @@ -108,6 +112,18 @@ std::shared_ptr ov::proxy::tests::ProxyTests::create_model_with_subtr return std::make_shared(ov::ResultVector{result}, ov::ParameterVector{param}); } +std::shared_ptr ov::proxy::tests::ProxyTests::create_model_with_add() { + auto param = std::make_shared(ov::element::i64, ov::Shape{1, 3, 2, 2}); + param->set_friendly_name("input"); + auto const_value = ov::opset11::Constant::create(ov::element::i64, ov::Shape{1, 1, 1, 1}, {1}); + const_value->set_friendly_name("const_val"); + auto add = std::make_shared(param, const_value); + add->set_friendly_name("add"); + auto result = std::make_shared(add); + result->set_friendly_name("res"); + return std::make_shared(ov::ResultVector{result}, ov::ParameterVector{param}); +} + std::shared_ptr ov::proxy::tests::ProxyTests::create_model_with_subtract_reshape_relu() { auto param = std::make_shared(ov::element::i64, ov::Shape{1, 3, 2, 2}); param->set_friendly_name("input"); @@ -173,7 +189,7 @@ public: } std::shared_ptr get_runtime_model() const override { - OPENVINO_NOT_IMPLEMENTED; + return m_model; } void set_property(const ov::AnyMap& properties) override { @@ -515,6 +531,8 @@ void ov::proxy::tests::ProxyTests::register_plugin_support_reshape(ov::Core& cor m_profiling = it.second.as(); else if (it.first == ov::device::id.name()) continue; + else if (it.first == ov::cache_dir.name()) + continue; else OPENVINO_THROW(get_device_name(), " set config: " + it.first); } @@ -530,10 +548,14 @@ void ov::proxy::tests::ProxyTests::register_plugin_support_reshape(ov::Core& cor RO_property(ov::loaded_from_cache.name()), RO_property(ov::device::uuid.name()), RO_property(METRIC_KEY(IMPORT_EXPORT_SUPPORT)), + RO_property(ov::optimal_batch_size.name()), + RW_property(ov::hint::performance_mode.name()), + RW_property(ov::hint::num_requests.name()), }; // the whole config is RW before network is loaded. const static std::vector rwProperties{ RW_property(ov::num_streams.name()), + RW_property(ov::cache_dir.name()), RW_property(ov::enable_profiling.name()), }; @@ -551,6 +573,12 @@ void ov::proxy::tests::ProxyTests::register_plugin_support_reshape(ov::Core& cor } else if (name == ov::internal::supported_properties) { return decltype(ov::internal::supported_properties)::value_type( {ov::PropertyName{ov::internal::caching_properties.name(), ov::PropertyMutability::RO}}); + } else if (name == ov::optimal_batch_size) { + return decltype(ov::optimal_batch_size)::value_type{1}; + } else if (name == ov::hint::num_requests) { + return decltype(ov::hint::num_requests)::value_type{1}; + } else if (name == ov::hint::performance_mode) { + return decltype(ov::hint::performance_mode)::value_type{ov::hint::PerformanceMode::LATENCY}; } else if (name == ov::device::uuid) { ov::device::UUID uuid; for (size_t i = 0; i < uuid.MAX_UUID_SIZE; i++) { @@ -657,6 +685,7 @@ void ov::proxy::tests::ProxyTests::register_plugin_support_subtract(ov::Core& co // the whole config is RW before network is loaded. const static std::vector rwProperties{ RW_property(ov::enable_profiling.name()), + RW_property(ov::cache_dir.name()), }; std::string device_id; if (arguments.find(ov::device::id.name()) != arguments.end()) { diff --git a/src/plugins/proxy/tests/proxy_tests.hpp b/src/plugins/proxy/tests/proxy_tests.hpp index c2f94dae0c6..014c9ba51aa 100644 --- a/src/plugins/proxy/tests/proxy_tests.hpp +++ b/src/plugins/proxy/tests/proxy_tests.hpp @@ -64,11 +64,13 @@ public: ov::Core core; void SetUp() override; + void TearDown() override; std::shared_ptr create_model_with_subtract(); std::shared_ptr create_model_with_subtract_reshape(); std::shared_ptr create_model_with_subtract_reshape_relu(); std::shared_ptr create_model_with_reshape(); + std::shared_ptr create_model_with_add(); ov::Tensor create_and_fill_tensor(const ov::element::Type& type, const ov::Shape& shape); protected: