diff --git a/inference-engine/include/gpu/gpu_ocl_wrapper.hpp b/inference-engine/include/gpu/gpu_ocl_wrapper.hpp index 282dcd84488..f51076e8fe9 100644 --- a/inference-engine/include/gpu/gpu_ocl_wrapper.hpp +++ b/inference-engine/include/gpu/gpu_ocl_wrapper.hpp @@ -13,17 +13,34 @@ /** * @brief Definitions required by Khronos headers */ -#define CL_HPP_ENABLE_EXCEPTIONS -#define CL_HPP_MINIMUM_OPENCL_VERSION 120 -#define CL_HPP_TARGET_OPENCL_VERSION 120 -#if defined __GNUC__ +#ifndef CL_HPP_ENABLE_EXCEPTIONS +# define CL_HPP_ENABLE_EXCEPTIONS +#endif + +#ifdef CL_HPP_MINIMUM_OPENCL_VERSION +# if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 +# error "CL_HPP_MINIMUM_OPENCL_VERSION must be >= 120" +# endif +#else +# define CL_HPP_MINIMUM_OPENCL_VERSION 120 +#endif + +#ifdef CL_HPP_TARGET_OPENCL_VERSION +# if CL_HPP_TARGET_OPENCL_VERSION <= 120 +# error "CL_HPP_TARGET_OPENCL_VERSION must be >= 120" +# endif +#else +# define CL_HPP_TARGET_OPENCL_VERSION 120 +#endif + +#ifdef __GNUC__ # pragma GCC diagnostic push # pragma GCC system_header #endif #include -#if defined __GNUC__ +#ifdef __GNUC__ # pragma GCC diagnostic pop #endif diff --git a/inference-engine/src/cldnn_engine/cldnn_engine.cpp b/inference-engine/src/cldnn_engine/cldnn_engine.cpp index 743ff3c993f..b53c39d3135 100644 --- a/inference-engine/src/cldnn_engine/cldnn_engine.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_engine.cpp @@ -363,9 +363,9 @@ RemoteContext::Ptr clDNNEngine::CreateContext(const ParamMap& params) { } } -RemoteContext::Ptr clDNNEngine::GetDefaultContext() { +RemoteContext::Ptr clDNNEngine::GetDefaultContext(const ParamMap& params) { if (nullptr == m_defaultContext) { - m_defaultContext.reset(new CLDNNRemoteCLContext(shared_from_this(), ParamMap(), _impl->m_config)); + m_defaultContext.reset(new CLDNNRemoteCLContext(shared_from_this(), params, _impl->m_config)); } return std::dynamic_pointer_cast(m_defaultContext); } diff --git a/inference-engine/src/cldnn_engine/cldnn_engine.h b/inference-engine/src/cldnn_engine/cldnn_engine.h index 84b37d53741..b5469826920 100644 --- a/inference-engine/src/cldnn_engine/cldnn_engine.h +++ b/inference-engine/src/cldnn_engine/cldnn_engine.h @@ -46,7 +46,7 @@ public: const std::map& config) const override; InferenceEngine::RemoteContext::Ptr CreateContext(const InferenceEngine::ParamMap& params) override; - InferenceEngine::RemoteContext::Ptr GetDefaultContext() override; + InferenceEngine::RemoteContext::Ptr GetDefaultContext(const ParamMap& params) override; }; }; // namespace CLDNNPlugin diff --git a/inference-engine/src/gna_plugin/gna_plugin.hpp b/inference-engine/src/gna_plugin/gna_plugin.hpp index dbe98fd37a4..838e9046b8d 100644 --- a/inference-engine/src/gna_plugin/gna_plugin.hpp +++ b/inference-engine/src/gna_plugin/gna_plugin.hpp @@ -123,7 +123,7 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin { InferenceEngine::Parameter GetMetric(const std::string& name, const std::map & options) const override; InferenceEngine::RemoteContext::Ptr CreateContext(const InferenceEngine::ParamMap& params) override { THROW_GNA_EXCEPTION << "Not implemented"; } - InferenceEngine::RemoteContext::Ptr GetDefaultContext() override { THROW_GNA_EXCEPTION << "Not implemented"; } + InferenceEngine::RemoteContext::Ptr GetDefaultContext(const InferenceEngine::ParamMap&) override { THROW_GNA_EXCEPTION << "Not implemented"; } void Wait(uint32_t sync, InferenceEngine::Blob &result) { THROW_GNA_EXCEPTION << "Not implemented"; } diff --git a/inference-engine/src/inference_engine/ie_core.cpp b/inference-engine/src/inference_engine/ie_core.cpp index c22eeb57454..ddce658f9c6 100644 --- a/inference-engine/src/inference_engine/ie_core.cpp +++ b/inference-engine/src/inference_engine/ie_core.cpp @@ -598,45 +598,37 @@ void Core::AddExtension(const IExtensionPtr& extension) { ExecutableNetwork Core::LoadNetwork(const CNNNetwork& network, RemoteContext::Ptr context, const std::map& config) { OV_ITT_SCOPED_TASK(itt::domains::IE, "Core::LoadNetwork"); - std::map config_ = config; if (context == nullptr) { THROW_IE_EXCEPTION << "Remote context is null"; } - std::string deviceName_ = context->getDeviceName(); - DeviceIDParser device(deviceName_); - std::string deviceName = device.getDeviceName(); - - return _impl->GetCPPPluginByName(deviceName).LoadNetwork(network, config_, context); + auto parsed = parseDeviceNameIntoConfig(context->getDeviceName(), config); + return _impl->GetCPPPluginByName(parsed._deviceName).LoadNetwork(network, parsed._config, context); } -RemoteContext::Ptr Core::CreateContext(const std::string& deviceName_, const ParamMap& params) { - if (deviceName_.find("HETERO") == 0) { - THROW_IE_EXCEPTION << "HETERO device does not support remote contexts"; +RemoteContext::Ptr Core::CreateContext(const std::string& deviceName, const ParamMap& params) { + if (deviceName.find("HETERO") == 0) { + THROW_IE_EXCEPTION << "HETERO device does not support remote context"; } - if (deviceName_.find("MULTI") == 0) { - THROW_IE_EXCEPTION << "MULTI device does not support remote contexts"; + if (deviceName.find("MULTI") == 0) { + THROW_IE_EXCEPTION << "MULTI device does not support remote context"; } - DeviceIDParser device(deviceName_); - std::string deviceName = device.getDeviceName(); - - return _impl->GetCPPPluginByName(deviceName).CreateContext(params); + auto parsed = parseDeviceNameIntoConfig(deviceName, params); + return _impl->GetCPPPluginByName(parsed._deviceName).CreateContext(parsed._config); } -RemoteContext::Ptr Core::GetDefaultContext(const std::string& deviceName_) { - if (deviceName_.find("HETERO") == 0) { - THROW_IE_EXCEPTION << "HETERO device does not support remote contexts"; +RemoteContext::Ptr Core::GetDefaultContext(const std::string& deviceName) { + if (deviceName.find("HETERO") == 0) { + THROW_IE_EXCEPTION << "HETERO device does not support remote context"; } - if (deviceName_.find("MULTI") == 0) { - THROW_IE_EXCEPTION << "MULTI device does not support remote contexts"; + if (deviceName.find("MULTI") == 0) { + THROW_IE_EXCEPTION << "MULTI device does not support remote context"; } - DeviceIDParser device(deviceName_); - std::string deviceName = device.getDeviceName(); - - return _impl->GetCPPPluginByName(deviceName).GetDefaultContext(); + auto parsed = parseDeviceNameIntoConfig(deviceName, ParamMap()); + return _impl->GetCPPPluginByName(parsed._deviceName).GetDefaultContext(parsed._config); } void Core::AddExtension(IExtensionPtr extension, const std::string& deviceName_) { diff --git a/inference-engine/src/inference_engine/ie_plugin_cpp.hpp b/inference-engine/src/inference_engine/ie_plugin_cpp.hpp index 9b3be1fcd58..ec8f89389d3 100644 --- a/inference-engine/src/inference_engine/ie_plugin_cpp.hpp +++ b/inference-engine/src/inference_engine/ie_plugin_cpp.hpp @@ -118,8 +118,8 @@ public: CALL_STATEMENT(return actual->CreateContext(params)); } - RemoteContext::Ptr GetDefaultContext() { - CALL_STATEMENT(return actual->GetDefaultContext()); + RemoteContext::Ptr GetDefaultContext(const ParamMap& params) { + CALL_STATEMENT(return actual->GetDefaultContext(params)); } ExecutableNetwork ImportNetwork(std::istream& networkModel, diff --git a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp index 9069132a657..dec31fa96a3 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp @@ -150,7 +150,7 @@ public: THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str; } - RemoteContext::Ptr GetDefaultContext() override { + RemoteContext::Ptr GetDefaultContext(const ParamMap& /*params*/) override { THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str; } diff --git a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp index 67fac3d3b3d..d949e2de3b2 100644 --- a/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp +++ b/inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp @@ -214,9 +214,10 @@ public: /** * @brief Provides a default remote context instance if supported by a plugin + * @param[in] params The map of parameters * @return The default context. */ - virtual RemoteContext::Ptr GetDefaultContext() = 0; + virtual RemoteContext::Ptr GetDefaultContext(const ParamMap& params) = 0; /** * @deprecated Use ImportNetwork(std::istream& networkModel, const std::map& config) diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp index b06cee1c4e0..ebb66d750db 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/basic_lstm.cpp @@ -200,6 +200,7 @@ TEST_P(Basic_LSTM_S, CompareWithRefImpl_LowLatencyTransformation) { manager.register_pass(); // LowLatency enables UnrollTI manager.run_passes(function); LoadNetwork(); + IE_SUPPRESS_DEPRECATED_START auto states = executableNetwork.QueryState(); for (auto& state : states) { auto name = state.GetName(); @@ -215,6 +216,7 @@ TEST_P(Basic_LSTM_S, CompareWithRefImpl_LowLatencyTransformation) { GTEST_FAIL() << "unknown memory state"; } } + IE_SUPPRESS_DEPRECATED_END // Run and compare Infer(); const auto& actualOutputs = GetOutputs(); diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_LSTMCell.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_LSTMCell.cpp index 93a883741a0..4542c8fccbb 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_LSTMCell.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_LSTMCell.cpp @@ -260,6 +260,7 @@ namespace SubgraphTestsDefinitions { void MemoryLSTMCellTest::Run() { SKIP_IF_CURRENT_TEST_IS_DISABLED() + IE_SUPPRESS_DEPRECATED_START LoadNetwork(); auto states = executableNetwork.QueryState(); for (auto& state : states) { @@ -276,6 +277,7 @@ namespace SubgraphTestsDefinitions { GTEST_FAIL() << "unknown memory state"; } } + IE_SUPPRESS_DEPRECATED_END Infer(); switchToNgraphFriendlyModel(); Validate(); @@ -297,6 +299,7 @@ namespace SubgraphTestsDefinitions { manager.run_passes(function); LoadNetwork(); } + IE_SUPPRESS_DEPRECATED_START auto states = executableNetwork.QueryState(); for (auto& state : states) { auto name = state.GetName(); @@ -312,6 +315,7 @@ namespace SubgraphTestsDefinitions { GTEST_FAIL() << "unknown memory state"; } } + IE_SUPPRESS_DEPRECATED_END Infer(); CreatePureTensorIteratorModel(); diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_eltwise_reshape_concat.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_eltwise_reshape_concat.cpp index 7b9a13f0b67..a1754b04157 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_eltwise_reshape_concat.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/memory_eltwise_reshape_concat.cpp @@ -135,10 +135,12 @@ void MemoryEltwiseReshapeConcatTest::Run() { InferenceEngine::SizeVector({1, inputSize * concatSize}), InferenceEngine::Layout::NC); + IE_SUPPRESS_DEPRECATED_START auto states = executableNetwork.QueryState(); auto state_values_blob = FuncTestUtils::createAndFillBlobWithFloatArray(state_description, memory_init.data(), memory_init.size()); states[0].SetState(state_values_blob); + IE_SUPPRESS_DEPRECATED_END Infer(); initNgraphFriendlyModel(); Validate(); diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/multiple_LSTMCell.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/multiple_LSTMCell.cpp index 9463031be87..0197077c3c3 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/multiple_LSTMCell.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/multiple_LSTMCell.cpp @@ -402,6 +402,7 @@ void MultipleLSTMCellTest::Run() { InferenceEngine::SizeVector({1, hiddenSize}), InferenceEngine::Layout::NC); LoadNetwork(); + IE_SUPPRESS_DEPRECATED_START auto states = executableNetwork.QueryState(); for (auto& state : states) { auto name = state.GetName(); @@ -425,6 +426,7 @@ void MultipleLSTMCellTest::Run() { GTEST_FAIL() << "unknown memory state"; } } + IE_SUPPRESS_DEPRECATED_END Infer(); switchToNgraphFriendlyModel(); Validate(); @@ -450,6 +452,7 @@ void MultipleLSTMCellTest::RunLowLatency(bool regular_api) { manager.run_passes(function); LoadNetwork(); } + IE_SUPPRESS_DEPRECATED_START auto states = executableNetwork.QueryState(); for (auto& state : states) { auto name = state.GetName(); @@ -473,6 +476,7 @@ void MultipleLSTMCellTest::RunLowLatency(bool regular_api) { GTEST_FAIL() << "unknown memory state"; } } + IE_SUPPRESS_DEPRECATED_END Infer(); // Calculate ref values for Unrolled TI diff --git a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/negative_memory_layer_offset.cpp b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/negative_memory_layer_offset.cpp index 781e92f0472..d1efd0eff1b 100644 --- a/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/negative_memory_layer_offset.cpp +++ b/inference-engine/tests/functional/plugin/shared/src/subgraph_tests/negative_memory_layer_offset.cpp @@ -79,6 +79,7 @@ namespace LayerTestsDefinitions { SKIP_IF_CURRENT_TEST_IS_DISABLED() LoadNetwork(); + IE_SUPPRESS_DEPRECATED_START auto states = executableNetwork.QueryState(); for (auto& state : states) { auto name = state.GetName(); @@ -90,6 +91,7 @@ namespace LayerTestsDefinitions { GTEST_FAIL() << "unknown memory state"; } } + IE_SUPPRESS_DEPRECATED_END Infer(); switchToNgraphFriendlyModel(); Validate(); diff --git a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp index 8e31e8e3cc3..a0cfd456ba0 100644 --- a/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp +++ b/inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp @@ -29,7 +29,7 @@ public: const std::string&, const std::map&)); MOCK_METHOD1(CreateContext, InferenceEngine::RemoteContext::Ptr(const InferenceEngine::ParamMap&)); - MOCK_METHOD0(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(void)); + MOCK_METHOD1(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(const InferenceEngine::ParamMap&)); MOCK_METHOD3(LoadNetwork, InferenceEngine::ExecutableNetwork( const InferenceEngine::ICNNNetwork&, const std::map&, InferenceEngine::RemoteContext::Ptr));