Deprecate set batch method (#17057)

* Deprecate set batch method

* Fixed some errors

* Suppress warning in tests

* Fixed warning in GPU

* Deprecate python
This commit is contained in:
Ilya Churaev
2023-04-20 00:21:18 +04:00
committed by GitHub
parent 1ec22a3180
commit 71880aadd3
10 changed files with 24 additions and 1 deletions

View File

@@ -758,7 +758,8 @@ ie_infer_request_wait(ie_infer_request_t* infer_request, const int64_t timeout);
* @param size New batch size to be used by all the following inference calls for this request.
* @return Status code of the operation: OK(0) for success.
*/
INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode)
INFERENCE_ENGINE_C_API(OPENVINO_DEPRECATED("This function is deprecated and will be removed in 2023.1 release")
IE_NODISCARD IEStatusCode)
ie_infer_request_set_batch(ie_infer_request_t* infer_request, const size_t size);
/** @} */ // end of InferRequest

View File

@@ -1549,6 +1549,8 @@ cdef class InferRequest:
def set_batch(self, size):
"""Sets new batch size for certain infer request when dynamic batching is enabled in executable network
that created this request.
This method is deprecated and will be removed in 2023.1 release.
.. note:: Support of dynamic batch size depends on the target plugin.

View File

@@ -263,10 +263,12 @@ public:
return _syncRequest->GetPreProcess(name);
}
OPENVINO_SUPPRESS_DEPRECATED_START
void SetBatch(int batch) override {
CheckState();
_syncRequest->SetBatch(batch);
};
OPENVINO_SUPPRESS_DEPRECATED_END
void SetCallback(Callback callback) override {
CheckState();

View File

@@ -147,8 +147,10 @@ public:
/**
* @brief Sets new batch size when dynamic batching is enabled in executable network that created this request.
* @deprecated
* @param batch - new batch size to be used by all the following inference calls for this request.
*/
INFERENCE_ENGINE_DEPRECATED("This method is deprecated and will be removed in 2023.1 release")
virtual void SetBatch(int batch);
/**

View File

@@ -165,9 +165,11 @@ public:
/**
* @brief Sets new batch size when dynamic batching is enabled in executable network that created this request.
* @deprecated
*
* @param batch new batch size to be used by all the following inference calls for this request.
*/
INFERENCE_ENGINE_DEPRECATED("This method is deprecated and will be removed in 2023.1 release")
void SetBatch(const int batch);
/**

View File

@@ -194,11 +194,13 @@ public:
/**
* @brief Sets new batch size when dynamic batching is enabled in executable network that created this request.
*
* @deprecated
* @param batch_size new batch size to be used by all the following inference calls for this request.
* @param resp Optional: a pointer to an already allocated object to contain extra information of a failure (if
* occurred)
* @return Enumeration of the resulted action: InferenceEngine::OK (0) for success
*/
INFERENCE_ENGINE_DEPRECATED("This method is deprecated and will be removed in 2023.1 release")
virtual InferenceEngine::StatusCode SetBatch(int batch_size, ResponseDesc* resp) noexcept = 0;
protected:

View File

@@ -346,6 +346,7 @@ DECLARE_CONFIG_KEY(PERF_COUNT);
/**
* @brief The key defines dynamic limit of batch processing.
* @deprecated
*
* Specified value is applied to all following Infer() calls. Inference Engine processes
* min(batch_limit, original_batch_size) first pictures from input blob. For example, if input
@@ -357,11 +358,14 @@ DECLARE_CONFIG_KEY(PERF_COUNT);
* -1 - Do not limit batch processing
* >0 - Direct value of limit. Batch size to process is min(new batch_limit, original_batch)
*/
INFERENCE_ENGINE_DEPRECATED("This config is deprecated and will be removed in 2023.1 release")
DECLARE_CONFIG_KEY(DYN_BATCH_LIMIT);
/**
* @brief The key checks whether dynamic batch is enabled.
* @deprecated
*/
INFERENCE_ENGINE_DEPRECATED("This config is deprecated and will be removed in 2023.1 release")
DECLARE_CONFIG_KEY(DYN_BATCH_ENABLED);
/**

View File

@@ -55,7 +55,9 @@ TEST(InferRequestCPPTests, throwsOnUninitializedSetOutput) {
TEST(InferRequestCPPTests, throwsOnUninitializedSetBatch) {
InferRequest req;
IE_SUPPRESS_DEPRECATED_START
ASSERT_THROW(req.SetBatch({}), InferenceEngine::NotAllocated);
IE_SUPPRESS_DEPRECATED_END
}
TEST(InferRequestCPPTests, throwsOnUninitializedStartAsync) {

View File

@@ -74,6 +74,7 @@ void Config::readProperties(const std::map<std::string, std::string> &prop) {
for (const auto& kvp : prop) {
const auto& key = kvp.first;
const auto& val = kvp.second;
IE_SUPPRESS_DEPRECATED_START
if (streamExecutorConfigKeys.end() !=
std::find(std::begin(streamExecutorConfigKeys), std::end(streamExecutorConfigKeys), key)) {
streamExecutorConfig.SetConfig(key, val);
@@ -246,6 +247,7 @@ void Config::readProperties(const std::map<std::string, std::string> &prop) {
} else {
IE_THROW(NotFound) << "Unsupported property " << key << " by CPU plugin";
}
IE_SUPPRESS_DEPRECATED_END
}
// apply execution mode after all the params are handled to prevent possible conflicts
// when both execution_mode and inference_precision are specified
@@ -298,12 +300,14 @@ void Config::updateProperties() {
_config.insert({ PluginConfigParams::KEY_EXCLUSIVE_ASYNC_REQUESTS, PluginConfigParams::YES });
else
_config.insert({ PluginConfigParams::KEY_EXCLUSIVE_ASYNC_REQUESTS, PluginConfigParams::NO });
IE_SUPPRESS_DEPRECATED_START
if (enableDynamicBatch == true)
_config.insert({ PluginConfigParams::KEY_DYN_BATCH_ENABLED, PluginConfigParams::YES });
else
_config.insert({ PluginConfigParams::KEY_DYN_BATCH_ENABLED, PluginConfigParams::NO });
_config.insert({ PluginConfigParams::KEY_DYN_BATCH_LIMIT, std::to_string(batchLimit) });
IE_SUPPRESS_DEPRECATED_END
_config.insert({ PluginConfigParams::KEY_CPU_THROUGHPUT_STREAMS, std::to_string(streamExecutorConfig._streams) });

View File

@@ -223,6 +223,7 @@ std::pair<std::string, ov::Any> LegacyAPIHelper::convert_to_legacy_property(cons
}
std::vector<std::string> LegacyAPIHelper::get_supported_configs() {
OPENVINO_SUPPRESS_DEPRECATED_START
static const std::vector<std::string> supported_config = {
CONFIG_KEY(MODEL_PRIORITY),
CONFIG_KEY(PERFORMANCE_HINT),
@@ -241,6 +242,7 @@ std::vector<std::string> LegacyAPIHelper::get_supported_configs() {
GPU_CONFIG_KEY(MAX_NUM_THREADS),
GPU_CONFIG_KEY(ENABLE_LOOP_UNROLLING),
};
OPENVINO_SUPPRESS_DEPRECATED_END
return supported_config;
}