Deprecate ExecutableNetwork and InferRequest API (#17801)
* Deprecate ExecutableNetwork and InferRequest API * Fixed some warnings * Fixed some warnings * Try to fix documentation * Try to skip documentation warnings
This commit is contained in:
parent
d53339ff67
commit
18d6ece4e4
@ -13,11 +13,13 @@
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
//! [part0]
|
||||
ov::Core core;
|
||||
// Load GPU Extensions
|
||||
core.set_property("GPU", {{ CONFIG_KEY(CONFIG_FILE), "<path_to_the_xml_file>" }});
|
||||
//! [part0]
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -103,3 +103,5 @@ unexpected unindent
|
||||
failed to import object
|
||||
autosummary: stub file not found
|
||||
failed to parse name
|
||||
for undefined enum \'waitmode\' found
|
||||
internal inconsistency: scope for class inferenceengine
|
||||
|
@ -309,6 +309,7 @@ int main(int argc, char* argv[]) {
|
||||
slog::info << "Extensions are loaded: " << FLAGS_extensions << slog::endl;
|
||||
}
|
||||
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
// Load clDNN Extensions
|
||||
if ((FLAGS_d.find("GPU") != std::string::npos) && !FLAGS_c.empty()) {
|
||||
// Override config if command line parameter is specified
|
||||
@ -321,6 +322,7 @@ int main(int argc, char* argv[]) {
|
||||
core.set_property("GPU", {{CONFIG_KEY(CONFIG_FILE), ext}});
|
||||
slog::info << "GPU extensions are loaded: " << ext << slog::endl;
|
||||
}
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
|
||||
slog::info << "OpenVINO:" << slog::endl;
|
||||
slog::info << ov::get_openvino_version() << slog::endl;
|
||||
@ -842,10 +844,12 @@ int main(int argc, char* argv[]) {
|
||||
for (auto& item : devices_properties) {
|
||||
slog::info << " " << item.first << ": " << slog::endl;
|
||||
for (auto& item2 : item.second.as<ov::AnyMap>()) {
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
if (item2.first == ov::supported_properties ||
|
||||
item2.first == METRIC_KEY(SUPPORTED_CONFIG_KEYS) ||
|
||||
item2.first == METRIC_KEY(SUPPORTED_METRICS))
|
||||
continue;
|
||||
OPENVINO_SUPPRESS_DEPRECATED_END
|
||||
slog::info << " " << item2.first << ": " << item2.second.as<std::string>() << slog::endl;
|
||||
}
|
||||
}
|
||||
|
@ -15,11 +15,13 @@
|
||||
#include <vector>
|
||||
|
||||
#include "cpp_interfaces/interface/ie_iinfer_request_internal.hpp"
|
||||
#include "ie_api.h"
|
||||
#include "threading/ie_immediate_executor.hpp"
|
||||
#include "threading/ie_istreams_executor.hpp"
|
||||
#include "threading/ie_itask_executor.hpp"
|
||||
|
||||
namespace InferenceEngine {
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
/**
|
||||
* @ingroup ie_dev_api_async_infer_request_api
|
||||
@ -459,4 +461,5 @@ private:
|
||||
Futures _futures;
|
||||
InferState _state = InferState::Idle;
|
||||
};
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
} // namespace InferenceEngine
|
||||
|
@ -10,6 +10,16 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <ostream>
|
||||
@ -32,7 +42,7 @@ class IExecutableNetworkInternal;
|
||||
/**
|
||||
* @brief This is an interface of an executable network
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) {
|
||||
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(ExecutableNetwork) {
|
||||
std::shared_ptr<IExecutableNetworkInternal> _impl;
|
||||
std::shared_ptr<void> _so;
|
||||
|
||||
@ -180,7 +190,6 @@ public:
|
||||
*/
|
||||
explicit operator bool() const noexcept;
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
/**
|
||||
* @deprecated The method Will be removed
|
||||
* @brief reset owned object to new pointer.
|
||||
@ -188,7 +197,6 @@ public:
|
||||
* Essential for cases when simultaneously loaded networks not expected.
|
||||
* @param newActual actual pointed object
|
||||
*/
|
||||
INFERENCE_ENGINE_DEPRECATED("The method will be removed")
|
||||
void reset(std::shared_ptr<IExecutableNetwork> newActual);
|
||||
|
||||
/**
|
||||
@ -196,7 +204,6 @@ public:
|
||||
* @brief cast operator is used when this wrapper initialized by LoadNetwork
|
||||
* @return A shared pointer to IExecutableNetwork interface.
|
||||
*/
|
||||
INFERENCE_ENGINE_DEPRECATED("The method will be removed. Use operator bool")
|
||||
operator std::shared_ptr<IExecutableNetwork>();
|
||||
|
||||
/**
|
||||
@ -206,7 +213,6 @@ public:
|
||||
* Wraps IExecutableNetwork::CreateInferRequest.
|
||||
* @return shared pointer on InferenceEngine::InferRequest object
|
||||
*/
|
||||
INFERENCE_ENGINE_DEPRECATED("Use ExecutableNetwork::CreateInferRequest instead")
|
||||
InferRequest::Ptr CreateInferRequestPtr();
|
||||
};
|
||||
|
||||
|
@ -9,6 +9,16 @@
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
@ -31,7 +41,7 @@ class ICompletionCallbackWrapper;
|
||||
* Wraps IInferRequest
|
||||
* It can throw exceptions safely for the application, where it is properly handled.
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(InferRequest) {
|
||||
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(InferRequest) {
|
||||
std::shared_ptr<IInferRequestInternal> _impl;
|
||||
std::shared_ptr<void> _so;
|
||||
|
||||
@ -49,7 +59,7 @@ public:
|
||||
* @enum WaitMode
|
||||
* @brief Enumeration to hold wait mode for IInferRequest
|
||||
*/
|
||||
enum WaitMode : int64_t {
|
||||
enum INFERENCE_ENGINE_1_0_DEPRECATED WaitMode : int64_t {
|
||||
/** Wait until inference result becomes available */
|
||||
RESULT_READY = -1,
|
||||
/** IInferRequest doesn't block or interrupt current thread and immediately returns inference status */
|
||||
@ -219,6 +229,7 @@ public:
|
||||
SetCallback<F>{*this}(std::move(callbackToSet));
|
||||
}
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
/**
|
||||
* @brief Gets state control interface for given infer request.
|
||||
*
|
||||
@ -227,7 +238,6 @@ public:
|
||||
*/
|
||||
std::vector<VariableState> QueryState();
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
/**
|
||||
* @brief IInferRequest pointer to be used directly in CreateInferRequest functions
|
||||
* @return A shared pointer to IInferRequest interface
|
||||
@ -261,6 +271,7 @@ public:
|
||||
bool operator==(const InferRequest&) const noexcept;
|
||||
};
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
@ -272,8 +283,6 @@ struct InferRequest::SetCallback<std::function<void(InferRequest, StatusCode)>>
|
||||
InferRequest& _this;
|
||||
};
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
|
@ -10,6 +10,16 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
@ -19,11 +29,12 @@
|
||||
namespace InferenceEngine {
|
||||
|
||||
class IVariableStateInternal;
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
/**
|
||||
* @brief VariableState class
|
||||
*/
|
||||
class INFERENCE_ENGINE_API_CLASS(VariableState) {
|
||||
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(VariableState) {
|
||||
std::shared_ptr<IVariableStateInternal> _impl;
|
||||
std::shared_ptr<void> _so;
|
||||
|
||||
@ -95,4 +106,6 @@ public:
|
||||
*/
|
||||
using MemoryState = VariableState;
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
||||
} // namespace InferenceEngine
|
||||
|
@ -9,6 +9,16 @@
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <ostream>
|
||||
@ -24,13 +34,12 @@
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
_IE_SUPPRESS_DEPRECATED_START_GCC
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
/**
|
||||
* @brief This is an interface of an executable network
|
||||
*/
|
||||
class INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::ExecutableNetwork instead") IExecutableNetwork
|
||||
: public std::enable_shared_from_this<IExecutableNetwork> {
|
||||
class INFERENCE_ENGINE_1_0_DEPRECATED IExecutableNetwork : public std::enable_shared_from_this<IExecutableNetwork> {
|
||||
public:
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
/**
|
||||
@ -162,6 +171,6 @@ protected:
|
||||
virtual ~IExecutableNetwork() = default;
|
||||
};
|
||||
|
||||
_IE_SUPPRESS_DEPRECATED_END_GCC
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
||||
} // namespace InferenceEngine
|
||||
|
@ -10,6 +10,16 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
@ -20,14 +30,13 @@
|
||||
|
||||
namespace InferenceEngine {
|
||||
|
||||
_IE_SUPPRESS_DEPRECATED_START_GCC
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
/**
|
||||
* @deprecated Use InferenceEngine::InferRequest C++ wrapper
|
||||
* @brief This is an interface of asynchronous infer request
|
||||
*/
|
||||
class INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::InferRequest C++ wrapper") IInferRequest
|
||||
: public std::enable_shared_from_this<IInferRequest> {
|
||||
class INFERENCE_ENGINE_1_0_DEPRECATED IInferRequest : public std::enable_shared_from_this<IInferRequest> {
|
||||
public:
|
||||
/**
|
||||
* @enum WaitMode
|
||||
@ -207,6 +216,6 @@ protected:
|
||||
virtual ~IInferRequest() = default;
|
||||
};
|
||||
|
||||
_IE_SUPPRESS_DEPRECATED_END_GCC
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
|
||||
} // namespace InferenceEngine
|
||||
|
@ -14,6 +14,16 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include "openvino/core/parallel.hpp"
|
||||
|
||||
#define IE_THREAD_TBB OV_THREAD_TBB
|
||||
|
@ -10,6 +10,16 @@
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
|
||||
# define IE_LEGACY_HEADER_INCLUDED
|
||||
# ifdef _MSC_VER
|
||||
# pragma message( \
|
||||
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# else
|
||||
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
@ -18,6 +28,7 @@
|
||||
#include "ie_precision.hpp"
|
||||
|
||||
namespace InferenceEngine {
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
/**
|
||||
* @brief %Metrics
|
||||
@ -56,6 +67,7 @@ namespace Metrics {
|
||||
/**
|
||||
* @brief Metric to get a std::vector<std::string> of available device IDs. String value is "AVAILABLE_DEVICES"
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(AVAILABLE_DEVICES, std::vector<std::string>);
|
||||
|
||||
/**
|
||||
@ -67,6 +79,7 @@ DECLARE_METRIC_KEY(AVAILABLE_DEVICES, std::vector<std::string>);
|
||||
* can be passed to ExecutableNetwork::GetMetric.
|
||||
*
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(SUPPORTED_METRICS, std::vector<std::string>);
|
||||
|
||||
/**
|
||||
@ -79,11 +92,13 @@ DECLARE_METRIC_KEY(SUPPORTED_METRICS, std::vector<std::string>);
|
||||
* ExecutableNetwork::GetConfig.
|
||||
*
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(SUPPORTED_CONFIG_KEYS, std::vector<std::string>);
|
||||
|
||||
/**
|
||||
* @brief Metric to get a std::string value representing a full device name. String value is "FULL_DEVICE_NAME"
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(FULL_DEVICE_NAME, std::string);
|
||||
|
||||
/**
|
||||
@ -99,14 +114,22 @@ DECLARE_METRIC_KEY(FULL_DEVICE_NAME, std::string);
|
||||
* - "WINOGRAD" - device can support models where convolution implemented via Winograd transformations
|
||||
* - "BATCHED_BLOB" - device can support BatchedBlob
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(OPTIMIZATION_CAPABILITIES, std::vector<std::string>);
|
||||
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(FP32);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(BF16);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(FP16);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(INT8);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(BIN);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(WINOGRAD);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_VALUE(BATCHED_BLOB);
|
||||
|
||||
/**
|
||||
@ -117,6 +140,7 @@ DECLARE_METRIC_VALUE(BATCHED_BLOB);
|
||||
* - Second value is upper bound.
|
||||
* String value for metric name is "RANGE_FOR_STREAMS".
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(RANGE_FOR_STREAMS, std::tuple<unsigned int, unsigned int>);
|
||||
/**
|
||||
* @brief Metric to query information optimal batch size for the given device and the network
|
||||
@ -129,6 +153,7 @@ DECLARE_METRIC_KEY(RANGE_FOR_STREAMS, std::tuple<unsigned int, unsigned int>);
|
||||
* so that the result (>1) governs the automatic batching (transparently to the application).
|
||||
* The automatic batching can be disabled with ALLOW_AUTO_BATCHING set to NO
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(OPTIMAL_BATCH_SIZE, unsigned int);
|
||||
|
||||
/**
|
||||
@ -139,6 +164,7 @@ DECLARE_METRIC_KEY(OPTIMAL_BATCH_SIZE, unsigned int);
|
||||
* Also, MODEL_PTR is the required option for this metric since the available max batch size depends on the model size.
|
||||
* If the MODEL_PTR is not given, it will return 1.
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(MAX_BATCH_SIZE, unsigned int);
|
||||
|
||||
/**
|
||||
@ -151,6 +177,7 @@ DECLARE_METRIC_KEY(MAX_BATCH_SIZE, unsigned int);
|
||||
* - Third value is step inside this range.
|
||||
* String value for metric name is "RANGE_FOR_ASYNC_INFER_REQUESTS".
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS, std::tuple<unsigned int, unsigned int, unsigned int>);
|
||||
|
||||
/**
|
||||
@ -158,6 +185,7 @@ DECLARE_METRIC_KEY(RANGE_FOR_ASYNC_INFER_REQUESTS, std::tuple<unsigned int, unsi
|
||||
*
|
||||
* String value is "NUMBER_OF_WAITNING_INFER_REQUESTS". This can be used as an executable network metric as well
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS, unsigned int);
|
||||
|
||||
/**
|
||||
@ -165,23 +193,27 @@ DECLARE_METRIC_KEY(NUMBER_OF_WAITING_INFER_REQUESTS, unsigned int);
|
||||
*
|
||||
* String value is "NUMBER_OF_EXEC_INFER_REQUESTS". This can be used as an executable network metric as well
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(NUMBER_OF_EXEC_INFER_REQUESTS, unsigned int);
|
||||
|
||||
/**
|
||||
* @brief Metric which defines the device architecture.
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(DEVICE_ARCHITECTURE, std::string);
|
||||
|
||||
/**
|
||||
* @brief Enum to define possible device types
|
||||
*/
|
||||
enum class DeviceType {
|
||||
enum class INFERENCE_ENGINE_1_0_DEPRECATED DeviceType {
|
||||
integrated = 0,
|
||||
discrete = 1,
|
||||
};
|
||||
|
||||
/** @cond INTERNAL */
|
||||
inline std::ostream& operator<<(std::ostream& os, const InferenceEngine::Metrics::DeviceType& deviceType) {
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED inline std::ostream& operator<<(
|
||||
std::ostream& os,
|
||||
const InferenceEngine::Metrics::DeviceType& deviceType) {
|
||||
switch (deviceType) {
|
||||
case InferenceEngine::Metrics::DeviceType::discrete:
|
||||
os << "discrete";
|
||||
@ -201,32 +233,38 @@ inline std::ostream& operator<<(std::ostream& os, const InferenceEngine::Metrics
|
||||
/**
|
||||
* @brief Metric to get a type of device. See DeviceType enum definition for possible return values
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(DEVICE_TYPE, DeviceType);
|
||||
|
||||
/**
|
||||
* @brief Metric which defines Giga OPS per second count (GFLOPS or GIOPS) for a set of precisions supported by
|
||||
* specified device
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(DEVICE_GOPS, std::map<InferenceEngine::Precision, float>);
|
||||
|
||||
/**
|
||||
* @brief Metric which defines support of import/export functionality by plugin
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(IMPORT_EXPORT_SUPPORT, bool);
|
||||
|
||||
/**
|
||||
* @brief Metric to get a name of network. String value is "NETWORK_NAME".
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_EXEC_NETWORK_METRIC_KEY(NETWORK_NAME, std::string);
|
||||
|
||||
/**
|
||||
* @brief Metric to get a float of device thermal. String value is "DEVICE_THERMAL"
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_METRIC_KEY(DEVICE_THERMAL, float);
|
||||
|
||||
/**
|
||||
* @brief Metric to get an unsigned integer value of optimal number of executable network infer requests.
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_EXEC_NETWORK_METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS, unsigned int);
|
||||
|
||||
} // namespace Metrics
|
||||
@ -255,9 +293,13 @@ namespace PluginConfigParams {
|
||||
* @brief (Optional) config key that defines what model should be provided with more performant bounded resource first
|
||||
* It provides 3 types of levels: High, Medium and Low. The default value is Medium
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(MODEL_PRIORITY);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(MODEL_PRIORITY_HIGH);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(MODEL_PRIORITY_MED);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(MODEL_PRIORITY_LOW);
|
||||
|
||||
/**
|
||||
@ -265,40 +307,52 @@ DECLARE_CONFIG_VALUE(MODEL_PRIORITY_LOW);
|
||||
* unlike low-level config keys that are individual (per-device), the hints are smth that every device accepts
|
||||
* and turns into device-specific settings
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(PERFORMANCE_HINT);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LATENCY);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(THROUGHPUT);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(UNDEFINED);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(CUMULATIVE_THROUGHPUT);
|
||||
/**
|
||||
* @brief (Optional) config key that backs the (above) Performance Hints
|
||||
* by giving additional information on how many inference requests the application will be keeping in flight
|
||||
* usually this value comes from the actual use-case (e.g. number of video-cameras, or other sources of inputs)
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(PERFORMANCE_HINT_NUM_REQUESTS);
|
||||
/**
|
||||
* @brief (Optional) config key that governs Auto-Batching (with YES/NO values, below)
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(ALLOW_AUTO_BATCHING);
|
||||
|
||||
/**
|
||||
* @brief generic boolean values
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(YES);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(NO);
|
||||
|
||||
/**
|
||||
* @brief Auto-batching configuration, string for the device + batch size, e.g. "GPU(4)"
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(AUTO_BATCH_DEVICE_CONFIG);
|
||||
/**
|
||||
* @brief Auto-batching configuration: string with timeout (in ms), e.g. "100"
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(AUTO_BATCH_TIMEOUT);
|
||||
|
||||
/**
|
||||
* @brief Limit `#threads` that are used by Inference Engine for inference on the CPU.
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(CPU_THREADS_NUM);
|
||||
|
||||
/**
|
||||
@ -318,8 +372,11 @@ DECLARE_CONFIG_KEY(CPU_THREADS_NUM);
|
||||
* Also, the settings are ignored, if the OpenVINO compiled with OpenMP and any affinity-related OpenMP's
|
||||
* environment variable is set (as affinity is configured explicitly)
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(CPU_BIND_THREAD);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(NUMA);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(HYBRID_AWARE);
|
||||
|
||||
/**
|
||||
@ -332,8 +389,11 @@ DECLARE_CONFIG_VALUE(HYBRID_AWARE);
|
||||
* (and what is the optimal number of streams)
|
||||
* - finally, specifying the positive integer value creates the requested number of streams
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(CPU_THROUGHPUT_STREAMS);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(CPU_THROUGHPUT_NUMA);
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(CPU_THROUGHPUT_AUTO);
|
||||
|
||||
/**
|
||||
@ -342,6 +402,7 @@ DECLARE_CONFIG_VALUE(CPU_THROUGHPUT_AUTO);
|
||||
* It is passed to Core::SetConfig(), this option should be used with values:
|
||||
* PluginConfigParams::YES or PluginConfigParams::NO
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(PERF_COUNT);
|
||||
|
||||
/**
|
||||
@ -373,6 +434,7 @@ DECLARE_CONFIG_KEY(DYN_BATCH_ENABLED);
|
||||
*
|
||||
* The value should be a file name with the plugin specific configuration
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(CONFIG_FILE);
|
||||
|
||||
/**
|
||||
@ -382,23 +444,31 @@ DECLARE_CONFIG_KEY(CONFIG_FILE);
|
||||
* PluginConfigParams::LOG_ERROR, PluginConfigParams::LOG_WARNING,
|
||||
* PluginConfigParams::LOG_INFO, PluginConfigParams::LOG_DEBUG, PluginConfigParams::LOG_TRACE
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(LOG_LEVEL);
|
||||
|
||||
DECLARE_CONFIG_VALUE(LOG_NONE); // turn off logging
|
||||
DECLARE_CONFIG_VALUE(LOG_ERROR); // error events that might still allow the
|
||||
// application to continue running
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LOG_NONE); // turn off logging
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LOG_ERROR); // error events that might still allow the
|
||||
// application to continue running
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LOG_WARNING); // potentially harmful situations which may
|
||||
// further lead to ERROR
|
||||
DECLARE_CONFIG_VALUE(LOG_INFO); // informational messages that display the progress of the
|
||||
// application at coarse-grained level
|
||||
DECLARE_CONFIG_VALUE(LOG_DEBUG); // fine-grained events that are most useful to
|
||||
// debug an application.
|
||||
DECLARE_CONFIG_VALUE(LOG_TRACE); // finer-grained informational events than the DEBUG
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LOG_INFO); // informational messages that display the progress of the
|
||||
// application at coarse-grained level
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LOG_DEBUG); // fine-grained events that are most useful to
|
||||
// debug an application.
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_VALUE(LOG_TRACE); // finer-grained informational events than the DEBUG
|
||||
|
||||
/**
|
||||
* @brief the key for setting of required device to execute on
|
||||
* values: device id starts from "0" - first device, "1" - second device, etc
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(DEVICE_ID);
|
||||
|
||||
/**
|
||||
@ -411,6 +481,7 @@ DECLARE_CONFIG_KEY(DEVICE_ID);
|
||||
* By default, the option is set to YES for hetero cases, and to NO for conventional (single-plugin) cases
|
||||
* Notice that setting YES disables the CPU streams feature (see another config key in this file)
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS);
|
||||
|
||||
/**
|
||||
@ -421,7 +492,7 @@ DECLARE_CONFIG_KEY(EXCLUSIVE_ASYNC_REQUESTS);
|
||||
* corresponding configuration information. Value is a name of output dot file without extension.
|
||||
* Files `<dot_file_name>_init.dot` and `<dot_file_name>_perf.dot` will be produced.
|
||||
*/
|
||||
INFERENCE_ENGINE_DEPRECATED("Use InferenceEngine::ExecutableNetwork::GetExecGraphInfo::serialize method")
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(DUMP_EXEC_GRAPH_AS_DOT);
|
||||
|
||||
/**
|
||||
@ -433,6 +504,7 @@ DECLARE_CONFIG_KEY(DUMP_EXEC_GRAPH_AS_DOT);
|
||||
* verified separately by the user and basing on performance and accuracy results it should be
|
||||
* user's decision to use this option or not to use
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(ENFORCE_BF16);
|
||||
|
||||
/**
|
||||
@ -453,6 +525,7 @@ DECLARE_CONFIG_KEY(ENFORCE_BF16);
|
||||
* ie.SetConfig({{CONFIG_KEY(CACHE_DIR), "cache/"}}); // enables models cache
|
||||
* @endcode
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(CACHE_DIR);
|
||||
|
||||
/**
|
||||
@ -466,6 +539,7 @@ DECLARE_CONFIG_KEY(CACHE_DIR);
|
||||
* ie.SetConfig({{CONFIG_KEY(FORCE_TBB_TERMINATE), CONFIG_VALUE(YES)}}); // enable
|
||||
* @endcode
|
||||
*/
|
||||
INFERENCE_ENGINE_1_0_DEPRECATED
|
||||
DECLARE_CONFIG_KEY(FORCE_TBB_TERMINATE);
|
||||
|
||||
} // namespace PluginConfigParams
|
||||
@ -478,6 +552,7 @@ DECLARE_CONFIG_KEY(FORCE_TBB_TERMINATE);
|
||||
|
||||
#define DECLARE_AUTO_CONFIG_KEY(name) DECLARE_CONFIG_KEY(AUTO_##name)
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_END
|
||||
} // namespace InferenceEngine
|
||||
|
||||
#include "gpu/gpu_config.hpp"
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
namespace ov {
|
||||
namespace threading {
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
|
||||
IStreamsExecutor::~IStreamsExecutor() {}
|
||||
|
||||
|
@ -11,6 +11,8 @@ using namespace std;
|
||||
using namespace InferenceEngine;
|
||||
using namespace InferenceEngine::details;
|
||||
|
||||
IE_SUPPRESS_DEPRECATED_START
|
||||
|
||||
TEST(InferRequestCPPTests, throwsOnUninitializedSetBlob) {
|
||||
InferRequest req;
|
||||
ASSERT_THROW(req.SetBlob({}, {}), InferenceEngine::NotAllocated);
|
||||
|
@ -6,10 +6,13 @@
|
||||
|
||||
#include <cpp/ie_infer_request.hpp>
|
||||
|
||||
#include "openvino/core/deprecated.hpp"
|
||||
|
||||
using namespace ::testing;
|
||||
using namespace std;
|
||||
using namespace InferenceEngine;
|
||||
using namespace InferenceEngine::details;
|
||||
OPENVINO_SUPPRESS_DEPRECATED_START
|
||||
|
||||
TEST(VariableStateCPPTests, throwsOnUninitializedReset) {
|
||||
VariableState req;
|
||||
|
Loading…
Reference in New Issue
Block a user