Fix spelling errors in the API and bindings (#2781)
This commit is contained in:
parent
258c51bd1f
commit
ba3fc7fb8a
@ -352,7 +352,7 @@ INFERENCE_ENGINE_C_API(void) ie_core_free(ie_core_t **core);
|
||||
* @brief Gets version information of the device specified. Use the ie_core_versions_free() method to free memory.
|
||||
* @ingroup Core
|
||||
* @param core A pointer to ie_core_t instance.
|
||||
* @param device_name Name to indentify device.
|
||||
* @param device_name Name to identify device.
|
||||
* @param versions A pointer to versions corresponding to device_name.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
@ -409,7 +409,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_core_load_network(ie_core_t
|
||||
* @ingroup Core
|
||||
* @param core A pointer to ie_core_t instance.
|
||||
* @param ie_core_config Device configuration.
|
||||
* @param device_name An optinal name of a device. If device name is not specified,
|
||||
* @param device_name An optional name of a device. If device name is not specified,
|
||||
* the config is set for all the registered devices.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
@ -537,7 +537,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_exec_network_get_metric(con
|
||||
|
||||
/**
|
||||
* @brief Sets configuration for current executable network. Currently, the method can be used
|
||||
* when the network run on the Multi device and the configuration paramter is only can be "MULTI_DEVICE_PRIORITIES"
|
||||
* when the network run on the Multi device and the configuration parameter is only can be "MULTI_DEVICE_PRIORITIES"
|
||||
* @ingroup ExecutableNetwork
|
||||
* @param ie_exec_network A pointer to ie_executable_network_t instance.
|
||||
* @param param_config A pointer to device configuration..
|
||||
@ -551,7 +551,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_exec_network_set_config(ie_
|
||||
* @ingroup ExecutableNetwork
|
||||
* @param ie_exec_network A pointer to ie_executable_network_t instance.
|
||||
* @param metric_config A configuration parameter name to request.
|
||||
* @param param_result A configuration value corresponding to a configuration paramter name.
|
||||
* @param param_result A configuration value corresponding to a configuration parameter name.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_exec_network_get_config(const ie_executable_network_t *ie_exec_network, \
|
||||
@ -653,7 +653,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_infer_request_set_batch(ie_
|
||||
*/
|
||||
|
||||
/**
|
||||
* @brief When netowrk is loaded into the Infernece Engine, it is not required anymore and should be released
|
||||
* @brief When network is loaded into the Infernece Engine, it is not required anymore and should be released
|
||||
* @ingroup Network
|
||||
* @param network The pointer to the instance of the ie_network_t to free.
|
||||
*/
|
||||
@ -765,7 +765,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_network_set_input_resize_al
|
||||
* @param network A pointer to ie_network_t instance.
|
||||
* @param input_name Name of input data.
|
||||
* @param colformat_result The pointer to the color format used for input blob creation.
|
||||
* @reutrn Status code of the operation: OK(0) for success.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_network_get_color_format(const ie_network_t *network, const char *input_name, colorformat_e *colformat_result);
|
||||
|
||||
@ -775,7 +775,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_network_get_color_format(co
|
||||
* @param network A pointer to ie_network_t instance.
|
||||
* @param input_name Name of input data.
|
||||
* @param color_format Color format of the input data.
|
||||
* @reutrn Status code of the operation: OK(0) for success.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_network_set_color_format(ie_network_t *network, const char *input_name, const colorformat_e color_format);
|
||||
|
||||
@ -801,7 +801,7 @@ INFERENCE_ENGINE_C_API(IE_NODISCARD IEStatusCode) ie_network_reshape(ie_network_
|
||||
/**
|
||||
* @brief Gets number of output for the network.
|
||||
* @ingroup Network
|
||||
* @param network A pointer to the instance of the ie_network_t to get number of ouput information.
|
||||
* @param network A pointer to the instance of the ie_network_t to get number of output information.
|
||||
* @param size_result A number of the network's output information.
|
||||
* @return Status code of the operation: OK(0) for success.
|
||||
*/
|
||||
|
@ -145,7 +145,7 @@ std::map<std::string, IE::Parameter> config2ParamMap(const ie_config_t *config)
|
||||
}
|
||||
|
||||
/**
|
||||
*@brief convert the paramter.
|
||||
*@brief convert the parameter.
|
||||
*/
|
||||
void parameter2IEparam(const IE::Parameter param, ie_param_t *ie_param) {
|
||||
if (param.is<std::string>()) {
|
||||
|
@ -172,8 +172,8 @@ JNIEXPORT jobject JNICALL Java_org_intel_openvino_InferRequest_GetPerformanceCou
|
||||
try
|
||||
{
|
||||
InferRequest *infer_request = (InferRequest *)addr;
|
||||
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> perfomance;
|
||||
perfomance = infer_request->GetPerformanceCounts();
|
||||
std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> performance;
|
||||
performance = infer_request->GetPerformanceCounts();
|
||||
|
||||
jclass hashMap_class = env->FindClass("java/util/LinkedHashMap");
|
||||
jmethodID init_method_id = env->GetMethodID(hashMap_class, "<init>", "()V");
|
||||
@ -188,7 +188,7 @@ JNIEXPORT jobject JNICALL Java_org_intel_openvino_InferRequest_GetPerformanceCou
|
||||
jclass layerStatus_enum = env->FindClass("org/intel/openvino/InferenceEngineProfileInfo$LayerStatus");
|
||||
jmethodID valueOf_method_id = env->GetStaticMethodID(layerStatus_enum,"valueOf","(I)Lorg/intel/openvino/InferenceEngineProfileInfo$LayerStatus;");
|
||||
|
||||
for (const auto& itr : perfomance) {
|
||||
for (const auto& itr : performance) {
|
||||
InferenceEngine::InferenceEngineProfileInfo ie_prof_info = itr.second;
|
||||
auto it = layer_status_map.find(ie_prof_info.status);
|
||||
|
||||
|
@ -74,7 +74,7 @@ PyObject *parse_parameter(const InferenceEngine::Parameter ¶m) {
|
||||
auto val = param.as<int>();
|
||||
return PyLong_FromLong((long)val);
|
||||
}
|
||||
// Check for unsinged int
|
||||
// Check for unsigned int
|
||||
else if (param.is<unsigned int>()) {
|
||||
auto val = param.as<unsigned int>();
|
||||
return PyLong_FromLong((unsigned long)val);
|
||||
|
@ -209,7 +209,7 @@ public:
|
||||
*
|
||||
* Wraps IExecutableNetwork::GetConfig
|
||||
* @param name - config key, can be found in ie_plugin_config.hpp
|
||||
* @return Configuration paramater value
|
||||
* @return Configuration parameter value
|
||||
*/
|
||||
Parameter GetConfig(const std::string& name) const {
|
||||
Parameter configValue;
|
||||
@ -222,7 +222,7 @@ public:
|
||||
*
|
||||
* Wraps IExecutableNetwork::GetMetric
|
||||
* @param name - metric name to request
|
||||
* @return Metric paramater value
|
||||
* @return Metric parameter value
|
||||
*/
|
||||
Parameter GetMetric(const std::string& name) const {
|
||||
Parameter metricValue;
|
||||
|
@ -55,7 +55,7 @@ DECLARE_GNA_CONFIG_KEY(PRECISION);
|
||||
DECLARE_GNA_CONFIG_KEY(FIRMWARE_MODEL_IMAGE);
|
||||
|
||||
/**
|
||||
* @brief inforamtion on GNA generation chosen for firmware model dump, can be overriden by GNA3
|
||||
* @brief information on GNA generation chosen for firmware model dump, can be overridden by GNA3
|
||||
*/
|
||||
DECLARE_GNA_CONFIG_KEY(FIRMWARE_MODEL_IMAGE_GENERATION);
|
||||
|
||||
|
@ -404,7 +404,7 @@ public:
|
||||
*
|
||||
* The memory been addressed in the MemoryBlob in general case can be allocated on remote device.
|
||||
* This function copies remote memory to the memory in the virtual process space and after
|
||||
* destruction of the LockedMemory it will not upload host memory back, bacause it is expected that
|
||||
* destruction of the LockedMemory it will not upload host memory back, because it is expected that
|
||||
* content is not changed.
|
||||
*
|
||||
* To have an ability change content, you can use rwmap() and wmap() functions.
|
||||
@ -428,10 +428,10 @@ public:
|
||||
*
|
||||
* The memory been addressed in the MemoryBlob in general case can be allocated on remote device.
|
||||
* This function does not copy of the content from the device to the memory in the virtual process
|
||||
* space, the content of the memory just after calling of this functin is not specified. After
|
||||
* space, the content of the memory just after calling of this function is not specified. After
|
||||
* destruction of the LockedMemory, content will be upload host memory.
|
||||
* In the same time there is no abilities to restrict reading from the memory, you need to care of
|
||||
* reading from memory got by wmap(), it might have sence in some cases like filling of content and
|
||||
* reading from memory got by wmap(), it might have sense in some cases like filling of content and
|
||||
* before uploading to device
|
||||
*
|
||||
* To access data stored in the blob, you can use rwmap() and rmap() functions.
|
||||
|
@ -79,7 +79,7 @@ enum Layout : uint8_t {
|
||||
SCALAR = 95, //!< A scalar layout
|
||||
|
||||
// bias layouts
|
||||
C = 96, //!< A bias layout for opearation
|
||||
C = 96, //!< A bias layout for operation
|
||||
|
||||
// Single image layouts
|
||||
CHW = 128, //!< A single image layout (e.g. for mean image)
|
||||
@ -166,7 +166,7 @@ struct InferenceEngineProfileInfo {
|
||||
* @brief Defines the general status of the layer
|
||||
*/
|
||||
enum LayerStatus {
|
||||
NOT_RUN, //!< A layer is not exectued
|
||||
NOT_RUN, //!< A layer is not executed
|
||||
OPTIMIZED_OUT, //!< A layer is optimized out during graph optimization phase
|
||||
EXECUTED //!< A layer is executed
|
||||
};
|
||||
@ -237,7 +237,7 @@ struct ResponseDesc {
|
||||
|
||||
|
||||
/**
|
||||
* @brief Responce structure encapsulating information about supported layer
|
||||
* @brief Response structure encapsulating information about supported layer
|
||||
*/
|
||||
struct QueryNetworkResult {
|
||||
/**
|
||||
|
@ -44,7 +44,7 @@ public:
|
||||
/**
|
||||
* @brief Returns plugins version information
|
||||
*
|
||||
* @param deviceName Device name to indentify plugin
|
||||
* @param deviceName Device name to identify plugin
|
||||
* @return A vector of versions
|
||||
*/
|
||||
std::map<std::string, Version> GetVersions(const std::string& deviceName) const;
|
||||
@ -184,7 +184,7 @@ public:
|
||||
/**
|
||||
* @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
|
||||
*
|
||||
* @param deviceName An optinal name of a device. If device name is not specified, the config is set for all the
|
||||
* @param deviceName An optional name of a device. If device name is not specified, the config is set for all the
|
||||
* registered devices.
|
||||
*
|
||||
* @param config Map of pairs: (config parameter name, config parameter value)
|
||||
|
@ -160,7 +160,7 @@ public:
|
||||
/**
|
||||
* @brief Gets shared context used to create an executable network.
|
||||
*
|
||||
* @param pContext Refernce to a pointer that will receive resulting shared context object ptr
|
||||
* @param pContext Reference to a pointer that will receive resulting shared context object ptr
|
||||
* @param resp Pointer to the response message that holds a description of an error if any occurred
|
||||
* @return code of the operation. InferenceEngine::OK if succeeded
|
||||
*/
|
||||
|
@ -194,7 +194,7 @@ public:
|
||||
* @brief A wrapper of CreateBlob method of RemoteContext to keep consistency with
|
||||
* plugin-specific wrappers.
|
||||
* @param desc Defines the layout and dims of the blob
|
||||
* @param ctx Poniter to the plugin object derived from RemoteContext.
|
||||
* @param ctx Pointer to the plugin object derived from RemoteContext.
|
||||
* @return A pointer to plugin object that implements RemoteBlob interface.
|
||||
*/
|
||||
inline RemoteBlob::Ptr make_shared_blob(const TensorDesc& desc, RemoteContext::Ptr ctx) {
|
||||
|
Loading…
Reference in New Issue
Block a user