[C API] support string size for char pointer (#19931)

* [C API] support string size for char pointer

* rename function name

* Add deprecated flag

* Add macro to ignore deprecated

* Fix build error in windows

---------

Co-authored-by: Ilya Churaev <ilya.churaev@intel.com>
This commit is contained in:
River Li 2023-10-02 18:24:20 +08:00 committed by GitHub
parent 850bf3d87f
commit 32b4ae7570
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 77 additions and 12 deletions

View File

@ -10,6 +10,7 @@
#pragma once
#include "openvino/c/deprecated.h"
#include "openvino/c/ov_common.h"
#include "openvino/c/ov_compiled_model.h"
#include "openvino/c/ov_model.h"
@ -173,8 +174,31 @@ ov_core_read_model_unicode(const ov_core_t* core,
/**
* @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats.
* @ingroup ov_core_c_api
* @deprecated Use ov_core_read_model_from_memory_buffer instead.
* @param core A pointer to the ie_core_t instance.
* @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format.
* @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format, string is null-terminated.
* @param weights Shared pointer to a constant tensor with weights.
* @param model A pointer to the newly created model.
* Reading ONNX / PDPD / TF / TFLite models does not support loading weights from the @p weights tensors.
* @note Created model object shares the weights with the @p weights object.
* Thus, do not create @p weights on temporary data that can be freed later, since the model
* constant data will point to an invalid memory.
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(OPENVINO_DEPRECATED(
"This API is deprecated and will be replaced by ov_core_read_model_from_memory_buffer") ov_status_e)
ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model);
/**
* @brief Reads models from IR / ONNX / PDPD / TF / TFLite formats with models string size.
* @ingroup ov_core_c_api
* @param core A pointer to the ie_core_t instance.
* @param model_str String with a model in IR / ONNX / PDPD / TF / TFLite format, support model string containing
* several null chars.
* @param str_len The length of model string.
* @param weights Shared pointer to a constant tensor with weights.
* @param model A pointer to the newly created model.
* Reading ONNX / PDPD / TF / TFLite models does not support loading weights from the @p weights tensors.
@ -184,10 +208,11 @@ ov_core_read_model_unicode(const ov_core_t* core,
* @return Status code of the operation: OK(0) for success.
*/
OPENVINO_C_API(ov_status_e)
ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model);
ov_core_read_model_from_memory_buffer(const ov_core_t* core,
const char* model_str,
const size_t str_len,
const ov_tensor_t* weights,
ov_model_t** model);
/**
* @brief Creates a compiled model from a source model object.

View File

@ -89,20 +89,22 @@ ov_status_e ov_core_read_model(const ov_core_t* core,
return ov_status_e::OK;
}
ov_status_e ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model) {
if (!core || !model_str || !model) {
ov_status_e ov_core_read_model_from_memory_buffer(const ov_core_t* core,
const char* model_str,
const size_t str_size,
const ov_tensor_t* weights,
ov_model_t** model) {
if (!core || !model_str || !model || !str_size) {
return ov_status_e::INVALID_C_PARAM;
}
try {
std::unique_ptr<ov_model_t> _model(new ov_model_t);
std::string model_string(model_str, str_size);
if (weights) {
_model->object = core->object->read_model(model_str, *(weights->object));
_model->object = core->object->read_model(model_string, *(weights->object));
} else {
_model->object = core->object->read_model(model_str, ov::Tensor());
_model->object = core->object->read_model(model_string, ov::Tensor());
}
*model = _model.release();
}
@ -110,6 +112,13 @@ ov_status_e ov_core_read_model_from_memory(const ov_core_t* core,
return ov_status_e::OK;
}
ov_status_e ov_core_read_model_from_memory(const ov_core_t* core,
const char* model_str,
const ov_tensor_t* weights,
ov_model_t** model) {
return ov_core_read_model_from_memory_buffer(core, model_str, strlen(model_str), weights, model);
}
ov_status_e ov_core_compile_model(const ov_core_t* core,
const ov_model_t* model,
const char* device_name,

View File

@ -77,6 +77,7 @@ TEST_P(ov_core_test, ov_core_read_model_no_bin) {
ov_core_free(core);
}
OPENVINO_SUPPRESS_DEPRECATED_START
TEST_P(ov_core_test, ov_core_read_model_from_memory) {
ov_core_t* core = nullptr;
OV_EXPECT_OK(ov_core_create(&core));
@ -102,6 +103,36 @@ TEST_P(ov_core_test, ov_core_read_model_from_memory) {
ov_model_free(model);
ov_core_free(core);
}
OPENVINO_SUPPRESS_DEPRECATED_END
TEST_P(ov_core_test, ov_core_read_model_from_memory_buffer_with_size) {
ov_core_t* core = nullptr;
OV_EXPECT_OK(ov_core_create(&core));
EXPECT_NE(nullptr, core);
std::vector<uint8_t> weights_content(content_from_file(bin_file_name.c_str(), true));
ov_tensor_t* tensor = nullptr;
ov_shape_t shape;
int64_t dims[2] = {1, (int64_t)weights_content.size()};
ov_shape_create(2, dims, &shape);
OV_EXPECT_OK(ov_tensor_create_from_host_ptr(ov_element_type_e::U8, shape, weights_content.data(), &tensor));
EXPECT_NE(nullptr, tensor);
std::vector<uint8_t> xml_content(content_from_file(xml_file_name.c_str(), false));
ov_model_t* model = nullptr;
OV_EXPECT_OK(ov_core_read_model_from_memory_buffer(core,
reinterpret_cast<const char*>(xml_content.data()),
xml_content.size(),
tensor,
&model));
EXPECT_NE(nullptr, model);
ov_shape_free(&shape);
ov_tensor_free(tensor);
ov_model_free(model);
ov_core_free(core);
}
TEST_P(ov_core_test, ov_core_compile_model) {
auto device_name = GetParam();