[Python API] Fix import/export of model + update speech sample (#10103)

* Fix import/export of model

* update speech sample

* fix code-style

Co-authored-by: jiwaszki <jan.iwaszkiewicz@intel.com>
This commit is contained in:
Anastasia Kuporosova
2022-02-07 12:12:06 +03:00
committed by GitHub
parent 38f470c184
commit 3c13cea02b
4 changed files with 206 additions and 20 deletions

View File

@@ -5,6 +5,7 @@
import re
import sys
from io import BytesIO
from timeit import default_timer
from typing import Dict
@@ -148,12 +149,16 @@ def main():
if args.model:
compiled_model = core.compile_model(model, device_str, plugin_config)
else:
compiled_model = core.import_model(args.import_gna_model, device_str, plugin_config)
with open(args.import_gna_model, 'rb') as f:
buf = BytesIO(f.read())
compiled_model = core.import_model(buf, device_str, plugin_config)
# --------------------------- Exporting GNA model using InferenceEngine AOT API ---------------------------------------
if args.export_gna_model:
log.info(f'Writing GNA Model to {args.export_gna_model}')
compiled_model.export_model(args.export_gna_model)
user_stream = compiled_model.export_model()
with open(args.export_gna_model, 'wb') as f:
f.write(user_stream)
return 0
if args.export_embedded_gna_model:

View File

@@ -40,7 +40,82 @@ void regclass_CompiledModel(py::module m) {
},
py::arg("inputs"));
cls.def("export_model", &ov::CompiledModel::export_model, py::arg("model_stream"));
cls.def(
"export_model",
[](ov::CompiledModel& self) {
std::stringstream _stream;
self.export_model(_stream);
return py::bytes(_stream.str());
},
R"(
Exports the compiled model to bytes/output stream.
Parameters
----------
None
Returns
----------
export_model : bytes
Bytes object that contains this compiled model.
Examples
----------
user_stream = compiled.export_model()
with open('./my_model', 'wb') as f:
f.write(user_stream)
# ...
new_compiled = core.import_model(user_stream, "CPU")
)");
cls.def(
"export_model",
[](ov::CompiledModel& self, py::object& model_stream) {
if (!(py::isinstance(model_stream, pybind11::module::import("io").attr("BytesIO")))) {
throw py::type_error("CompiledModel.export_model(model_stream) incompatible function argument: "
"`model_stream` must be an io.BytesIO object but " +
(std::string)(py::repr(model_stream)) + "` provided");
}
std::stringstream _stream;
self.export_model(_stream);
model_stream.attr("flush")();
model_stream.attr("write")(py::bytes(_stream.str()));
model_stream.attr("seek")(0); // Always rewind stream!
},
py::arg("model_stream"),
R"(
Exports the compiled model to bytes/output stream.
Advanced version of `export_model`. It utilizes, streams from standard
Python library `io`.
Function performs flushing of the stream, writes to it and then rewinds
the stream to the beginning (using seek(0)).
Parameters
----------
model_stream : io.BytesIO
A stream object to which the model will be serialized.
Returns
----------
export_model : None
Examples
----------
user_stream = io.BytesIO()
compiled.export_model(user_stream)
with open('./my_model', 'wb') as f:
f.write(user_stream.getvalue()) # or read() if seek(0) was applied before
# ...
new_compiled = core.import_model(user_stream, "CPU")
)");
cls.def(
"set_property",

View File

@@ -170,14 +170,103 @@ void regclass_Core(py::module m) {
cls.def(
"import_model",
[](ov::Core& self,
std::istream& model_file,
const std::string& model_stream,
const std::string& device_name,
const std::map<std::string, std::string>& config) {
return self.import_model(model_file, device_name, {config.begin(), config.end()});
const std::map<std::string, std::string>& properties) {
std::stringstream _stream;
_stream << model_stream;
return self.import_model(_stream, device_name, {properties.begin(), properties.end()});
},
py::arg("model_file"),
py::arg("model_stream"),
py::arg("device_name"),
py::arg("config") = py::none());
py::arg("properties") = py::none(),
R"(
Imports a compiled model from a previously exported one.
Parameters
----------
model_stream : bytes
Input stream containing a model previously exported using export_model method.
device_name : str
Name of device to import compiled model for.
Note, if device_name device was not used to compile the original mode, an exception is thrown.
properties : dict
Optional map of pairs: (property name, property value) relevant only for this load operation.
Returns
----------
import_model : openvino.runtime.CompiledModel
Examples
----------
user_stream = compiled.export_model()
with open('./my_model', 'wb') as f:
f.write(user_stream)
# ...
new_compiled = core.import_model(user_stream, "CPU")
)");
// keep as second one to solve overload resolution problem
cls.def(
"import_model",
[](ov::Core& self,
const py::object& model_stream,
const std::string& device_name,
const std::map<std::string, std::string>& properties) {
if (!(py::isinstance(model_stream, pybind11::module::import("io").attr("BytesIO")))) {
throw py::type_error("CompiledModel.import_model(model_stream) incompatible function argument: "
"`model_stream` must be an io.BytesIO object but " +
(std::string)(py::repr(model_stream)) + "` provided");
}
model_stream.attr("seek")(0); // Always rewind stream!
std::stringstream _stream;
_stream << model_stream
.attr("read")() // alternative: model_stream.attr("get_value")()
.cast<std::string>();
return self.import_model(_stream, device_name, {properties.begin(), properties.end()});
},
py::arg("model_stream"),
py::arg("device_name"),
py::arg("properties") = py::none(),
R"(
Imports a compiled model from a previously exported one.
Advanced version of `import_model`. It utilizes, streams from standard
Python library `io`.
Parameters
----------
model_stream : bytes
Input stream containing a model previously exported using export_model method.
device_name : str
Name of device to import compiled model for.
Note, if device_name device was not used to compile the original mode, an exception is thrown.
properties : dict
Optional map of pairs: (property name, property value) relevant only for this load operation.
Returns
----------
import_model : openvino.runtime.CompiledModel
Examples
----------
user_stream = io.BytesIO()
compiled.export_model(user_stream)
with open('./my_model', 'wb') as f:
f.write(user_stream.getvalue()) # or read() if seek(0) was applied before
# ...
new_compiled = core.import_model(user_stream, "CPU")
)");
// todo: remove after Accuracy Checker migration to set/get_property API
cls.def(

View File

@@ -41,21 +41,38 @@ def test_get_runtime_model(device):
assert isinstance(runtime_func, Model)
@pytest.mark.skip(reason="After infer will be implemented")
def test_export_import():
core = Core()
func = core.read_model(model=test_net_xml, weights=test_net_bin)
exec_net = core.compile_model(func, "CPU")
exported_net_file = "exported_model.bin"
exec_net.export_model(network_model=exported_net_file)
assert os.path.exists(exported_net_file)
exec_net = core.import_network(exported_net_file, "CPU")
os.remove(exported_net_file)
model = core.read_model(model=test_net_xml, weights=test_net_bin)
compiled = core.compile_model(model, "CPU")
user_stream = compiled.export_model()
new_compiled = core.import_model(user_stream, "CPU")
img = read_image()
res = exec_net.infer({"data": img})
assert np.argmax(res["fc_out"][0]) == 3
del exec_net
del core
res = new_compiled.infer_new_request({"data": img})
assert np.argmax(res[new_compiled.outputs[0]]) == 2
def test_export_import_advanced():
import io
core = Core()
model = core.read_model(model=test_net_xml, weights=test_net_bin)
compiled = core.compile_model(model, "CPU")
user_stream = io.BytesIO()
compiled.export_model(user_stream)
new_compiled = core.import_model(user_stream, "CPU")
img = read_image()
res = new_compiled.infer_new_request({"data": img})
assert np.argmax(res[new_compiled.outputs[0]]) == 2
def test_get_input_i(device):