Python API for LoadNetwork by model file name (#4896)

This commit is contained in:
Mikhail Nosov 2021-03-23 16:58:20 +03:00 committed by GitHub
parent 1d76ab7f55
commit 86eebbdfac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 28 additions and 4 deletions

View File

@ -68,7 +68,7 @@ cdef class ExecutableNetwork:
cdef class IECore:
cdef C.IECore impl
cpdef IENetwork read_network(self, model : [str, bytes, os.PathLike], weights : [str, bytes, os.PathLike] = ?, bool init_from_buffer = ?)
cpdef ExecutableNetwork load_network(self, IENetwork network, str device_name, config = ?, int num_requests = ?)
cpdef ExecutableNetwork load_network(self, network: [IENetwork, str], str device_name, config = ?, int num_requests = ?)
cpdef ExecutableNetwork import_network(self, str model_file, str device_name, config = ?, int num_requests = ?)

View File

@ -319,7 +319,7 @@ cdef class IECore:
# and creates an `ExecutableNetwork` object of the `IENetwork` class.
# You can create as many networks as you need and use them simultaneously (up to the limitation of the hardware
# resources).
# @param network: A valid `IENetwork` instance
# @param network: A valid `IENetwork` instance. Model file name .xml, .onnx can also be passed as argument
# @param device_name: A device name of a target plugin
# @param config: A dictionary of plugin configuration keys and their values
# @param num_requests: A positive integer value of infer requests to be created. Number of infer requests is limited
@ -333,7 +333,7 @@ cdef class IECore:
# net = ie.read_network(model=path_to_xml_file, weights=path_to_bin_file)
# exec_net = ie.load_network(network=net, device_name="CPU", num_requests=2)
# ```
cpdef ExecutableNetwork load_network(self, IENetwork network, str device_name, config=None, int num_requests=1):
cpdef ExecutableNetwork load_network(self, network: [IENetwork, str], str device_name, config=None, int num_requests=1):
cdef ExecutableNetwork exec_net = ExecutableNetwork()
cdef map[string, string] c_config
if num_requests < 0:
@ -342,7 +342,10 @@ cdef class IECore:
if config:
c_config = dict_to_c_map(config)
exec_net.ie_core_impl = self.impl
exec_net.impl = move(self.impl.loadNetwork(network.impl, device_name.encode(), c_config, num_requests))
if isinstance(network, str):
exec_net.impl = move(self.impl.loadNetworkFromFile((<str>network).encode(), device_name.encode(), c_config, num_requests))
else:
exec_net.impl = move(self.impl.loadNetwork((<IENetwork>network).impl, device_name.encode(), c_config, num_requests))
return exec_net
## Creates an executable network from a previously exported network

View File

@ -564,6 +564,17 @@ std::unique_ptr <InferenceEnginePython::IEExecNetwork> InferenceEnginePython::IE
return exec_network;
}
std::unique_ptr<InferenceEnginePython::IEExecNetwork> InferenceEnginePython::IECore::loadNetworkFromFile(
const std::string &modelPath, const std::string &deviceName, const std::map<std::string,
std::string> &config, int num_requests) {
auto exec_network = InferenceEnginePython::make_unique<InferenceEnginePython::IEExecNetwork>(modelPath,
num_requests);
exec_network->actual = actual.LoadNetwork(modelPath, deviceName, config);
exec_network->createInferRequests(num_requests);
return exec_network;
}
std::unique_ptr <InferenceEnginePython::IEExecNetwork> InferenceEnginePython::IECore::importNetwork(
const std::string &modelFIle, const std::string &deviceName, const std::map <std::string, std::string> &config,
int num_requests) {

View File

@ -161,6 +161,8 @@ struct IECore {
InferenceEnginePython::IENetwork readNetwork(const std::string& model, const uint8_t *bin, size_t bin_size);
std::unique_ptr<InferenceEnginePython::IEExecNetwork> loadNetwork(IENetwork network, const std::string & deviceName,
const std::map<std::string, std::string> & config, int num_requests);
std::unique_ptr<InferenceEnginePython::IEExecNetwork> loadNetworkFromFile(const std::string & modelPath,
const std::string & deviceName, const std::map<std::string, std::string> & config, int num_requests);
std::unique_ptr<InferenceEnginePython::IEExecNetwork> importNetwork(const std::string & modelFIle, const std::string & deviceName,
const std::map<std::string, std::string> & config, int num_requests);
std::map<std::string, std::string> queryNetwork(IENetwork network, const std::string & deviceName,

View File

@ -216,6 +216,8 @@ cdef extern from "ie_api_impl.hpp" namespace "InferenceEnginePython":
IENetwork readNetwork(const string& modelPath,uint8_t*bin, size_t bin_size) except +
unique_ptr[IEExecNetwork] loadNetwork(IENetwork network, const string deviceName,
const map[string, string] & config, int num_requests) except +
unique_ptr[IEExecNetwork] loadNetworkFromFile(const string & modelPath, const string & deviceName,
const map[string, string] & config, int num_requests) except +
unique_ptr[IEExecNetwork] importNetwork(const string & modelFIle, const string & deviceName,
const map[string, string] & config, int num_requests) except +
map[string, string] queryNetwork(IENetwork network, const string deviceName,

View File

@ -58,6 +58,12 @@ def test_load_network(device):
assert isinstance(exec_net, ExecutableNetwork)
def test_load_network_from_file(device):
ie = IECore()
exec_net = ie.load_network(test_net_xml, device)
assert isinstance(exec_net, ExecutableNetwork)
@pytest.mark.skipif(os.environ.get("TEST_DEVICE", "CPU") != "CPU", reason="Device independent test")
def test_load_network_wrong_device():
ie = IECore()