[Samples] Python: added support of onnx models (#2782)

* [Samples] Python: added support of onnx models

* [Docs] Python: added support of onnx models

Updated documentation
This commit is contained in:
Mikhail Ryzhov
2020-10-27 10:23:25 +03:00
committed by GitHub
parent 73c40722fd
commit dc52ec7880
9 changed files with 58 additions and 28 deletions

View File

@@ -58,7 +58,8 @@ Running the application with the empty list of options yields the usage message
To run the sample, you can use RMNet_SSD or other object-detection models. You can download the pre-trained models with the OpenVINO [Model Downloader](@ref omz_tools_downloader_README) or from [https://download.01.org/opencv/](https://download.01.org/opencv/).
> **NOTE**: Before running the sample with a trained model, make sure the model is converted to the Inference Engine format (\*.xml + \*.bin) using the [Model Optimizer tool](../../../../../docs/MO_DG/Deep_Learning_Model_Optimizer_DevGuide.md).
>
> The sample accepts models in ONNX format (.onnx) that do not require preprocessing.
You can do inference of an image using a trained RMNet_SSD network on FPGA with fallback to CPU using the following command:
```

View File

@@ -29,7 +29,7 @@ def build_argparser():
parser = ArgumentParser(add_help=False)
args = parser.add_argument_group("Options")
args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Show this help message and exit.')
args.add_argument("-m", "--model", help="Required. Path to an .xml file with a trained model.",
args.add_argument("-m", "--model", help="Required. Path to an .xml or .onnx file with a trained model.",
required=True, type=str)
args.add_argument("-i", "--input", help="Required. Path to image file.",
required=True, type=str, nargs="+")
@@ -53,11 +53,17 @@ def main():
args = build_argparser().parse_args()
log.info("Loading Inference Engine")
ie = IECore()
# --------------------------- 1. Read IR Generated by ModelOptimizer (.xml and .bin files) ------------
model_xml = args.model
model_bin = os.path.splitext(model_xml)[0] + ".bin"
log.info("Loading network files:\n\t{}\n\t{}".format(model_xml, model_bin))
net = ie.read_network(model=model_xml, weights=model_bin)
# ---1. Read a model in OpenVINO Intermediate Representation (.xml and .bin files) or ONNX (.onnx file) format ---
model = args.model
model_bin = None
model_name, model_ext = os.path.splitext(model)
log.info(f"Loading network files:\n\t{model}")
if model_ext == ".xml":
# Read .bin weights for IR format only
model_bin = model_name + ".bin"
log.info(f"\n\t{model_bin}")
net = ie.read_network(model=model, weights=model_bin)
func = ng.function_from_cnn(net)
ops = func.get_ordered_ops()
# -----------------------------------------------------------------------------------------------------