From 361367393b44ce2c6c6bf0d01088998548fdeba4 Mon Sep 17 00:00:00 2001 From: Maxim Vafin Date: Tue, 1 Aug 2023 15:00:12 +0200 Subject: [PATCH] [MO] Raise exception for older nncf models (#18890) * [PT FE] Call strip() for older nncf models * Raise excception on older nncf models * Update legacy mo too --- .../tools/mo/moc_frontend/pytorch_frontend_utils.py | 11 +++++++++++ .../tools/ovc/moc_frontend/pytorch_frontend_utils.py | 11 +++++++++++ 2 files changed, 22 insertions(+) diff --git a/tools/mo/openvino/tools/mo/moc_frontend/pytorch_frontend_utils.py b/tools/mo/openvino/tools/mo/moc_frontend/pytorch_frontend_utils.py index aab0d0ca93d..b42fa131225 100644 --- a/tools/mo/openvino/tools/mo/moc_frontend/pytorch_frontend_utils.py +++ b/tools/mo/openvino/tools/mo/moc_frontend/pytorch_frontend_utils.py @@ -19,6 +19,17 @@ def get_pytorch_decoder(model, input_shape, example_inputs, args): except Exception as e: log.error("PyTorch frontend loading failed") raise e + try: + import nncf + from nncf.torch.nncf_network import NNCFNetwork + from packaging import version + + if isinstance(model, NNCFNetwork): + if version.parse(nncf.__version__) < version.parse("2.6"): + raise RuntimeError( + "NNCF models produced by nncf<2.6 are not supported directly. Please export to ONNX first.") + except: + pass inputs = prepare_torch_inputs(example_inputs, input_shape, args.get("input"), allow_none=True) decoder = TorchScriptPythonDecoder(model, example_input=inputs) args['input_model'] = decoder diff --git a/tools/ovc/openvino/tools/ovc/moc_frontend/pytorch_frontend_utils.py b/tools/ovc/openvino/tools/ovc/moc_frontend/pytorch_frontend_utils.py index 372ad8f4023..3bb6c928f3a 100644 --- a/tools/ovc/openvino/tools/ovc/moc_frontend/pytorch_frontend_utils.py +++ b/tools/ovc/openvino/tools/ovc/moc_frontend/pytorch_frontend_utils.py @@ -19,6 +19,17 @@ def get_pytorch_decoder(model, example_inputs, args): except Exception as e: log.error("PyTorch frontend loading failed") raise e + try: + import nncf + from nncf.torch.nncf_network import NNCFNetwork + from packaging import version + + if isinstance(model, NNCFNetwork): + if version.parse(nncf.__version__) <= version.parse("2.6"): + raise RuntimeError( + "NNCF models produced by nncf<2.6 are not supported directly. Please export to ONNX first.") + except: + pass inputs = prepare_torch_inputs(example_inputs, args.get("input"), allow_none=True) decoder = TorchScriptPythonDecoder(model, example_input=inputs) args['input_model'] = decoder